##// END OF EJS Templates
modernize: python3 updates
super-admin -
r5096:a0018795 default
parent child Browse files
Show More
@@ -1,71 +1,69 b''
1
2
3 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import atexit
19 import atexit
22 import logging
20 import logging
23 import os
21 import os
24 import signal
22 import signal
25 import sys
23 import sys
26
24
27 import rhodecode
25 import rhodecode
28
26
29 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
30
28
31 cache_keys_by_pid = set()
29 cache_keys_by_pid = set()
32
30
33
31
34 def sigHandler(signo, frame):
32 def sigHandler(signo, frame):
35 """
33 """
36 signals trigger sys.exit so there's a single handler to cleanup the code.
34 signals trigger sys.exit so there's a single handler to cleanup the code.
37 """
35 """
38 if rhodecode.is_test:
36 if rhodecode.is_test:
39 return
37 return
40
38
41 sys.exit(0)
39 sys.exit(0)
42
40
43
41
44 def free_cache_keys(*args):
42 def free_cache_keys(*args):
45 from rhodecode.model.db import CacheKey, Session
43 from rhodecode.model.db import CacheKey, Session
46
44
47 if rhodecode.is_test:
45 if rhodecode.is_test:
48 return
46 return
49
47
50 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
48 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
51 if ssh_cmd:
49 if ssh_cmd:
52 return
50 return
53
51
54 if cache_keys_by_pid:
52 if cache_keys_by_pid:
55 try:
53 try:
56 for cache_proc in set(cache_keys_by_pid):
54 for cache_proc in set(cache_keys_by_pid):
57 like_expression = '{}%'.format(cache_proc)
55 like_expression = f'{cache_proc}%'
58 qry = CacheKey.query().filter(CacheKey.cache_key.like(like_expression))
56 qry = CacheKey.query().filter(CacheKey.cache_key.like(like_expression))
59 count = qry.count()
57 count = qry.count()
60 log.info('Clearing %s: %s cache keys, total: %s', cache_proc, len(cache_keys_by_pid), count)
58 log.info('Clearing %s: %s cache keys, total: %s', cache_proc, len(cache_keys_by_pid), count)
61 qry.delete(synchronize_session='fetch')
59 qry.delete(synchronize_session='fetch')
62 cache_keys_by_pid.remove(cache_proc)
60 cache_keys_by_pid.remove(cache_proc)
63 Session().commit()
61 Session().commit()
64 except Exception:
62 except Exception:
65 log.exception('Failed to clear keys, exiting gracefully')
63 log.exception('Failed to clear keys, exiting gracefully')
66
64
67 atexit.register(free_cache_keys)
65 atexit.register(free_cache_keys)
68
66
69 signal.signal(signal.SIGTERM, sigHandler)
67 signal.signal(signal.SIGTERM, sigHandler)
70 signal.signal(signal.SIGINT, sigHandler)
68 signal.signal(signal.SIGINT, sigHandler)
71
69
@@ -1,405 +1,405 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.lib.hash_utils import sha1
29 from rhodecode.lib.hash_utils import sha1
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool
31 from rhodecode.lib.type_utils import str2bool
32
32
33 from . import region_meta, cache_key_meta
33 from . import region_meta, cache_key_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'{self.__class__}(name={self.name})'
48 return f'{self.__class__}(name={self.name})'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80
80
81 timeout = expiration_time() if expiration_time_is_callable \
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
82 else expiration_time
83
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
86
87 def cache_decorator(user_func):
87 def cache_decorator(user_func):
88 if to_str is str:
88 if to_str is str:
89 # backwards compatible
89 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
90 key_generator = function_key_generator(namespace, user_func)
91 else:
91 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
93
94 def refresh(*arg, **kw):
94 def refresh(*arg, **kw):
95 """
95 """
96 Like invalidate, but regenerates the value instead
96 Like invalidate, but regenerates the value instead
97 """
97 """
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
99 value = user_func(*arg, **kw)
100 self.set(key, value)
100 self.set(key, value)
101 return value
101 return value
102
102
103 def invalidate(*arg, **kw):
103 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
105 self.delete(key)
105 self.delete(key)
106
106
107 def set_(value, *arg, **kw):
107 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
108 key = key_generator(*arg, **kw)
109 self.set(key, value)
109 self.set(key, value)
110
110
111 def get(*arg, **kw):
111 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
112 key = key_generator(*arg, **kw)
113 return self.get(key)
113 return self.get(key)
114
114
115 user_func.set = set_
115 user_func.set = set_
116 user_func.invalidate = invalidate
116 user_func.invalidate = invalidate
117 user_func.get = get
117 user_func.get = get
118 user_func.refresh = refresh
118 user_func.refresh = refresh
119 user_func.key_generator = key_generator
119 user_func.key_generator = key_generator
120 user_func.original = user_func
120 user_func.original = user_func
121
121
122 # Use `decorate` to preserve the signature of :param:`user_func`.
122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
123 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
124 get_or_create_for_user_func, key_generator))
125
125
126 return cache_decorator
126 return cache_decorator
127
127
128
128
129 def make_region(*arg, **kw):
129 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
130 return RhodeCodeCacheRegion(*arg, **kw)
131
131
132
132
133 def get_default_cache_settings(settings, prefixes=None):
133 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
134 prefixes = prefixes or []
135 cache_settings = {}
135 cache_settings = {}
136 for key in settings.keys():
136 for key in settings.keys():
137 for prefix in prefixes:
137 for prefix in prefixes:
138 if key.startswith(prefix):
138 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
139 name = key.split(prefix)[1].strip()
140 val = settings[key]
140 val = settings[key]
141 if isinstance(val, str):
141 if isinstance(val, str):
142 val = val.strip()
142 val = val.strip()
143 cache_settings[name] = val
143 cache_settings[name] = val
144 return cache_settings
144 return cache_settings
145
145
146
146
147 def compute_key_from_params(*args):
147 def compute_key_from_params(*args):
148 """
148 """
149 Helper to compute key from given params to be used in cache manager
149 Helper to compute key from given params to be used in cache manager
150 """
150 """
151 return sha1(safe_bytes("_".join(map(str, args))))
151 return sha1(safe_bytes("_".join(map(str, args))))
152
152
153
153
154 def custom_key_generator(backend, namespace, fn):
154 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
155 func_name = fn.__name__
156
156
157 def generate_key(*args):
157 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
159 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
160 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
162
163 return final_key
163 return final_key
164
164
165 return generate_key
165 return generate_key
166
166
167
167
168 def backend_key_generator(backend):
168 def backend_key_generator(backend):
169 """
169 """
170 Special wrapper that also sends over the backend to the key generator
170 Special wrapper that also sends over the backend to the key generator
171 """
171 """
172 def wrapper(namespace, fn):
172 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
173 return custom_key_generator(backend, namespace, fn)
174 return wrapper
174 return wrapper
175
175
176
176
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 from .backends import FileNamespaceBackend
178 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
179 from . import async_creation_runner
180
180
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
182 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
185
186 region_uid_name = f'{region_name}:{region_namespace}'
186 region_uid_name = f'{region_name}:{region_namespace}'
187
187
188 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
188 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 if not region_namespace:
189 if not region_namespace:
190 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
190 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191
191
192 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
192 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 if region_exist:
193 if region_exist:
194 log.debug('Using already configured region: %s', region_namespace)
194 log.debug('Using already configured region: %s', region_namespace)
195 return region_exist
195 return region_exist
196
196
197 expiration_time = region_obj.expiration_time
197 expiration_time = region_obj.expiration_time
198
198
199 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
199 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 namespace_cache_dir = cache_dir
200 namespace_cache_dir = cache_dir
201
201
202 # we default the namespace_cache_dir to our default cache dir.
202 # we default the namespace_cache_dir to our default cache dir.
203 # however if this backend is configured with filename= param, we prioritize that
203 # however if this backend is configured with filename= param, we prioritize that
204 # so all caches within that particular region, even those namespaced end up in the same path
204 # so all caches within that particular region, even those namespaced end up in the same path
205 if region_obj.actual_backend.filename:
205 if region_obj.actual_backend.filename:
206 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
206 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207
207
208 if not os.path.isdir(namespace_cache_dir):
208 if not os.path.isdir(namespace_cache_dir):
209 os.makedirs(namespace_cache_dir)
209 os.makedirs(namespace_cache_dir)
210 new_region = make_region(
210 new_region = make_region(
211 name=region_uid_name,
211 name=region_uid_name,
212 function_key_generator=backend_key_generator(region_obj.actual_backend)
212 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 )
213 )
214
214
215 namespace_filename = os.path.join(
215 namespace_filename = os.path.join(
216 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
216 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 # special type that allows 1db per namespace
217 # special type that allows 1db per namespace
218 new_region.configure(
218 new_region.configure(
219 backend='dogpile.cache.rc.file_namespace',
219 backend='dogpile.cache.rc.file_namespace',
220 expiration_time=expiration_time,
220 expiration_time=expiration_time,
221 arguments={"filename": namespace_filename}
221 arguments={"filename": namespace_filename}
222 )
222 )
223
223
224 # create and save in region caches
224 # create and save in region caches
225 log.debug('configuring new region: %s', region_uid_name)
225 log.debug('configuring new region: %s', region_uid_name)
226 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
226 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227
227
228 region_obj._default_namespace = region_namespace
228 region_obj._default_namespace = region_namespace
229 if use_async_runner:
229 if use_async_runner:
230 region_obj.async_creation_runner = async_creation_runner
230 region_obj.async_creation_runner = async_creation_runner
231 return region_obj
231 return region_obj
232
232
233
233
234 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
234 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
235 from . import CLEAR_DELETE, CLEAR_INVALIDATE
235 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236
236
237 if not isinstance(cache_region, RhodeCodeCacheRegion):
237 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
238 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 log.debug('clearing cache region: %s with method=%s', cache_region, method)
239 log.debug('clearing cache region: %s with method=%s', cache_region, method)
240
240
241 num_affected_keys = None
241 num_affected_keys = None
242
242
243 if method == CLEAR_INVALIDATE:
243 if method == CLEAR_INVALIDATE:
244 # NOTE: The CacheRegion.invalidate() method’s default mode of
244 # NOTE: The CacheRegion.invalidate() method’s default mode of
245 # operation is to set a timestamp local to this CacheRegion in this Python process only.
245 # operation is to set a timestamp local to this CacheRegion in this Python process only.
246 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
246 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
247 cache_region.invalidate(hard=True)
247 cache_region.invalidate(hard=True)
248
248
249 if method == CLEAR_DELETE:
249 if method == CLEAR_DELETE:
250 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
250 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
251 num_affected_keys = len(cache_keys)
251 num_affected_keys = len(cache_keys)
252 if num_affected_keys:
252 if num_affected_keys:
253 cache_region.delete_multi(cache_keys)
253 cache_region.delete_multi(cache_keys)
254
254
255 return num_affected_keys
255 return num_affected_keys
256
256
257
257
258 class ActiveRegionCache(object):
258 class ActiveRegionCache(object):
259 def __init__(self, context, cache_data):
259 def __init__(self, context, cache_data):
260 self.context = context
260 self.context = context
261 self.cache_data = cache_data
261 self.cache_data = cache_data
262
262
263 def should_invalidate(self):
263 def should_invalidate(self):
264 return False
264 return False
265
265
266
266
267 class FreshRegionCache(object):
267 class FreshRegionCache(object):
268 def __init__(self, context, cache_data):
268 def __init__(self, context, cache_data):
269 self.context = context
269 self.context = context
270 self.cache_data = cache_data
270 self.cache_data = cache_data
271
271
272 def should_invalidate(self):
272 def should_invalidate(self):
273 return True
273 return True
274
274
275
275
276 class InvalidationContext(object):
276 class InvalidationContext(object):
277 """
277 """
278 usage::
278 usage::
279
279
280 from rhodecode.lib import rc_cache
280 from rhodecode.lib import rc_cache
281
281
282 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
282 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
283 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
283 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
284
284
285 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
285 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
286 def heavy_compute(cache_name, param1, param2):
286 def heavy_compute(cache_name, param1, param2):
287 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
287 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
288
288
289 # invalidation namespace is shared namespace key for all process caches
289 # invalidation namespace is shared namespace key for all process caches
290 # we use it to send a global signal
290 # we use it to send a global signal
291 invalidation_namespace = 'repo_cache:1'
291 invalidation_namespace = 'repo_cache:1'
292
292
293 inv_context_manager = rc_cache.InvalidationContext(
293 inv_context_manager = rc_cache.InvalidationContext(
294 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
294 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
295 with inv_context_manager as invalidation_context:
295 with inv_context_manager as invalidation_context:
296 args = ('one', 'two')
296 args = ('one', 'two')
297 # re-compute and store cache if we get invalidate signal
297 # re-compute and store cache if we get invalidate signal
298 if invalidation_context.should_invalidate():
298 if invalidation_context.should_invalidate():
299 result = heavy_compute.refresh(*args)
299 result = heavy_compute.refresh(*args)
300 else:
300 else:
301 result = heavy_compute(*args)
301 result = heavy_compute(*args)
302
302
303 compute_time = inv_context_manager.compute_time
303 compute_time = inv_context_manager.compute_time
304 log.debug('result computed in %.4fs', compute_time)
304 log.debug('result computed in %.4fs', compute_time)
305
305
306 # To send global invalidation signal, simply run
306 # To send global invalidation signal, simply run
307 CacheKey.set_invalidate(invalidation_namespace)
307 CacheKey.set_invalidate(invalidation_namespace)
308
308
309 """
309 """
310
310
311 def __repr__(self):
311 def __repr__(self):
312 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
312 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
313
313
314 def __init__(self, uid, invalidation_namespace='',
314 def __init__(self, uid, invalidation_namespace='',
315 raise_exception=False, thread_scoped=None):
315 raise_exception=False, thread_scoped=None):
316 self.uid = uid
316 self.uid = uid
317 self.invalidation_namespace = invalidation_namespace
317 self.invalidation_namespace = invalidation_namespace
318 self.raise_exception = raise_exception
318 self.raise_exception = raise_exception
319 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
319 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
320 self.thread_id = 'global'
320 self.thread_id = 'global'
321
321
322 if thread_scoped is None:
322 if thread_scoped is None:
323 # if we set "default" we can override this via .ini settings
323 # if we set "default" we can override this via .ini settings
324 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
324 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
325
325
326 # Append the thread id to the cache key if this invalidation context
326 # Append the thread id to the cache key if this invalidation context
327 # should be scoped to the current thread.
327 # should be scoped to the current thread.
328 if thread_scoped is True:
328 if thread_scoped is True:
329 self.thread_id = threading.current_thread().ident
329 self.thread_id = threading.current_thread().ident
330
330
331 self.cache_key = compute_key_from_params(uid)
331 self.cache_key = compute_key_from_params(uid)
332 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
332 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
333 self.proc_id, self.thread_id, self.cache_key)
333 self.proc_id, self.thread_id, self.cache_key)
334 self.proc_key = 'proc:{}'.format(self.proc_id)
334 self.proc_key = f'proc:{self.proc_id}'
335 self.compute_time = 0
335 self.compute_time = 0
336
336
337 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
337 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
338 from rhodecode.model.db import CacheKey
338 from rhodecode.model.db import CacheKey
339
339
340 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
340 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
341 # fetch all cache keys for this namespace and convert them to a map to find if we
341 # fetch all cache keys for this namespace and convert them to a map to find if we
342 # have specific cache_key object registered. We do this because we want to have
342 # have specific cache_key object registered. We do this because we want to have
343 # all consistent cache_state_uid for newly registered objects
343 # all consistent cache_state_uid for newly registered objects
344 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
344 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
345 cache_obj = cache_obj_map.get(self.cache_key)
345 cache_obj = cache_obj_map.get(self.cache_key)
346 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
346 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
347
347
348 if not cache_obj:
348 if not cache_obj:
349 new_cache_args = invalidation_namespace
349 new_cache_args = invalidation_namespace
350 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
350 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
351 cache_state_uid = None
351 cache_state_uid = None
352 if first_cache_obj:
352 if first_cache_obj:
353 cache_state_uid = first_cache_obj.cache_state_uid
353 cache_state_uid = first_cache_obj.cache_state_uid
354 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
354 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
355 cache_state_uid=cache_state_uid)
355 cache_state_uid=cache_state_uid)
356 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
356 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
357
357
358 return cache_obj
358 return cache_obj
359
359
360 def __enter__(self):
360 def __enter__(self):
361 """
361 """
362 Test if current object is valid, and return CacheRegion function
362 Test if current object is valid, and return CacheRegion function
363 that does invalidation and calculation
363 that does invalidation and calculation
364 """
364 """
365 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
365 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
366 # register or get a new key based on uid
366 # register or get a new key based on uid
367 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
367 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
368 cache_data = self.cache_obj.get_dict()
368 cache_data = self.cache_obj.get_dict()
369 self._start_time = time.time()
369 self._start_time = time.time()
370 if self.cache_obj.cache_active:
370 if self.cache_obj.cache_active:
371 # means our cache obj is existing and marked as it's
371 # means our cache obj is existing and marked as it's
372 # cache is not outdated, we return ActiveRegionCache
372 # cache is not outdated, we return ActiveRegionCache
373 self.skip_cache_active_change = True
373 self.skip_cache_active_change = True
374
374
375 return ActiveRegionCache(context=self, cache_data=cache_data)
375 return ActiveRegionCache(context=self, cache_data=cache_data)
376
376
377 # the key is either not existing or set to False, we return
377 # the key is either not existing or set to False, we return
378 # the real invalidator which re-computes value. We additionally set
378 # the real invalidator which re-computes value. We additionally set
379 # the flag to actually update the Database objects
379 # the flag to actually update the Database objects
380 self.skip_cache_active_change = False
380 self.skip_cache_active_change = False
381 return FreshRegionCache(context=self, cache_data=cache_data)
381 return FreshRegionCache(context=self, cache_data=cache_data)
382
382
383 def __exit__(self, exc_type, exc_val, exc_tb):
383 def __exit__(self, exc_type, exc_val, exc_tb):
384 from rhodecode.model.db import IntegrityError, Session
384 from rhodecode.model.db import IntegrityError, Session
385
385
386 # save compute time
386 # save compute time
387 self.compute_time = time.time() - self._start_time
387 self.compute_time = time.time() - self._start_time
388
388
389 if self.skip_cache_active_change:
389 if self.skip_cache_active_change:
390 return
390 return
391
391
392 try:
392 try:
393 self.cache_obj.cache_active = True
393 self.cache_obj.cache_active = True
394 Session().add(self.cache_obj)
394 Session().add(self.cache_obj)
395 Session().commit()
395 Session().commit()
396 except IntegrityError:
396 except IntegrityError:
397 # if we catch integrity error, it means we inserted this object
397 # if we catch integrity error, it means we inserted this object
398 # assumption is that's really an edge race-condition case and
398 # assumption is that's really an edge race-condition case and
399 # it's safe is to skip it
399 # it's safe is to skip it
400 Session().rollback()
400 Session().rollback()
401 except Exception:
401 except Exception:
402 log.exception('Failed to commit on cache key update')
402 log.exception('Failed to commit on cache key update')
403 Session().rollback()
403 Session().rollback()
404 if self.raise_exception:
404 if self.raise_exception:
405 raise
405 raise
@@ -1,107 +1,105 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import sys
19 import sys
22 import logging
20 import logging
23
21
24 import click
22 import click
25
23
26 from rhodecode.lib.pyramid_utils import bootstrap
24 from rhodecode.lib.pyramid_utils import bootstrap
27 from rhodecode.model.db import Session, User, Repository
25 from rhodecode.model.db import Session, User, Repository
28 from rhodecode.model.user import UserModel
26 from rhodecode.model.user import UserModel
29 from rhodecode.apps.file_store import utils as store_utils
27 from rhodecode.apps.file_store import utils as store_utils
30
28
31 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
32
30
33
31
34 @click.command()
32 @click.command()
35 @click.argument('ini_path', type=click.Path(exists=True))
33 @click.argument('ini_path', type=click.Path(exists=True))
36 @click.option(
34 @click.option(
37 '--filename',
35 '--filename',
38 required=True,
36 required=True,
39 help='Filename for artifact.')
37 help='Filename for artifact.')
40 @click.option(
38 @click.option(
41 '--file-path',
39 '--file-path',
42 required=True,
40 required=True,
43 type=click.Path(exists=True, dir_okay=False, readable=True),
41 type=click.Path(exists=True, dir_okay=False, readable=True),
44 help='Path to a file to be added as artifact')
42 help='Path to a file to be added as artifact')
45 @click.option(
43 @click.option(
46 '--repo-id',
44 '--repo-id',
47 required=True,
45 required=True,
48 type=int,
46 type=int,
49 help='ID of repository to add this artifact to.')
47 help='ID of repository to add this artifact to.')
50 @click.option(
48 @click.option(
51 '--user-id',
49 '--user-id',
52 default=None,
50 default=None,
53 type=int,
51 type=int,
54 help='User ID for creator of artifact. '
52 help='User ID for creator of artifact. '
55 'Default would be first super admin.')
53 'Default would be first super admin.')
56 @click.option(
54 @click.option(
57 '--description',
55 '--description',
58 default=None,
56 default=None,
59 type=str,
57 type=str,
60 help='Add description to this artifact')
58 help='Add description to this artifact')
61 def main(ini_path, filename, file_path, repo_id, user_id, description):
59 def main(ini_path, filename, file_path, repo_id, user_id, description):
62 return command(ini_path, filename, file_path, repo_id, user_id, description)
60 return command(ini_path, filename, file_path, repo_id, user_id, description)
63
61
64
62
65 def command(ini_path, filename, file_path, repo_id, user_id, description):
63 def command(ini_path, filename, file_path, repo_id, user_id, description):
66 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
64 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
67 try:
65 try:
68 from rc_ee.api.views.store_api import _store_file
66 from rc_ee.api.views.store_api import _store_file
69 except ImportError:
67 except ImportError:
70 click.secho('ERROR: Unable to import store_api. '
68 click.secho('ERROR: Unable to import store_api. '
71 'store_api is only available in EE edition of RhodeCode',
69 'store_api is only available in EE edition of RhodeCode',
72 fg='red')
70 fg='red')
73 sys.exit(-1)
71 sys.exit(-1)
74
72
75 request = env['request']
73 request = env['request']
76
74
77 repo = Repository.get(repo_id)
75 repo = Repository.get(repo_id)
78 if not repo:
76 if not repo:
79 click.secho('ERROR: Unable to find repository with id `{}`'.format(repo_id),
77 click.secho(f'ERROR: Unable to find repository with id `{repo_id}`',
80 fg='red')
78 fg='red')
81 sys.exit(-1)
79 sys.exit(-1)
82
80
83 # if we don't give user, or it's "DEFAULT" user we pick super-admin
81 # if we don't give user, or it's "DEFAULT" user we pick super-admin
84 if user_id is not None or user_id == 1:
82 if user_id is not None or user_id == 1:
85 db_user = User.get(user_id)
83 db_user = User.get(user_id)
86 else:
84 else:
87 db_user = User.get_first_super_admin()
85 db_user = User.get_first_super_admin()
88
86
89 if not db_user:
87 if not db_user:
90 click.secho('ERROR: Unable to find user with id/username `{}`'.format(user_id),
88 click.secho(f'ERROR: Unable to find user with id/username `{user_id}`',
91 fg='red')
89 fg='red')
92 sys.exit(-1)
90 sys.exit(-1)
93
91
94 auth_user = db_user.AuthUser(ip_addr='127.0.0.1')
92 auth_user = db_user.AuthUser(ip_addr='127.0.0.1')
95
93
96 storage = store_utils.get_file_storage(request.registry.settings)
94 storage = store_utils.get_file_storage(request.registry.settings)
97
95
98 with open(file_path, 'rb') as f:
96 with open(file_path, 'rb') as f:
99 click.secho('Adding new artifact from path: `{}`'.format(file_path),
97 click.secho(f'Adding new artifact from path: `{file_path}`',
100 fg='green')
98 fg='green')
101
99
102 file_data = _store_file(
100 file_data = _store_file(
103 storage, auth_user, filename, content=None, check_acl=True,
101 storage, auth_user, filename, content=None, check_acl=True,
104 file_obj=f, description=description,
102 file_obj=f, description=description,
105 scope_repo_id=repo.repo_id)
103 scope_repo_id=repo.repo_id)
106 click.secho('File Data: {}'.format(file_data),
104 click.secho(f'File Data: {file_data}',
107 fg='green')
105 fg='green')
@@ -1,59 +1,57 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import click
19 import click
22
20
23 from rhodecode.lib.pyramid_utils import bootstrap
21 from rhodecode.lib.pyramid_utils import bootstrap
24 import pyramid.paster
22 import pyramid.paster
25
23
26 # imports, used in ipython shell
24 # imports, used in ipython shell
27 import os
25 import os
28 import sys
26 import sys
29 import time
27 import time
30 import shutil
28 import shutil
31 import datetime
29 import datetime
32 from rhodecode.model.db import *
30 from rhodecode.model.db import *
33
31
34 welcome_banner = """Welcome to RhodeCode iShell.
32 welcome_banner = """Welcome to RhodeCode iShell.
35 Type `exit` to exit the shell.
33 Type `exit` to exit the shell.
36 iShell is interactive shell to interact directly with the
34 iShell is interactive shell to interact directly with the
37 internal RhodeCode APIs. You can rescue your lost password,
35 internal RhodeCode APIs. You can rescue your lost password,
38 or reset some user/system settings.
36 or reset some user/system settings.
39 """
37 """
40
38
41
39
42 @click.command()
40 @click.command()
43 @click.argument('ini_path', type=click.Path(exists=True))
41 @click.argument('ini_path', type=click.Path(exists=True))
44 def main(ini_path):
42 def main(ini_path):
45 pyramid.paster.setup_logging(ini_path)
43 pyramid.paster.setup_logging(ini_path)
46
44
47 with bootstrap(ini_path) as env:
45 with bootstrap(ini_path) as env:
48
46
49 try:
47 try:
50 from IPython import embed
48 from IPython import embed
51 from traitlets.config import Config
49 from traitlets.config import Config
52 cfg = Config()
50 cfg = Config()
53 cfg.InteractiveShellEmbed.confirm_exit = False
51 cfg.InteractiveShellEmbed.confirm_exit = False
54 embed(config=cfg, banner1=welcome_banner)
52 embed(config=cfg, banner1=welcome_banner)
55 except ImportError:
53 except ImportError:
56 print('ipython installation required for ishell')
54 print('ipython installation required for ishell')
57 sys.exit(-1)
55 sys.exit(-1)
58
56
59
57
@@ -1,125 +1,123 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
18 import logging
21
19
22 import click
20 import click
23 import pyramid.paster
21 import pyramid.paster
24
22
25 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
23 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
26 from rhodecode.lib.db_manage import DbManage
24 from rhodecode.lib.db_manage import DbManage
27 from rhodecode.lib.utils2 import get_encryption_key
25 from rhodecode.lib.utils2 import get_encryption_key
28 from rhodecode.model.db import Session
26 from rhodecode.model.db import Session
29
27
30
28
31 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
32
30
33
31
34 @click.command()
32 @click.command()
35 @click.argument('ini_path', type=click.Path(exists=True))
33 @click.argument('ini_path', type=click.Path(exists=True))
36 @click.option(
34 @click.option(
37 '--force-yes/--force-no', default=None,
35 '--force-yes/--force-no', default=None,
38 help="Force yes/no to every question")
36 help="Force yes/no to every question")
39 @click.option(
37 @click.option(
40 '--user',
38 '--user',
41 default=None,
39 default=None,
42 help='Initial super-admin username')
40 help='Initial super-admin username')
43 @click.option(
41 @click.option(
44 '--email',
42 '--email',
45 default=None,
43 default=None,
46 help='Initial super-admin email address.')
44 help='Initial super-admin email address.')
47 @click.option(
45 @click.option(
48 '--password',
46 '--password',
49 default=None,
47 default=None,
50 help='Initial super-admin password. Minimum 6 chars.')
48 help='Initial super-admin password. Minimum 6 chars.')
51 @click.option(
49 @click.option(
52 '--api-key',
50 '--api-key',
53 help='Initial API key for the admin user')
51 help='Initial API key for the admin user')
54 @click.option(
52 @click.option(
55 '--repos',
53 '--repos',
56 default=None,
54 default=None,
57 help='Absolute path to storage location. This is storage for all '
55 help='Absolute path to storage location. This is storage for all '
58 'existing and future repositories, and repository groups.')
56 'existing and future repositories, and repository groups.')
59 @click.option(
57 @click.option(
60 '--public-access/--no-public-access',
58 '--public-access/--no-public-access',
61 default=None,
59 default=None,
62 help='Enable public access on this installation. '
60 help='Enable public access on this installation. '
63 'Default is public access enabled.')
61 'Default is public access enabled.')
64 @click.option(
62 @click.option(
65 '--skip-existing-db',
63 '--skip-existing-db',
66 default=False,
64 default=False,
67 is_flag=True,
65 is_flag=True,
68 help='Do not destroy and re-initialize the database if it already exist.')
66 help='Do not destroy and re-initialize the database if it already exist.')
69 @click.option(
67 @click.option(
70 '--apply-license-key',
68 '--apply-license-key',
71 default=False,
69 default=False,
72 is_flag=True,
70 is_flag=True,
73 help='Get the license key from a license file or ENV and apply during DB creation.')
71 help='Get the license key from a license file or ENV and apply during DB creation.')
74 def main(ini_path, force_yes, user, email, password, api_key, repos,
72 def main(ini_path, force_yes, user, email, password, api_key, repos,
75 public_access, skip_existing_db, apply_license_key):
73 public_access, skip_existing_db, apply_license_key):
76 return command(ini_path, force_yes, user, email, password, api_key,
74 return command(ini_path, force_yes, user, email, password, api_key,
77 repos, public_access, skip_existing_db, apply_license_key)
75 repos, public_access, skip_existing_db, apply_license_key)
78
76
79
77
80 def command(ini_path, force_yes, user, email, password, api_key, repos,
78 def command(ini_path, force_yes, user, email, password, api_key, repos,
81 public_access, skip_existing_db, apply_license_key):
79 public_access, skip_existing_db, apply_license_key):
82 # mapping of old parameters to new CLI from click
80 # mapping of old parameters to new CLI from click
83 options = dict(
81 options = dict(
84 username=user,
82 username=user,
85 email=email,
83 email=email,
86 password=password,
84 password=password,
87 api_key=api_key,
85 api_key=api_key,
88 repos_location=repos,
86 repos_location=repos,
89 force_ask=force_yes,
87 force_ask=force_yes,
90 public_access=public_access
88 public_access=public_access
91 )
89 )
92 pyramid.paster.setup_logging(ini_path)
90 pyramid.paster.setup_logging(ini_path)
93
91
94 config = get_app_config(ini_path)
92 config = get_app_config(ini_path)
95
93
96 db_uri = config['sqlalchemy.db1.url']
94 db_uri = config['sqlalchemy.db1.url']
97 enc_key = get_encryption_key(config)
95 enc_key = get_encryption_key(config)
98 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
96 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
99 tests=False, cli_args=options, enc_key=enc_key)
97 tests=False, cli_args=options, enc_key=enc_key)
100 if skip_existing_db and dbmanage.db_exists():
98 if skip_existing_db and dbmanage.db_exists():
101 return
99 return
102
100
103 dbmanage.create_tables(override=True)
101 dbmanage.create_tables(override=True)
104 dbmanage.set_db_version()
102 dbmanage.set_db_version()
105 opts = dbmanage.config_prompt(None)
103 opts = dbmanage.config_prompt(None)
106 dbmanage.create_settings(opts)
104 dbmanage.create_settings(opts)
107 dbmanage.create_default_user()
105 dbmanage.create_default_user()
108 dbmanage.create_admin_and_prompt()
106 dbmanage.create_admin_and_prompt()
109 dbmanage.create_permissions()
107 dbmanage.create_permissions()
110 dbmanage.populate_default_permissions()
108 dbmanage.populate_default_permissions()
111 if apply_license_key:
109 if apply_license_key:
112 try:
110 try:
113 from rc_license.models import apply_trial_license_if_missing
111 from rc_license.models import apply_trial_license_if_missing
114 apply_trial_license_if_missing(force=True)
112 apply_trial_license_if_missing(force=True)
115 except ImportError:
113 except ImportError:
116 pass
114 pass
117
115
118 Session().commit()
116 Session().commit()
119
117
120 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
118 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
121 msg = 'Successfully initialized database, schema and default data.'
119 msg = 'Successfully initialized database, schema and default data.'
122 print()
120 print()
123 print('*' * len(msg))
121 print('*' * len(msg))
124 print(msg.upper())
122 print(msg.upper())
125 print('*' * len(msg))
123 print('*' * len(msg))
@@ -1,56 +1,54 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import logging
19 import logging
22
20
23 import click
21 import click
24 import pyramid.paster
22 import pyramid.paster
25
23
26 from rhodecode.lib.pyramid_utils import bootstrap
24 from rhodecode.lib.pyramid_utils import bootstrap
27 from rhodecode.lib.db_manage import DbManage
25 from rhodecode.lib.db_manage import DbManage
28 from rhodecode.lib.utils2 import safe_int, get_encryption_key
26 from rhodecode.lib.utils2 import safe_int, get_encryption_key
29
27
30 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
31
29
32
30
33 @click.command()
31 @click.command()
34 @click.argument('ini_path', type=click.Path(exists=True))
32 @click.argument('ini_path', type=click.Path(exists=True))
35 @click.option('--force-yes/--force-no', default=None,
33 @click.option('--force-yes/--force-no', default=None,
36 help="Force yes/no to every question")
34 help="Force yes/no to every question")
37 @click.option('--force-version', default=None,
35 @click.option('--force-version', default=None,
38 help="Force upgrade from version")
36 help="Force upgrade from version")
39 def main(ini_path, force_yes, force_version):
37 def main(ini_path, force_yes, force_version):
40 return command(ini_path, force_yes, force_version)
38 return command(ini_path, force_yes, force_version)
41
39
42
40
43 def command(ini_path, force_yes, force_version):
41 def command(ini_path, force_yes, force_version):
44 pyramid.paster.setup_logging(ini_path)
42 pyramid.paster.setup_logging(ini_path)
45
43
46 with bootstrap(ini_path, env={'RC_CMD_UPGRADE_DB': '1'}) as env:
44 with bootstrap(ini_path, env={'RC_CMD_UPGRADE_DB': '1'}) as env:
47 config = env['registry'].settings
45 config = env['registry'].settings
48 db_uri = config['sqlalchemy.db1.url']
46 db_uri = config['sqlalchemy.db1.url']
49 enc_key = get_encryption_key(config)
47 enc_key = get_encryption_key(config)
50 options = {}
48 options = {}
51 if force_yes is not None:
49 if force_yes is not None:
52 options['force_ask'] = force_yes
50 options['force_ask'] = force_yes
53 dbmanage = DbManage(
51 dbmanage = DbManage(
54 log_sql=True, dbconf=db_uri, root='.', tests=False,
52 log_sql=True, dbconf=db_uri, root='.', tests=False,
55 cli_args=options, enc_key=enc_key)
53 cli_args=options, enc_key=enc_key)
56 dbmanage.upgrade(version=safe_int(force_version))
54 dbmanage.upgrade(version=safe_int(force_version))
@@ -1,193 +1,191 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
18 import logging
21
19
22 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
23
21
24
22
25 class MaintenanceTask(object):
23 class MaintenanceTask(object):
26 human_name = 'undefined'
24 human_name = 'undefined'
27
25
28 def __init__(self, db_repo):
26 def __init__(self, db_repo):
29 self.db_repo = db_repo
27 self.db_repo = db_repo
30
28
31 def run(self):
29 def run(self):
32 """Execute task and return task human value"""
30 """Execute task and return task human value"""
33 raise NotImplementedError()
31 raise NotImplementedError()
34
32
35
33
36 class GitGC(MaintenanceTask):
34 class GitGC(MaintenanceTask):
37 human_name = 'GIT Garbage collect'
35 human_name = 'GIT Garbage collect'
38
36
39 def _count_objects(self, repo):
37 def _count_objects(self, repo):
40 stdout, stderr = repo.run_git_command(
38 stdout, stderr = repo.run_git_command(
41 ['count-objects', '-v'], fail_on_stderr=False)
39 ['count-objects', '-v'], fail_on_stderr=False)
42
40
43 errors = ' '
41 errors = ' '
44 objects = ' '.join(stdout.splitlines())
42 objects = ' '.join(stdout.splitlines())
45
43
46 if stderr:
44 if stderr:
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
45 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
48 return objects + errors
46 return objects + errors
49
47
50 def run(self):
48 def run(self):
51 output = []
49 output = []
52 instance = self.db_repo.scm_instance()
50 instance = self.db_repo.scm_instance()
53
51
54 objects_before = self._count_objects(instance)
52 objects_before = self._count_objects(instance)
55
53
56 log.debug('GIT objects:%s', objects_before)
54 log.debug('GIT objects:%s', objects_before)
57 cmd = ['gc', '--aggressive']
55 cmd = ['gc', '--aggressive']
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
56 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
59
57
60 out = 'executed {}'.format(' '.join(cmd))
58 out = 'executed {}'.format(' '.join(cmd))
61 output.append(out)
59 output.append(out)
62
60
63 out = ''
61 out = ''
64 if stderr:
62 if stderr:
65 out += ''.join(stderr.splitlines())
63 out += ''.join(stderr.splitlines())
66
64
67 if stdout:
65 if stdout:
68 out += ''.join(stdout.splitlines())
66 out += ''.join(stdout.splitlines())
69
67
70 if out:
68 if out:
71 output.append(out)
69 output.append(out)
72
70
73 objects_after = self._count_objects(instance)
71 objects_after = self._count_objects(instance)
74 log.debug('GIT objects:%s', objects_after)
72 log.debug('GIT objects:%s', objects_after)
75 output.append('objects before :' + objects_before)
73 output.append('objects before :' + objects_before)
76 output.append('objects after :' + objects_after)
74 output.append('objects after :' + objects_after)
77
75
78 return '\n'.join(output)
76 return '\n'.join(output)
79
77
80
78
81 class GitFSCK(MaintenanceTask):
79 class GitFSCK(MaintenanceTask):
82 human_name = 'GIT FSCK'
80 human_name = 'GIT FSCK'
83
81
84 def run(self):
82 def run(self):
85 output = []
83 output = []
86 instance = self.db_repo.scm_instance()
84 instance = self.db_repo.scm_instance()
87
85
88 cmd = ['fsck', '--full']
86 cmd = ['fsck', '--full']
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
87 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90
88
91 out = 'executed {}'.format(' '.join(cmd))
89 out = 'executed {}'.format(' '.join(cmd))
92 output.append(out)
90 output.append(out)
93
91
94 out = ''
92 out = ''
95 if stderr:
93 if stderr:
96 out += ''.join(stderr.splitlines())
94 out += ''.join(stderr.splitlines())
97
95
98 if stdout:
96 if stdout:
99 out += ''.join(stdout.splitlines())
97 out += ''.join(stdout.splitlines())
100
98
101 if out:
99 if out:
102 output.append(out)
100 output.append(out)
103
101
104 return '\n'.join(output)
102 return '\n'.join(output)
105
103
106
104
107 class GitRepack(MaintenanceTask):
105 class GitRepack(MaintenanceTask):
108 human_name = 'GIT Repack'
106 human_name = 'GIT Repack'
109
107
110 def run(self):
108 def run(self):
111 output = []
109 output = []
112 instance = self.db_repo.scm_instance()
110 instance = self.db_repo.scm_instance()
113 cmd = ['repack', '-a', '-d',
111 cmd = ['repack', '-a', '-d',
114 '--window-memory', '10m', '--max-pack-size', '100m']
112 '--window-memory', '10m', '--max-pack-size', '100m']
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
113 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116
114
117 out = 'executed {}'.format(' '.join(cmd))
115 out = 'executed {}'.format(' '.join(cmd))
118 output.append(out)
116 output.append(out)
119 out = ''
117 out = ''
120
118
121 if stderr:
119 if stderr:
122 out += ''.join(stderr.splitlines())
120 out += ''.join(stderr.splitlines())
123
121
124 if stdout:
122 if stdout:
125 out += ''.join(stdout.splitlines())
123 out += ''.join(stdout.splitlines())
126
124
127 if out:
125 if out:
128 output.append(out)
126 output.append(out)
129
127
130 return '\n'.join(output)
128 return '\n'.join(output)
131
129
132
130
133 class HGVerify(MaintenanceTask):
131 class HGVerify(MaintenanceTask):
134 human_name = 'HG Verify repo'
132 human_name = 'HG Verify repo'
135
133
136 def run(self):
134 def run(self):
137 instance = self.db_repo.scm_instance()
135 instance = self.db_repo.scm_instance()
138 res = instance.verify()
136 res = instance.verify()
139 return res
137 return res
140
138
141
139
142 class HGUpdateCaches(MaintenanceTask):
140 class HGUpdateCaches(MaintenanceTask):
143 human_name = 'HG update caches'
141 human_name = 'HG update caches'
144
142
145 def run(self):
143 def run(self):
146 instance = self.db_repo.scm_instance()
144 instance = self.db_repo.scm_instance()
147 res = instance.hg_update_cache()
145 res = instance.hg_update_cache()
148 return res
146 return res
149
147
150
148
151 class HGRebuildFnCaches(MaintenanceTask):
149 class HGRebuildFnCaches(MaintenanceTask):
152 human_name = 'HG rebuild fn caches'
150 human_name = 'HG rebuild fn caches'
153
151
154 def run(self):
152 def run(self):
155 instance = self.db_repo.scm_instance()
153 instance = self.db_repo.scm_instance()
156 res = instance.hg_rebuild_fn_cache()
154 res = instance.hg_rebuild_fn_cache()
157 return res
155 return res
158
156
159
157
160 class SVNVerify(MaintenanceTask):
158 class SVNVerify(MaintenanceTask):
161 human_name = 'SVN Verify repo'
159 human_name = 'SVN Verify repo'
162
160
163 def run(self):
161 def run(self):
164 instance = self.db_repo.scm_instance()
162 instance = self.db_repo.scm_instance()
165 res = instance.verify()
163 res = instance.verify()
166 return res
164 return res
167
165
168
166
169 class RepoMaintenance(object):
167 class RepoMaintenance(object):
170 """
168 """
171 Performs maintenance of repository based on it's type
169 Performs maintenance of repository based on it's type
172 """
170 """
173 tasks = {
171 tasks = {
174 'hg': [HGVerify, HGUpdateCaches, HGRebuildFnCaches],
172 'hg': [HGVerify, HGUpdateCaches, HGRebuildFnCaches],
175 'git': [GitFSCK, GitGC, GitRepack],
173 'git': [GitFSCK, GitGC, GitRepack],
176 'svn': [SVNVerify],
174 'svn': [SVNVerify],
177 }
175 }
178
176
179 def get_tasks_for_repo(self, db_repo):
177 def get_tasks_for_repo(self, db_repo):
180 """
178 """
181 fetches human names of tasks pending for execution for given type of repo
179 fetches human names of tasks pending for execution for given type of repo
182 """
180 """
183 tasks = []
181 tasks = []
184 for task in self.tasks[db_repo.repo_type]:
182 for task in self.tasks[db_repo.repo_type]:
185 tasks.append(task.human_name)
183 tasks.append(task.human_name)
186 return tasks
184 return tasks
187
185
188 def execute(self, db_repo):
186 def execute(self, db_repo):
189 executed_tasks = []
187 executed_tasks = []
190 for task in self.tasks[db_repo.repo_type]:
188 for task in self.tasks[db_repo.repo_type]:
191 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
189 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
192 executed_tasks.append(output)
190 executed_tasks.append(output)
193 return executed_tasks
191 return executed_tasks
@@ -1,109 +1,107 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 from uuid import uuid4
19 from uuid import uuid4
22 import pyramid.testing
20 import pyramid.testing
23 from pyramid.decorator import reify
21 from pyramid.decorator import reify
24 from pyramid.request import Request as _Request
22 from pyramid.request import Request as _Request
25 from rhodecode.lib.type_utils import StrictAttributeDict
23 from rhodecode.lib.type_utils import StrictAttributeDict
26
24
27
25
28 class TemplateArgs(StrictAttributeDict):
26 class TemplateArgs(StrictAttributeDict):
29 pass
27 pass
30
28
31
29
32 # Base Class with DummyMethods, testing / CLI scripts
30 # Base Class with DummyMethods, testing / CLI scripts
33 class RequestBase(object):
31 class RequestBase(object):
34 _req_id_bucket = list()
32 _req_id_bucket = list()
35 _call_context = TemplateArgs()
33 _call_context = TemplateArgs()
36 _call_context.visual = TemplateArgs()
34 _call_context.visual = TemplateArgs()
37 _call_context.visual.show_sha_length = 12
35 _call_context.visual.show_sha_length = 12
38 _call_context.visual.show_revision_number = True
36 _call_context.visual.show_revision_number = True
39
37
40 @reify
38 @reify
41 def req_id(self):
39 def req_id(self):
42 return str(uuid4())
40 return str(uuid4())
43
41
44 @property
42 @property
45 def req_id_bucket(self):
43 def req_id_bucket(self):
46 return self._req_id_bucket
44 return self._req_id_bucket
47
45
48 def req_id_records_init(self):
46 def req_id_records_init(self):
49 self._req_id_bucket = list()
47 self._req_id_bucket = list()
50
48
51 def translate(self, *args, **kwargs):
49 def translate(self, *args, **kwargs):
52 raise NotImplementedError()
50 raise NotImplementedError()
53
51
54 def plularize(self, *args, **kwargs):
52 def plularize(self, *args, **kwargs):
55 raise NotImplementedError()
53 raise NotImplementedError()
56
54
57 def get_partial_renderer(self, tmpl_name):
55 def get_partial_renderer(self, tmpl_name):
58 raise NotImplementedError()
56 raise NotImplementedError()
59
57
60 @property
58 @property
61 def call_context(self):
59 def call_context(self):
62 return self._call_context
60 return self._call_context
63
61
64 def set_call_context(self, new_context):
62 def set_call_context(self, new_context):
65 self._call_context = new_context
63 self._call_context = new_context
66
64
67
65
68 # for thin non-web/cli etc
66 # for thin non-web/cli etc
69 class ThinRequest(RequestBase, pyramid.testing.DummyRequest):
67 class ThinRequest(RequestBase, pyramid.testing.DummyRequest):
70
68
71 def translate(self, msg):
69 def translate(self, msg):
72 return msg
70 return msg
73
71
74 def plularize(self, singular, plural, n):
72 def plularize(self, singular, plural, n):
75 return singular
73 return singular
76
74
77 def get_partial_renderer(self, tmpl_name):
75 def get_partial_renderer(self, tmpl_name):
78 from rhodecode.lib.partial_renderer import get_partial_renderer
76 from rhodecode.lib.partial_renderer import get_partial_renderer
79 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
77 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
80
78
81
79
82 # for real-web-based
80 # for real-web-based
83 class RealRequest(RequestBase, _Request):
81 class RealRequest(RequestBase, _Request):
84 def get_partial_renderer(self, tmpl_name):
82 def get_partial_renderer(self, tmpl_name):
85 from rhodecode.lib.partial_renderer import get_partial_renderer
83 from rhodecode.lib.partial_renderer import get_partial_renderer
86 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
84 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
87
85
88 def request_count(self):
86 def request_count(self):
89 from rhodecode.lib.request_counter import get_request_counter
87 from rhodecode.lib.request_counter import get_request_counter
90 return get_request_counter()
88 return get_request_counter()
91
89
92 def plularize(self, *args, **kwargs):
90 def plularize(self, *args, **kwargs):
93 return self.localizer.pluralize(*args, **kwargs)
91 return self.localizer.pluralize(*args, **kwargs)
94
92
95 def translate(self, *args, **kwargs):
93 def translate(self, *args, **kwargs):
96 localizer = self.localizer
94 localizer = self.localizer
97 from rhodecode.translation import _ as tsf
95 from rhodecode.translation import _ as tsf
98
96
99 def auto_translate(*_args, **_kwargs):
97 def auto_translate(*_args, **_kwargs):
100 return localizer.translate(tsf(*_args, **_kwargs))
98 return localizer.translate(tsf(*_args, **_kwargs))
101
99
102 return auto_translate(*args, **kwargs)
100 return auto_translate(*args, **kwargs)
103
101
104
102
105 class Request(RealRequest):
103 class Request(RealRequest):
106 """
104 """
107 This is the main request object used in web-context
105 This is the main request object used in web-context
108 """
106 """
109 pass
107 pass
@@ -1,27 +1,25 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 counter = 0
19 counter = 0
22
20
23
21
24 def get_request_counter():
22 def get_request_counter():
25 global counter
23 global counter
26 counter += 1
24 counter += 1
27 return counter
25 return counter
@@ -1,49 +1,49 b''
1 from rhodecode.lib._vendor.statsd import client_from_config
1 from rhodecode.lib._vendor.statsd import client_from_config
2
2
3
3
4 class StatsdClientNotInitialised(Exception):
4 class StatsdClientNotInitialised(Exception):
5 pass
5 pass
6
6
7
7
8 class _Singleton(type):
8 class _Singleton(type):
9 """A metaclass that creates a Singleton base class when called."""
9 """A metaclass that creates a Singleton base class when called."""
10
10
11 _instances = {}
11 _instances = {}
12
12
13 def __call__(cls, *args, **kwargs):
13 def __call__(cls, *args, **kwargs):
14 if cls not in cls._instances:
14 if cls not in cls._instances:
15 cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
15 cls._instances[cls] = super().__call__(*args, **kwargs)
16 return cls._instances[cls]
16 return cls._instances[cls]
17
17
18
18
19 class Singleton(_Singleton("SingletonMeta", (object,), {})):
19 class Singleton(_Singleton("SingletonMeta", (object,), {})):
20 pass
20 pass
21
21
22
22
23 class StatsdClientClass(Singleton):
23 class StatsdClientClass(Singleton):
24 setup_run = False
24 setup_run = False
25 statsd_client = None
25 statsd_client = None
26 statsd = None
26 statsd = None
27
27
28 def __getattribute__(self, name):
28 def __getattribute__(self, name):
29
29
30 if name.startswith("statsd"):
30 if name.startswith("statsd"):
31 if self.setup_run:
31 if self.setup_run:
32 return super(StatsdClientClass, self).__getattribute__(name)
32 return super().__getattribute__(name)
33 else:
33 else:
34 return None
34 return None
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
36
36
37 return super(StatsdClientClass, self).__getattribute__(name)
37 return super().__getattribute__(name)
38
38
39 def setup(self, settings):
39 def setup(self, settings):
40 """
40 """
41 Initialize the client
41 Initialize the client
42 """
42 """
43 statsd = client_from_config(settings)
43 statsd = client_from_config(settings)
44 self.statsd = statsd
44 self.statsd = statsd
45 self.statsd_client = statsd
45 self.statsd_client = statsd
46 self.setup_run = True
46 self.setup_run = True
47
47
48
48
49 StatsdClient = StatsdClientClass()
49 StatsdClient = StatsdClientClass()
@@ -1,170 +1,169 b''
1
2 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import typing
19 import typing
21 import base64
20 import base64
22 import logging
21 import logging
23 from unidecode import unidecode
22 from unidecode import unidecode
24
23
25 import rhodecode
24 import rhodecode
26 from rhodecode.lib.type_utils import aslist
25 from rhodecode.lib.type_utils import aslist
27
26
28
27
29 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
30
29
31
30
32 def safe_int(val, default=None) -> int:
31 def safe_int(val, default=None) -> int:
33 """
32 """
34 Returns int() of val if val is not convertable to int use default
33 Returns int() of val if val is not convertable to int use default
35 instead
34 instead
36
35
37 :param val:
36 :param val:
38 :param default:
37 :param default:
39 """
38 """
40
39
41 try:
40 try:
42 val = int(val)
41 val = int(val)
43 except (ValueError, TypeError):
42 except (ValueError, TypeError):
44 val = default
43 val = default
45
44
46 return val
45 return val
47
46
48
47
49 def safe_float(val, default=None) -> float:
48 def safe_float(val, default=None) -> float:
50 """
49 """
51 Returns float() of val if val is not convertable to float use default
50 Returns float() of val if val is not convertable to float use default
52 instead
51 instead
53
52
54 :param val:
53 :param val:
55 :param default:
54 :param default:
56 """
55 """
57
56
58 try:
57 try:
59 val = float(val)
58 val = float(val)
60 except (ValueError, TypeError):
59 except (ValueError, TypeError):
61 val = default
60 val = default
62
61
63 return val
62 return val
64
63
65
64
66 def base64_to_str(text) -> str:
65 def base64_to_str(text) -> str:
67 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
66 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
68
67
69
68
70 def get_default_encodings() -> typing.List[str]:
69 def get_default_encodings() -> list[str]:
71 return aslist(rhodecode.CONFIG.get('default_encoding', 'utf8'), sep=',')
70 return aslist(rhodecode.CONFIG.get('default_encoding', 'utf8'), sep=',')
72
71
73
72
74 DEFAULT_ENCODINGS = get_default_encodings()
73 DEFAULT_ENCODINGS = get_default_encodings()
75
74
76
75
77 def safe_str(str_, to_encoding=None) -> str:
76 def safe_str(str_, to_encoding=None) -> str:
78 """
77 """
79 safe str function. Does few trick to turn unicode_ into string
78 safe str function. Does few trick to turn unicode_ into string
80
79
81 :param str_: str to encode
80 :param str_: str to encode
82 :param to_encoding: encode to this type UTF8 default
81 :param to_encoding: encode to this type UTF8 default
83 """
82 """
84 if isinstance(str_, str):
83 if isinstance(str_, str):
85 return str_
84 return str_
86
85
87 # if it's bytes cast to str
86 # if it's bytes cast to str
88 if not isinstance(str_, bytes):
87 if not isinstance(str_, bytes):
89 return str(str_)
88 return str(str_)
90
89
91 to_encoding = to_encoding or DEFAULT_ENCODINGS
90 to_encoding = to_encoding or DEFAULT_ENCODINGS
92 if not isinstance(to_encoding, (list, tuple)):
91 if not isinstance(to_encoding, (list, tuple)):
93 to_encoding = [to_encoding]
92 to_encoding = [to_encoding]
94
93
95 for enc in to_encoding:
94 for enc in to_encoding:
96 try:
95 try:
97 return str(str_, enc)
96 return str(str_, enc)
98 except UnicodeDecodeError:
97 except UnicodeDecodeError:
99 pass
98 pass
100
99
101 return str(str_, to_encoding[0], 'replace')
100 return str(str_, to_encoding[0], 'replace')
102
101
103
102
104 def safe_bytes(str_, from_encoding=None) -> bytes:
103 def safe_bytes(str_, from_encoding=None) -> bytes:
105 """
104 """
106 safe bytes function. Does few trick to turn str_ into bytes string:
105 safe bytes function. Does few trick to turn str_ into bytes string:
107
106
108 :param str_: string to decode
107 :param str_: string to decode
109 :param from_encoding: encode from this type UTF8 default
108 :param from_encoding: encode from this type UTF8 default
110 """
109 """
111 if isinstance(str_, bytes):
110 if isinstance(str_, bytes):
112 return str_
111 return str_
113
112
114 if not isinstance(str_, str):
113 if not isinstance(str_, str):
115 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
114 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
116
115
117 from_encoding = from_encoding or get_default_encodings()
116 from_encoding = from_encoding or get_default_encodings()
118 if not isinstance(from_encoding, (list, tuple)):
117 if not isinstance(from_encoding, (list, tuple)):
119 from_encoding = [from_encoding]
118 from_encoding = [from_encoding]
120
119
121 for enc in from_encoding:
120 for enc in from_encoding:
122 try:
121 try:
123 return str_.encode(enc)
122 return str_.encode(enc)
124 except UnicodeDecodeError:
123 except UnicodeDecodeError:
125 pass
124 pass
126
125
127 return str_.encode(from_encoding[0], 'replace')
126 return str_.encode(from_encoding[0], 'replace')
128
127
129
128
130 def ascii_bytes(str_, allow_bytes=False) -> bytes:
129 def ascii_bytes(str_, allow_bytes=False) -> bytes:
131 """
130 """
132 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
131 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
133 Fails with UnicodeError on invalid input.
132 Fails with UnicodeError on invalid input.
134 This should be used where encoding and "safe" ambiguity should be avoided.
133 This should be used where encoding and "safe" ambiguity should be avoided.
135 Where strings already have been encoded in other ways but still are unicode
134 Where strings already have been encoded in other ways but still are unicode
136 string - for example to hex, base64, json, urlencoding, or are known to be
135 string - for example to hex, base64, json, urlencoding, or are known to be
137 identifiers.
136 identifiers.
138 """
137 """
139 if allow_bytes and isinstance(str_, bytes):
138 if allow_bytes and isinstance(str_, bytes):
140 return str_
139 return str_
141
140
142 if not isinstance(str_, str):
141 if not isinstance(str_, str):
143 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
142 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
144 return str_.encode('ascii')
143 return str_.encode('ascii')
145
144
146
145
147 def ascii_str(str_) -> str:
146 def ascii_str(str_) -> str:
148 """
147 """
149 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
148 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
150 Fails with UnicodeError on invalid input.
149 Fails with UnicodeError on invalid input.
151 This should be used where encoding and "safe" ambiguity should be avoided.
150 This should be used where encoding and "safe" ambiguity should be avoided.
152 Where strings are encoded but also in other ways are known to be ASCII, and
151 Where strings are encoded but also in other ways are known to be ASCII, and
153 where a unicode string is wanted without caring about encoding. For example
152 where a unicode string is wanted without caring about encoding. For example
154 to hex, base64, urlencoding, or are known to be identifiers.
153 to hex, base64, urlencoding, or are known to be identifiers.
155 """
154 """
156
155
157 if not isinstance(str_, bytes):
156 if not isinstance(str_, bytes):
158 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
157 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
159 return str_.decode('ascii')
158 return str_.decode('ascii')
160
159
161
160
162 def convert_special_chars(str_) -> str:
161 def convert_special_chars(str_) -> str:
163 """
162 """
164 trie to replace non-ascii letters to their ascii representation eg::
163 trie to replace non-ascii letters to their ascii representation eg::
165
164
166 `żołw` converts into `zolw`
165 `żołw` converts into `zolw`
167 """
166 """
168 value = safe_str(str_)
167 value = safe_str(str_)
169 converted_value = unidecode(value)
168 converted_value = unidecode(value)
170 return converted_value
169 return converted_value
@@ -1,34 +1,33 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 def html(info):
20 def html(info):
22 """
21 """
23 Custom string as html content_type renderer for pyramid
22 Custom string as html content_type renderer for pyramid
24 """
23 """
25 def _render(value, system):
24 def _render(value, system):
26 request = system.get('request')
25 request = system.get('request')
27 if request is not None:
26 if request is not None:
28 response = request.response
27 response = request.response
29 ct = response.content_type
28 ct = response.content_type
30 if ct == response.default_content_type:
29 if ct == response.default_content_type:
31 response.content_type = 'text/html'
30 response.content_type = 'text/html'
32 return value
31 return value
33
32
34 return _render
33 return _render
@@ -1,848 +1,846 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 import os
20 import os
23 import sys
21 import sys
24 import time
22 import time
25 import platform
23 import platform
26 import collections
24 import collections
27 import psutil
25 import psutil
28 from functools import wraps
26 from functools import wraps
29
27
30 import pkg_resources
28 import pkg_resources
31 import logging
29 import logging
32 import resource
30 import resource
33
31
34 import configparser
32 import configparser
35
33
36 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.str_utils import safe_str
37
35
38 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
39
37
40
38
41 _NA = 'NOT AVAILABLE'
39 _NA = 'NOT AVAILABLE'
42 _NA_FLOAT = 0.0
40 _NA_FLOAT = 0.0
43
41
44 STATE_OK = 'ok'
42 STATE_OK = 'ok'
45 STATE_ERR = 'error'
43 STATE_ERR = 'error'
46 STATE_WARN = 'warning'
44 STATE_WARN = 'warning'
47
45
48 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
46 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
49
47
50
48
51 registered_helpers = {}
49 registered_helpers = {}
52
50
53
51
54 def register_sysinfo(func):
52 def register_sysinfo(func):
55 """
53 """
56 @register_helper
54 @register_helper
57 def db_check():
55 def db_check():
58 pass
56 pass
59
57
60 db_check == registered_helpers['db_check']
58 db_check == registered_helpers['db_check']
61 """
59 """
62 global registered_helpers
60 global registered_helpers
63 registered_helpers[func.__name__] = func
61 registered_helpers[func.__name__] = func
64
62
65 @wraps(func)
63 @wraps(func)
66 def _wrapper(*args, **kwargs):
64 def _wrapper(*args, **kwargs):
67 return func(*args, **kwargs)
65 return func(*args, **kwargs)
68 return _wrapper
66 return _wrapper
69
67
70
68
71 # HELPERS
69 # HELPERS
72 def percentage(part: (int, float), whole: (int, float)):
70 def percentage(part: (int, float), whole: (int, float)):
73 whole = float(whole)
71 whole = float(whole)
74 if whole > 0:
72 if whole > 0:
75 return round(100 * float(part) / whole, 1)
73 return round(100 * float(part) / whole, 1)
76 return 0.0
74 return 0.0
77
75
78
76
79 def get_storage_size(storage_path):
77 def get_storage_size(storage_path):
80 sizes = []
78 sizes = []
81 for file_ in os.listdir(storage_path):
79 for file_ in os.listdir(storage_path):
82 storage_file = os.path.join(storage_path, file_)
80 storage_file = os.path.join(storage_path, file_)
83 if os.path.isfile(storage_file):
81 if os.path.isfile(storage_file):
84 try:
82 try:
85 sizes.append(os.path.getsize(storage_file))
83 sizes.append(os.path.getsize(storage_file))
86 except OSError:
84 except OSError:
87 log.exception('Failed to get size of storage file %s', storage_file)
85 log.exception('Failed to get size of storage file %s', storage_file)
88 pass
86 pass
89
87
90 return sum(sizes)
88 return sum(sizes)
91
89
92
90
93 def get_resource(resource_type):
91 def get_resource(resource_type):
94 try:
92 try:
95 return resource.getrlimit(resource_type)
93 return resource.getrlimit(resource_type)
96 except Exception:
94 except Exception:
97 return 'NOT_SUPPORTED'
95 return 'NOT_SUPPORTED'
98
96
99
97
100 def get_cert_path(ini_path):
98 def get_cert_path(ini_path):
101 default = '/etc/ssl/certs/ca-certificates.crt'
99 default = '/etc/ssl/certs/ca-certificates.crt'
102 control_ca_bundle = os.path.join(
100 control_ca_bundle = os.path.join(
103 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
101 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
104 '.rccontrol-profile/etc/ca-bundle.crt')
102 '.rccontrol-profile/etc/ca-bundle.crt')
105 if os.path.isfile(control_ca_bundle):
103 if os.path.isfile(control_ca_bundle):
106 default = control_ca_bundle
104 default = control_ca_bundle
107
105
108 return default
106 return default
109
107
110
108
111 class SysInfoRes(object):
109 class SysInfoRes(object):
112 def __init__(self, value, state=None, human_value=None):
110 def __init__(self, value, state=None, human_value=None):
113 self.value = value
111 self.value = value
114 self.state = state or STATE_OK_DEFAULT
112 self.state = state or STATE_OK_DEFAULT
115 self.human_value = human_value or value
113 self.human_value = human_value or value
116
114
117 def __json__(self):
115 def __json__(self):
118 return {
116 return {
119 'value': self.value,
117 'value': self.value,
120 'state': self.state,
118 'state': self.state,
121 'human_value': self.human_value,
119 'human_value': self.human_value,
122 }
120 }
123
121
124 def get_value(self):
122 def get_value(self):
125 return self.__json__()
123 return self.__json__()
126
124
127 def __str__(self):
125 def __str__(self):
128 return '<SysInfoRes({})>'.format(self.__json__())
126 return f'<SysInfoRes({self.__json__()})>'
129
127
130
128
131 class SysInfo(object):
129 class SysInfo(object):
132
130
133 def __init__(self, func_name, **kwargs):
131 def __init__(self, func_name, **kwargs):
134 self.function_name = func_name
132 self.function_name = func_name
135 self.value = _NA
133 self.value = _NA
136 self.state = None
134 self.state = None
137 self.kwargs = kwargs or {}
135 self.kwargs = kwargs or {}
138
136
139 def __call__(self):
137 def __call__(self):
140 computed = self.compute(**self.kwargs)
138 computed = self.compute(**self.kwargs)
141 if not isinstance(computed, SysInfoRes):
139 if not isinstance(computed, SysInfoRes):
142 raise ValueError(
140 raise ValueError(
143 'computed value for {} is not instance of '
141 'computed value for {} is not instance of '
144 '{}, got {} instead'.format(
142 '{}, got {} instead'.format(
145 self.function_name, SysInfoRes, type(computed)))
143 self.function_name, SysInfoRes, type(computed)))
146 return computed.__json__()
144 return computed.__json__()
147
145
148 def __str__(self):
146 def __str__(self):
149 return '<SysInfo({})>'.format(self.function_name)
147 return f'<SysInfo({self.function_name})>'
150
148
151 def compute(self, **kwargs):
149 def compute(self, **kwargs):
152 return self.function_name(**kwargs)
150 return self.function_name(**kwargs)
153
151
154
152
155 # SysInfo functions
153 # SysInfo functions
156 @register_sysinfo
154 @register_sysinfo
157 def python_info():
155 def python_info():
158 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
156 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
159 executable=sys.executable)
157 executable=sys.executable)
160 return SysInfoRes(value=value)
158 return SysInfoRes(value=value)
161
159
162
160
163 @register_sysinfo
161 @register_sysinfo
164 def py_modules():
162 def py_modules():
165 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
163 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
166 for p in pkg_resources.working_set])
164 for p in pkg_resources.working_set])
167
165
168 value = sorted(mods.items(), key=lambda k: k[0].lower())
166 value = sorted(mods.items(), key=lambda k: k[0].lower())
169 return SysInfoRes(value=value)
167 return SysInfoRes(value=value)
170
168
171
169
172 @register_sysinfo
170 @register_sysinfo
173 def platform_type():
171 def platform_type():
174 from rhodecode.lib.utils import generate_platform_uuid
172 from rhodecode.lib.utils import generate_platform_uuid
175
173
176 value = dict(
174 value = dict(
177 name=safe_str(platform.platform()),
175 name=safe_str(platform.platform()),
178 uuid=generate_platform_uuid()
176 uuid=generate_platform_uuid()
179 )
177 )
180 return SysInfoRes(value=value)
178 return SysInfoRes(value=value)
181
179
182
180
183 @register_sysinfo
181 @register_sysinfo
184 def locale_info():
182 def locale_info():
185 import locale
183 import locale
186
184
187 def safe_get_locale(locale_name):
185 def safe_get_locale(locale_name):
188 try:
186 try:
189 locale.getlocale(locale_name)
187 locale.getlocale(locale_name)
190 except TypeError:
188 except TypeError:
191 return f'FAILED_LOCALE_GET:{locale_name}'
189 return f'FAILED_LOCALE_GET:{locale_name}'
192
190
193 value = dict(
191 value = dict(
194 locale_default=locale.getlocale(),
192 locale_default=locale.getlocale(),
195 locale_lc_all=safe_get_locale(locale.LC_ALL),
193 locale_lc_all=safe_get_locale(locale.LC_ALL),
196 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
194 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
197 lang_env=os.environ.get('LANG'),
195 lang_env=os.environ.get('LANG'),
198 lc_all_env=os.environ.get('LC_ALL'),
196 lc_all_env=os.environ.get('LC_ALL'),
199 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
197 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
200 )
198 )
201 human_value = \
199 human_value = \
202 f"LANG: {value['lang_env']}, \
200 f"LANG: {value['lang_env']}, \
203 locale LC_ALL: {value['locale_lc_all']}, \
201 locale LC_ALL: {value['locale_lc_all']}, \
204 locale LC_CTYPE: {value['locale_lc_ctype']}, \
202 locale LC_CTYPE: {value['locale_lc_ctype']}, \
205 Default locales: {value['locale_default']}"
203 Default locales: {value['locale_default']}"
206
204
207 return SysInfoRes(value=value, human_value=human_value)
205 return SysInfoRes(value=value, human_value=human_value)
208
206
209
207
210 @register_sysinfo
208 @register_sysinfo
211 def ulimit_info():
209 def ulimit_info():
212 data = collections.OrderedDict([
210 data = collections.OrderedDict([
213 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
211 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
214 ('file size', get_resource(resource.RLIMIT_FSIZE)),
212 ('file size', get_resource(resource.RLIMIT_FSIZE)),
215 ('stack size', get_resource(resource.RLIMIT_STACK)),
213 ('stack size', get_resource(resource.RLIMIT_STACK)),
216 ('core file size', get_resource(resource.RLIMIT_CORE)),
214 ('core file size', get_resource(resource.RLIMIT_CORE)),
217 ('address space size', get_resource(resource.RLIMIT_AS)),
215 ('address space size', get_resource(resource.RLIMIT_AS)),
218 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
216 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
219 ('heap size', get_resource(resource.RLIMIT_DATA)),
217 ('heap size', get_resource(resource.RLIMIT_DATA)),
220 ('rss size', get_resource(resource.RLIMIT_RSS)),
218 ('rss size', get_resource(resource.RLIMIT_RSS)),
221 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
219 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
222 ('open files', get_resource(resource.RLIMIT_NOFILE)),
220 ('open files', get_resource(resource.RLIMIT_NOFILE)),
223 ])
221 ])
224
222
225 text = ', '.join(f'{k}:{v}' for k, v in data.items())
223 text = ', '.join(f'{k}:{v}' for k, v in data.items())
226
224
227 value = {
225 value = {
228 'limits': data,
226 'limits': data,
229 'text': text,
227 'text': text,
230 }
228 }
231 return SysInfoRes(value=value)
229 return SysInfoRes(value=value)
232
230
233
231
234 @register_sysinfo
232 @register_sysinfo
235 def uptime():
233 def uptime():
236 from rhodecode.lib.helpers import age, time_to_datetime
234 from rhodecode.lib.helpers import age, time_to_datetime
237 from rhodecode.translation import TranslationString
235 from rhodecode.translation import TranslationString
238
236
239 value = dict(boot_time=0, uptime=0, text='')
237 value = dict(boot_time=0, uptime=0, text='')
240 state = STATE_OK_DEFAULT
238 state = STATE_OK_DEFAULT
241
239
242 boot_time = psutil.boot_time()
240 boot_time = psutil.boot_time()
243 value['boot_time'] = boot_time
241 value['boot_time'] = boot_time
244 value['uptime'] = time.time() - boot_time
242 value['uptime'] = time.time() - boot_time
245
243
246 date_or_age = age(time_to_datetime(boot_time))
244 date_or_age = age(time_to_datetime(boot_time))
247 if isinstance(date_or_age, TranslationString):
245 if isinstance(date_or_age, TranslationString):
248 date_or_age = date_or_age.interpolate()
246 date_or_age = date_or_age.interpolate()
249
247
250 human_value = value.copy()
248 human_value = value.copy()
251 human_value['boot_time'] = time_to_datetime(boot_time)
249 human_value['boot_time'] = time_to_datetime(boot_time)
252 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
250 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
253
251
254 human_value['text'] = 'Server started {}'.format(date_or_age)
252 human_value['text'] = f'Server started {date_or_age}'
255 return SysInfoRes(value=value, human_value=human_value)
253 return SysInfoRes(value=value, human_value=human_value)
256
254
257
255
258 @register_sysinfo
256 @register_sysinfo
259 def memory():
257 def memory():
260 from rhodecode.lib.helpers import format_byte_size_binary
258 from rhodecode.lib.helpers import format_byte_size_binary
261 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
259 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
262 percent_used=0, free=0, inactive=0, active=0, shared=0,
260 percent_used=0, free=0, inactive=0, active=0, shared=0,
263 total=0, buffers=0, text='')
261 total=0, buffers=0, text='')
264
262
265 state = STATE_OK_DEFAULT
263 state = STATE_OK_DEFAULT
266
264
267 value.update(dict(psutil.virtual_memory()._asdict()))
265 value.update(dict(psutil.virtual_memory()._asdict()))
268 value['used_real'] = value['total'] - value['available']
266 value['used_real'] = value['total'] - value['available']
269 value['percent_used'] = psutil._common.usage_percent(
267 value['percent_used'] = psutil._common.usage_percent(
270 value['used_real'], value['total'], 1)
268 value['used_real'], value['total'], 1)
271
269
272 human_value = value.copy()
270 human_value = value.copy()
273 human_value['text'] = '%s/%s, %s%% used' % (
271 human_value['text'] = '{}/{}, {}% used'.format(
274 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
275 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
276 value['percent_used'],)
274 value['percent_used'])
277
275
278 keys = list(value.keys())[::]
276 keys = list(value.keys())[::]
279 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
280 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
281 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
282 for k in keys:
280 for k in keys:
283 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
284
282
285 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
286 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
287 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
288
286
289 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
290 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
291 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
292
290
293 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
294
292
295
293
296 @register_sysinfo
294 @register_sysinfo
297 def machine_load():
295 def machine_load():
298 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
299 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
300
298
301 # load averages
299 # load averages
302 if hasattr(psutil.os, 'getloadavg'):
300 if hasattr(psutil.os, 'getloadavg'):
303 value.update(dict(
301 value.update(dict(
304 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
305 ))
303 ))
306
304
307 human_value = value.copy()
305 human_value = value.copy()
308 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
309 value['1_min'], value['5_min'], value['15_min'])
307 value['1_min'], value['5_min'], value['15_min'])
310
308
311 if state['type'] == STATE_OK and value['15_min'] > 5.0:
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
312 msg = 'Warning: your machine load is very high.'
310 msg = 'Warning: your machine load is very high.'
313 state = {'message': msg, 'type': STATE_WARN}
311 state = {'message': msg, 'type': STATE_WARN}
314
312
315 return SysInfoRes(value=value, state=state, human_value=human_value)
313 return SysInfoRes(value=value, state=state, human_value=human_value)
316
314
317
315
318 @register_sysinfo
316 @register_sysinfo
319 def cpu():
317 def cpu():
320 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
321 state = STATE_OK_DEFAULT
319 state = STATE_OK_DEFAULT
322
320
323 value['cpu'] = psutil.cpu_percent(0.5)
321 value['cpu'] = psutil.cpu_percent(0.5)
324 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
325 value['cpu_count'] = psutil.cpu_count()
323 value['cpu_count'] = psutil.cpu_count()
326
324
327 human_value = value.copy()
325 human_value = value.copy()
328 human_value['text'] = '{} cores at {} %'.format(
326 human_value['text'] = '{} cores at {} %'.format(
329 value['cpu_count'], value['cpu'])
327 value['cpu_count'], value['cpu'])
330
328
331 return SysInfoRes(value=value, state=state, human_value=human_value)
329 return SysInfoRes(value=value, state=state, human_value=human_value)
332
330
333
331
334 @register_sysinfo
332 @register_sysinfo
335 def storage():
333 def storage():
336 from rhodecode.lib.helpers import format_byte_size_binary
334 from rhodecode.lib.helpers import format_byte_size_binary
337 from rhodecode.model.settings import VcsSettingsModel
335 from rhodecode.model.settings import VcsSettingsModel
338 path = VcsSettingsModel().get_repos_location()
336 path = VcsSettingsModel().get_repos_location()
339
337
340 value = dict(percent=0, used=0, total=0, path=path, text='')
338 value = dict(percent=0, used=0, total=0, path=path, text='')
341 state = STATE_OK_DEFAULT
339 state = STATE_OK_DEFAULT
342
340
343 try:
341 try:
344 value.update(dict(psutil.disk_usage(path)._asdict()))
342 value.update(dict(psutil.disk_usage(path)._asdict()))
345 except Exception as e:
343 except Exception as e:
346 log.exception('Failed to fetch disk info')
344 log.exception('Failed to fetch disk info')
347 state = {'message': str(e), 'type': STATE_ERR}
345 state = {'message': str(e), 'type': STATE_ERR}
348
346
349 human_value = value.copy()
347 human_value = value.copy()
350 human_value['used'] = format_byte_size_binary(value['used'])
348 human_value['used'] = format_byte_size_binary(value['used'])
351 human_value['total'] = format_byte_size_binary(value['total'])
349 human_value['total'] = format_byte_size_binary(value['total'])
352 human_value['text'] = "{}/{}, {}% used".format(
350 human_value['text'] = "{}/{}, {}% used".format(
353 format_byte_size_binary(value['used']),
351 format_byte_size_binary(value['used']),
354 format_byte_size_binary(value['total']),
352 format_byte_size_binary(value['total']),
355 value['percent'])
353 value['percent'])
356
354
357 if state['type'] == STATE_OK and value['percent'] > 90:
355 if state['type'] == STATE_OK and value['percent'] > 90:
358 msg = 'Critical: your disk space is very low.'
356 msg = 'Critical: your disk space is very low.'
359 state = {'message': msg, 'type': STATE_ERR}
357 state = {'message': msg, 'type': STATE_ERR}
360
358
361 elif state['type'] == STATE_OK and value['percent'] > 70:
359 elif state['type'] == STATE_OK and value['percent'] > 70:
362 msg = 'Warning: your disk space is running low.'
360 msg = 'Warning: your disk space is running low.'
363 state = {'message': msg, 'type': STATE_WARN}
361 state = {'message': msg, 'type': STATE_WARN}
364
362
365 return SysInfoRes(value=value, state=state, human_value=human_value)
363 return SysInfoRes(value=value, state=state, human_value=human_value)
366
364
367
365
368 @register_sysinfo
366 @register_sysinfo
369 def storage_inodes():
367 def storage_inodes():
370 from rhodecode.model.settings import VcsSettingsModel
368 from rhodecode.model.settings import VcsSettingsModel
371 path = VcsSettingsModel().get_repos_location()
369 path = VcsSettingsModel().get_repos_location()
372
370
373 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
374 state = STATE_OK_DEFAULT
372 state = STATE_OK_DEFAULT
375
373
376 try:
374 try:
377 i_stat = os.statvfs(path)
375 i_stat = os.statvfs(path)
378 value['free'] = i_stat.f_ffree
376 value['free'] = i_stat.f_ffree
379 value['used'] = i_stat.f_files-i_stat.f_favail
377 value['used'] = i_stat.f_files-i_stat.f_favail
380 value['total'] = i_stat.f_files
378 value['total'] = i_stat.f_files
381 value['percent'] = percentage(value['used'], value['total'])
379 value['percent'] = percentage(value['used'], value['total'])
382 except Exception as e:
380 except Exception as e:
383 log.exception('Failed to fetch disk inodes info')
381 log.exception('Failed to fetch disk inodes info')
384 state = {'message': str(e), 'type': STATE_ERR}
382 state = {'message': str(e), 'type': STATE_ERR}
385
383
386 human_value = value.copy()
384 human_value = value.copy()
387 human_value['text'] = "{}/{}, {}% used".format(
385 human_value['text'] = "{}/{}, {}% used".format(
388 value['used'], value['total'], value['percent'])
386 value['used'], value['total'], value['percent'])
389
387
390 if state['type'] == STATE_OK and value['percent'] > 90:
388 if state['type'] == STATE_OK and value['percent'] > 90:
391 msg = 'Critical: your disk free inodes are very low.'
389 msg = 'Critical: your disk free inodes are very low.'
392 state = {'message': msg, 'type': STATE_ERR}
390 state = {'message': msg, 'type': STATE_ERR}
393
391
394 elif state['type'] == STATE_OK and value['percent'] > 70:
392 elif state['type'] == STATE_OK and value['percent'] > 70:
395 msg = 'Warning: your disk free inodes are running low.'
393 msg = 'Warning: your disk free inodes are running low.'
396 state = {'message': msg, 'type': STATE_WARN}
394 state = {'message': msg, 'type': STATE_WARN}
397
395
398 return SysInfoRes(value=value, state=state, human_value=human_value)
396 return SysInfoRes(value=value, state=state, human_value=human_value)
399
397
400
398
401 @register_sysinfo
399 @register_sysinfo
402 def storage_archives():
400 def storage_archives():
403 import rhodecode
401 import rhodecode
404 from rhodecode.lib.utils import safe_str
402 from rhodecode.lib.utils import safe_str
405 from rhodecode.lib.helpers import format_byte_size_binary
403 from rhodecode.lib.helpers import format_byte_size_binary
406
404
407 msg = 'Archive cache storage is controlled by ' \
405 msg = 'Archive cache storage is controlled by ' \
408 'archive_cache.store_dir=/path/to/cache option in the .ini file'
406 'archive_cache.store_dir=/path/to/cache option in the .ini file'
409 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
407 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
410
408
411 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
409 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
412 state = STATE_OK_DEFAULT
410 state = STATE_OK_DEFAULT
413 try:
411 try:
414 items_count = 0
412 items_count = 0
415 used = 0
413 used = 0
416 for root, dirs, files in os.walk(path):
414 for root, dirs, files in os.walk(path):
417 if root == path:
415 if root == path:
418 items_count = len(dirs)
416 items_count = len(dirs)
419
417
420 for f in files:
418 for f in files:
421 try:
419 try:
422 used += os.path.getsize(os.path.join(root, f))
420 used += os.path.getsize(os.path.join(root, f))
423 except OSError:
421 except OSError:
424 pass
422 pass
425 value.update({
423 value.update({
426 'percent': 100,
424 'percent': 100,
427 'used': used,
425 'used': used,
428 'total': used,
426 'total': used,
429 'items': items_count
427 'items': items_count
430 })
428 })
431
429
432 except Exception as e:
430 except Exception as e:
433 log.exception('failed to fetch archive cache storage')
431 log.exception('failed to fetch archive cache storage')
434 state = {'message': str(e), 'type': STATE_ERR}
432 state = {'message': str(e), 'type': STATE_ERR}
435
433
436 human_value = value.copy()
434 human_value = value.copy()
437 human_value['used'] = format_byte_size_binary(value['used'])
435 human_value['used'] = format_byte_size_binary(value['used'])
438 human_value['total'] = format_byte_size_binary(value['total'])
436 human_value['total'] = format_byte_size_binary(value['total'])
439 human_value['text'] = "{} ({} items)".format(
437 human_value['text'] = "{} ({} items)".format(
440 human_value['used'], value['items'])
438 human_value['used'], value['items'])
441
439
442 return SysInfoRes(value=value, state=state, human_value=human_value)
440 return SysInfoRes(value=value, state=state, human_value=human_value)
443
441
444
442
445 @register_sysinfo
443 @register_sysinfo
446 def storage_gist():
444 def storage_gist():
447 from rhodecode.model.gist import GIST_STORE_LOC
445 from rhodecode.model.gist import GIST_STORE_LOC
448 from rhodecode.model.settings import VcsSettingsModel
446 from rhodecode.model.settings import VcsSettingsModel
449 from rhodecode.lib.utils import safe_str
447 from rhodecode.lib.utils import safe_str
450 from rhodecode.lib.helpers import format_byte_size_binary
448 from rhodecode.lib.helpers import format_byte_size_binary
451 path = safe_str(os.path.join(
449 path = safe_str(os.path.join(
452 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
450 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
453
451
454 # gist storage
452 # gist storage
455 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
453 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
456 state = STATE_OK_DEFAULT
454 state = STATE_OK_DEFAULT
457
455
458 try:
456 try:
459 items_count = 0
457 items_count = 0
460 used = 0
458 used = 0
461 for root, dirs, files in os.walk(path):
459 for root, dirs, files in os.walk(path):
462 if root == path:
460 if root == path:
463 items_count = len(dirs)
461 items_count = len(dirs)
464
462
465 for f in files:
463 for f in files:
466 try:
464 try:
467 used += os.path.getsize(os.path.join(root, f))
465 used += os.path.getsize(os.path.join(root, f))
468 except OSError:
466 except OSError:
469 pass
467 pass
470 value.update({
468 value.update({
471 'percent': 100,
469 'percent': 100,
472 'used': used,
470 'used': used,
473 'total': used,
471 'total': used,
474 'items': items_count
472 'items': items_count
475 })
473 })
476 except Exception as e:
474 except Exception as e:
477 log.exception('failed to fetch gist storage items')
475 log.exception('failed to fetch gist storage items')
478 state = {'message': str(e), 'type': STATE_ERR}
476 state = {'message': str(e), 'type': STATE_ERR}
479
477
480 human_value = value.copy()
478 human_value = value.copy()
481 human_value['used'] = format_byte_size_binary(value['used'])
479 human_value['used'] = format_byte_size_binary(value['used'])
482 human_value['total'] = format_byte_size_binary(value['total'])
480 human_value['total'] = format_byte_size_binary(value['total'])
483 human_value['text'] = "{} ({} items)".format(
481 human_value['text'] = "{} ({} items)".format(
484 human_value['used'], value['items'])
482 human_value['used'], value['items'])
485
483
486 return SysInfoRes(value=value, state=state, human_value=human_value)
484 return SysInfoRes(value=value, state=state, human_value=human_value)
487
485
488
486
489 @register_sysinfo
487 @register_sysinfo
490 def storage_temp():
488 def storage_temp():
491 import tempfile
489 import tempfile
492 from rhodecode.lib.helpers import format_byte_size_binary
490 from rhodecode.lib.helpers import format_byte_size_binary
493
491
494 path = tempfile.gettempdir()
492 path = tempfile.gettempdir()
495 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
493 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
496 state = STATE_OK_DEFAULT
494 state = STATE_OK_DEFAULT
497
495
498 if not psutil:
496 if not psutil:
499 return SysInfoRes(value=value, state=state)
497 return SysInfoRes(value=value, state=state)
500
498
501 try:
499 try:
502 value.update(dict(psutil.disk_usage(path)._asdict()))
500 value.update(dict(psutil.disk_usage(path)._asdict()))
503 except Exception as e:
501 except Exception as e:
504 log.exception('Failed to fetch temp dir info')
502 log.exception('Failed to fetch temp dir info')
505 state = {'message': str(e), 'type': STATE_ERR}
503 state = {'message': str(e), 'type': STATE_ERR}
506
504
507 human_value = value.copy()
505 human_value = value.copy()
508 human_value['used'] = format_byte_size_binary(value['used'])
506 human_value['used'] = format_byte_size_binary(value['used'])
509 human_value['total'] = format_byte_size_binary(value['total'])
507 human_value['total'] = format_byte_size_binary(value['total'])
510 human_value['text'] = "{}/{}, {}% used".format(
508 human_value['text'] = "{}/{}, {}% used".format(
511 format_byte_size_binary(value['used']),
509 format_byte_size_binary(value['used']),
512 format_byte_size_binary(value['total']),
510 format_byte_size_binary(value['total']),
513 value['percent'])
511 value['percent'])
514
512
515 return SysInfoRes(value=value, state=state, human_value=human_value)
513 return SysInfoRes(value=value, state=state, human_value=human_value)
516
514
517
515
518 @register_sysinfo
516 @register_sysinfo
519 def search_info():
517 def search_info():
520 import rhodecode
518 import rhodecode
521 from rhodecode.lib.index import searcher_from_config
519 from rhodecode.lib.index import searcher_from_config
522
520
523 backend = rhodecode.CONFIG.get('search.module', '')
521 backend = rhodecode.CONFIG.get('search.module', '')
524 location = rhodecode.CONFIG.get('search.location', '')
522 location = rhodecode.CONFIG.get('search.location', '')
525
523
526 try:
524 try:
527 searcher = searcher_from_config(rhodecode.CONFIG)
525 searcher = searcher_from_config(rhodecode.CONFIG)
528 searcher = searcher.__class__.__name__
526 searcher = searcher.__class__.__name__
529 except Exception:
527 except Exception:
530 searcher = None
528 searcher = None
531
529
532 value = dict(
530 value = dict(
533 backend=backend, searcher=searcher, location=location, text='')
531 backend=backend, searcher=searcher, location=location, text='')
534 state = STATE_OK_DEFAULT
532 state = STATE_OK_DEFAULT
535
533
536 human_value = value.copy()
534 human_value = value.copy()
537 human_value['text'] = "backend:`{}`".format(human_value['backend'])
535 human_value['text'] = "backend:`{}`".format(human_value['backend'])
538
536
539 return SysInfoRes(value=value, state=state, human_value=human_value)
537 return SysInfoRes(value=value, state=state, human_value=human_value)
540
538
541
539
542 @register_sysinfo
540 @register_sysinfo
543 def git_info():
541 def git_info():
544 from rhodecode.lib.vcs.backends import git
542 from rhodecode.lib.vcs.backends import git
545 state = STATE_OK_DEFAULT
543 state = STATE_OK_DEFAULT
546 value = human_value = ''
544 value = human_value = ''
547 try:
545 try:
548 value = git.discover_git_version(raise_on_exc=True)
546 value = git.discover_git_version(raise_on_exc=True)
549 human_value = 'version reported from VCSServer: {}'.format(value)
547 human_value = f'version reported from VCSServer: {value}'
550 except Exception as e:
548 except Exception as e:
551 state = {'message': str(e), 'type': STATE_ERR}
549 state = {'message': str(e), 'type': STATE_ERR}
552
550
553 return SysInfoRes(value=value, state=state, human_value=human_value)
551 return SysInfoRes(value=value, state=state, human_value=human_value)
554
552
555
553
556 @register_sysinfo
554 @register_sysinfo
557 def hg_info():
555 def hg_info():
558 from rhodecode.lib.vcs.backends import hg
556 from rhodecode.lib.vcs.backends import hg
559 state = STATE_OK_DEFAULT
557 state = STATE_OK_DEFAULT
560 value = human_value = ''
558 value = human_value = ''
561 try:
559 try:
562 value = hg.discover_hg_version(raise_on_exc=True)
560 value = hg.discover_hg_version(raise_on_exc=True)
563 human_value = 'version reported from VCSServer: {}'.format(value)
561 human_value = f'version reported from VCSServer: {value}'
564 except Exception as e:
562 except Exception as e:
565 state = {'message': str(e), 'type': STATE_ERR}
563 state = {'message': str(e), 'type': STATE_ERR}
566 return SysInfoRes(value=value, state=state, human_value=human_value)
564 return SysInfoRes(value=value, state=state, human_value=human_value)
567
565
568
566
569 @register_sysinfo
567 @register_sysinfo
570 def svn_info():
568 def svn_info():
571 from rhodecode.lib.vcs.backends import svn
569 from rhodecode.lib.vcs.backends import svn
572 state = STATE_OK_DEFAULT
570 state = STATE_OK_DEFAULT
573 value = human_value = ''
571 value = human_value = ''
574 try:
572 try:
575 value = svn.discover_svn_version(raise_on_exc=True)
573 value = svn.discover_svn_version(raise_on_exc=True)
576 human_value = 'version reported from VCSServer: {}'.format(value)
574 human_value = f'version reported from VCSServer: {value}'
577 except Exception as e:
575 except Exception as e:
578 state = {'message': str(e), 'type': STATE_ERR}
576 state = {'message': str(e), 'type': STATE_ERR}
579 return SysInfoRes(value=value, state=state, human_value=human_value)
577 return SysInfoRes(value=value, state=state, human_value=human_value)
580
578
581
579
582 @register_sysinfo
580 @register_sysinfo
583 def vcs_backends():
581 def vcs_backends():
584 import rhodecode
582 import rhodecode
585 value = rhodecode.CONFIG.get('vcs.backends')
583 value = rhodecode.CONFIG.get('vcs.backends')
586 human_value = 'Enabled backends in order: {}'.format(','.join(value))
584 human_value = 'Enabled backends in order: {}'.format(','.join(value))
587 return SysInfoRes(value=value, human_value=human_value)
585 return SysInfoRes(value=value, human_value=human_value)
588
586
589
587
590 @register_sysinfo
588 @register_sysinfo
591 def vcs_server():
589 def vcs_server():
592 import rhodecode
590 import rhodecode
593 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
591 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
594
592
595 server_url = rhodecode.CONFIG.get('vcs.server')
593 server_url = rhodecode.CONFIG.get('vcs.server')
596 enabled = rhodecode.CONFIG.get('vcs.server.enable')
594 enabled = rhodecode.CONFIG.get('vcs.server.enable')
597 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
595 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
598 state = STATE_OK_DEFAULT
596 state = STATE_OK_DEFAULT
599 version = None
597 version = None
600 workers = 0
598 workers = 0
601
599
602 try:
600 try:
603 data = get_vcsserver_service_data()
601 data = get_vcsserver_service_data()
604 if data and 'version' in data:
602 if data and 'version' in data:
605 version = data['version']
603 version = data['version']
606
604
607 if data and 'config' in data:
605 if data and 'config' in data:
608 conf = data['config']
606 conf = data['config']
609 workers = conf.get('workers', 'NOT AVAILABLE')
607 workers = conf.get('workers', 'NOT AVAILABLE')
610
608
611 connection = 'connected'
609 connection = 'connected'
612 except Exception as e:
610 except Exception as e:
613 connection = 'failed'
611 connection = 'failed'
614 state = {'message': str(e), 'type': STATE_ERR}
612 state = {'message': str(e), 'type': STATE_ERR}
615
613
616 value = dict(
614 value = dict(
617 url=server_url,
615 url=server_url,
618 enabled=enabled,
616 enabled=enabled,
619 protocol=protocol,
617 protocol=protocol,
620 connection=connection,
618 connection=connection,
621 version=version,
619 version=version,
622 text='',
620 text='',
623 )
621 )
624
622
625 human_value = value.copy()
623 human_value = value.copy()
626 human_value['text'] = \
624 human_value['text'] = \
627 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
625 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
628 url=server_url, ver=version, workers=workers, mode=protocol,
626 url=server_url, ver=version, workers=workers, mode=protocol,
629 conn=connection)
627 conn=connection)
630
628
631 return SysInfoRes(value=value, state=state, human_value=human_value)
629 return SysInfoRes(value=value, state=state, human_value=human_value)
632
630
633
631
634 @register_sysinfo
632 @register_sysinfo
635 def vcs_server_config():
633 def vcs_server_config():
636 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
634 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
637 state = STATE_OK_DEFAULT
635 state = STATE_OK_DEFAULT
638
636
639 value = {}
637 value = {}
640 try:
638 try:
641 data = get_vcsserver_service_data()
639 data = get_vcsserver_service_data()
642 value = data['app_config']
640 value = data['app_config']
643 except Exception as e:
641 except Exception as e:
644 state = {'message': str(e), 'type': STATE_ERR}
642 state = {'message': str(e), 'type': STATE_ERR}
645
643
646 human_value = value.copy()
644 human_value = value.copy()
647 human_value['text'] = 'VCS Server config'
645 human_value['text'] = 'VCS Server config'
648
646
649 return SysInfoRes(value=value, state=state, human_value=human_value)
647 return SysInfoRes(value=value, state=state, human_value=human_value)
650
648
651
649
652 @register_sysinfo
650 @register_sysinfo
653 def rhodecode_app_info():
651 def rhodecode_app_info():
654 import rhodecode
652 import rhodecode
655 edition = rhodecode.CONFIG.get('rhodecode.edition')
653 edition = rhodecode.CONFIG.get('rhodecode.edition')
656
654
657 value = dict(
655 value = dict(
658 rhodecode_version=rhodecode.__version__,
656 rhodecode_version=rhodecode.__version__,
659 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
657 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
660 text=''
658 text=''
661 )
659 )
662 human_value = value.copy()
660 human_value = value.copy()
663 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
661 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
664 edition=edition, ver=value['rhodecode_version']
662 edition=edition, ver=value['rhodecode_version']
665 )
663 )
666 return SysInfoRes(value=value, human_value=human_value)
664 return SysInfoRes(value=value, human_value=human_value)
667
665
668
666
669 @register_sysinfo
667 @register_sysinfo
670 def rhodecode_config():
668 def rhodecode_config():
671 import rhodecode
669 import rhodecode
672 path = rhodecode.CONFIG.get('__file__')
670 path = rhodecode.CONFIG.get('__file__')
673 rhodecode_ini_safe = rhodecode.CONFIG.copy()
671 rhodecode_ini_safe = rhodecode.CONFIG.copy()
674 cert_path = get_cert_path(path)
672 cert_path = get_cert_path(path)
675
673
676 try:
674 try:
677 config = configparser.ConfigParser()
675 config = configparser.ConfigParser()
678 config.read(path)
676 config.read(path)
679 parsed_ini = config
677 parsed_ini = config
680 if parsed_ini.has_section('server:main'):
678 if parsed_ini.has_section('server:main'):
681 parsed_ini = dict(parsed_ini.items('server:main'))
679 parsed_ini = dict(parsed_ini.items('server:main'))
682 except Exception:
680 except Exception:
683 log.exception('Failed to read .ini file for display')
681 log.exception('Failed to read .ini file for display')
684 parsed_ini = {}
682 parsed_ini = {}
685
683
686 rhodecode_ini_safe['server:main'] = parsed_ini
684 rhodecode_ini_safe['server:main'] = parsed_ini
687
685
688 blacklist = [
686 blacklist = [
689 'rhodecode_license_key',
687 'rhodecode_license_key',
690 'routes.map',
688 'routes.map',
691 'sqlalchemy.db1.url',
689 'sqlalchemy.db1.url',
692 'channelstream.secret',
690 'channelstream.secret',
693 'beaker.session.secret',
691 'beaker.session.secret',
694 'rhodecode.encrypted_values.secret',
692 'rhodecode.encrypted_values.secret',
695 'rhodecode_auth_github_consumer_key',
693 'rhodecode_auth_github_consumer_key',
696 'rhodecode_auth_github_consumer_secret',
694 'rhodecode_auth_github_consumer_secret',
697 'rhodecode_auth_google_consumer_key',
695 'rhodecode_auth_google_consumer_key',
698 'rhodecode_auth_google_consumer_secret',
696 'rhodecode_auth_google_consumer_secret',
699 'rhodecode_auth_bitbucket_consumer_secret',
697 'rhodecode_auth_bitbucket_consumer_secret',
700 'rhodecode_auth_bitbucket_consumer_key',
698 'rhodecode_auth_bitbucket_consumer_key',
701 'rhodecode_auth_twitter_consumer_secret',
699 'rhodecode_auth_twitter_consumer_secret',
702 'rhodecode_auth_twitter_consumer_key',
700 'rhodecode_auth_twitter_consumer_key',
703
701
704 'rhodecode_auth_twitter_secret',
702 'rhodecode_auth_twitter_secret',
705 'rhodecode_auth_github_secret',
703 'rhodecode_auth_github_secret',
706 'rhodecode_auth_google_secret',
704 'rhodecode_auth_google_secret',
707 'rhodecode_auth_bitbucket_secret',
705 'rhodecode_auth_bitbucket_secret',
708
706
709 'appenlight.api_key',
707 'appenlight.api_key',
710 ('app_conf', 'sqlalchemy.db1.url')
708 ('app_conf', 'sqlalchemy.db1.url')
711 ]
709 ]
712 for k in blacklist:
710 for k in blacklist:
713 if isinstance(k, tuple):
711 if isinstance(k, tuple):
714 section, key = k
712 section, key = k
715 if section in rhodecode_ini_safe:
713 if section in rhodecode_ini_safe:
716 rhodecode_ini_safe[section] = '**OBFUSCATED**'
714 rhodecode_ini_safe[section] = '**OBFUSCATED**'
717 else:
715 else:
718 rhodecode_ini_safe.pop(k, None)
716 rhodecode_ini_safe.pop(k, None)
719
717
720 # TODO: maybe put some CONFIG checks here ?
718 # TODO: maybe put some CONFIG checks here ?
721 return SysInfoRes(value={'config': rhodecode_ini_safe,
719 return SysInfoRes(value={'config': rhodecode_ini_safe,
722 'path': path, 'cert_path': cert_path})
720 'path': path, 'cert_path': cert_path})
723
721
724
722
725 @register_sysinfo
723 @register_sysinfo
726 def database_info():
724 def database_info():
727 import rhodecode
725 import rhodecode
728 from sqlalchemy.engine import url as engine_url
726 from sqlalchemy.engine import url as engine_url
729 from rhodecode.model import meta
727 from rhodecode.model import meta
730 from rhodecode.model.meta import Session
728 from rhodecode.model.meta import Session
731 from rhodecode.model.db import DbMigrateVersion
729 from rhodecode.model.db import DbMigrateVersion
732
730
733 state = STATE_OK_DEFAULT
731 state = STATE_OK_DEFAULT
734
732
735 db_migrate = DbMigrateVersion.query().filter(
733 db_migrate = DbMigrateVersion.query().filter(
736 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
734 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
737
735
738 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
736 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
739
737
740 try:
738 try:
741 engine = meta.get_engine()
739 engine = meta.get_engine()
742 db_server_info = engine.dialect._get_server_version_info(
740 db_server_info = engine.dialect._get_server_version_info(
743 Session.connection(bind=engine))
741 Session.connection(bind=engine))
744 db_version = '.'.join(map(str, db_server_info))
742 db_version = '.'.join(map(str, db_server_info))
745 except Exception:
743 except Exception:
746 log.exception('failed to fetch db version')
744 log.exception('failed to fetch db version')
747 db_version = 'UNKNOWN'
745 db_version = 'UNKNOWN'
748
746
749 db_info = dict(
747 db_info = dict(
750 migrate_version=db_migrate.version,
748 migrate_version=db_migrate.version,
751 type=db_url_obj.get_backend_name(),
749 type=db_url_obj.get_backend_name(),
752 version=db_version,
750 version=db_version,
753 url=repr(db_url_obj)
751 url=repr(db_url_obj)
754 )
752 )
755 current_version = db_migrate.version
753 current_version = db_migrate.version
756 expected_version = rhodecode.__dbversion__
754 expected_version = rhodecode.__dbversion__
757 if state['type'] == STATE_OK and current_version != expected_version:
755 if state['type'] == STATE_OK and current_version != expected_version:
758 msg = 'Critical: database schema mismatch, ' \
756 msg = 'Critical: database schema mismatch, ' \
759 'expected version {}, got {}. ' \
757 'expected version {}, got {}. ' \
760 'Please run migrations on your database.'.format(
758 'Please run migrations on your database.'.format(
761 expected_version, current_version)
759 expected_version, current_version)
762 state = {'message': msg, 'type': STATE_ERR}
760 state = {'message': msg, 'type': STATE_ERR}
763
761
764 human_value = db_info.copy()
762 human_value = db_info.copy()
765 human_value['url'] = "{} @ migration version: {}".format(
763 human_value['url'] = "{} @ migration version: {}".format(
766 db_info['url'], db_info['migrate_version'])
764 db_info['url'], db_info['migrate_version'])
767 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
765 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
768 return SysInfoRes(value=db_info, state=state, human_value=human_value)
766 return SysInfoRes(value=db_info, state=state, human_value=human_value)
769
767
770
768
771 @register_sysinfo
769 @register_sysinfo
772 def server_info(environ):
770 def server_info(environ):
773 import rhodecode
771 import rhodecode
774 from rhodecode.lib.base import get_server_ip_addr, get_server_port
772 from rhodecode.lib.base import get_server_ip_addr, get_server_port
775
773
776 value = {
774 value = {
777 'server_ip': '%s:%s' % (
775 'server_ip': '{}:{}'.format(
778 get_server_ip_addr(environ, log_errors=False),
776 get_server_ip_addr(environ, log_errors=False),
779 get_server_port(environ)
777 get_server_port(environ)
780 ),
778 ),
781 'server_id': rhodecode.CONFIG.get('instance_id'),
779 'server_id': rhodecode.CONFIG.get('instance_id'),
782 }
780 }
783 return SysInfoRes(value=value)
781 return SysInfoRes(value=value)
784
782
785
783
786 @register_sysinfo
784 @register_sysinfo
787 def usage_info():
785 def usage_info():
788 from rhodecode.model.db import User, Repository
786 from rhodecode.model.db import User, Repository
789 value = {
787 value = {
790 'users': User.query().count(),
788 'users': User.query().count(),
791 'users_active': User.query().filter(User.active == True).count(),
789 'users_active': User.query().filter(User.active == True).count(),
792 'repositories': Repository.query().count(),
790 'repositories': Repository.query().count(),
793 'repository_types': {
791 'repository_types': {
794 'hg': Repository.query().filter(
792 'hg': Repository.query().filter(
795 Repository.repo_type == 'hg').count(),
793 Repository.repo_type == 'hg').count(),
796 'git': Repository.query().filter(
794 'git': Repository.query().filter(
797 Repository.repo_type == 'git').count(),
795 Repository.repo_type == 'git').count(),
798 'svn': Repository.query().filter(
796 'svn': Repository.query().filter(
799 Repository.repo_type == 'svn').count(),
797 Repository.repo_type == 'svn').count(),
800 },
798 },
801 }
799 }
802 return SysInfoRes(value=value)
800 return SysInfoRes(value=value)
803
801
804
802
805 def get_system_info(environ):
803 def get_system_info(environ):
806 environ = environ or {}
804 environ = environ or {}
807 return {
805 return {
808 'rhodecode_app': SysInfo(rhodecode_app_info)(),
806 'rhodecode_app': SysInfo(rhodecode_app_info)(),
809 'rhodecode_config': SysInfo(rhodecode_config)(),
807 'rhodecode_config': SysInfo(rhodecode_config)(),
810 'rhodecode_usage': SysInfo(usage_info)(),
808 'rhodecode_usage': SysInfo(usage_info)(),
811 'python': SysInfo(python_info)(),
809 'python': SysInfo(python_info)(),
812 'py_modules': SysInfo(py_modules)(),
810 'py_modules': SysInfo(py_modules)(),
813
811
814 'platform': SysInfo(platform_type)(),
812 'platform': SysInfo(platform_type)(),
815 'locale': SysInfo(locale_info)(),
813 'locale': SysInfo(locale_info)(),
816 'server': SysInfo(server_info, environ=environ)(),
814 'server': SysInfo(server_info, environ=environ)(),
817 'database': SysInfo(database_info)(),
815 'database': SysInfo(database_info)(),
818 'ulimit': SysInfo(ulimit_info)(),
816 'ulimit': SysInfo(ulimit_info)(),
819 'storage': SysInfo(storage)(),
817 'storage': SysInfo(storage)(),
820 'storage_inodes': SysInfo(storage_inodes)(),
818 'storage_inodes': SysInfo(storage_inodes)(),
821 'storage_archive': SysInfo(storage_archives)(),
819 'storage_archive': SysInfo(storage_archives)(),
822 'storage_gist': SysInfo(storage_gist)(),
820 'storage_gist': SysInfo(storage_gist)(),
823 'storage_temp': SysInfo(storage_temp)(),
821 'storage_temp': SysInfo(storage_temp)(),
824
822
825 'search': SysInfo(search_info)(),
823 'search': SysInfo(search_info)(),
826
824
827 'uptime': SysInfo(uptime)(),
825 'uptime': SysInfo(uptime)(),
828 'load': SysInfo(machine_load)(),
826 'load': SysInfo(machine_load)(),
829 'cpu': SysInfo(cpu)(),
827 'cpu': SysInfo(cpu)(),
830 'memory': SysInfo(memory)(),
828 'memory': SysInfo(memory)(),
831
829
832 'vcs_backends': SysInfo(vcs_backends)(),
830 'vcs_backends': SysInfo(vcs_backends)(),
833 'vcs_server': SysInfo(vcs_server)(),
831 'vcs_server': SysInfo(vcs_server)(),
834
832
835 'vcs_server_config': SysInfo(vcs_server_config)(),
833 'vcs_server_config': SysInfo(vcs_server_config)(),
836
834
837 'git': SysInfo(git_info)(),
835 'git': SysInfo(git_info)(),
838 'hg': SysInfo(hg_info)(),
836 'hg': SysInfo(hg_info)(),
839 'svn': SysInfo(svn_info)(),
837 'svn': SysInfo(svn_info)(),
840 }
838 }
841
839
842
840
843 def load_system_info(key):
841 def load_system_info(key):
844 """
842 """
845 get_sys_info('vcs_server')
843 get_sys_info('vcs_server')
846 get_sys_info('database')
844 get_sys_info('database')
847 """
845 """
848 return SysInfo(registered_helpers[key])()
846 return SysInfo(registered_helpers[key])()
@@ -1,93 +1,91 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import logging
19 import logging
22
20
23 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
24
22
25
23
26 def str2bool(str_):
24 def str2bool(str_):
27 """
25 """
28 returns True/False value from given string, it tries to translate the
26 returns True/False value from given string, it tries to translate the
29 string into boolean
27 string into boolean
30
28
31 :param str_: string value to translate into boolean
29 :param str_: string value to translate into boolean
32 :rtype: boolean
30 :rtype: boolean
33 :returns: boolean from given string
31 :returns: boolean from given string
34 """
32 """
35 if str_ is None:
33 if str_ is None:
36 return False
34 return False
37 if str_ in (True, False):
35 if str_ in (True, False):
38 return str_
36 return str_
39 str_ = str(str_).strip().lower()
37 str_ = str(str_).strip().lower()
40 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
41
39
42
40
43 def aslist(obj, sep=None, strip=True) -> list:
41 def aslist(obj, sep=None, strip=True) -> list:
44 """
42 """
45 Returns given string separated by sep as list
43 Returns given string separated by sep as list
46
44
47 :param obj:
45 :param obj:
48 :param sep:
46 :param sep:
49 :param strip:
47 :param strip:
50 """
48 """
51 if isinstance(obj, str):
49 if isinstance(obj, str):
52 if obj in ['', ""]:
50 if obj in ['', ""]:
53 return []
51 return []
54
52
55 lst = obj.split(sep)
53 lst = obj.split(sep)
56 if strip:
54 if strip:
57 lst = [v.strip() for v in lst]
55 lst = [v.strip() for v in lst]
58 return lst
56 return lst
59 elif isinstance(obj, (list, tuple)):
57 elif isinstance(obj, (list, tuple)):
60 return obj
58 return obj
61 elif obj is None:
59 elif obj is None:
62 return []
60 return []
63 else:
61 else:
64 return [obj]
62 return [obj]
65
63
66
64
67 class AttributeDictBase(dict):
65 class AttributeDictBase(dict):
68 def __getstate__(self):
66 def __getstate__(self):
69 odict = self.__dict__ # get attribute dictionary
67 odict = self.__dict__ # get attribute dictionary
70 return odict
68 return odict
71
69
72 def __setstate__(self, dict):
70 def __setstate__(self, dict):
73 self.__dict__ = dict
71 self.__dict__ = dict
74
72
75 __setattr__ = dict.__setitem__
73 __setattr__ = dict.__setitem__
76 __delattr__ = dict.__delitem__
74 __delattr__ = dict.__delitem__
77
75
78
76
79 class StrictAttributeDict(AttributeDictBase):
77 class StrictAttributeDict(AttributeDictBase):
80 """
78 """
81 Strict Version of Attribute dict which raises an Attribute error when
79 Strict Version of Attribute dict which raises an Attribute error when
82 requested attribute is not set
80 requested attribute is not set
83 """
81 """
84 def __getattr__(self, attr):
82 def __getattr__(self, attr):
85 try:
83 try:
86 return self[attr]
84 return self[attr]
87 except KeyError:
85 except KeyError:
88 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
86 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
89
87
90
88
91 class AttributeDict(AttributeDictBase):
89 class AttributeDict(AttributeDictBase):
92 def __getattr__(self, attr):
90 def __getattr__(self, attr):
93 return self.get(attr, None)
91 return self.get(attr, None)
@@ -1,128 +1,127 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import logging
19 import logging
21
20
22 from whoosh.qparser.default import QueryParser, query
21 from whoosh.qparser.default import QueryParser, query
23 from whoosh.qparser.dateparse import DateParserPlugin
22 from whoosh.qparser.dateparse import DateParserPlugin
24 from whoosh.fields import (TEXT, Schema, DATETIME, KEYWORD)
23 from whoosh.fields import (TEXT, Schema, DATETIME, KEYWORD)
25 from sqlalchemy.sql.expression import or_, and_, not_, func
24 from sqlalchemy.sql.expression import or_, and_, not_, func
26
25
27 from rhodecode.model.db import UserLog
26 from rhodecode.model.db import UserLog
28 from rhodecode.lib.utils2 import remove_prefix, remove_suffix
27 from rhodecode.lib.utils2 import remove_prefix, remove_suffix
29 from rhodecode.lib.str_utils import safe_str
28 from rhodecode.lib.str_utils import safe_str
30
29
31
30
32 # JOURNAL SCHEMA used only to generate queries in journal. We use whoosh
31 # JOURNAL SCHEMA used only to generate queries in journal. We use whoosh
33 # querylang to build sql queries and filter journals
32 # querylang to build sql queries and filter journals
34 AUDIT_LOG_SCHEMA = Schema(
33 AUDIT_LOG_SCHEMA = Schema(
35 username=KEYWORD(),
34 username=KEYWORD(),
36 repository=KEYWORD(),
35 repository=KEYWORD(),
37
36
38 date=DATETIME(),
37 date=DATETIME(),
39 action=TEXT(),
38 action=TEXT(),
40 ip=TEXT(),
39 ip=TEXT(),
41 )
40 )
42
41
43 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
44
43
45
44
46 def user_log_filter(user_log, search_term):
45 def user_log_filter(user_log, search_term):
47 """
46 """
48 Filters sqlalchemy user_log based on search_term with whoosh Query language
47 Filters sqlalchemy user_log based on search_term with whoosh Query language
49 http://packages.python.org/Whoosh/querylang.html
48 http://packages.python.org/Whoosh/querylang.html
50
49
51 :param user_log:
50 :param user_log:
52 :param search_term:
51 :param search_term:
53 """
52 """
54 log.debug('Initial search term: %r', search_term)
53 log.debug('Initial search term: %r', search_term)
55 qry = None
54 qry = None
56 if search_term:
55 if search_term:
57 qp = QueryParser('repository', schema=AUDIT_LOG_SCHEMA)
56 qp = QueryParser('repository', schema=AUDIT_LOG_SCHEMA)
58 qp.add_plugin(DateParserPlugin())
57 qp.add_plugin(DateParserPlugin())
59 qry = qp.parse(safe_str(search_term))
58 qry = qp.parse(safe_str(search_term))
60 log.debug('Filtering using parsed query %r', qry)
59 log.debug('Filtering using parsed query %r', qry)
61
60
62 def wildcard_handler(col, wc_term):
61 def wildcard_handler(col, wc_term):
63 if wc_term.startswith('*') and not wc_term.endswith('*'):
62 if wc_term.startswith('*') and not wc_term.endswith('*'):
64 # postfix == endswith
63 # postfix == endswith
65 wc_term = remove_prefix(wc_term, prefix='*')
64 wc_term = remove_prefix(wc_term, prefix='*')
66 return func.lower(col).endswith(wc_term)
65 return func.lower(col).endswith(wc_term)
67 elif wc_term.startswith('*') and wc_term.endswith('*'):
66 elif wc_term.startswith('*') and wc_term.endswith('*'):
68 # wildcard == ilike
67 # wildcard == ilike
69 wc_term = remove_prefix(wc_term, prefix='*')
68 wc_term = remove_prefix(wc_term, prefix='*')
70 wc_term = remove_suffix(wc_term, suffix='*')
69 wc_term = remove_suffix(wc_term, suffix='*')
71 return func.lower(col).contains(wc_term)
70 return func.lower(col).contains(wc_term)
72
71
73 def get_filterion(field, val, term):
72 def get_filterion(field, val, term):
74
73
75 if field == 'repository':
74 if field == 'repository':
76 field = getattr(UserLog, 'repository_name')
75 field = getattr(UserLog, 'repository_name')
77 elif field == 'ip':
76 elif field == 'ip':
78 field = getattr(UserLog, 'user_ip')
77 field = getattr(UserLog, 'user_ip')
79 elif field == 'date':
78 elif field == 'date':
80 field = getattr(UserLog, 'action_date')
79 field = getattr(UserLog, 'action_date')
81 elif field == 'username':
80 elif field == 'username':
82 field = getattr(UserLog, 'username')
81 field = getattr(UserLog, 'username')
83 else:
82 else:
84 field = getattr(UserLog, field)
83 field = getattr(UserLog, field)
85 log.debug('filter field: %s val=>%s', field, val)
84 log.debug('filter field: %s val=>%s', field, val)
86
85
87 # sql filtering
86 # sql filtering
88 if isinstance(term, query.Wildcard):
87 if isinstance(term, query.Wildcard):
89 return wildcard_handler(field, val)
88 return wildcard_handler(field, val)
90 elif isinstance(term, query.Prefix):
89 elif isinstance(term, query.Prefix):
91 return func.lower(field).startswith(func.lower(val))
90 return func.lower(field).startswith(func.lower(val))
92 elif isinstance(term, query.DateRange):
91 elif isinstance(term, query.DateRange):
93 return and_(field >= val[0], field <= val[1])
92 return and_(field >= val[0], field <= val[1])
94 elif isinstance(term, query.Not):
93 elif isinstance(term, query.Not):
95 return not_(field == val)
94 return not_(field == val)
96 return func.lower(field) == func.lower(val)
95 return func.lower(field) == func.lower(val)
97
96
98 if isinstance(qry, (query.And, query.Not, query.Term, query.Prefix,
97 if isinstance(qry, (query.And, query.Not, query.Term, query.Prefix,
99 query.Wildcard, query.DateRange)):
98 query.Wildcard, query.DateRange)):
100 if not isinstance(qry, query.And):
99 if not isinstance(qry, query.And):
101 qry = [qry]
100 qry = [qry]
102
101
103 for term in qry:
102 for term in qry:
104 if isinstance(term, query.Not):
103 if isinstance(term, query.Not):
105 not_term = [z for z in term.leaves()][0]
104 not_term = [z for z in term.leaves()][0]
106 field = not_term.fieldname
105 field = not_term.fieldname
107 val = not_term.text
106 val = not_term.text
108 elif isinstance(term, query.DateRange):
107 elif isinstance(term, query.DateRange):
109 field = term.fieldname
108 field = term.fieldname
110 val = [term.startdate, term.enddate]
109 val = [term.startdate, term.enddate]
111 elif isinstance(term, query.NullQuery.__class__):
110 elif isinstance(term, query.NullQuery.__class__):
112 field = ''
111 field = ''
113 val = ''
112 val = ''
114 else:
113 else:
115 field = term.fieldname
114 field = term.fieldname
116 val = term.text
115 val = term.text
117 if field:
116 if field:
118 user_log = user_log.filter(get_filterion(field, val, term))
117 user_log = user_log.filter(get_filterion(field, val, term))
119 elif isinstance(qry, query.Or):
118 elif isinstance(qry, query.Or):
120 filters = []
119 filters = []
121 for term in qry:
120 for term in qry:
122 field = term.fieldname
121 field = term.fieldname
123 val = (term.text if not isinstance(term, query.DateRange)
122 val = (term.text if not isinstance(term, query.DateRange)
124 else [term.startdate, term.enddate])
123 else [term.startdate, term.enddate])
125 filters.append(get_filterion(field, val, term))
124 filters.append(get_filterion(field, val, term))
126 user_log = user_log.filter(or_(*filters))
125 user_log = user_log.filter(or_(*filters))
127
126
128 return user_log
127 return user_log
@@ -1,264 +1,261 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import os
19 import os
22 import re
20 import re
23 import time
21 import time
24 import datetime
22 import datetime
25 import dateutil
23 import dateutil
26 import pickle
24 import pickle
27
25
28 from rhodecode.model.db import DbSession, Session
26 from rhodecode.model.db import DbSession, Session
29
27
30
28
31 class CleanupCommand(Exception):
29 class CleanupCommand(Exception):
32 pass
30 pass
33
31
34
32
35 class BaseAuthSessions(object):
33 class BaseAuthSessions(object):
36 SESSION_TYPE = None
34 SESSION_TYPE = None
37 NOT_AVAILABLE = 'NOT AVAILABLE'
35 NOT_AVAILABLE = 'NOT AVAILABLE'
38
36
39 def __init__(self, config):
37 def __init__(self, config):
40 session_conf = {}
38 session_conf = {}
41 for k, v in list(config.items()):
39 for k, v in list(config.items()):
42 if k.startswith('beaker.session'):
40 if k.startswith('beaker.session'):
43 session_conf[k] = v
41 session_conf[k] = v
44 self.config = session_conf
42 self.config = session_conf
45
43
46 def get_count(self):
44 def get_count(self):
47 raise NotImplementedError
45 raise NotImplementedError
48
46
49 def get_expired_count(self, older_than_seconds=None):
47 def get_expired_count(self, older_than_seconds=None):
50 raise NotImplementedError
48 raise NotImplementedError
51
49
52 def clean_sessions(self, older_than_seconds=None):
50 def clean_sessions(self, older_than_seconds=None):
53 raise NotImplementedError
51 raise NotImplementedError
54
52
55 def _seconds_to_date(self, seconds):
53 def _seconds_to_date(self, seconds):
56 return datetime.datetime.utcnow() - dateutil.relativedelta.relativedelta(
54 return datetime.datetime.utcnow() - dateutil.relativedelta.relativedelta(
57 seconds=seconds)
55 seconds=seconds)
58
56
59
57
60 class DbAuthSessions(BaseAuthSessions):
58 class DbAuthSessions(BaseAuthSessions):
61 SESSION_TYPE = 'ext:database'
59 SESSION_TYPE = 'ext:database'
62
60
63 def get_count(self):
61 def get_count(self):
64 return DbSession.query().count()
62 return DbSession.query().count()
65
63
66 def get_expired_count(self, older_than_seconds=None):
64 def get_expired_count(self, older_than_seconds=None):
67 expiry_date = self._seconds_to_date(older_than_seconds)
65 expiry_date = self._seconds_to_date(older_than_seconds)
68 return DbSession.query().filter(DbSession.accessed < expiry_date).count()
66 return DbSession.query().filter(DbSession.accessed < expiry_date).count()
69
67
70 def clean_sessions(self, older_than_seconds=None):
68 def clean_sessions(self, older_than_seconds=None):
71 expiry_date = self._seconds_to_date(older_than_seconds)
69 expiry_date = self._seconds_to_date(older_than_seconds)
72 to_remove = DbSession.query().filter(DbSession.accessed < expiry_date).count()
70 to_remove = DbSession.query().filter(DbSession.accessed < expiry_date).count()
73 DbSession.query().filter(DbSession.accessed < expiry_date).delete()
71 DbSession.query().filter(DbSession.accessed < expiry_date).delete()
74 Session().commit()
72 Session().commit()
75 return to_remove
73 return to_remove
76
74
77
75
78 class FileAuthSessions(BaseAuthSessions):
76 class FileAuthSessions(BaseAuthSessions):
79 SESSION_TYPE = 'file sessions'
77 SESSION_TYPE = 'file sessions'
80
78
81 def _get_sessions_dir(self):
79 def _get_sessions_dir(self):
82 data_dir = self.config.get('beaker.session.data_dir')
80 data_dir = self.config.get('beaker.session.data_dir')
83 return data_dir
81 return data_dir
84
82
85 def _count_on_filesystem(self, path, older_than=0, callback=None):
83 def _count_on_filesystem(self, path, older_than=0, callback=None):
86 value = dict(percent=0, used=0, total=0, items=0, callbacks=0,
84 value = dict(percent=0, used=0, total=0, items=0, callbacks=0,
87 path=path, text='')
85 path=path, text='')
88 items_count = 0
86 items_count = 0
89 used = 0
87 used = 0
90 callbacks = 0
88 callbacks = 0
91 cur_time = time.time()
89 cur_time = time.time()
92 for root, dirs, files in os.walk(path):
90 for root, dirs, files in os.walk(path):
93 for f in files:
91 for f in files:
94 final_path = os.path.join(root, f)
92 final_path = os.path.join(root, f)
95 try:
93 try:
96 mtime = os.stat(final_path).st_mtime
94 mtime = os.stat(final_path).st_mtime
97 if (cur_time - mtime) > older_than:
95 if (cur_time - mtime) > older_than:
98 items_count += 1
96 items_count += 1
99 if callback:
97 if callback:
100 callback_res = callback(final_path)
98 callback_res = callback(final_path)
101 callbacks += 1
99 callbacks += 1
102 else:
100 else:
103 used += os.path.getsize(final_path)
101 used += os.path.getsize(final_path)
104 except OSError:
102 except OSError:
105 pass
103 pass
106 value.update({
104 value.update({
107 'percent': 100,
105 'percent': 100,
108 'used': used,
106 'used': used,
109 'total': used,
107 'total': used,
110 'items': items_count,
108 'items': items_count,
111 'callbacks': callbacks
109 'callbacks': callbacks
112 })
110 })
113 return value
111 return value
114
112
115 def get_count(self):
113 def get_count(self):
116 try:
114 try:
117 sessions_dir = self._get_sessions_dir()
115 sessions_dir = self._get_sessions_dir()
118 items_count = self._count_on_filesystem(sessions_dir)['items']
116 items_count = self._count_on_filesystem(sessions_dir)['items']
119 except Exception:
117 except Exception:
120 items_count = self.NOT_AVAILABLE
118 items_count = self.NOT_AVAILABLE
121 return items_count
119 return items_count
122
120
123 def get_expired_count(self, older_than_seconds=0):
121 def get_expired_count(self, older_than_seconds=0):
124 try:
122 try:
125 sessions_dir = self._get_sessions_dir()
123 sessions_dir = self._get_sessions_dir()
126 items_count = self._count_on_filesystem(
124 items_count = self._count_on_filesystem(
127 sessions_dir, older_than=older_than_seconds)['items']
125 sessions_dir, older_than=older_than_seconds)['items']
128 except Exception:
126 except Exception:
129 items_count = self.NOT_AVAILABLE
127 items_count = self.NOT_AVAILABLE
130 return items_count
128 return items_count
131
129
132 def clean_sessions(self, older_than_seconds=0):
130 def clean_sessions(self, older_than_seconds=0):
133 # find . -mtime +60 -exec rm {} \;
131 # find . -mtime +60 -exec rm {} \;
134
132
135 sessions_dir = self._get_sessions_dir()
133 sessions_dir = self._get_sessions_dir()
136
134
137 def remove_item(path):
135 def remove_item(path):
138 os.remove(path)
136 os.remove(path)
139
137
140 stats = self._count_on_filesystem(
138 stats = self._count_on_filesystem(
141 sessions_dir, older_than=older_than_seconds,
139 sessions_dir, older_than=older_than_seconds,
142 callback=remove_item)
140 callback=remove_item)
143 return stats['callbacks']
141 return stats['callbacks']
144
142
145
143
146 class MemcachedAuthSessions(BaseAuthSessions):
144 class MemcachedAuthSessions(BaseAuthSessions):
147 SESSION_TYPE = 'ext:memcached'
145 SESSION_TYPE = 'ext:memcached'
148 _key_regex = re.compile(r'ITEM (.*_session) \[(.*); (.*)\]')
146 _key_regex = re.compile(r'ITEM (.*_session) \[(.*); (.*)\]')
149
147
150 def _get_client(self):
148 def _get_client(self):
151 import memcache
149 import memcache
152 client = memcache.Client([self.config.get('beaker.session.url')])
150 client = memcache.Client([self.config.get('beaker.session.url')])
153 return client
151 return client
154
152
155 def _get_telnet_client(self, host, port):
153 def _get_telnet_client(self, host, port):
156 import telnetlib
154 import telnetlib
157 client = telnetlib.Telnet(host, port, None)
155 client = telnetlib.Telnet(host, port, None)
158 return client
156 return client
159
157
160 def _run_telnet_cmd(self, client, cmd):
158 def _run_telnet_cmd(self, client, cmd):
161 client.write("%s\n" % cmd)
159 client.write("%s\n" % cmd)
162 return client.read_until('END')
160 return client.read_until('END')
163
161
164 def key_details(self, client, slab_ids, limit=100):
162 def key_details(self, client, slab_ids, limit=100):
165 """ Return a list of tuples containing keys and details """
163 """ Return a list of tuples containing keys and details """
166 cmd = 'stats cachedump %s %s'
164 cmd = 'stats cachedump %s %s'
167 for slab_id in slab_ids:
165 for slab_id in slab_ids:
168 for key in self._key_regex.finditer(
166 yield from self._key_regex.finditer(
169 self._run_telnet_cmd(client, cmd % (slab_id, limit))):
167 self._run_telnet_cmd(client, cmd % (slab_id, limit)))
170 yield key
171
168
172 def get_count(self):
169 def get_count(self):
173 client = self._get_client()
170 client = self._get_client()
174 count = self.NOT_AVAILABLE
171 count = self.NOT_AVAILABLE
175 try:
172 try:
176 slabs = []
173 slabs = []
177 for server, slabs_data in client.get_slabs():
174 for server, slabs_data in client.get_slabs():
178 slabs.extend(list(slabs_data.keys()))
175 slabs.extend(list(slabs_data.keys()))
179
176
180 host, port = client.servers[0].address
177 host, port = client.servers[0].address
181 telnet_client = self._get_telnet_client(host, port)
178 telnet_client = self._get_telnet_client(host, port)
182 keys = self.key_details(telnet_client, slabs)
179 keys = self.key_details(telnet_client, slabs)
183 count = 0
180 count = 0
184 for _k in keys:
181 for _k in keys:
185 count += 1
182 count += 1
186 except Exception:
183 except Exception:
187 return count
184 return count
188
185
189 return count
186 return count
190
187
191 def get_expired_count(self, older_than_seconds=None):
188 def get_expired_count(self, older_than_seconds=None):
192 return self.NOT_AVAILABLE
189 return self.NOT_AVAILABLE
193
190
194 def clean_sessions(self, older_than_seconds=None):
191 def clean_sessions(self, older_than_seconds=None):
195 raise CleanupCommand('Cleanup for this session type not yet available')
192 raise CleanupCommand('Cleanup for this session type not yet available')
196
193
197
194
198 class RedisAuthSessions(BaseAuthSessions):
195 class RedisAuthSessions(BaseAuthSessions):
199 SESSION_TYPE = 'ext:redis'
196 SESSION_TYPE = 'ext:redis'
200
197
201 def _get_client(self):
198 def _get_client(self):
202 import redis
199 import redis
203 args = {
200 args = {
204 'socket_timeout': 60,
201 'socket_timeout': 60,
205 'url': self.config.get('beaker.session.url')
202 'url': self.config.get('beaker.session.url')
206 }
203 }
207
204
208 client = redis.StrictRedis.from_url(**args)
205 client = redis.StrictRedis.from_url(**args)
209 return client
206 return client
210
207
211 def get_count(self):
208 def get_count(self):
212 client = self._get_client()
209 client = self._get_client()
213 return len(client.keys('beaker_cache:*'))
210 return len(client.keys('beaker_cache:*'))
214
211
215 def get_expired_count(self, older_than_seconds=None):
212 def get_expired_count(self, older_than_seconds=None):
216 expiry_date = self._seconds_to_date(older_than_seconds)
213 expiry_date = self._seconds_to_date(older_than_seconds)
217 return self.NOT_AVAILABLE
214 return self.NOT_AVAILABLE
218
215
219 def clean_sessions(self, older_than_seconds=None):
216 def clean_sessions(self, older_than_seconds=None):
220 client = self._get_client()
217 client = self._get_client()
221 expiry_time = time.time() - older_than_seconds
218 expiry_time = time.time() - older_than_seconds
222 deleted_keys = 0
219 deleted_keys = 0
223 for key in client.keys('beaker_cache:*'):
220 for key in client.keys('beaker_cache:*'):
224 data = client.get(key)
221 data = client.get(key)
225 if data:
222 if data:
226 json_data = pickle.loads(data)
223 json_data = pickle.loads(data)
227 try:
224 try:
228 accessed_time = json_data['_accessed_time']
225 accessed_time = json_data['_accessed_time']
229 except KeyError:
226 except KeyError:
230 accessed_time = 0
227 accessed_time = 0
231 if accessed_time < expiry_time:
228 if accessed_time < expiry_time:
232 client.delete(key)
229 client.delete(key)
233 deleted_keys += 1
230 deleted_keys += 1
234
231
235 return deleted_keys
232 return deleted_keys
236
233
237
234
238 class MemoryAuthSessions(BaseAuthSessions):
235 class MemoryAuthSessions(BaseAuthSessions):
239 SESSION_TYPE = 'memory'
236 SESSION_TYPE = 'memory'
240
237
241 def get_count(self):
238 def get_count(self):
242 return self.NOT_AVAILABLE
239 return self.NOT_AVAILABLE
243
240
244 def get_expired_count(self, older_than_seconds=None):
241 def get_expired_count(self, older_than_seconds=None):
245 return self.NOT_AVAILABLE
242 return self.NOT_AVAILABLE
246
243
247 def clean_sessions(self, older_than_seconds=None):
244 def clean_sessions(self, older_than_seconds=None):
248 raise CleanupCommand('Cleanup for this session type not yet available')
245 raise CleanupCommand('Cleanup for this session type not yet available')
249
246
250
247
251 def get_session_handler(session_type):
248 def get_session_handler(session_type):
252 types = {
249 types = {
253 'file': FileAuthSessions,
250 'file': FileAuthSessions,
254 'ext:memcached': MemcachedAuthSessions,
251 'ext:memcached': MemcachedAuthSessions,
255 'ext:redis': RedisAuthSessions,
252 'ext:redis': RedisAuthSessions,
256 'ext:database': DbAuthSessions,
253 'ext:database': DbAuthSessions,
257 'memory': MemoryAuthSessions
254 'memory': MemoryAuthSessions
258 }
255 }
259
256
260 try:
257 try:
261 return types[session_type]
258 return types[session_type]
262 except KeyError:
259 except KeyError:
263 raise ValueError(
260 raise ValueError(
264 'This type {} is not supported'.format(session_type))
261 f'This type {session_type} is not supported')
@@ -1,809 +1,807 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Utilities library for RhodeCode
20 Utilities library for RhodeCode
22 """
21 """
23
22
24 import datetime
23 import datetime
25 import decorator
24 import decorator
26 import logging
25 import logging
27 import os
26 import os
28 import re
27 import re
29 import sys
28 import sys
30 import shutil
29 import shutil
31 import socket
30 import socket
32 import tempfile
31 import tempfile
33 import traceback
32 import traceback
34 import tarfile
33 import tarfile
35 import warnings
34 import warnings
36 from os.path import join as jn
35 from os.path import join as jn
37
36
38 import paste
37 import paste
39 import pkg_resources
38 import pkg_resources
40 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41
40
42 from mako import exceptions
41 from mako import exceptions
43
42
44 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
45 from rhodecode.lib.str_utils import safe_bytes, safe_str
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
46 from rhodecode.lib.vcs.backends.base import Config
45 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.exceptions import VCSError
46 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.ext_json import sjson as json
48 from rhodecode.lib.ext_json import sjson as json
50 from rhodecode.model import meta
49 from rhodecode.model import meta
51 from rhodecode.model.db import (
50 from rhodecode.model.db import (
52 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
53 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
54
53
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
59
58
60 # String which contains characters that are not allowed in slug names for
59 # String which contains characters that are not allowed in slug names for
61 # repositories or repository groups. It is properly escaped to use it in
60 # repositories or repository groups. It is properly escaped to use it in
62 # regular expressions.
61 # regular expressions.
63 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
64
63
65 # Regex that matches forbidden characters in repo/group slugs.
64 # Regex that matches forbidden characters in repo/group slugs.
66 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
67
66
68 # Regex that matches allowed characters in repo/group slugs.
67 # Regex that matches allowed characters in repo/group slugs.
69 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
70
69
71 # Regex that matches whole repo/group slugs.
70 # Regex that matches whole repo/group slugs.
72 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
73
72
74 _license_cache = None
73 _license_cache = None
75
74
76
75
77 def repo_name_slug(value):
76 def repo_name_slug(value):
78 """
77 """
79 Return slug of name of repository
78 Return slug of name of repository
80 This function is called on each creation/modification
79 This function is called on each creation/modification
81 of repository to prevent bad names in repo
80 of repository to prevent bad names in repo
82 """
81 """
83
82
84 replacement_char = '-'
83 replacement_char = '-'
85
84
86 slug = strip_tags(value)
85 slug = strip_tags(value)
87 slug = convert_accented_entities(slug)
86 slug = convert_accented_entities(slug)
88 slug = convert_misc_entities(slug)
87 slug = convert_misc_entities(slug)
89
88
90 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
91 slug = re.sub(r'[\s]+', '-', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
92 slug = collapse(slug, replacement_char)
91 slug = collapse(slug, replacement_char)
93
92
94 return slug
93 return slug
95
94
96
95
97 #==============================================================================
96 #==============================================================================
98 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
99 #==============================================================================
98 #==============================================================================
100 def get_repo_slug(request):
99 def get_repo_slug(request):
101 _repo = ''
100 _repo = ''
102
101
103 if hasattr(request, 'db_repo_name'):
102 if hasattr(request, 'db_repo_name'):
104 # if our requests has set db reference use it for name, this
103 # if our requests has set db reference use it for name, this
105 # translates the example.com/_<id> into proper repo names
104 # translates the example.com/_<id> into proper repo names
106 _repo = request.db_repo_name
105 _repo = request.db_repo_name
107 elif getattr(request, 'matchdict', None):
106 elif getattr(request, 'matchdict', None):
108 # pyramid
107 # pyramid
109 _repo = request.matchdict.get('repo_name')
108 _repo = request.matchdict.get('repo_name')
110
109
111 if _repo:
110 if _repo:
112 _repo = _repo.rstrip('/')
111 _repo = _repo.rstrip('/')
113 return _repo
112 return _repo
114
113
115
114
116 def get_repo_group_slug(request):
115 def get_repo_group_slug(request):
117 _group = ''
116 _group = ''
118 if hasattr(request, 'db_repo_group'):
117 if hasattr(request, 'db_repo_group'):
119 # if our requests has set db reference use it for name, this
118 # if our requests has set db reference use it for name, this
120 # translates the example.com/_<id> into proper repo group names
119 # translates the example.com/_<id> into proper repo group names
121 _group = request.db_repo_group.group_name
120 _group = request.db_repo_group.group_name
122 elif getattr(request, 'matchdict', None):
121 elif getattr(request, 'matchdict', None):
123 # pyramid
122 # pyramid
124 _group = request.matchdict.get('repo_group_name')
123 _group = request.matchdict.get('repo_group_name')
125
124
126 if _group:
125 if _group:
127 _group = _group.rstrip('/')
126 _group = _group.rstrip('/')
128 return _group
127 return _group
129
128
130
129
131 def get_user_group_slug(request):
130 def get_user_group_slug(request):
132 _user_group = ''
131 _user_group = ''
133
132
134 if hasattr(request, 'db_user_group'):
133 if hasattr(request, 'db_user_group'):
135 _user_group = request.db_user_group.users_group_name
134 _user_group = request.db_user_group.users_group_name
136 elif getattr(request, 'matchdict', None):
135 elif getattr(request, 'matchdict', None):
137 # pyramid
136 # pyramid
138 _user_group = request.matchdict.get('user_group_id')
137 _user_group = request.matchdict.get('user_group_id')
139 _user_group_name = request.matchdict.get('user_group_name')
138 _user_group_name = request.matchdict.get('user_group_name')
140 try:
139 try:
141 if _user_group:
140 if _user_group:
142 _user_group = UserGroup.get(_user_group)
141 _user_group = UserGroup.get(_user_group)
143 elif _user_group_name:
142 elif _user_group_name:
144 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
145
144
146 if _user_group:
145 if _user_group:
147 _user_group = _user_group.users_group_name
146 _user_group = _user_group.users_group_name
148 except Exception:
147 except Exception:
149 log.exception('Failed to get user group by id and name')
148 log.exception('Failed to get user group by id and name')
150 # catch all failures here
149 # catch all failures here
151 return None
150 return None
152
151
153 return _user_group
152 return _user_group
154
153
155
154
156 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
157 """
156 """
158 Scans given path for repos and return (name,(type,path)) tuple
157 Scans given path for repos and return (name,(type,path)) tuple
159
158
160 :param path: path to scan for repositories
159 :param path: path to scan for repositories
161 :param recursive: recursive search and return names with subdirs in front
160 :param recursive: recursive search and return names with subdirs in front
162 """
161 """
163
162
164 # remove ending slash for better results
163 # remove ending slash for better results
165 path = path.rstrip(os.sep)
164 path = path.rstrip(os.sep)
166 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
167
166
168 def _get_repos(p):
167 def _get_repos(p):
169 dirpaths = get_dirpaths(p)
168 dirpaths = get_dirpaths(p)
170 if not _is_dir_writable(p):
169 if not _is_dir_writable(p):
171 log.warning('repo path without write access: %s', p)
170 log.warning('repo path without write access: %s', p)
172
171
173 for dirpath in dirpaths:
172 for dirpath in dirpaths:
174 if os.path.isfile(os.path.join(p, dirpath)):
173 if os.path.isfile(os.path.join(p, dirpath)):
175 continue
174 continue
176 cur_path = os.path.join(p, dirpath)
175 cur_path = os.path.join(p, dirpath)
177
176
178 # skip removed repos
177 # skip removed repos
179 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
180 continue
179 continue
181
180
182 #skip .<somethin> dirs
181 #skip .<somethin> dirs
183 if dirpath.startswith('.'):
182 if dirpath.startswith('.'):
184 continue
183 continue
185
184
186 try:
185 try:
187 scm_info = get_scm(cur_path)
186 scm_info = get_scm(cur_path)
188 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
189 except VCSError:
188 except VCSError:
190 if not recursive:
189 if not recursive:
191 continue
190 continue
192 #check if this dir containts other repos for recursive scan
191 #check if this dir containts other repos for recursive scan
193 rec_path = os.path.join(p, dirpath)
192 rec_path = os.path.join(p, dirpath)
194 if os.path.isdir(rec_path):
193 if os.path.isdir(rec_path):
195 for inner_scm in _get_repos(rec_path):
194 yield from _get_repos(rec_path)
196 yield inner_scm
197
195
198 return _get_repos(path)
196 return _get_repos(path)
199
197
200
198
201 def get_dirpaths(p: str) -> list:
199 def get_dirpaths(p: str) -> list:
202 try:
200 try:
203 # OS-independable way of checking if we have at least read-only
201 # OS-independable way of checking if we have at least read-only
204 # access or not.
202 # access or not.
205 dirpaths = os.listdir(p)
203 dirpaths = os.listdir(p)
206 except OSError:
204 except OSError:
207 log.warning('ignoring repo path without read access: %s', p)
205 log.warning('ignoring repo path without read access: %s', p)
208 return []
206 return []
209
207
210 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
211 # decode paths and suddenly returns unicode objects itself. The items it
209 # decode paths and suddenly returns unicode objects itself. The items it
212 # cannot decode are returned as strings and cause issues.
210 # cannot decode are returned as strings and cause issues.
213 #
211 #
214 # Those paths are ignored here until a solid solution for path handling has
212 # Those paths are ignored here until a solid solution for path handling has
215 # been built.
213 # been built.
216 expected_type = type(p)
214 expected_type = type(p)
217
215
218 def _has_correct_type(item):
216 def _has_correct_type(item):
219 if type(item) is not expected_type:
217 if type(item) is not expected_type:
220 log.error(
218 log.error(
221 "Ignoring path %s since it cannot be decoded into str.",
219 "Ignoring path %s since it cannot be decoded into str.",
222 # Using "repr" to make sure that we see the byte value in case
220 # Using "repr" to make sure that we see the byte value in case
223 # of support.
221 # of support.
224 repr(item))
222 repr(item))
225 return False
223 return False
226 return True
224 return True
227
225
228 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
229
227
230 return dirpaths
228 return dirpaths
231
229
232
230
233 def _is_dir_writable(path):
231 def _is_dir_writable(path):
234 """
232 """
235 Probe if `path` is writable.
233 Probe if `path` is writable.
236
234
237 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
238 possible to create a file inside of `path`, stat does not produce reliable
236 possible to create a file inside of `path`, stat does not produce reliable
239 results in this case.
237 results in this case.
240 """
238 """
241 try:
239 try:
242 with tempfile.TemporaryFile(dir=path):
240 with tempfile.TemporaryFile(dir=path):
243 pass
241 pass
244 except OSError:
242 except OSError:
245 return False
243 return False
246 return True
244 return True
247
245
248
246
249 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
250 """
248 """
251 Returns True if given path is a valid repository False otherwise.
249 Returns True if given path is a valid repository False otherwise.
252 If expect_scm param is given also, compare if given scm is the same
250 If expect_scm param is given also, compare if given scm is the same
253 as expected from scm parameter. If explicit_scm is given don't try to
251 as expected from scm parameter. If explicit_scm is given don't try to
254 detect the scm, just use the given one to check if repo is valid
252 detect the scm, just use the given one to check if repo is valid
255
253
256 :param repo_name:
254 :param repo_name:
257 :param base_path:
255 :param base_path:
258 :param expect_scm:
256 :param expect_scm:
259 :param explicit_scm:
257 :param explicit_scm:
260 :param config:
258 :param config:
261
259
262 :return True: if given path is a valid repository
260 :return True: if given path is a valid repository
263 """
261 """
264 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
265 log.debug('Checking if `%s` is a valid path for repository. '
263 log.debug('Checking if `%s` is a valid path for repository. '
266 'Explicit type: %s', repo_name, explicit_scm)
264 'Explicit type: %s', repo_name, explicit_scm)
267
265
268 try:
266 try:
269 if explicit_scm:
267 if explicit_scm:
270 detected_scms = [get_scm_backend(explicit_scm)(
268 detected_scms = [get_scm_backend(explicit_scm)(
271 full_path, config=config).alias]
269 full_path, config=config).alias]
272 else:
270 else:
273 detected_scms = get_scm(full_path)
271 detected_scms = get_scm(full_path)
274
272
275 if expect_scm:
273 if expect_scm:
276 return detected_scms[0] == expect_scm
274 return detected_scms[0] == expect_scm
277 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
278 return True
276 return True
279 except VCSError:
277 except VCSError:
280 log.debug('path: %s is not a valid repo !', full_path)
278 log.debug('path: %s is not a valid repo !', full_path)
281 return False
279 return False
282
280
283
281
284 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
285 """
283 """
286 Returns True if given path is a repository group, False otherwise
284 Returns True if given path is a repository group, False otherwise
287
285
288 :param repo_name:
286 :param repo_name:
289 :param base_path:
287 :param base_path:
290 """
288 """
291 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
292 log.debug('Checking if `%s` is a valid path for repository group',
290 log.debug('Checking if `%s` is a valid path for repository group',
293 repo_group_name)
291 repo_group_name)
294
292
295 # check if it's not a repo
293 # check if it's not a repo
296 if is_valid_repo(repo_group_name, base_path):
294 if is_valid_repo(repo_group_name, base_path):
297 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
298 return False
296 return False
299
297
300 try:
298 try:
301 # we need to check bare git repos at higher level
299 # we need to check bare git repos at higher level
302 # since we might match branches/hooks/info/objects or possible
300 # since we might match branches/hooks/info/objects or possible
303 # other things inside bare git repo
301 # other things inside bare git repo
304 maybe_repo = os.path.dirname(full_path)
302 maybe_repo = os.path.dirname(full_path)
305 if maybe_repo == base_path:
303 if maybe_repo == base_path:
306 # skip root level repo check, we know root location CANNOT BE a repo group
304 # skip root level repo check, we know root location CANNOT BE a repo group
307 return False
305 return False
308
306
309 scm_ = get_scm(maybe_repo)
307 scm_ = get_scm(maybe_repo)
310 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
311 return False
309 return False
312 except VCSError:
310 except VCSError:
313 pass
311 pass
314
312
315 # check if it's a valid path
313 # check if it's a valid path
316 if skip_path_check or os.path.isdir(full_path):
314 if skip_path_check or os.path.isdir(full_path):
317 log.debug('path: %s is a valid repo group !', full_path)
315 log.debug('path: %s is a valid repo group !', full_path)
318 return True
316 return True
319
317
320 log.debug('path: %s is not a valid repo group !', full_path)
318 log.debug('path: %s is not a valid repo group !', full_path)
321 return False
319 return False
322
320
323
321
324 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
325 while True:
323 while True:
326 ok = eval(input(prompt))
324 ok = eval(input(prompt))
327 if ok.lower() in ('y', 'ye', 'yes'):
325 if ok.lower() in ('y', 'ye', 'yes'):
328 return True
326 return True
329 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
330 return False
328 return False
331 retries = retries - 1
329 retries = retries - 1
332 if retries < 0:
330 if retries < 0:
333 raise IOError
331 raise OSError
334 print(complaint)
332 print(complaint)
335
333
336 # propagated from mercurial documentation
334 # propagated from mercurial documentation
337 ui_sections = [
335 ui_sections = [
338 'alias', 'auth',
336 'alias', 'auth',
339 'decode/encode', 'defaults',
337 'decode/encode', 'defaults',
340 'diff', 'email',
338 'diff', 'email',
341 'extensions', 'format',
339 'extensions', 'format',
342 'merge-patterns', 'merge-tools',
340 'merge-patterns', 'merge-tools',
343 'hooks', 'http_proxy',
341 'hooks', 'http_proxy',
344 'smtp', 'patch',
342 'smtp', 'patch',
345 'paths', 'profiling',
343 'paths', 'profiling',
346 'server', 'trusted',
344 'server', 'trusted',
347 'ui', 'web', ]
345 'ui', 'web', ]
348
346
349
347
350 def config_data_from_db(clear_session=True, repo=None):
348 def config_data_from_db(clear_session=True, repo=None):
351 """
349 """
352 Read the configuration data from the database and return configuration
350 Read the configuration data from the database and return configuration
353 tuples.
351 tuples.
354 """
352 """
355 from rhodecode.model.settings import VcsSettingsModel
353 from rhodecode.model.settings import VcsSettingsModel
356
354
357 config = []
355 config = []
358
356
359 sa = meta.Session()
357 sa = meta.Session()
360 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
361
359
362 ui_settings = settings_model.get_ui_settings()
360 ui_settings = settings_model.get_ui_settings()
363
361
364 ui_data = []
362 ui_data = []
365 for setting in ui_settings:
363 for setting in ui_settings:
366 if setting.active:
364 if setting.active:
367 ui_data.append((setting.section, setting.key, setting.value))
365 ui_data.append((setting.section, setting.key, setting.value))
368 config.append((
366 config.append((
369 safe_str(setting.section), safe_str(setting.key),
367 safe_str(setting.section), safe_str(setting.key),
370 safe_str(setting.value)))
368 safe_str(setting.value)))
371 if setting.key == 'push_ssl':
369 if setting.key == 'push_ssl':
372 # force set push_ssl requirement to False, rhodecode
370 # force set push_ssl requirement to False, rhodecode
373 # handles that
371 # handles that
374 config.append((
372 config.append((
375 safe_str(setting.section), safe_str(setting.key), False))
373 safe_str(setting.section), safe_str(setting.key), False))
376 log.debug(
374 log.debug(
377 'settings ui from db@repo[%s]: %s',
375 'settings ui from db@repo[%s]: %s',
378 repo,
376 repo,
379 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
380 if clear_session:
378 if clear_session:
381 meta.Session.remove()
379 meta.Session.remove()
382
380
383 # TODO: mikhail: probably it makes no sense to re-read hooks information.
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
384 # It's already there and activated/deactivated
382 # It's already there and activated/deactivated
385 skip_entries = []
383 skip_entries = []
386 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
387 if 'pull' not in enabled_hook_classes:
385 if 'pull' not in enabled_hook_classes:
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
389 if 'push' not in enabled_hook_classes:
387 if 'push' not in enabled_hook_classes:
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
391 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
392 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
393
391
394 config = [entry for entry in config if entry[:2] not in skip_entries]
392 config = [entry for entry in config if entry[:2] not in skip_entries]
395
393
396 return config
394 return config
397
395
398
396
399 def make_db_config(clear_session=True, repo=None):
397 def make_db_config(clear_session=True, repo=None):
400 """
398 """
401 Create a :class:`Config` instance based on the values in the database.
399 Create a :class:`Config` instance based on the values in the database.
402 """
400 """
403 config = Config()
401 config = Config()
404 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
405 for section, option, value in config_data:
403 for section, option, value in config_data:
406 config.set(section, option, value)
404 config.set(section, option, value)
407 return config
405 return config
408
406
409
407
410 def get_enabled_hook_classes(ui_settings):
408 def get_enabled_hook_classes(ui_settings):
411 """
409 """
412 Return the enabled hook classes.
410 Return the enabled hook classes.
413
411
414 :param ui_settings: List of ui_settings as returned
412 :param ui_settings: List of ui_settings as returned
415 by :meth:`VcsSettingsModel.get_ui_settings`
413 by :meth:`VcsSettingsModel.get_ui_settings`
416
414
417 :return: a list with the enabled hook classes. The order is not guaranteed.
415 :return: a list with the enabled hook classes. The order is not guaranteed.
418 :rtype: list
416 :rtype: list
419 """
417 """
420 enabled_hooks = []
418 enabled_hooks = []
421 active_hook_keys = [
419 active_hook_keys = [
422 key for section, key, value, active in ui_settings
420 key for section, key, value, active in ui_settings
423 if section == 'hooks' and active]
421 if section == 'hooks' and active]
424
422
425 hook_names = {
423 hook_names = {
426 RhodeCodeUi.HOOK_PUSH: 'push',
424 RhodeCodeUi.HOOK_PUSH: 'push',
427 RhodeCodeUi.HOOK_PULL: 'pull',
425 RhodeCodeUi.HOOK_PULL: 'pull',
428 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
429 }
427 }
430
428
431 for key in active_hook_keys:
429 for key in active_hook_keys:
432 hook = hook_names.get(key)
430 hook = hook_names.get(key)
433 if hook:
431 if hook:
434 enabled_hooks.append(hook)
432 enabled_hooks.append(hook)
435
433
436 return enabled_hooks
434 return enabled_hooks
437
435
438
436
439 def set_rhodecode_config(config):
437 def set_rhodecode_config(config):
440 """
438 """
441 Updates pyramid config with new settings from database
439 Updates pyramid config with new settings from database
442
440
443 :param config:
441 :param config:
444 """
442 """
445 from rhodecode.model.settings import SettingsModel
443 from rhodecode.model.settings import SettingsModel
446 app_settings = SettingsModel().get_all_settings()
444 app_settings = SettingsModel().get_all_settings()
447
445
448 for k, v in list(app_settings.items()):
446 for k, v in list(app_settings.items()):
449 config[k] = v
447 config[k] = v
450
448
451
449
452 def get_rhodecode_realm():
450 def get_rhodecode_realm():
453 """
451 """
454 Return the rhodecode realm from database.
452 Return the rhodecode realm from database.
455 """
453 """
456 from rhodecode.model.settings import SettingsModel
454 from rhodecode.model.settings import SettingsModel
457 realm = SettingsModel().get_setting_by_name('realm')
455 realm = SettingsModel().get_setting_by_name('realm')
458 return safe_str(realm.app_settings_value)
456 return safe_str(realm.app_settings_value)
459
457
460
458
461 def get_rhodecode_base_path():
459 def get_rhodecode_base_path():
462 """
460 """
463 Returns the base path. The base path is the filesystem path which points
461 Returns the base path. The base path is the filesystem path which points
464 to the repository store.
462 to the repository store.
465 """
463 """
466
464
467 import rhodecode
465 import rhodecode
468 return rhodecode.CONFIG['default_base_path']
466 return rhodecode.CONFIG['default_base_path']
469
467
470
468
471 def map_groups(path):
469 def map_groups(path):
472 """
470 """
473 Given a full path to a repository, create all nested groups that this
471 Given a full path to a repository, create all nested groups that this
474 repo is inside. This function creates parent-child relationships between
472 repo is inside. This function creates parent-child relationships between
475 groups and creates default perms for all new groups.
473 groups and creates default perms for all new groups.
476
474
477 :param paths: full path to repository
475 :param paths: full path to repository
478 """
476 """
479 from rhodecode.model.repo_group import RepoGroupModel
477 from rhodecode.model.repo_group import RepoGroupModel
480 sa = meta.Session()
478 sa = meta.Session()
481 groups = path.split(Repository.NAME_SEP)
479 groups = path.split(Repository.NAME_SEP)
482 parent = None
480 parent = None
483 group = None
481 group = None
484
482
485 # last element is repo in nested groups structure
483 # last element is repo in nested groups structure
486 groups = groups[:-1]
484 groups = groups[:-1]
487 rgm = RepoGroupModel(sa)
485 rgm = RepoGroupModel(sa)
488 owner = User.get_first_super_admin()
486 owner = User.get_first_super_admin()
489 for lvl, group_name in enumerate(groups):
487 for lvl, group_name in enumerate(groups):
490 group_name = '/'.join(groups[:lvl] + [group_name])
488 group_name = '/'.join(groups[:lvl] + [group_name])
491 group = RepoGroup.get_by_group_name(group_name)
489 group = RepoGroup.get_by_group_name(group_name)
492 desc = '%s group' % group_name
490 desc = '%s group' % group_name
493
491
494 # skip folders that are now removed repos
492 # skip folders that are now removed repos
495 if REMOVED_REPO_PAT.match(group_name):
493 if REMOVED_REPO_PAT.match(group_name):
496 break
494 break
497
495
498 if group is None:
496 if group is None:
499 log.debug('creating group level: %s group_name: %s',
497 log.debug('creating group level: %s group_name: %s',
500 lvl, group_name)
498 lvl, group_name)
501 group = RepoGroup(group_name, parent)
499 group = RepoGroup(group_name, parent)
502 group.group_description = desc
500 group.group_description = desc
503 group.user = owner
501 group.user = owner
504 sa.add(group)
502 sa.add(group)
505 perm_obj = rgm._create_default_perms(group)
503 perm_obj = rgm._create_default_perms(group)
506 sa.add(perm_obj)
504 sa.add(perm_obj)
507 sa.flush()
505 sa.flush()
508
506
509 parent = group
507 parent = group
510 return group
508 return group
511
509
512
510
513 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
514 """
512 """
515 maps all repos given in initial_repo_list, non existing repositories
513 maps all repos given in initial_repo_list, non existing repositories
516 are created, if remove_obsolete is True it also checks for db entries
514 are created, if remove_obsolete is True it also checks for db entries
517 that are not in initial_repo_list and removes them.
515 that are not in initial_repo_list and removes them.
518
516
519 :param initial_repo_list: list of repositories found by scanning methods
517 :param initial_repo_list: list of repositories found by scanning methods
520 :param remove_obsolete: check for obsolete entries in database
518 :param remove_obsolete: check for obsolete entries in database
521 """
519 """
522 from rhodecode.model.repo import RepoModel
520 from rhodecode.model.repo import RepoModel
523 from rhodecode.model.repo_group import RepoGroupModel
521 from rhodecode.model.repo_group import RepoGroupModel
524 from rhodecode.model.settings import SettingsModel
522 from rhodecode.model.settings import SettingsModel
525
523
526 sa = meta.Session()
524 sa = meta.Session()
527 repo_model = RepoModel()
525 repo_model = RepoModel()
528 user = User.get_first_super_admin()
526 user = User.get_first_super_admin()
529 added = []
527 added = []
530
528
531 # creation defaults
529 # creation defaults
532 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
533 enable_statistics = defs.get('repo_enable_statistics')
531 enable_statistics = defs.get('repo_enable_statistics')
534 enable_locking = defs.get('repo_enable_locking')
532 enable_locking = defs.get('repo_enable_locking')
535 enable_downloads = defs.get('repo_enable_downloads')
533 enable_downloads = defs.get('repo_enable_downloads')
536 private = defs.get('repo_private')
534 private = defs.get('repo_private')
537
535
538 for name, repo in list(initial_repo_list.items()):
536 for name, repo in list(initial_repo_list.items()):
539 group = map_groups(name)
537 group = map_groups(name)
540 str_name = safe_str(name)
538 str_name = safe_str(name)
541 db_repo = repo_model.get_by_repo_name(str_name)
539 db_repo = repo_model.get_by_repo_name(str_name)
542 # found repo that is on filesystem not in RhodeCode database
540 # found repo that is on filesystem not in RhodeCode database
543 if not db_repo:
541 if not db_repo:
544 log.info('repository %s not found, creating now', name)
542 log.info('repository %s not found, creating now', name)
545 added.append(name)
543 added.append(name)
546 desc = (repo.description
544 desc = (repo.description
547 if repo.description != 'unknown'
545 if repo.description != 'unknown'
548 else '%s repository' % name)
546 else '%s repository' % name)
549
547
550 db_repo = repo_model._create_repo(
548 db_repo = repo_model._create_repo(
551 repo_name=name,
549 repo_name=name,
552 repo_type=repo.alias,
550 repo_type=repo.alias,
553 description=desc,
551 description=desc,
554 repo_group=getattr(group, 'group_id', None),
552 repo_group=getattr(group, 'group_id', None),
555 owner=user,
553 owner=user,
556 enable_locking=enable_locking,
554 enable_locking=enable_locking,
557 enable_downloads=enable_downloads,
555 enable_downloads=enable_downloads,
558 enable_statistics=enable_statistics,
556 enable_statistics=enable_statistics,
559 private=private,
557 private=private,
560 state=Repository.STATE_CREATED
558 state=Repository.STATE_CREATED
561 )
559 )
562 sa.commit()
560 sa.commit()
563 # we added that repo just now, and make sure we updated server info
561 # we added that repo just now, and make sure we updated server info
564 if db_repo.repo_type == 'git':
562 if db_repo.repo_type == 'git':
565 git_repo = db_repo.scm_instance()
563 git_repo = db_repo.scm_instance()
566 # update repository server-info
564 # update repository server-info
567 log.debug('Running update server info')
565 log.debug('Running update server info')
568 git_repo._update_server_info()
566 git_repo._update_server_info()
569
567
570 db_repo.update_commit_cache()
568 db_repo.update_commit_cache()
571
569
572 config = db_repo._config
570 config = db_repo._config
573 config.set('extensions', 'largefiles', '')
571 config.set('extensions', 'largefiles', '')
574 repo = db_repo.scm_instance(config=config)
572 repo = db_repo.scm_instance(config=config)
575 repo.install_hooks()
573 repo.install_hooks()
576
574
577 removed = []
575 removed = []
578 if remove_obsolete:
576 if remove_obsolete:
579 # remove from database those repositories that are not in the filesystem
577 # remove from database those repositories that are not in the filesystem
580 for repo in sa.query(Repository).all():
578 for repo in sa.query(Repository).all():
581 if repo.repo_name not in list(initial_repo_list.keys()):
579 if repo.repo_name not in list(initial_repo_list.keys()):
582 log.debug("Removing non-existing repository found in db `%s`",
580 log.debug("Removing non-existing repository found in db `%s`",
583 repo.repo_name)
581 repo.repo_name)
584 try:
582 try:
585 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
583 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
586 sa.commit()
584 sa.commit()
587 removed.append(repo.repo_name)
585 removed.append(repo.repo_name)
588 except Exception:
586 except Exception:
589 # don't hold further removals on error
587 # don't hold further removals on error
590 log.error(traceback.format_exc())
588 log.error(traceback.format_exc())
591 sa.rollback()
589 sa.rollback()
592
590
593 def splitter(full_repo_name):
591 def splitter(full_repo_name):
594 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
592 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
595 gr_name = None
593 gr_name = None
596 if len(_parts) == 2:
594 if len(_parts) == 2:
597 gr_name = _parts[0]
595 gr_name = _parts[0]
598 return gr_name
596 return gr_name
599
597
600 initial_repo_group_list = [splitter(x) for x in
598 initial_repo_group_list = [splitter(x) for x in
601 list(initial_repo_list.keys()) if splitter(x)]
599 list(initial_repo_list.keys()) if splitter(x)]
602
600
603 # remove from database those repository groups that are not in the
601 # remove from database those repository groups that are not in the
604 # filesystem due to parent child relationships we need to delete them
602 # filesystem due to parent child relationships we need to delete them
605 # in a specific order of most nested first
603 # in a specific order of most nested first
606 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
604 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
607 def nested_sort(gr):
605 def nested_sort(gr):
608 return len(gr.split('/'))
606 return len(gr.split('/'))
609 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
607 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
610 if group_name not in initial_repo_group_list:
608 if group_name not in initial_repo_group_list:
611 repo_group = RepoGroup.get_by_group_name(group_name)
609 repo_group = RepoGroup.get_by_group_name(group_name)
612 if (repo_group.children.all() or
610 if (repo_group.children.all() or
613 not RepoGroupModel().check_exist_filesystem(
611 not RepoGroupModel().check_exist_filesystem(
614 group_name=group_name, exc_on_failure=False)):
612 group_name=group_name, exc_on_failure=False)):
615 continue
613 continue
616
614
617 log.info(
615 log.info(
618 'Removing non-existing repository group found in db `%s`',
616 'Removing non-existing repository group found in db `%s`',
619 group_name)
617 group_name)
620 try:
618 try:
621 RepoGroupModel(sa).delete(group_name, fs_remove=False)
619 RepoGroupModel(sa).delete(group_name, fs_remove=False)
622 sa.commit()
620 sa.commit()
623 removed.append(group_name)
621 removed.append(group_name)
624 except Exception:
622 except Exception:
625 # don't hold further removals on error
623 # don't hold further removals on error
626 log.exception(
624 log.exception(
627 'Unable to remove repository group `%s`',
625 'Unable to remove repository group `%s`',
628 group_name)
626 group_name)
629 sa.rollback()
627 sa.rollback()
630 raise
628 raise
631
629
632 return added, removed
630 return added, removed
633
631
634
632
635 def load_rcextensions(root_path):
633 def load_rcextensions(root_path):
636 import rhodecode
634 import rhodecode
637 from rhodecode.config import conf
635 from rhodecode.config import conf
638
636
639 path = os.path.join(root_path)
637 path = os.path.join(root_path)
640 sys.path.append(path)
638 sys.path.append(path)
641
639
642 try:
640 try:
643 rcextensions = __import__('rcextensions')
641 rcextensions = __import__('rcextensions')
644 except ImportError:
642 except ImportError:
645 if os.path.isdir(os.path.join(path, 'rcextensions')):
643 if os.path.isdir(os.path.join(path, 'rcextensions')):
646 log.warning('Unable to load rcextensions from %s', path)
644 log.warning('Unable to load rcextensions from %s', path)
647 rcextensions = None
645 rcextensions = None
648
646
649 if rcextensions:
647 if rcextensions:
650 log.info('Loaded rcextensions from %s...', rcextensions)
648 log.info('Loaded rcextensions from %s...', rcextensions)
651 rhodecode.EXTENSIONS = rcextensions
649 rhodecode.EXTENSIONS = rcextensions
652
650
653 # Additional mappings that are not present in the pygments lexers
651 # Additional mappings that are not present in the pygments lexers
654 conf.LANGUAGES_EXTENSIONS_MAP.update(
652 conf.LANGUAGES_EXTENSIONS_MAP.update(
655 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
653 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
656
654
657
655
658 def get_custom_lexer(extension):
656 def get_custom_lexer(extension):
659 """
657 """
660 returns a custom lexer if it is defined in rcextensions module, or None
658 returns a custom lexer if it is defined in rcextensions module, or None
661 if there's no custom lexer defined
659 if there's no custom lexer defined
662 """
660 """
663 import rhodecode
661 import rhodecode
664 from pygments import lexers
662 from pygments import lexers
665
663
666 # custom override made by RhodeCode
664 # custom override made by RhodeCode
667 if extension in ['mako']:
665 if extension in ['mako']:
668 return lexers.get_lexer_by_name('html+mako')
666 return lexers.get_lexer_by_name('html+mako')
669
667
670 # check if we didn't define this extension as other lexer
668 # check if we didn't define this extension as other lexer
671 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
669 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
672 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
670 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
673 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
671 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
674 return lexers.get_lexer_by_name(_lexer_name)
672 return lexers.get_lexer_by_name(_lexer_name)
675
673
676
674
677 #==============================================================================
675 #==============================================================================
678 # TEST FUNCTIONS AND CREATORS
676 # TEST FUNCTIONS AND CREATORS
679 #==============================================================================
677 #==============================================================================
680 def create_test_index(repo_location, config):
678 def create_test_index(repo_location, config):
681 """
679 """
682 Makes default test index.
680 Makes default test index.
683 """
681 """
684 try:
682 try:
685 import rc_testdata
683 import rc_testdata
686 except ImportError:
684 except ImportError:
687 raise ImportError('Failed to import rc_testdata, '
685 raise ImportError('Failed to import rc_testdata, '
688 'please make sure this package is installed from requirements_test.txt')
686 'please make sure this package is installed from requirements_test.txt')
689 rc_testdata.extract_search_index(
687 rc_testdata.extract_search_index(
690 'vcs_search_index', os.path.dirname(config['search.location']))
688 'vcs_search_index', os.path.dirname(config['search.location']))
691
689
692
690
693 def create_test_directory(test_path):
691 def create_test_directory(test_path):
694 """
692 """
695 Create test directory if it doesn't exist.
693 Create test directory if it doesn't exist.
696 """
694 """
697 if not os.path.isdir(test_path):
695 if not os.path.isdir(test_path):
698 log.debug('Creating testdir %s', test_path)
696 log.debug('Creating testdir %s', test_path)
699 os.makedirs(test_path)
697 os.makedirs(test_path)
700
698
701
699
702 def create_test_database(test_path, config):
700 def create_test_database(test_path, config):
703 """
701 """
704 Makes a fresh database.
702 Makes a fresh database.
705 """
703 """
706 from rhodecode.lib.db_manage import DbManage
704 from rhodecode.lib.db_manage import DbManage
707 from rhodecode.lib.utils2 import get_encryption_key
705 from rhodecode.lib.utils2 import get_encryption_key
708
706
709 # PART ONE create db
707 # PART ONE create db
710 dbconf = config['sqlalchemy.db1.url']
708 dbconf = config['sqlalchemy.db1.url']
711 enc_key = get_encryption_key(config)
709 enc_key = get_encryption_key(config)
712
710
713 log.debug('making test db %s', dbconf)
711 log.debug('making test db %s', dbconf)
714
712
715 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
713 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
716 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
714 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
717 dbmanage.create_tables(override=True)
715 dbmanage.create_tables(override=True)
718 dbmanage.set_db_version()
716 dbmanage.set_db_version()
719 # for tests dynamically set new root paths based on generated content
717 # for tests dynamically set new root paths based on generated content
720 dbmanage.create_settings(dbmanage.config_prompt(test_path))
718 dbmanage.create_settings(dbmanage.config_prompt(test_path))
721 dbmanage.create_default_user()
719 dbmanage.create_default_user()
722 dbmanage.create_test_admin_and_users()
720 dbmanage.create_test_admin_and_users()
723 dbmanage.create_permissions()
721 dbmanage.create_permissions()
724 dbmanage.populate_default_permissions()
722 dbmanage.populate_default_permissions()
725 Session().commit()
723 Session().commit()
726
724
727
725
728 def create_test_repositories(test_path, config):
726 def create_test_repositories(test_path, config):
729 """
727 """
730 Creates test repositories in the temporary directory. Repositories are
728 Creates test repositories in the temporary directory. Repositories are
731 extracted from archives within the rc_testdata package.
729 extracted from archives within the rc_testdata package.
732 """
730 """
733 import rc_testdata
731 import rc_testdata
734 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
732 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
735
733
736 log.debug('making test vcs repositories')
734 log.debug('making test vcs repositories')
737
735
738 idx_path = config['search.location']
736 idx_path = config['search.location']
739 data_path = config['cache_dir']
737 data_path = config['cache_dir']
740
738
741 # clean index and data
739 # clean index and data
742 if idx_path and os.path.exists(idx_path):
740 if idx_path and os.path.exists(idx_path):
743 log.debug('remove %s', idx_path)
741 log.debug('remove %s', idx_path)
744 shutil.rmtree(idx_path)
742 shutil.rmtree(idx_path)
745
743
746 if data_path and os.path.exists(data_path):
744 if data_path and os.path.exists(data_path):
747 log.debug('remove %s', data_path)
745 log.debug('remove %s', data_path)
748 shutil.rmtree(data_path)
746 shutil.rmtree(data_path)
749
747
750 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
748 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
751 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
749 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
752
750
753 # Note: Subversion is in the process of being integrated with the system,
751 # Note: Subversion is in the process of being integrated with the system,
754 # until we have a properly packed version of the test svn repository, this
752 # until we have a properly packed version of the test svn repository, this
755 # tries to copy over the repo from a package "rc_testdata"
753 # tries to copy over the repo from a package "rc_testdata"
756 svn_repo_path = rc_testdata.get_svn_repo_archive()
754 svn_repo_path = rc_testdata.get_svn_repo_archive()
757 with tarfile.open(svn_repo_path) as tar:
755 with tarfile.open(svn_repo_path) as tar:
758 tar.extractall(jn(test_path, SVN_REPO))
756 tar.extractall(jn(test_path, SVN_REPO))
759
757
760
758
761 def password_changed(auth_user, session):
759 def password_changed(auth_user, session):
762 # Never report password change in case of default user or anonymous user.
760 # Never report password change in case of default user or anonymous user.
763 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
761 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
764 return False
762 return False
765
763
766 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
764 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
767 rhodecode_user = session.get('rhodecode_user', {})
765 rhodecode_user = session.get('rhodecode_user', {})
768 session_password_hash = rhodecode_user.get('password', '')
766 session_password_hash = rhodecode_user.get('password', '')
769 return password_hash != session_password_hash
767 return password_hash != session_password_hash
770
768
771
769
772 def read_opensource_licenses():
770 def read_opensource_licenses():
773 global _license_cache
771 global _license_cache
774
772
775 if not _license_cache:
773 if not _license_cache:
776 licenses = pkg_resources.resource_string(
774 licenses = pkg_resources.resource_string(
777 'rhodecode', 'config/licenses.json')
775 'rhodecode', 'config/licenses.json')
778 _license_cache = json.loads(licenses)
776 _license_cache = json.loads(licenses)
779
777
780 return _license_cache
778 return _license_cache
781
779
782
780
783 def generate_platform_uuid():
781 def generate_platform_uuid():
784 """
782 """
785 Generates platform UUID based on it's name
783 Generates platform UUID based on it's name
786 """
784 """
787 import platform
785 import platform
788
786
789 try:
787 try:
790 uuid_list = [platform.platform()]
788 uuid_list = [platform.platform()]
791 return sha256_safe(':'.join(uuid_list))
789 return sha256_safe(':'.join(uuid_list))
792 except Exception as e:
790 except Exception as e:
793 log.error('Failed to generate host uuid: %s', e)
791 log.error('Failed to generate host uuid: %s', e)
794 return 'UNDEFINED'
792 return 'UNDEFINED'
795
793
796
794
797 def send_test_email(recipients, email_body='TEST EMAIL'):
795 def send_test_email(recipients, email_body='TEST EMAIL'):
798 """
796 """
799 Simple code for generating test emails.
797 Simple code for generating test emails.
800 Usage::
798 Usage::
801
799
802 from rhodecode.lib import utils
800 from rhodecode.lib import utils
803 utils.send_test_email()
801 utils.send_test_email()
804 """
802 """
805 from rhodecode.lib.celerylib import tasks, run_task
803 from rhodecode.lib.celerylib import tasks, run_task
806
804
807 email_body = email_body_plaintext = email_body
805 email_body = email_body_plaintext = email_body
808 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
806 subject = f'SUBJECT FROM: {socket.gethostname()}'
809 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
807 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,989 +1,987 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 Some simple helper functions
21 Some simple helper functions
24 """
22 """
25
23
26 import collections
24 import collections
27 import datetime
25 import datetime
28 import dateutil.relativedelta
26 import dateutil.relativedelta
29 import logging
27 import logging
30 import re
28 import re
31 import sys
29 import sys
32 import time
30 import time
33 import urllib.request
31 import urllib.request
34 import urllib.parse
32 import urllib.parse
35 import urllib.error
33 import urllib.error
36 import urlobject
34 import urlobject
37 import uuid
35 import uuid
38 import getpass
36 import getpass
39 import socket
37 import socket
40 import errno
38 import errno
41 import random
39 import random
42 import functools
40 import functools
43 from contextlib import closing
41 from contextlib import closing
44
42
45 import pygments.lexers
43 import pygments.lexers
46 import sqlalchemy
44 import sqlalchemy
47 import sqlalchemy.event
45 import sqlalchemy.event
48 import sqlalchemy.engine.url
46 import sqlalchemy.engine.url
49 import sqlalchemy.exc
47 import sqlalchemy.exc
50 import sqlalchemy.sql
48 import sqlalchemy.sql
51 import webob
49 import webob
52 from pyramid.settings import asbool
50 from pyramid.settings import asbool
53
51
54 import rhodecode
52 import rhodecode
55 from rhodecode.translation import _, _pluralize
53 from rhodecode.translation import _, _pluralize
56 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
57 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
58 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
59
57
60
58
61 def __get_lem(extra_mapping=None):
59 def __get_lem(extra_mapping=None):
62 """
60 """
63 Get language extension map based on what's inside pygments lexers
61 Get language extension map based on what's inside pygments lexers
64 """
62 """
65 d = collections.defaultdict(lambda: [])
63 d = collections.defaultdict(lambda: [])
66
64
67 def __clean(s):
65 def __clean(s):
68 s = s.lstrip('*')
66 s = s.lstrip('*')
69 s = s.lstrip('.')
67 s = s.lstrip('.')
70
68
71 if s.find('[') != -1:
69 if s.find('[') != -1:
72 exts = []
70 exts = []
73 start, stop = s.find('['), s.find(']')
71 start, stop = s.find('['), s.find(']')
74
72
75 for suffix in s[start + 1:stop]:
73 for suffix in s[start + 1:stop]:
76 exts.append(s[:s.find('[')] + suffix)
74 exts.append(s[:s.find('[')] + suffix)
77 return [e.lower() for e in exts]
75 return [e.lower() for e in exts]
78 else:
76 else:
79 return [s.lower()]
77 return [s.lower()]
80
78
81 for lx, t in sorted(pygments.lexers.LEXERS.items()):
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
82 m = list(map(__clean, t[-2]))
80 m = list(map(__clean, t[-2]))
83 if m:
81 if m:
84 m = functools.reduce(lambda x, y: x + y, m)
82 m = functools.reduce(lambda x, y: x + y, m)
85 for ext in m:
83 for ext in m:
86 desc = lx.replace('Lexer', '')
84 desc = lx.replace('Lexer', '')
87 d[ext].append(desc)
85 d[ext].append(desc)
88
86
89 data = dict(d)
87 data = dict(d)
90
88
91 extra_mapping = extra_mapping or {}
89 extra_mapping = extra_mapping or {}
92 if extra_mapping:
90 if extra_mapping:
93 for k, v in list(extra_mapping.items()):
91 for k, v in list(extra_mapping.items()):
94 if k not in data:
92 if k not in data:
95 # register new mapping2lexer
93 # register new mapping2lexer
96 data[k] = [v]
94 data[k] = [v]
97
95
98 return data
96 return data
99
97
100
98
101 def convert_line_endings(line: str, mode) -> str:
99 def convert_line_endings(line: str, mode) -> str:
102 """
100 """
103 Converts a given line "line end" accordingly to given mode
101 Converts a given line "line end" accordingly to given mode
104
102
105 Available modes are::
103 Available modes are::
106 0 - Unix
104 0 - Unix
107 1 - Mac
105 1 - Mac
108 2 - DOS
106 2 - DOS
109
107
110 :param line: given line to convert
108 :param line: given line to convert
111 :param mode: mode to convert to
109 :param mode: mode to convert to
112 :return: converted line according to mode
110 :return: converted line according to mode
113 """
111 """
114 if mode == 0:
112 if mode == 0:
115 line = line.replace('\r\n', '\n')
113 line = line.replace('\r\n', '\n')
116 line = line.replace('\r', '\n')
114 line = line.replace('\r', '\n')
117 elif mode == 1:
115 elif mode == 1:
118 line = line.replace('\r\n', '\r')
116 line = line.replace('\r\n', '\r')
119 line = line.replace('\n', '\r')
117 line = line.replace('\n', '\r')
120 elif mode == 2:
118 elif mode == 2:
121 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
122 return line
120 return line
123
121
124
122
125 def detect_mode(line: str, default) -> int:
123 def detect_mode(line: str, default) -> int:
126 """
124 """
127 Detects line break for given line, if line break couldn't be found
125 Detects line break for given line, if line break couldn't be found
128 given default value is returned
126 given default value is returned
129
127
130 :param line: str line
128 :param line: str line
131 :param default: default
129 :param default: default
132 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
133 """
131 """
134 if line.endswith('\r\n'):
132 if line.endswith('\r\n'):
135 return 2
133 return 2
136 elif line.endswith('\n'):
134 elif line.endswith('\n'):
137 return 0
135 return 0
138 elif line.endswith('\r'):
136 elif line.endswith('\r'):
139 return 1
137 return 1
140 else:
138 else:
141 return default
139 return default
142
140
143
141
144 def remove_suffix(s, suffix):
142 def remove_suffix(s, suffix):
145 if s.endswith(suffix):
143 if s.endswith(suffix):
146 s = s[:-1 * len(suffix)]
144 s = s[:-1 * len(suffix)]
147 return s
145 return s
148
146
149
147
150 def remove_prefix(s, prefix):
148 def remove_prefix(s, prefix):
151 if s.startswith(prefix):
149 if s.startswith(prefix):
152 s = s[len(prefix):]
150 s = s[len(prefix):]
153 return s
151 return s
154
152
155
153
156 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
157 """
155 """
158 Look through the calling stack and return the frame which called
156 Look through the calling stack and return the frame which called
159 this function and is part of core module ( ie. rhodecode.* )
157 this function and is part of core module ( ie. rhodecode.* )
160
158
161 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
159 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
162 :param depth:
160 :param depth:
163 :param output_writer:
161 :param output_writer:
164 :param indent:
162 :param indent:
165
163
166 usage::
164 usage::
167
165
168 from rhodecode.lib.utils2 import find_calling_context
166 from rhodecode.lib.utils2 import find_calling_context
169
167
170 calling_context = find_calling_context(ignore_modules=[
168 calling_context = find_calling_context(ignore_modules=[
171 'rhodecode.lib.caching_query',
169 'rhodecode.lib.caching_query',
172 'rhodecode.model.settings',
170 'rhodecode.model.settings',
173 ])
171 ])
174
172
175 """
173 """
176 import inspect
174 import inspect
177 if not output_writer:
175 if not output_writer:
178 try:
176 try:
179 from rich import print as pprint
177 from rich import print as pprint
180 except ImportError:
178 except ImportError:
181 pprint = print
179 pprint = print
182 output_writer = pprint
180 output_writer = pprint
183
181
184 frame = inspect.currentframe()
182 frame = inspect.currentframe()
185 cc = []
183 cc = []
186 try:
184 try:
187 for i in range(depth): # current frame + 3 callers
185 for i in range(depth): # current frame + 3 callers
188 frame = frame.f_back
186 frame = frame.f_back
189 if not frame:
187 if not frame:
190 break
188 break
191
189
192 info = inspect.getframeinfo(frame)
190 info = inspect.getframeinfo(frame)
193 name = frame.f_globals.get('__name__')
191 name = frame.f_globals.get('__name__')
194 if name not in ignore_modules:
192 if name not in ignore_modules:
195 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
193 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
196 finally:
194 finally:
197 # Avoids a reference cycle
195 # Avoids a reference cycle
198 del frame
196 del frame
199
197
200 output_writer('* INFO: This code was called from: *')
198 output_writer('* INFO: This code was called from: *')
201 for cnt, frm_info in enumerate(cc):
199 for cnt, frm_info in enumerate(cc):
202 if not indent:
200 if not indent:
203 cnt = 1
201 cnt = 1
204 output_writer(' ' * cnt + frm_info)
202 output_writer(' ' * cnt + frm_info)
205
203
206
204
207 def ping_connection(connection, branch):
205 def ping_connection(connection, branch):
208 if branch:
206 if branch:
209 # "branch" refers to a sub-connection of a connection,
207 # "branch" refers to a sub-connection of a connection,
210 # we don't want to bother pinging on these.
208 # we don't want to bother pinging on these.
211 return
209 return
212
210
213 # turn off "close with result". This flag is only used with
211 # turn off "close with result". This flag is only used with
214 # "connectionless" execution, otherwise will be False in any case
212 # "connectionless" execution, otherwise will be False in any case
215 save_should_close_with_result = connection.should_close_with_result
213 save_should_close_with_result = connection.should_close_with_result
216 connection.should_close_with_result = False
214 connection.should_close_with_result = False
217
215
218 try:
216 try:
219 # run a SELECT 1. use a core select() so that
217 # run a SELECT 1. use a core select() so that
220 # the SELECT of a scalar value without a table is
218 # the SELECT of a scalar value without a table is
221 # appropriately formatted for the backend
219 # appropriately formatted for the backend
222 connection.scalar(sqlalchemy.sql.select([1]))
220 connection.scalar(sqlalchemy.sql.select([1]))
223 except sqlalchemy.exc.DBAPIError as err:
221 except sqlalchemy.exc.DBAPIError as err:
224 # catch SQLAlchemy's DBAPIError, which is a wrapper
222 # catch SQLAlchemy's DBAPIError, which is a wrapper
225 # for the DBAPI's exception. It includes a .connection_invalidated
223 # for the DBAPI's exception. It includes a .connection_invalidated
226 # attribute which specifies if this connection is a "disconnect"
224 # attribute which specifies if this connection is a "disconnect"
227 # condition, which is based on inspection of the original exception
225 # condition, which is based on inspection of the original exception
228 # by the dialect in use.
226 # by the dialect in use.
229 if err.connection_invalidated:
227 if err.connection_invalidated:
230 # run the same SELECT again - the connection will re-validate
228 # run the same SELECT again - the connection will re-validate
231 # itself and establish a new connection. The disconnect detection
229 # itself and establish a new connection. The disconnect detection
232 # here also causes the whole connection pool to be invalidated
230 # here also causes the whole connection pool to be invalidated
233 # so that all stale connections are discarded.
231 # so that all stale connections are discarded.
234 connection.scalar(sqlalchemy.sql.select([1]))
232 connection.scalar(sqlalchemy.sql.select([1]))
235 else:
233 else:
236 raise
234 raise
237 finally:
235 finally:
238 # restore "close with result"
236 # restore "close with result"
239 connection.should_close_with_result = save_should_close_with_result
237 connection.should_close_with_result = save_should_close_with_result
240
238
241
239
242 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
240 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
243 """Custom engine_from_config functions."""
241 """Custom engine_from_config functions."""
244 log = logging.getLogger('sqlalchemy.engine')
242 log = logging.getLogger('sqlalchemy.engine')
245 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
243 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
246 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
244 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
247
245
248 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
246 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
249
247
250 def color_sql(sql):
248 def color_sql(sql):
251 color_seq = '\033[1;33m' # This is yellow: code 33
249 color_seq = '\033[1;33m' # This is yellow: code 33
252 normal = '\x1b[0m'
250 normal = '\x1b[0m'
253 return ''.join([color_seq, sql, normal])
251 return ''.join([color_seq, sql, normal])
254
252
255 if use_ping_connection:
253 if use_ping_connection:
256 log.debug('Adding ping_connection on the engine config.')
254 log.debug('Adding ping_connection on the engine config.')
257 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
255 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
258
256
259 if debug:
257 if debug:
260 # attach events only for debug configuration
258 # attach events only for debug configuration
261 def before_cursor_execute(conn, cursor, statement,
259 def before_cursor_execute(conn, cursor, statement,
262 parameters, context, executemany):
260 parameters, context, executemany):
263 setattr(conn, 'query_start_time', time.time())
261 setattr(conn, 'query_start_time', time.time())
264 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
262 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
265 find_calling_context(ignore_modules=[
263 find_calling_context(ignore_modules=[
266 'rhodecode.lib.caching_query',
264 'rhodecode.lib.caching_query',
267 'rhodecode.model.settings',
265 'rhodecode.model.settings',
268 ], output_writer=log.info)
266 ], output_writer=log.info)
269
267
270 def after_cursor_execute(conn, cursor, statement,
268 def after_cursor_execute(conn, cursor, statement,
271 parameters, context, executemany):
269 parameters, context, executemany):
272 delattr(conn, 'query_start_time')
270 delattr(conn, 'query_start_time')
273
271
274 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
272 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
275 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
273 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
276
274
277 return engine
275 return engine
278
276
279
277
280 def get_encryption_key(config) -> bytes:
278 def get_encryption_key(config) -> bytes:
281 secret = config.get('rhodecode.encrypted_values.secret')
279 secret = config.get('rhodecode.encrypted_values.secret')
282 default = config['beaker.session.secret']
280 default = config['beaker.session.secret']
283 enc_key = secret or default
281 enc_key = secret or default
284
282
285 return safe_bytes(enc_key)
283 return safe_bytes(enc_key)
286
284
287
285
288 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
286 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
289 """
287 """
290 Turns a datetime into an age string.
288 Turns a datetime into an age string.
291 If show_short_version is True, this generates a shorter string with
289 If show_short_version is True, this generates a shorter string with
292 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
290 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
293
291
294 * IMPORTANT*
292 * IMPORTANT*
295 Code of this function is written in special way so it's easier to
293 Code of this function is written in special way so it's easier to
296 backport it to javascript. If you mean to update it, please also update
294 backport it to javascript. If you mean to update it, please also update
297 `jquery.timeago-extension.js` file
295 `jquery.timeago-extension.js` file
298
296
299 :param prevdate: datetime object
297 :param prevdate: datetime object
300 :param now: get current time, if not define we use
298 :param now: get current time, if not define we use
301 `datetime.datetime.now()`
299 `datetime.datetime.now()`
302 :param show_short_version: if it should approximate the date and
300 :param show_short_version: if it should approximate the date and
303 return a shorter string
301 return a shorter string
304 :param show_suffix:
302 :param show_suffix:
305 :param short_format: show short format, eg 2D instead of 2 days
303 :param short_format: show short format, eg 2D instead of 2 days
306 :rtype: unicode
304 :rtype: unicode
307 :returns: unicode words describing age
305 :returns: unicode words describing age
308 """
306 """
309
307
310 def _get_relative_delta(now, prevdate):
308 def _get_relative_delta(now, prevdate):
311 base = dateutil.relativedelta.relativedelta(now, prevdate)
309 base = dateutil.relativedelta.relativedelta(now, prevdate)
312 return {
310 return {
313 'year': base.years,
311 'year': base.years,
314 'month': base.months,
312 'month': base.months,
315 'day': base.days,
313 'day': base.days,
316 'hour': base.hours,
314 'hour': base.hours,
317 'minute': base.minutes,
315 'minute': base.minutes,
318 'second': base.seconds,
316 'second': base.seconds,
319 }
317 }
320
318
321 def _is_leap_year(year):
319 def _is_leap_year(year):
322 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
320 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
323
321
324 def get_month(prevdate):
322 def get_month(prevdate):
325 return prevdate.month
323 return prevdate.month
326
324
327 def get_year(prevdate):
325 def get_year(prevdate):
328 return prevdate.year
326 return prevdate.year
329
327
330 now = now or datetime.datetime.now()
328 now = now or datetime.datetime.now()
331 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
329 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
332 deltas = {}
330 deltas = {}
333 future = False
331 future = False
334
332
335 if prevdate > now:
333 if prevdate > now:
336 now_old = now
334 now_old = now
337 now = prevdate
335 now = prevdate
338 prevdate = now_old
336 prevdate = now_old
339 future = True
337 future = True
340 if future:
338 if future:
341 prevdate = prevdate.replace(microsecond=0)
339 prevdate = prevdate.replace(microsecond=0)
342 # Get date parts deltas
340 # Get date parts deltas
343 for part in order:
341 for part in order:
344 rel_delta = _get_relative_delta(now, prevdate)
342 rel_delta = _get_relative_delta(now, prevdate)
345 deltas[part] = rel_delta[part]
343 deltas[part] = rel_delta[part]
346
344
347 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
345 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
348 # not 1 hour, -59 minutes and -59 seconds)
346 # not 1 hour, -59 minutes and -59 seconds)
349 offsets = [[5, 60], [4, 60], [3, 24]]
347 offsets = [[5, 60], [4, 60], [3, 24]]
350 for element in offsets: # seconds, minutes, hours
348 for element in offsets: # seconds, minutes, hours
351 num = element[0]
349 num = element[0]
352 length = element[1]
350 length = element[1]
353
351
354 part = order[num]
352 part = order[num]
355 carry_part = order[num - 1]
353 carry_part = order[num - 1]
356
354
357 if deltas[part] < 0:
355 if deltas[part] < 0:
358 deltas[part] += length
356 deltas[part] += length
359 deltas[carry_part] -= 1
357 deltas[carry_part] -= 1
360
358
361 # Same thing for days except that the increment depends on the (variable)
359 # Same thing for days except that the increment depends on the (variable)
362 # number of days in the month
360 # number of days in the month
363 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
361 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
364 if deltas['day'] < 0:
362 if deltas['day'] < 0:
365 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
363 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
366 deltas['day'] += 29
364 deltas['day'] += 29
367 else:
365 else:
368 deltas['day'] += month_lengths[get_month(prevdate) - 1]
366 deltas['day'] += month_lengths[get_month(prevdate) - 1]
369
367
370 deltas['month'] -= 1
368 deltas['month'] -= 1
371
369
372 if deltas['month'] < 0:
370 if deltas['month'] < 0:
373 deltas['month'] += 12
371 deltas['month'] += 12
374 deltas['year'] -= 1
372 deltas['year'] -= 1
375
373
376 # Format the result
374 # Format the result
377 if short_format:
375 if short_format:
378 fmt_funcs = {
376 fmt_funcs = {
379 'year': lambda d: '%dy' % d,
377 'year': lambda d: '%dy' % d,
380 'month': lambda d: '%dm' % d,
378 'month': lambda d: '%dm' % d,
381 'day': lambda d: '%dd' % d,
379 'day': lambda d: '%dd' % d,
382 'hour': lambda d: '%dh' % d,
380 'hour': lambda d: '%dh' % d,
383 'minute': lambda d: '%dmin' % d,
381 'minute': lambda d: '%dmin' % d,
384 'second': lambda d: '%dsec' % d,
382 'second': lambda d: '%dsec' % d,
385 }
383 }
386 else:
384 else:
387 fmt_funcs = {
385 fmt_funcs = {
388 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
386 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
389 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
387 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
390 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
388 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
391 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
389 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
392 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
390 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
393 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
391 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
394 }
392 }
395
393
396 i = 0
394 i = 0
397 for part in order:
395 for part in order:
398 value = deltas[part]
396 value = deltas[part]
399 if value != 0:
397 if value != 0:
400
398
401 if i < 5:
399 if i < 5:
402 sub_part = order[i + 1]
400 sub_part = order[i + 1]
403 sub_value = deltas[sub_part]
401 sub_value = deltas[sub_part]
404 else:
402 else:
405 sub_value = 0
403 sub_value = 0
406
404
407 if sub_value == 0 or show_short_version:
405 if sub_value == 0 or show_short_version:
408 _val = fmt_funcs[part](value)
406 _val = fmt_funcs[part](value)
409 if future:
407 if future:
410 if show_suffix:
408 if show_suffix:
411 return _('in ${ago}', mapping={'ago': _val})
409 return _('in ${ago}', mapping={'ago': _val})
412 else:
410 else:
413 return _(_val)
411 return _(_val)
414
412
415 else:
413 else:
416 if show_suffix:
414 if show_suffix:
417 return _('${ago} ago', mapping={'ago': _val})
415 return _('${ago} ago', mapping={'ago': _val})
418 else:
416 else:
419 return _(_val)
417 return _(_val)
420
418
421 val = fmt_funcs[part](value)
419 val = fmt_funcs[part](value)
422 val_detail = fmt_funcs[sub_part](sub_value)
420 val_detail = fmt_funcs[sub_part](sub_value)
423 mapping = {'val': val, 'detail': val_detail}
421 mapping = {'val': val, 'detail': val_detail}
424
422
425 if short_format:
423 if short_format:
426 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
424 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
427 if show_suffix:
425 if show_suffix:
428 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
426 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
429 if future:
427 if future:
430 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
428 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
431 else:
429 else:
432 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
430 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
433 if show_suffix:
431 if show_suffix:
434 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
432 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
435 if future:
433 if future:
436 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
434 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
437
435
438 return datetime_tmpl
436 return datetime_tmpl
439 i += 1
437 i += 1
440 return _('just now')
438 return _('just now')
441
439
442
440
443 def age_from_seconds(seconds):
441 def age_from_seconds(seconds):
444 seconds = safe_int(seconds) or 0
442 seconds = safe_int(seconds) or 0
445 prevdate = time_to_datetime(time.time() + seconds)
443 prevdate = time_to_datetime(time.time() + seconds)
446 return age(prevdate, show_suffix=False, show_short_version=True)
444 return age(prevdate, show_suffix=False, show_short_version=True)
447
445
448
446
449 def cleaned_uri(uri):
447 def cleaned_uri(uri):
450 """
448 """
451 Quotes '[' and ']' from uri if there is only one of them.
449 Quotes '[' and ']' from uri if there is only one of them.
452 according to RFC3986 we cannot use such chars in uri
450 according to RFC3986 we cannot use such chars in uri
453 :param uri:
451 :param uri:
454 :return: uri without this chars
452 :return: uri without this chars
455 """
453 """
456 return urllib.parse.quote(uri, safe='@$:/')
454 return urllib.parse.quote(uri, safe='@$:/')
457
455
458
456
459 def credentials_filter(uri):
457 def credentials_filter(uri):
460 """
458 """
461 Returns a url with removed credentials
459 Returns a url with removed credentials
462
460
463 :param uri:
461 :param uri:
464 """
462 """
465 import urlobject
463 import urlobject
466 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
464 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
467 return 'InvalidDecryptionKey'
465 return 'InvalidDecryptionKey'
468
466
469 url_obj = urlobject.URLObject(cleaned_uri(uri))
467 url_obj = urlobject.URLObject(cleaned_uri(uri))
470 url_obj = url_obj.without_password().without_username()
468 url_obj = url_obj.without_password().without_username()
471
469
472 return url_obj
470 return url_obj
473
471
474
472
475 def get_host_info(request):
473 def get_host_info(request):
476 """
474 """
477 Generate host info, to obtain full url e.g https://server.com
475 Generate host info, to obtain full url e.g https://server.com
478 use this
476 use this
479 `{scheme}://{netloc}`
477 `{scheme}://{netloc}`
480 """
478 """
481 if not request:
479 if not request:
482 return {}
480 return {}
483
481
484 qualified_home_url = request.route_url('home')
482 qualified_home_url = request.route_url('home')
485 parsed_url = urlobject.URLObject(qualified_home_url)
483 parsed_url = urlobject.URLObject(qualified_home_url)
486 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
484 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
487
485
488 return {
486 return {
489 'scheme': parsed_url.scheme,
487 'scheme': parsed_url.scheme,
490 'netloc': parsed_url.netloc+decoded_path,
488 'netloc': parsed_url.netloc+decoded_path,
491 'hostname': parsed_url.hostname,
489 'hostname': parsed_url.hostname,
492 }
490 }
493
491
494
492
495 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
493 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
496 qualified_home_url = request.route_url('home')
494 qualified_home_url = request.route_url('home')
497 parsed_url = urlobject.URLObject(qualified_home_url)
495 parsed_url = urlobject.URLObject(qualified_home_url)
498 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
496 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
499
497
500 args = {
498 args = {
501 'scheme': parsed_url.scheme,
499 'scheme': parsed_url.scheme,
502 'user': '',
500 'user': '',
503 'sys_user': getpass.getuser(),
501 'sys_user': getpass.getuser(),
504 # path if we use proxy-prefix
502 # path if we use proxy-prefix
505 'netloc': parsed_url.netloc+decoded_path,
503 'netloc': parsed_url.netloc+decoded_path,
506 'hostname': parsed_url.hostname,
504 'hostname': parsed_url.hostname,
507 'prefix': decoded_path,
505 'prefix': decoded_path,
508 'repo': repo_name,
506 'repo': repo_name,
509 'repoid': str(repo_id),
507 'repoid': str(repo_id),
510 'repo_type': repo_type
508 'repo_type': repo_type
511 }
509 }
512 args.update(override)
510 args.update(override)
513 args['user'] = urllib.parse.quote(safe_str(args['user']))
511 args['user'] = urllib.parse.quote(safe_str(args['user']))
514
512
515 for k, v in list(args.items()):
513 for k, v in list(args.items()):
516 tmpl_key = '{%s}' % k
514 tmpl_key = '{%s}' % k
517 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
515 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
518
516
519 # special case for SVN clone url
517 # special case for SVN clone url
520 if repo_type == 'svn':
518 if repo_type == 'svn':
521 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
519 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
522
520
523 # remove leading @ sign if it's present. Case of empty user
521 # remove leading @ sign if it's present. Case of empty user
524 url_obj = urlobject.URLObject(uri_tmpl)
522 url_obj = urlobject.URLObject(uri_tmpl)
525 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
523 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
526
524
527 return safe_str(url)
525 return safe_str(url)
528
526
529
527
530 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
528 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
531 maybe_unreachable=False, reference_obj=None):
529 maybe_unreachable=False, reference_obj=None):
532 """
530 """
533 Safe version of get_commit if this commit doesn't exists for a
531 Safe version of get_commit if this commit doesn't exists for a
534 repository it returns a Dummy one instead
532 repository it returns a Dummy one instead
535
533
536 :param repo: repository instance
534 :param repo: repository instance
537 :param commit_id: commit id as str
535 :param commit_id: commit id as str
538 :param commit_idx: numeric commit index
536 :param commit_idx: numeric commit index
539 :param pre_load: optional list of commit attributes to load
537 :param pre_load: optional list of commit attributes to load
540 :param maybe_unreachable: translate unreachable commits on git repos
538 :param maybe_unreachable: translate unreachable commits on git repos
541 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
539 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
542 """
540 """
543 # TODO(skreft): remove these circular imports
541 # TODO(skreft): remove these circular imports
544 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
542 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
545 from rhodecode.lib.vcs.exceptions import RepositoryError
543 from rhodecode.lib.vcs.exceptions import RepositoryError
546 if not isinstance(repo, BaseRepository):
544 if not isinstance(repo, BaseRepository):
547 raise Exception('You must pass an Repository '
545 raise Exception('You must pass an Repository '
548 'object as first argument got %s', type(repo))
546 'object as first argument got %s', type(repo))
549
547
550 try:
548 try:
551 commit = repo.get_commit(
549 commit = repo.get_commit(
552 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
550 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
553 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
551 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
554 except (RepositoryError, LookupError):
552 except (RepositoryError, LookupError):
555 commit = EmptyCommit()
553 commit = EmptyCommit()
556 return commit
554 return commit
557
555
558
556
559 def datetime_to_time(dt):
557 def datetime_to_time(dt):
560 if dt:
558 if dt:
561 return time.mktime(dt.timetuple())
559 return time.mktime(dt.timetuple())
562
560
563
561
564 def time_to_datetime(tm):
562 def time_to_datetime(tm):
565 if tm:
563 if tm:
566 if isinstance(tm, str):
564 if isinstance(tm, str):
567 try:
565 try:
568 tm = float(tm)
566 tm = float(tm)
569 except ValueError:
567 except ValueError:
570 return
568 return
571 return datetime.datetime.fromtimestamp(tm)
569 return datetime.datetime.fromtimestamp(tm)
572
570
573
571
574 def time_to_utcdatetime(tm):
572 def time_to_utcdatetime(tm):
575 if tm:
573 if tm:
576 if isinstance(tm, str):
574 if isinstance(tm, str):
577 try:
575 try:
578 tm = float(tm)
576 tm = float(tm)
579 except ValueError:
577 except ValueError:
580 return
578 return
581 return datetime.datetime.utcfromtimestamp(tm)
579 return datetime.datetime.utcfromtimestamp(tm)
582
580
583
581
584 MENTIONS_REGEX = re.compile(
582 MENTIONS_REGEX = re.compile(
585 # ^@ or @ without any special chars in front
583 # ^@ or @ without any special chars in front
586 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
584 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
587 # main body starts with letter, then can be . - _
585 # main body starts with letter, then can be . - _
588 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
586 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
589 re.VERBOSE | re.MULTILINE)
587 re.VERBOSE | re.MULTILINE)
590
588
591
589
592 def extract_mentioned_users(s):
590 def extract_mentioned_users(s):
593 """
591 """
594 Returns unique usernames from given string s that have @mention
592 Returns unique usernames from given string s that have @mention
595
593
596 :param s: string to get mentions
594 :param s: string to get mentions
597 """
595 """
598 usrs = set()
596 usrs = set()
599 for username in MENTIONS_REGEX.findall(s):
597 for username in MENTIONS_REGEX.findall(s):
600 usrs.add(username)
598 usrs.add(username)
601
599
602 return sorted(list(usrs), key=lambda k: k.lower())
600 return sorted(list(usrs), key=lambda k: k.lower())
603
601
604
602
605 def fix_PATH(os_=None):
603 def fix_PATH(os_=None):
606 """
604 """
607 Get current active python path, and append it to PATH variable to fix
605 Get current active python path, and append it to PATH variable to fix
608 issues of subprocess calls and different python versions
606 issues of subprocess calls and different python versions
609 """
607 """
610 if os_ is None:
608 if os_ is None:
611 import os
609 import os
612 else:
610 else:
613 os = os_
611 os = os_
614
612
615 cur_path = os.path.split(sys.executable)[0]
613 cur_path = os.path.split(sys.executable)[0]
616 os_path = os.environ['PATH']
614 os_path = os.environ['PATH']
617 if not os.environ['PATH'].startswith(cur_path):
615 if not os.environ['PATH'].startswith(cur_path):
618 os.environ['PATH'] = f'{cur_path}:{os_path}'
616 os.environ['PATH'] = f'{cur_path}:{os_path}'
619
617
620
618
621 def obfuscate_url_pw(engine):
619 def obfuscate_url_pw(engine):
622 _url = engine or ''
620 _url = engine or ''
623 try:
621 try:
624 _url = sqlalchemy.engine.url.make_url(engine)
622 _url = sqlalchemy.engine.url.make_url(engine)
625 except Exception:
623 except Exception:
626 pass
624 pass
627 return repr(_url)
625 return repr(_url)
628
626
629
627
630 def get_server_url(environ):
628 def get_server_url(environ):
631 req = webob.Request(environ)
629 req = webob.Request(environ)
632 return req.host_url + req.script_name
630 return req.host_url + req.script_name
633
631
634
632
635 def unique_id(hexlen=32):
633 def unique_id(hexlen=32):
636 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
634 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
637 return suuid(truncate_to=hexlen, alphabet=alphabet)
635 return suuid(truncate_to=hexlen, alphabet=alphabet)
638
636
639
637
640 def suuid(url=None, truncate_to=22, alphabet=None):
638 def suuid(url=None, truncate_to=22, alphabet=None):
641 """
639 """
642 Generate and return a short URL safe UUID.
640 Generate and return a short URL safe UUID.
643
641
644 If the url parameter is provided, set the namespace to the provided
642 If the url parameter is provided, set the namespace to the provided
645 URL and generate a UUID.
643 URL and generate a UUID.
646
644
647 :param url to get the uuid for
645 :param url to get the uuid for
648 :truncate_to: truncate the basic 22 UUID to shorter version
646 :truncate_to: truncate the basic 22 UUID to shorter version
649
647
650 The IDs won't be universally unique any longer, but the probability of
648 The IDs won't be universally unique any longer, but the probability of
651 a collision will still be very low.
649 a collision will still be very low.
652 """
650 """
653 # Define our alphabet.
651 # Define our alphabet.
654 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
652 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
655
653
656 # If no URL is given, generate a random UUID.
654 # If no URL is given, generate a random UUID.
657 if url is None:
655 if url is None:
658 unique_id = uuid.uuid4().int
656 unique_id = uuid.uuid4().int
659 else:
657 else:
660 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
658 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
661
659
662 alphabet_length = len(_ALPHABET)
660 alphabet_length = len(_ALPHABET)
663 output = []
661 output = []
664 while unique_id > 0:
662 while unique_id > 0:
665 digit = unique_id % alphabet_length
663 digit = unique_id % alphabet_length
666 output.append(_ALPHABET[digit])
664 output.append(_ALPHABET[digit])
667 unique_id = int(unique_id / alphabet_length)
665 unique_id = int(unique_id / alphabet_length)
668 return "".join(output)[:truncate_to]
666 return "".join(output)[:truncate_to]
669
667
670
668
671 def get_current_rhodecode_user(request=None):
669 def get_current_rhodecode_user(request=None):
672 """
670 """
673 Gets rhodecode user from request
671 Gets rhodecode user from request
674 """
672 """
675 import pyramid.threadlocal
673 import pyramid.threadlocal
676 pyramid_request = request or pyramid.threadlocal.get_current_request()
674 pyramid_request = request or pyramid.threadlocal.get_current_request()
677
675
678 # web case
676 # web case
679 if pyramid_request and hasattr(pyramid_request, 'user'):
677 if pyramid_request and hasattr(pyramid_request, 'user'):
680 return pyramid_request.user
678 return pyramid_request.user
681
679
682 # api case
680 # api case
683 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
681 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
684 return pyramid_request.rpc_user
682 return pyramid_request.rpc_user
685
683
686 return None
684 return None
687
685
688
686
689 def action_logger_generic(action, namespace=''):
687 def action_logger_generic(action, namespace=''):
690 """
688 """
691 A generic logger for actions useful to the system overview, tries to find
689 A generic logger for actions useful to the system overview, tries to find
692 an acting user for the context of the call otherwise reports unknown user
690 an acting user for the context of the call otherwise reports unknown user
693
691
694 :param action: logging message eg 'comment 5 deleted'
692 :param action: logging message eg 'comment 5 deleted'
695 :param type: string
693 :param type: string
696
694
697 :param namespace: namespace of the logging message eg. 'repo.comments'
695 :param namespace: namespace of the logging message eg. 'repo.comments'
698 :param type: string
696 :param type: string
699
697
700 """
698 """
701
699
702 logger_name = 'rhodecode.actions'
700 logger_name = 'rhodecode.actions'
703
701
704 if namespace:
702 if namespace:
705 logger_name += '.' + namespace
703 logger_name += '.' + namespace
706
704
707 log = logging.getLogger(logger_name)
705 log = logging.getLogger(logger_name)
708
706
709 # get a user if we can
707 # get a user if we can
710 user = get_current_rhodecode_user()
708 user = get_current_rhodecode_user()
711
709
712 logfunc = log.info
710 logfunc = log.info
713
711
714 if not user:
712 if not user:
715 user = '<unknown user>'
713 user = '<unknown user>'
716 logfunc = log.warning
714 logfunc = log.warning
717
715
718 logfunc('Logging action by {}: {}'.format(user, action))
716 logfunc(f'Logging action by {user}: {action}')
719
717
720
718
721 def escape_split(text, sep=',', maxsplit=-1):
719 def escape_split(text, sep=',', maxsplit=-1):
722 r"""
720 r"""
723 Allows for escaping of the separator: e.g. arg='foo\, bar'
721 Allows for escaping of the separator: e.g. arg='foo\, bar'
724
722
725 It should be noted that the way bash et. al. do command line parsing, those
723 It should be noted that the way bash et. al. do command line parsing, those
726 single quotes are required.
724 single quotes are required.
727 """
725 """
728 escaped_sep = r'\%s' % sep
726 escaped_sep = r'\%s' % sep
729
727
730 if escaped_sep not in text:
728 if escaped_sep not in text:
731 return text.split(sep, maxsplit)
729 return text.split(sep, maxsplit)
732
730
733 before, _mid, after = text.partition(escaped_sep)
731 before, _mid, after = text.partition(escaped_sep)
734 startlist = before.split(sep, maxsplit) # a regular split is fine here
732 startlist = before.split(sep, maxsplit) # a regular split is fine here
735 unfinished = startlist[-1]
733 unfinished = startlist[-1]
736 startlist = startlist[:-1]
734 startlist = startlist[:-1]
737
735
738 # recurse because there may be more escaped separators
736 # recurse because there may be more escaped separators
739 endlist = escape_split(after, sep, maxsplit)
737 endlist = escape_split(after, sep, maxsplit)
740
738
741 # finish building the escaped value. we use endlist[0] becaue the first
739 # finish building the escaped value. we use endlist[0] becaue the first
742 # part of the string sent in recursion is the rest of the escaped value.
740 # part of the string sent in recursion is the rest of the escaped value.
743 unfinished += sep + endlist[0]
741 unfinished += sep + endlist[0]
744
742
745 return startlist + [unfinished] + endlist[1:] # put together all the parts
743 return startlist + [unfinished] + endlist[1:] # put together all the parts
746
744
747
745
748 class OptionalAttr(object):
746 class OptionalAttr(object):
749 """
747 """
750 Special Optional Option that defines other attribute. Example::
748 Special Optional Option that defines other attribute. Example::
751
749
752 def test(apiuser, userid=Optional(OAttr('apiuser')):
750 def test(apiuser, userid=Optional(OAttr('apiuser')):
753 user = Optional.extract(userid)
751 user = Optional.extract(userid)
754 # calls
752 # calls
755
753
756 """
754 """
757
755
758 def __init__(self, attr_name):
756 def __init__(self, attr_name):
759 self.attr_name = attr_name
757 self.attr_name = attr_name
760
758
761 def __repr__(self):
759 def __repr__(self):
762 return '<OptionalAttr:%s>' % self.attr_name
760 return '<OptionalAttr:%s>' % self.attr_name
763
761
764 def __call__(self):
762 def __call__(self):
765 return self
763 return self
766
764
767
765
768 # alias
766 # alias
769 OAttr = OptionalAttr
767 OAttr = OptionalAttr
770
768
771
769
772 class Optional(object):
770 class Optional(object):
773 """
771 """
774 Defines an optional parameter::
772 Defines an optional parameter::
775
773
776 param = param.getval() if isinstance(param, Optional) else param
774 param = param.getval() if isinstance(param, Optional) else param
777 param = param() if isinstance(param, Optional) else param
775 param = param() if isinstance(param, Optional) else param
778
776
779 is equivalent of::
777 is equivalent of::
780
778
781 param = Optional.extract(param)
779 param = Optional.extract(param)
782
780
783 """
781 """
784
782
785 def __init__(self, type_):
783 def __init__(self, type_):
786 self.type_ = type_
784 self.type_ = type_
787
785
788 def __repr__(self):
786 def __repr__(self):
789 return '<Optional:%s>' % self.type_.__repr__()
787 return '<Optional:%s>' % self.type_.__repr__()
790
788
791 def __call__(self):
789 def __call__(self):
792 return self.getval()
790 return self.getval()
793
791
794 def getval(self):
792 def getval(self):
795 """
793 """
796 returns value from this Optional instance
794 returns value from this Optional instance
797 """
795 """
798 if isinstance(self.type_, OAttr):
796 if isinstance(self.type_, OAttr):
799 # use params name
797 # use params name
800 return self.type_.attr_name
798 return self.type_.attr_name
801 return self.type_
799 return self.type_
802
800
803 @classmethod
801 @classmethod
804 def extract(cls, val):
802 def extract(cls, val):
805 """
803 """
806 Extracts value from Optional() instance
804 Extracts value from Optional() instance
807
805
808 :param val:
806 :param val:
809 :return: original value if it's not Optional instance else
807 :return: original value if it's not Optional instance else
810 value of instance
808 value of instance
811 """
809 """
812 if isinstance(val, cls):
810 if isinstance(val, cls):
813 return val.getval()
811 return val.getval()
814 return val
812 return val
815
813
816
814
817 def glob2re(pat):
815 def glob2re(pat):
818 import fnmatch
816 import fnmatch
819 return fnmatch.translate(pat)
817 return fnmatch.translate(pat)
820
818
821
819
822 def parse_byte_string(size_str):
820 def parse_byte_string(size_str):
823 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
821 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
824 if not match:
822 if not match:
825 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
823 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
826 f'to use format of <num>(MB|KB)')
824 f'to use format of <num>(MB|KB)')
827
825
828 _parts = match.groups()
826 _parts = match.groups()
829 num, type_ = _parts
827 num, type_ = _parts
830 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
828 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
831
829
832
830
833 class CachedProperty(object):
831 class CachedProperty(object):
834 """
832 """
835 Lazy Attributes. With option to invalidate the cache by running a method
833 Lazy Attributes. With option to invalidate the cache by running a method
836
834
837 >>> class Foo(object):
835 >>> class Foo(object):
838 ...
836 ...
839 ... @CachedProperty
837 ... @CachedProperty
840 ... def heavy_func(self):
838 ... def heavy_func(self):
841 ... return 'super-calculation'
839 ... return 'super-calculation'
842 ...
840 ...
843 ... foo = Foo()
841 ... foo = Foo()
844 ... foo.heavy_func() # first computation
842 ... foo.heavy_func() # first computation
845 ... foo.heavy_func() # fetch from cache
843 ... foo.heavy_func() # fetch from cache
846 ... foo._invalidate_prop_cache('heavy_func')
844 ... foo._invalidate_prop_cache('heavy_func')
847
845
848 # at this point calling foo.heavy_func() will be re-computed
846 # at this point calling foo.heavy_func() will be re-computed
849 """
847 """
850
848
851 def __init__(self, func, func_name=None):
849 def __init__(self, func, func_name=None):
852
850
853 if func_name is None:
851 if func_name is None:
854 func_name = func.__name__
852 func_name = func.__name__
855 self.data = (func, func_name)
853 self.data = (func, func_name)
856 functools.update_wrapper(self, func)
854 functools.update_wrapper(self, func)
857
855
858 def __get__(self, inst, class_):
856 def __get__(self, inst, class_):
859 if inst is None:
857 if inst is None:
860 return self
858 return self
861
859
862 func, func_name = self.data
860 func, func_name = self.data
863 value = func(inst)
861 value = func(inst)
864 inst.__dict__[func_name] = value
862 inst.__dict__[func_name] = value
865 if '_invalidate_prop_cache' not in inst.__dict__:
863 if '_invalidate_prop_cache' not in inst.__dict__:
866 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
864 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
867 self._invalidate_prop_cache, inst)
865 self._invalidate_prop_cache, inst)
868 return value
866 return value
869
867
870 def _invalidate_prop_cache(self, inst, name):
868 def _invalidate_prop_cache(self, inst, name):
871 inst.__dict__.pop(name, None)
869 inst.__dict__.pop(name, None)
872
870
873
871
874 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
872 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
875 """
873 """
876 Retry decorator with exponential backoff.
874 Retry decorator with exponential backoff.
877
875
878 Parameters
876 Parameters
879 ----------
877 ----------
880 func : typing.Callable, optional
878 func : typing.Callable, optional
881 Callable on which the decorator is applied, by default None
879 Callable on which the decorator is applied, by default None
882 exception : Exception or tuple of Exceptions, optional
880 exception : Exception or tuple of Exceptions, optional
883 Exception(s) that invoke retry, by default Exception
881 Exception(s) that invoke retry, by default Exception
884 n_tries : int, optional
882 n_tries : int, optional
885 Number of tries before giving up, by default 5
883 Number of tries before giving up, by default 5
886 delay : int, optional
884 delay : int, optional
887 Initial delay between retries in seconds, by default 5
885 Initial delay between retries in seconds, by default 5
888 backoff : int, optional
886 backoff : int, optional
889 Backoff multiplier e.g. value of 2 will double the delay, by default 1
887 Backoff multiplier e.g. value of 2 will double the delay, by default 1
890 logger : bool, optional
888 logger : bool, optional
891 Option to log or print, by default False
889 Option to log or print, by default False
892
890
893 Returns
891 Returns
894 -------
892 -------
895 typing.Callable
893 typing.Callable
896 Decorated callable that calls itself when exception(s) occur.
894 Decorated callable that calls itself when exception(s) occur.
897
895
898 Examples
896 Examples
899 --------
897 --------
900 >>> import random
898 >>> import random
901 >>> @retry(exception=Exception, n_tries=3)
899 >>> @retry(exception=Exception, n_tries=3)
902 ... def test_random(text):
900 ... def test_random(text):
903 ... x = random.random()
901 ... x = random.random()
904 ... if x < 0.5:
902 ... if x < 0.5:
905 ... raise Exception("Fail")
903 ... raise Exception("Fail")
906 ... else:
904 ... else:
907 ... print("Success: ", text)
905 ... print("Success: ", text)
908 >>> test_random("It works!")
906 >>> test_random("It works!")
909 """
907 """
910
908
911 if func is None:
909 if func is None:
912 return functools.partial(
910 return functools.partial(
913 retry,
911 retry,
914 exception=exception,
912 exception=exception,
915 n_tries=n_tries,
913 n_tries=n_tries,
916 delay=delay,
914 delay=delay,
917 backoff=backoff,
915 backoff=backoff,
918 logger=logger,
916 logger=logger,
919 )
917 )
920
918
921 @functools.wraps(func)
919 @functools.wraps(func)
922 def wrapper(*args, **kwargs):
920 def wrapper(*args, **kwargs):
923 _n_tries, n_delay = n_tries, delay
921 _n_tries, n_delay = n_tries, delay
924 log = logging.getLogger('rhodecode.retry')
922 log = logging.getLogger('rhodecode.retry')
925
923
926 while _n_tries > 1:
924 while _n_tries > 1:
927 try:
925 try:
928 return func(*args, **kwargs)
926 return func(*args, **kwargs)
929 except exception as e:
927 except exception as e:
930 e_details = repr(e)
928 e_details = repr(e)
931 msg = "Exception on calling func {func}: {e}, " \
929 msg = "Exception on calling func {func}: {e}, " \
932 "Retrying in {n_delay} seconds..."\
930 "Retrying in {n_delay} seconds..."\
933 .format(func=func, e=e_details, n_delay=n_delay)
931 .format(func=func, e=e_details, n_delay=n_delay)
934 if logger:
932 if logger:
935 log.warning(msg)
933 log.warning(msg)
936 else:
934 else:
937 print(msg)
935 print(msg)
938 time.sleep(n_delay)
936 time.sleep(n_delay)
939 _n_tries -= 1
937 _n_tries -= 1
940 n_delay *= backoff
938 n_delay *= backoff
941
939
942 return func(*args, **kwargs)
940 return func(*args, **kwargs)
943
941
944 return wrapper
942 return wrapper
945
943
946
944
947 def user_agent_normalizer(user_agent_raw, safe=True):
945 def user_agent_normalizer(user_agent_raw, safe=True):
948 log = logging.getLogger('rhodecode.user_agent_normalizer')
946 log = logging.getLogger('rhodecode.user_agent_normalizer')
949 ua = (user_agent_raw or '').strip().lower()
947 ua = (user_agent_raw or '').strip().lower()
950 ua = ua.replace('"', '')
948 ua = ua.replace('"', '')
951
949
952 try:
950 try:
953 if 'mercurial/proto-1.0' in ua:
951 if 'mercurial/proto-1.0' in ua:
954 ua = ua.replace('mercurial/proto-1.0', '')
952 ua = ua.replace('mercurial/proto-1.0', '')
955 ua = ua.replace('(', '').replace(')', '').strip()
953 ua = ua.replace('(', '').replace(')', '').strip()
956 ua = ua.replace('mercurial ', 'mercurial/')
954 ua = ua.replace('mercurial ', 'mercurial/')
957 elif ua.startswith('git'):
955 elif ua.startswith('git'):
958 parts = ua.split(' ')
956 parts = ua.split(' ')
959 if parts:
957 if parts:
960 ua = parts[0]
958 ua = parts[0]
961 ua = re.sub(r'\.windows\.\d', '', ua).strip()
959 ua = re.sub(r'\.windows\.\d', '', ua).strip()
962
960
963 return ua
961 return ua
964 except Exception:
962 except Exception:
965 log.exception('Failed to parse scm user-agent')
963 log.exception('Failed to parse scm user-agent')
966 if not safe:
964 if not safe:
967 raise
965 raise
968
966
969 return ua
967 return ua
970
968
971
969
972 def get_available_port(min_port=40000, max_port=55555, use_range=False):
970 def get_available_port(min_port=40000, max_port=55555, use_range=False):
973 hostname = ''
971 hostname = ''
974 for _ in range(min_port, max_port):
972 for _ in range(min_port, max_port):
975 pick_port = 0
973 pick_port = 0
976 if use_range:
974 if use_range:
977 pick_port = random.randint(min_port, max_port)
975 pick_port = random.randint(min_port, max_port)
978
976
979 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
977 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
980 try:
978 try:
981 s.bind((hostname, pick_port))
979 s.bind((hostname, pick_port))
982 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
980 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
983 return s.getsockname()[1]
981 return s.getsockname()[1]
984 except OSError:
982 except OSError:
985 continue
983 continue
986 except socket.error as e:
984 except socket.error as e:
987 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
985 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
988 continue
986 continue
989 raise
987 raise
@@ -1,1987 +1,1985 b''
1
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 Base module for all VCS systems
20 Base module for all VCS systems
23 """
21 """
24 import os
22 import os
25 import re
23 import re
26 import time
24 import time
27 import shutil
25 import shutil
28 import datetime
26 import datetime
29 import fnmatch
27 import fnmatch
30 import itertools
28 import itertools
31 import logging
29 import logging
32 import dataclasses
30 import dataclasses
33 import warnings
31 import warnings
34
32
35 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
36
34
37
35
38 import rhodecode
36 import rhodecode
39 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib.utils2 import safe_str, CachedProperty
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
41 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
42 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 RepositoryError)
46 RepositoryError)
49
47
50
48
51 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
52
50
53
51
54 FILEMODE_DEFAULT = 0o100644
52 FILEMODE_DEFAULT = 0o100644
55 FILEMODE_EXECUTABLE = 0o100755
53 FILEMODE_EXECUTABLE = 0o100755
56 EMPTY_COMMIT_ID = '0' * 40
54 EMPTY_COMMIT_ID = '0' * 40
57
55
58
56
59 @dataclasses.dataclass
57 @dataclasses.dataclass
60 class Reference:
58 class Reference:
61 type: str
59 type: str
62 name: str
60 name: str
63 commit_id: str
61 commit_id: str
64
62
65 def __iter__(self):
63 def __iter__(self):
66 yield self.type
64 yield self.type
67 yield self.name
65 yield self.name
68 yield self.commit_id
66 yield self.commit_id
69
67
70 @property
68 @property
71 def branch(self):
69 def branch(self):
72 if self.type == 'branch':
70 if self.type == 'branch':
73 return self.name
71 return self.name
74
72
75 @property
73 @property
76 def bookmark(self):
74 def bookmark(self):
77 if self.type == 'book':
75 if self.type == 'book':
78 return self.name
76 return self.name
79
77
80 @property
78 @property
81 def to_str(self):
79 def to_str(self):
82 return reference_to_unicode(self)
80 return reference_to_unicode(self)
83
81
84 def asdict(self):
82 def asdict(self):
85 return dict(
83 return dict(
86 type=self.type,
84 type=self.type,
87 name=self.name,
85 name=self.name,
88 commit_id=self.commit_id
86 commit_id=self.commit_id
89 )
87 )
90
88
91
89
92 def unicode_to_reference(raw: str):
90 def unicode_to_reference(raw: str):
93 """
91 """
94 Convert a unicode (or string) to a reference object.
92 Convert a unicode (or string) to a reference object.
95 If unicode evaluates to False it returns None.
93 If unicode evaluates to False it returns None.
96 """
94 """
97 if raw:
95 if raw:
98 refs = raw.split(':')
96 refs = raw.split(':')
99 return Reference(*refs)
97 return Reference(*refs)
100 else:
98 else:
101 return None
99 return None
102
100
103
101
104 def reference_to_unicode(ref: Reference):
102 def reference_to_unicode(ref: Reference):
105 """
103 """
106 Convert a reference object to unicode.
104 Convert a reference object to unicode.
107 If reference is None it returns None.
105 If reference is None it returns None.
108 """
106 """
109 if ref:
107 if ref:
110 return ':'.join(ref)
108 return ':'.join(ref)
111 else:
109 else:
112 return None
110 return None
113
111
114
112
115 class MergeFailureReason(object):
113 class MergeFailureReason(object):
116 """
114 """
117 Enumeration with all the reasons why the server side merge could fail.
115 Enumeration with all the reasons why the server side merge could fail.
118
116
119 DO NOT change the number of the reasons, as they may be stored in the
117 DO NOT change the number of the reasons, as they may be stored in the
120 database.
118 database.
121
119
122 Changing the name of a reason is acceptable and encouraged to deprecate old
120 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
121 reasons.
124 """
122 """
125
123
126 # Everything went well.
124 # Everything went well.
127 NONE = 0
125 NONE = 0
128
126
129 # An unexpected exception was raised. Check the logs for more details.
127 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
128 UNKNOWN = 1
131
129
132 # The merge was not successful, there are conflicts.
130 # The merge was not successful, there are conflicts.
133 MERGE_FAILED = 2
131 MERGE_FAILED = 2
134
132
135 # The merge succeeded but we could not push it to the target repository.
133 # The merge succeeded but we could not push it to the target repository.
136 PUSH_FAILED = 3
134 PUSH_FAILED = 3
137
135
138 # The specified target is not a head in the target repository.
136 # The specified target is not a head in the target repository.
139 TARGET_IS_NOT_HEAD = 4
137 TARGET_IS_NOT_HEAD = 4
140
138
141 # The source repository contains more branches than the target. Pushing
139 # The source repository contains more branches than the target. Pushing
142 # the merge will create additional branches in the target.
140 # the merge will create additional branches in the target.
143 HG_SOURCE_HAS_MORE_BRANCHES = 5
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
144
142
145 # The target reference has multiple heads. That does not allow to correctly
143 # The target reference has multiple heads. That does not allow to correctly
146 # identify the target location. This could only happen for mercurial
144 # identify the target location. This could only happen for mercurial
147 # branches.
145 # branches.
148 HG_TARGET_HAS_MULTIPLE_HEADS = 6
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
149
147
150 # The target repository is locked
148 # The target repository is locked
151 TARGET_IS_LOCKED = 7
149 TARGET_IS_LOCKED = 7
152
150
153 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
154 # A involved commit could not be found.
152 # A involved commit could not be found.
155 _DEPRECATED_MISSING_COMMIT = 8
153 _DEPRECATED_MISSING_COMMIT = 8
156
154
157 # The target repo reference is missing.
155 # The target repo reference is missing.
158 MISSING_TARGET_REF = 9
156 MISSING_TARGET_REF = 9
159
157
160 # The source repo reference is missing.
158 # The source repo reference is missing.
161 MISSING_SOURCE_REF = 10
159 MISSING_SOURCE_REF = 10
162
160
163 # The merge was not successful, there are conflicts related to sub
161 # The merge was not successful, there are conflicts related to sub
164 # repositories.
162 # repositories.
165 SUBREPO_MERGE_FAILED = 11
163 SUBREPO_MERGE_FAILED = 11
166
164
167
165
168 class UpdateFailureReason(object):
166 class UpdateFailureReason(object):
169 """
167 """
170 Enumeration with all the reasons why the pull request update could fail.
168 Enumeration with all the reasons why the pull request update could fail.
171
169
172 DO NOT change the number of the reasons, as they may be stored in the
170 DO NOT change the number of the reasons, as they may be stored in the
173 database.
171 database.
174
172
175 Changing the name of a reason is acceptable and encouraged to deprecate old
173 Changing the name of a reason is acceptable and encouraged to deprecate old
176 reasons.
174 reasons.
177 """
175 """
178
176
179 # Everything went well.
177 # Everything went well.
180 NONE = 0
178 NONE = 0
181
179
182 # An unexpected exception was raised. Check the logs for more details.
180 # An unexpected exception was raised. Check the logs for more details.
183 UNKNOWN = 1
181 UNKNOWN = 1
184
182
185 # The pull request is up to date.
183 # The pull request is up to date.
186 NO_CHANGE = 2
184 NO_CHANGE = 2
187
185
188 # The pull request has a reference type that is not supported for update.
186 # The pull request has a reference type that is not supported for update.
189 WRONG_REF_TYPE = 3
187 WRONG_REF_TYPE = 3
190
188
191 # Update failed because the target reference is missing.
189 # Update failed because the target reference is missing.
192 MISSING_TARGET_REF = 4
190 MISSING_TARGET_REF = 4
193
191
194 # Update failed because the source reference is missing.
192 # Update failed because the source reference is missing.
195 MISSING_SOURCE_REF = 5
193 MISSING_SOURCE_REF = 5
196
194
197
195
198 class MergeResponse(object):
196 class MergeResponse(object):
199
197
200 # uses .format(**metadata) for variables
198 # uses .format(**metadata) for variables
201 MERGE_STATUS_MESSAGES = {
199 MERGE_STATUS_MESSAGES = {
202 MergeFailureReason.NONE: lazy_ugettext(
200 MergeFailureReason.NONE: lazy_ugettext(
203 'This pull request can be automatically merged.'),
201 'This pull request can be automatically merged.'),
204 MergeFailureReason.UNKNOWN: lazy_ugettext(
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
205 'This pull request cannot be merged because of an unhandled exception. '
203 'This pull request cannot be merged because of an unhandled exception. '
206 '{exception}'),
204 '{exception}'),
207 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
208 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
209 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
210 'This pull request could not be merged because push to '
208 'This pull request could not be merged because push to '
211 'target:`{target}@{merge_commit}` failed.'),
209 'target:`{target}@{merge_commit}` failed.'),
212 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
213 'This pull request cannot be merged because the target '
211 'This pull request cannot be merged because the target '
214 '`{target_ref.name}` is not a head.'),
212 '`{target_ref.name}` is not a head.'),
215 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
216 'This pull request cannot be merged because the source contains '
214 'This pull request cannot be merged because the source contains '
217 'more branches than the target.'),
215 'more branches than the target.'),
218 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
219 'This pull request cannot be merged because the target `{target_ref.name}` '
217 'This pull request cannot be merged because the target `{target_ref.name}` '
220 'has multiple heads: `{heads}`.'),
218 'has multiple heads: `{heads}`.'),
221 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
222 'This pull request cannot be merged because the target repository is '
220 'This pull request cannot be merged because the target repository is '
223 'locked by {locked_by}.'),
221 'locked by {locked_by}.'),
224
222
225 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
226 'This pull request cannot be merged because the target '
224 'This pull request cannot be merged because the target '
227 'reference `{target_ref.name}` is missing.'),
225 'reference `{target_ref.name}` is missing.'),
228 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
229 'This pull request cannot be merged because the source '
227 'This pull request cannot be merged because the source '
230 'reference `{source_ref.name}` is missing.'),
228 'reference `{source_ref.name}` is missing.'),
231 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
232 'This pull request cannot be merged because of conflicts related '
230 'This pull request cannot be merged because of conflicts related '
233 'to sub repositories.'),
231 'to sub repositories.'),
234
232
235 # Deprecations
233 # Deprecations
236 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
237 'This pull request cannot be merged because the target or the '
235 'This pull request cannot be merged because the target or the '
238 'source reference is missing.'),
236 'source reference is missing.'),
239
237
240 }
238 }
241
239
242 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
240 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
243 self.possible = possible
241 self.possible = possible
244 self.executed = executed
242 self.executed = executed
245 self.merge_ref = merge_ref
243 self.merge_ref = merge_ref
246 self.failure_reason = failure_reason
244 self.failure_reason = failure_reason
247 self.metadata = metadata or {}
245 self.metadata = metadata or {}
248
246
249 def __repr__(self):
247 def __repr__(self):
250 return f'<MergeResponse:{self.label} {self.failure_reason}>'
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
251
249
252 def __eq__(self, other):
250 def __eq__(self, other):
253 same_instance = isinstance(other, self.__class__)
251 same_instance = isinstance(other, self.__class__)
254 return same_instance \
252 return same_instance \
255 and self.possible == other.possible \
253 and self.possible == other.possible \
256 and self.executed == other.executed \
254 and self.executed == other.executed \
257 and self.failure_reason == other.failure_reason
255 and self.failure_reason == other.failure_reason
258
256
259 @property
257 @property
260 def label(self):
258 def label(self):
261 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
262 not k.startswith('_'))
260 not k.startswith('_'))
263 return label_dict.get(self.failure_reason)
261 return label_dict.get(self.failure_reason)
264
262
265 @property
263 @property
266 def merge_status_message(self):
264 def merge_status_message(self):
267 """
265 """
268 Return a human friendly error message for the given merge status code.
266 Return a human friendly error message for the given merge status code.
269 """
267 """
270 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
271
269
272 try:
270 try:
273 return msg.format(**self.metadata)
271 return msg.format(**self.metadata)
274 except Exception:
272 except Exception:
275 log.exception('Failed to format %s message', self)
273 log.exception('Failed to format %s message', self)
276 return msg
274 return msg
277
275
278 def asdict(self):
276 def asdict(self):
279 data = {}
277 data = {}
280 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
281 'merge_status_message']:
279 'merge_status_message']:
282 data[k] = getattr(self, k)
280 data[k] = getattr(self, k)
283 return data
281 return data
284
282
285
283
286 class TargetRefMissing(ValueError):
284 class TargetRefMissing(ValueError):
287 pass
285 pass
288
286
289
287
290 class SourceRefMissing(ValueError):
288 class SourceRefMissing(ValueError):
291 pass
289 pass
292
290
293
291
294 class BaseRepository(object):
292 class BaseRepository(object):
295 """
293 """
296 Base Repository for final backends
294 Base Repository for final backends
297
295
298 .. attribute:: DEFAULT_BRANCH_NAME
296 .. attribute:: DEFAULT_BRANCH_NAME
299
297
300 name of default branch (i.e. "trunk" for svn, "master" for git etc.
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
301
299
302 .. attribute:: commit_ids
300 .. attribute:: commit_ids
303
301
304 list of all available commit ids, in ascending order
302 list of all available commit ids, in ascending order
305
303
306 .. attribute:: path
304 .. attribute:: path
307
305
308 absolute path to the repository
306 absolute path to the repository
309
307
310 .. attribute:: bookmarks
308 .. attribute:: bookmarks
311
309
312 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
313 there are no bookmarks or the backend implementation does not support
311 there are no bookmarks or the backend implementation does not support
314 bookmarks.
312 bookmarks.
315
313
316 .. attribute:: tags
314 .. attribute:: tags
317
315
318 Mapping from name to :term:`Commit ID` of the tag.
316 Mapping from name to :term:`Commit ID` of the tag.
319
317
320 """
318 """
321
319
322 DEFAULT_BRANCH_NAME = None
320 DEFAULT_BRANCH_NAME = None
323 DEFAULT_CONTACT = "Unknown"
321 DEFAULT_CONTACT = "Unknown"
324 DEFAULT_DESCRIPTION = "unknown"
322 DEFAULT_DESCRIPTION = "unknown"
325 EMPTY_COMMIT_ID = '0' * 40
323 EMPTY_COMMIT_ID = '0' * 40
326 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
327
325
328 path = None
326 path = None
329
327
330 _is_empty = None
328 _is_empty = None
331 _commit_ids = {}
329 _commit_ids = {}
332
330
333 def __init__(self, repo_path, config=None, create=False, **kwargs):
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
334 """
332 """
335 Initializes repository. Raises RepositoryError if repository could
333 Initializes repository. Raises RepositoryError if repository could
336 not be find at the given ``repo_path`` or directory at ``repo_path``
334 not be find at the given ``repo_path`` or directory at ``repo_path``
337 exists and ``create`` is set to True.
335 exists and ``create`` is set to True.
338
336
339 :param repo_path: local path of the repository
337 :param repo_path: local path of the repository
340 :param config: repository configuration
338 :param config: repository configuration
341 :param create=False: if set to True, would try to create repository.
339 :param create=False: if set to True, would try to create repository.
342 :param src_url=None: if set, should be proper url from which repository
340 :param src_url=None: if set, should be proper url from which repository
343 would be cloned; requires ``create`` parameter to be set to True -
341 would be cloned; requires ``create`` parameter to be set to True -
344 raises RepositoryError if src_url is set and create evaluates to
342 raises RepositoryError if src_url is set and create evaluates to
345 False
343 False
346 """
344 """
347 raise NotImplementedError
345 raise NotImplementedError
348
346
349 def __repr__(self):
347 def __repr__(self):
350 return '<{} at {}>'.format(self.__class__.__name__, self.path)
348 return f'<{self.__class__.__name__} at {self.path}>'
351
349
352 def __len__(self):
350 def __len__(self):
353 return self.count()
351 return self.count()
354
352
355 def __eq__(self, other):
353 def __eq__(self, other):
356 same_instance = isinstance(other, self.__class__)
354 same_instance = isinstance(other, self.__class__)
357 return same_instance and other.path == self.path
355 return same_instance and other.path == self.path
358
356
359 def __ne__(self, other):
357 def __ne__(self, other):
360 return not self.__eq__(other)
358 return not self.__eq__(other)
361
359
362 def get_create_shadow_cache_pr_path(self, db_repo):
360 def get_create_shadow_cache_pr_path(self, db_repo):
363 path = db_repo.cached_diffs_dir
361 path = db_repo.cached_diffs_dir
364 if not os.path.exists(path):
362 if not os.path.exists(path):
365 os.makedirs(path, 0o755)
363 os.makedirs(path, 0o755)
366 return path
364 return path
367
365
368 @classmethod
366 @classmethod
369 def get_default_config(cls, default=None):
367 def get_default_config(cls, default=None):
370 config = Config()
368 config = Config()
371 if default and isinstance(default, list):
369 if default and isinstance(default, list):
372 for section, key, val in default:
370 for section, key, val in default:
373 config.set(section, key, val)
371 config.set(section, key, val)
374 return config
372 return config
375
373
376 @LazyProperty
374 @LazyProperty
377 def _remote(self):
375 def _remote(self):
378 raise NotImplementedError
376 raise NotImplementedError
379
377
380 def _heads(self, branch=None):
378 def _heads(self, branch=None):
381 return []
379 return []
382
380
383 @LazyProperty
381 @LazyProperty
384 def EMPTY_COMMIT(self):
382 def EMPTY_COMMIT(self):
385 return EmptyCommit(self.EMPTY_COMMIT_ID)
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
386
384
387 @LazyProperty
385 @LazyProperty
388 def alias(self):
386 def alias(self):
389 for k, v in settings.BACKENDS.items():
387 for k, v in settings.BACKENDS.items():
390 if v.split('.')[-1] == str(self.__class__.__name__):
388 if v.split('.')[-1] == str(self.__class__.__name__):
391 return k
389 return k
392
390
393 @LazyProperty
391 @LazyProperty
394 def name(self):
392 def name(self):
395 return safe_str(os.path.basename(self.path))
393 return safe_str(os.path.basename(self.path))
396
394
397 @LazyProperty
395 @LazyProperty
398 def description(self):
396 def description(self):
399 raise NotImplementedError
397 raise NotImplementedError
400
398
401 def refs(self):
399 def refs(self):
402 """
400 """
403 returns a `dict` with branches, bookmarks, tags, and closed_branches
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
404 for this repository
402 for this repository
405 """
403 """
406 return dict(
404 return dict(
407 branches=self.branches,
405 branches=self.branches,
408 branches_closed=self.branches_closed,
406 branches_closed=self.branches_closed,
409 tags=self.tags,
407 tags=self.tags,
410 bookmarks=self.bookmarks
408 bookmarks=self.bookmarks
411 )
409 )
412
410
413 @LazyProperty
411 @LazyProperty
414 def branches(self):
412 def branches(self):
415 """
413 """
416 A `dict` which maps branch names to commit ids.
414 A `dict` which maps branch names to commit ids.
417 """
415 """
418 raise NotImplementedError
416 raise NotImplementedError
419
417
420 @LazyProperty
418 @LazyProperty
421 def branches_closed(self):
419 def branches_closed(self):
422 """
420 """
423 A `dict` which maps tags names to commit ids.
421 A `dict` which maps tags names to commit ids.
424 """
422 """
425 raise NotImplementedError
423 raise NotImplementedError
426
424
427 @LazyProperty
425 @LazyProperty
428 def bookmarks(self):
426 def bookmarks(self):
429 """
427 """
430 A `dict` which maps tags names to commit ids.
428 A `dict` which maps tags names to commit ids.
431 """
429 """
432 raise NotImplementedError
430 raise NotImplementedError
433
431
434 @LazyProperty
432 @LazyProperty
435 def tags(self):
433 def tags(self):
436 """
434 """
437 A `dict` which maps tags names to commit ids.
435 A `dict` which maps tags names to commit ids.
438 """
436 """
439 raise NotImplementedError
437 raise NotImplementedError
440
438
441 @LazyProperty
439 @LazyProperty
442 def size(self):
440 def size(self):
443 """
441 """
444 Returns combined size in bytes for all repository files
442 Returns combined size in bytes for all repository files
445 """
443 """
446 tip = self.get_commit()
444 tip = self.get_commit()
447 return tip.size
445 return tip.size
448
446
449 def size_at_commit(self, commit_id):
447 def size_at_commit(self, commit_id):
450 commit = self.get_commit(commit_id)
448 commit = self.get_commit(commit_id)
451 return commit.size
449 return commit.size
452
450
453 def _check_for_empty(self):
451 def _check_for_empty(self):
454 no_commits = len(self._commit_ids) == 0
452 no_commits = len(self._commit_ids) == 0
455 if no_commits:
453 if no_commits:
456 # check on remote to be sure
454 # check on remote to be sure
457 return self._remote.is_empty()
455 return self._remote.is_empty()
458 else:
456 else:
459 return False
457 return False
460
458
461 def is_empty(self):
459 def is_empty(self):
462 if rhodecode.is_test:
460 if rhodecode.is_test:
463 return self._check_for_empty()
461 return self._check_for_empty()
464
462
465 if self._is_empty is None:
463 if self._is_empty is None:
466 # cache empty for production, but not tests
464 # cache empty for production, but not tests
467 self._is_empty = self._check_for_empty()
465 self._is_empty = self._check_for_empty()
468
466
469 return self._is_empty
467 return self._is_empty
470
468
471 @staticmethod
469 @staticmethod
472 def check_url(url, config):
470 def check_url(url, config):
473 """
471 """
474 Function will check given url and try to verify if it's a valid
472 Function will check given url and try to verify if it's a valid
475 link.
473 link.
476 """
474 """
477 raise NotImplementedError
475 raise NotImplementedError
478
476
479 @staticmethod
477 @staticmethod
480 def is_valid_repository(path):
478 def is_valid_repository(path):
481 """
479 """
482 Check if given `path` contains a valid repository of this backend
480 Check if given `path` contains a valid repository of this backend
483 """
481 """
484 raise NotImplementedError
482 raise NotImplementedError
485
483
486 # ==========================================================================
484 # ==========================================================================
487 # COMMITS
485 # COMMITS
488 # ==========================================================================
486 # ==========================================================================
489
487
490 @CachedProperty
488 @CachedProperty
491 def commit_ids(self):
489 def commit_ids(self):
492 raise NotImplementedError
490 raise NotImplementedError
493
491
494 def append_commit_id(self, commit_id):
492 def append_commit_id(self, commit_id):
495 if commit_id not in self.commit_ids:
493 if commit_id not in self.commit_ids:
496 self._rebuild_cache(self.commit_ids + [commit_id])
494 self._rebuild_cache(self.commit_ids + [commit_id])
497
495
498 # clear cache
496 # clear cache
499 self._invalidate_prop_cache('commit_ids')
497 self._invalidate_prop_cache('commit_ids')
500 self._is_empty = False
498 self._is_empty = False
501
499
502 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
503 translate_tag=None, maybe_unreachable=False, reference_obj=None):
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
504 """
502 """
505 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
506 are both None, most recent commit is returned.
504 are both None, most recent commit is returned.
507
505
508 :param pre_load: Optional. List of commit attributes to load.
506 :param pre_load: Optional. List of commit attributes to load.
509
507
510 :raises ``EmptyRepositoryError``: if there are no commits
508 :raises ``EmptyRepositoryError``: if there are no commits
511 """
509 """
512 raise NotImplementedError
510 raise NotImplementedError
513
511
514 def __iter__(self):
512 def __iter__(self):
515 for commit_id in self.commit_ids:
513 for commit_id in self.commit_ids:
516 yield self.get_commit(commit_id=commit_id)
514 yield self.get_commit(commit_id=commit_id)
517
515
518 def get_commits(
516 def get_commits(
519 self, start_id=None, end_id=None, start_date=None, end_date=None,
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
520 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
521 """
519 """
522 Returns iterator of `BaseCommit` objects from start to end
520 Returns iterator of `BaseCommit` objects from start to end
523 not inclusive. This should behave just like a list, ie. end is not
521 not inclusive. This should behave just like a list, ie. end is not
524 inclusive.
522 inclusive.
525
523
526 :param start_id: None or str, must be a valid commit id
524 :param start_id: None or str, must be a valid commit id
527 :param end_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
528 :param start_date:
526 :param start_date:
529 :param end_date:
527 :param end_date:
530 :param branch_name:
528 :param branch_name:
531 :param show_hidden:
529 :param show_hidden:
532 :param pre_load:
530 :param pre_load:
533 :param translate_tags:
531 :param translate_tags:
534 """
532 """
535 raise NotImplementedError
533 raise NotImplementedError
536
534
537 def __getitem__(self, key):
535 def __getitem__(self, key):
538 """
536 """
539 Allows index based access to the commit objects of this repository.
537 Allows index based access to the commit objects of this repository.
540 """
538 """
541 pre_load = ["author", "branch", "date", "message", "parents"]
539 pre_load = ["author", "branch", "date", "message", "parents"]
542 if isinstance(key, slice):
540 if isinstance(key, slice):
543 return self._get_range(key, pre_load)
541 return self._get_range(key, pre_load)
544 return self.get_commit(commit_idx=key, pre_load=pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
545
543
546 def _get_range(self, slice_obj, pre_load):
544 def _get_range(self, slice_obj, pre_load):
547 for commit_id in self.commit_ids.__getitem__(slice_obj):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
548 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
549
547
550 def count(self):
548 def count(self):
551 return len(self.commit_ids)
549 return len(self.commit_ids)
552
550
553 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
554 """
552 """
555 Creates and returns a tag for the given ``commit_id``.
553 Creates and returns a tag for the given ``commit_id``.
556
554
557 :param name: name for new tag
555 :param name: name for new tag
558 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
559 :param commit_id: commit id for which new tag would be created
557 :param commit_id: commit id for which new tag would be created
560 :param message: message of the tag's commit
558 :param message: message of the tag's commit
561 :param date: date of tag's commit
559 :param date: date of tag's commit
562
560
563 :raises TagAlreadyExistError: if tag with same name already exists
561 :raises TagAlreadyExistError: if tag with same name already exists
564 """
562 """
565 raise NotImplementedError
563 raise NotImplementedError
566
564
567 def remove_tag(self, name, user, message=None, date=None):
565 def remove_tag(self, name, user, message=None, date=None):
568 """
566 """
569 Removes tag with the given ``name``.
567 Removes tag with the given ``name``.
570
568
571 :param name: name of the tag to be removed
569 :param name: name of the tag to be removed
572 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
573 :param message: message of the tag's removal commit
571 :param message: message of the tag's removal commit
574 :param date: date of tag's removal commit
572 :param date: date of tag's removal commit
575
573
576 :raises TagDoesNotExistError: if tag with given name does not exists
574 :raises TagDoesNotExistError: if tag with given name does not exists
577 """
575 """
578 raise NotImplementedError
576 raise NotImplementedError
579
577
580 def get_diff(
578 def get_diff(
581 self, commit1, commit2, path=None, ignore_whitespace=False,
579 self, commit1, commit2, path=None, ignore_whitespace=False,
582 context=3, path1=None):
580 context=3, path1=None):
583 """
581 """
584 Returns (git like) *diff*, as plain text. Shows changes introduced by
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
585 `commit2` since `commit1`.
583 `commit2` since `commit1`.
586
584
587 :param commit1: Entry point from which diff is shown. Can be
585 :param commit1: Entry point from which diff is shown. Can be
588 ``self.EMPTY_COMMIT`` - in this case, patch showing all
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
589 the changes since empty state of the repository until `commit2`
587 the changes since empty state of the repository until `commit2`
590 :param commit2: Until which commit changes should be shown.
588 :param commit2: Until which commit changes should be shown.
591 :param path: Can be set to a path of a file to create a diff of that
589 :param path: Can be set to a path of a file to create a diff of that
592 file. If `path1` is also set, this value is only associated to
590 file. If `path1` is also set, this value is only associated to
593 `commit2`.
591 `commit2`.
594 :param ignore_whitespace: If set to ``True``, would not show whitespace
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
595 changes. Defaults to ``False``.
593 changes. Defaults to ``False``.
596 :param context: How many lines before/after changed lines should be
594 :param context: How many lines before/after changed lines should be
597 shown. Defaults to ``3``.
595 shown. Defaults to ``3``.
598 :param path1: Can be set to a path to associate with `commit1`. This
596 :param path1: Can be set to a path to associate with `commit1`. This
599 parameter works only for backends which support diff generation for
597 parameter works only for backends which support diff generation for
600 different paths. Other backends will raise a `ValueError` if `path1`
598 different paths. Other backends will raise a `ValueError` if `path1`
601 is set and has a different value than `path`.
599 is set and has a different value than `path`.
602 :param file_path: filter this diff by given path pattern
600 :param file_path: filter this diff by given path pattern
603 """
601 """
604 raise NotImplementedError
602 raise NotImplementedError
605
603
606 def strip(self, commit_id, branch=None):
604 def strip(self, commit_id, branch=None):
607 """
605 """
608 Strip given commit_id from the repository
606 Strip given commit_id from the repository
609 """
607 """
610 raise NotImplementedError
608 raise NotImplementedError
611
609
612 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
613 """
611 """
614 Return a latest common ancestor commit if one exists for this repo
612 Return a latest common ancestor commit if one exists for this repo
615 `commit_id1` vs `commit_id2` from `repo2`.
613 `commit_id1` vs `commit_id2` from `repo2`.
616
614
617 :param commit_id1: Commit it from this repository to use as a
615 :param commit_id1: Commit it from this repository to use as a
618 target for the comparison.
616 target for the comparison.
619 :param commit_id2: Source commit id to use for comparison.
617 :param commit_id2: Source commit id to use for comparison.
620 :param repo2: Source repository to use for comparison.
618 :param repo2: Source repository to use for comparison.
621 """
619 """
622 raise NotImplementedError
620 raise NotImplementedError
623
621
624 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
625 """
623 """
626 Compare this repository's revision `commit_id1` with `commit_id2`.
624 Compare this repository's revision `commit_id1` with `commit_id2`.
627
625
628 Returns a tuple(commits, ancestor) that would be merged from
626 Returns a tuple(commits, ancestor) that would be merged from
629 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
630 will be returned as ancestor.
628 will be returned as ancestor.
631
629
632 :param commit_id1: Commit it from this repository to use as a
630 :param commit_id1: Commit it from this repository to use as a
633 target for the comparison.
631 target for the comparison.
634 :param commit_id2: Source commit id to use for comparison.
632 :param commit_id2: Source commit id to use for comparison.
635 :param repo2: Source repository to use for comparison.
633 :param repo2: Source repository to use for comparison.
636 :param merge: If set to ``True`` will do a merge compare which also
634 :param merge: If set to ``True`` will do a merge compare which also
637 returns the common ancestor.
635 returns the common ancestor.
638 :param pre_load: Optional. List of commit attributes to load.
636 :param pre_load: Optional. List of commit attributes to load.
639 """
637 """
640 raise NotImplementedError
638 raise NotImplementedError
641
639
642 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
643 user_name='', user_email='', message='', dry_run=False,
641 user_name='', user_email='', message='', dry_run=False,
644 use_rebase=False, close_branch=False):
642 use_rebase=False, close_branch=False):
645 """
643 """
646 Merge the revisions specified in `source_ref` from `source_repo`
644 Merge the revisions specified in `source_ref` from `source_repo`
647 onto the `target_ref` of this repository.
645 onto the `target_ref` of this repository.
648
646
649 `source_ref` and `target_ref` are named tupls with the following
647 `source_ref` and `target_ref` are named tupls with the following
650 fields `type`, `name` and `commit_id`.
648 fields `type`, `name` and `commit_id`.
651
649
652 Returns a MergeResponse named tuple with the following fields
650 Returns a MergeResponse named tuple with the following fields
653 'possible', 'executed', 'source_commit', 'target_commit',
651 'possible', 'executed', 'source_commit', 'target_commit',
654 'merge_commit'.
652 'merge_commit'.
655
653
656 :param repo_id: `repo_id` target repo id.
654 :param repo_id: `repo_id` target repo id.
657 :param workspace_id: `workspace_id` unique identifier.
655 :param workspace_id: `workspace_id` unique identifier.
658 :param target_ref: `target_ref` points to the commit on top of which
656 :param target_ref: `target_ref` points to the commit on top of which
659 the `source_ref` should be merged.
657 the `source_ref` should be merged.
660 :param source_repo: The repository that contains the commits to be
658 :param source_repo: The repository that contains the commits to be
661 merged.
659 merged.
662 :param source_ref: `source_ref` points to the topmost commit from
660 :param source_ref: `source_ref` points to the topmost commit from
663 the `source_repo` which should be merged.
661 the `source_repo` which should be merged.
664 :param user_name: Merge commit `user_name`.
662 :param user_name: Merge commit `user_name`.
665 :param user_email: Merge commit `user_email`.
663 :param user_email: Merge commit `user_email`.
666 :param message: Merge commit `message`.
664 :param message: Merge commit `message`.
667 :param dry_run: If `True` the merge will not take place.
665 :param dry_run: If `True` the merge will not take place.
668 :param use_rebase: If `True` commits from the source will be rebased
666 :param use_rebase: If `True` commits from the source will be rebased
669 on top of the target instead of being merged.
667 on top of the target instead of being merged.
670 :param close_branch: If `True` branch will be close before merging it
668 :param close_branch: If `True` branch will be close before merging it
671 """
669 """
672 if dry_run:
670 if dry_run:
673 message = message or settings.MERGE_DRY_RUN_MESSAGE
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
674 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
675 user_name = user_name or settings.MERGE_DRY_RUN_USER
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
676 else:
674 else:
677 if not user_name:
675 if not user_name:
678 raise ValueError('user_name cannot be empty')
676 raise ValueError('user_name cannot be empty')
679 if not user_email:
677 if not user_email:
680 raise ValueError('user_email cannot be empty')
678 raise ValueError('user_email cannot be empty')
681 if not message:
679 if not message:
682 raise ValueError('message cannot be empty')
680 raise ValueError('message cannot be empty')
683
681
684 try:
682 try:
685 return self._merge_repo(
683 return self._merge_repo(
686 repo_id, workspace_id, target_ref, source_repo,
684 repo_id, workspace_id, target_ref, source_repo,
687 source_ref, message, user_name, user_email, dry_run=dry_run,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
688 use_rebase=use_rebase, close_branch=close_branch)
686 use_rebase=use_rebase, close_branch=close_branch)
689 except RepositoryError as exc:
687 except RepositoryError as exc:
690 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
691 return MergeResponse(
689 return MergeResponse(
692 False, False, None, MergeFailureReason.UNKNOWN,
690 False, False, None, MergeFailureReason.UNKNOWN,
693 metadata={'exception': str(exc)})
691 metadata={'exception': str(exc)})
694
692
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
696 source_repo, source_ref, merge_message,
694 source_repo, source_ref, merge_message,
697 merger_name, merger_email, dry_run=False,
695 merger_name, merger_email, dry_run=False,
698 use_rebase=False, close_branch=False):
696 use_rebase=False, close_branch=False):
699 """Internal implementation of merge."""
697 """Internal implementation of merge."""
700 raise NotImplementedError
698 raise NotImplementedError
701
699
702 def _maybe_prepare_merge_workspace(
700 def _maybe_prepare_merge_workspace(
703 self, repo_id, workspace_id, target_ref, source_ref):
701 self, repo_id, workspace_id, target_ref, source_ref):
704 """
702 """
705 Create the merge workspace.
703 Create the merge workspace.
706
704
707 :param workspace_id: `workspace_id` unique identifier.
705 :param workspace_id: `workspace_id` unique identifier.
708 """
706 """
709 raise NotImplementedError
707 raise NotImplementedError
710
708
711 @classmethod
709 @classmethod
712 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
713 """
711 """
714 Legacy version that was used before. We still need it for
712 Legacy version that was used before. We still need it for
715 backward compat
713 backward compat
716 """
714 """
717 return os.path.join(
715 return os.path.join(
718 os.path.dirname(repo_path),
716 os.path.dirname(repo_path),
719 '.__shadow_{}_{}'.format(os.path.basename(repo_path), workspace_id))
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
720
718
721 @classmethod
719 @classmethod
722 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
723 # The name of the shadow repository must start with '.', so it is
721 # The name of the shadow repository must start with '.', so it is
724 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
725 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
726 if os.path.exists(legacy_repository_path):
724 if os.path.exists(legacy_repository_path):
727 return legacy_repository_path
725 return legacy_repository_path
728 else:
726 else:
729 return os.path.join(
727 return os.path.join(
730 os.path.dirname(repo_path),
728 os.path.dirname(repo_path),
731 '.__shadow_repo_{}_{}'.format(repo_id, workspace_id))
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
732
730
733 def cleanup_merge_workspace(self, repo_id, workspace_id):
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
734 """
732 """
735 Remove merge workspace.
733 Remove merge workspace.
736
734
737 This function MUST not fail in case there is no workspace associated to
735 This function MUST not fail in case there is no workspace associated to
738 the given `workspace_id`.
736 the given `workspace_id`.
739
737
740 :param workspace_id: `workspace_id` unique identifier.
738 :param workspace_id: `workspace_id` unique identifier.
741 """
739 """
742 shadow_repository_path = self._get_shadow_repository_path(
740 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
741 self.path, repo_id, workspace_id)
744 shadow_repository_path_del = '{}.{}.delete'.format(
742 shadow_repository_path_del = '{}.{}.delete'.format(
745 shadow_repository_path, time.time())
743 shadow_repository_path, time.time())
746
744
747 # move the shadow repo, so it never conflicts with the one used.
745 # move the shadow repo, so it never conflicts with the one used.
748 # we use this method because shutil.rmtree had some edge case problems
746 # we use this method because shutil.rmtree had some edge case problems
749 # removing symlinked repositories
747 # removing symlinked repositories
750 if not os.path.isdir(shadow_repository_path):
748 if not os.path.isdir(shadow_repository_path):
751 return
749 return
752
750
753 shutil.move(shadow_repository_path, shadow_repository_path_del)
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
754 try:
752 try:
755 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
756 except Exception:
754 except Exception:
757 log.exception('Failed to gracefully remove shadow repo under %s',
755 log.exception('Failed to gracefully remove shadow repo under %s',
758 shadow_repository_path_del)
756 shadow_repository_path_del)
759 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
760
758
761 # ========== #
759 # ========== #
762 # COMMIT API #
760 # COMMIT API #
763 # ========== #
761 # ========== #
764
762
765 @LazyProperty
763 @LazyProperty
766 def in_memory_commit(self):
764 def in_memory_commit(self):
767 """
765 """
768 Returns :class:`InMemoryCommit` object for this repository.
766 Returns :class:`InMemoryCommit` object for this repository.
769 """
767 """
770 raise NotImplementedError
768 raise NotImplementedError
771
769
772 # ======================== #
770 # ======================== #
773 # UTILITIES FOR SUBCLASSES #
771 # UTILITIES FOR SUBCLASSES #
774 # ======================== #
772 # ======================== #
775
773
776 def _validate_diff_commits(self, commit1, commit2):
774 def _validate_diff_commits(self, commit1, commit2):
777 """
775 """
778 Validates that the given commits are related to this repository.
776 Validates that the given commits are related to this repository.
779
777
780 Intended as a utility for sub classes to have a consistent validation
778 Intended as a utility for sub classes to have a consistent validation
781 of input parameters in methods like :meth:`get_diff`.
779 of input parameters in methods like :meth:`get_diff`.
782 """
780 """
783 self._validate_commit(commit1)
781 self._validate_commit(commit1)
784 self._validate_commit(commit2)
782 self._validate_commit(commit2)
785 if (isinstance(commit1, EmptyCommit) and
783 if (isinstance(commit1, EmptyCommit) and
786 isinstance(commit2, EmptyCommit)):
784 isinstance(commit2, EmptyCommit)):
787 raise ValueError("Cannot compare two empty commits")
785 raise ValueError("Cannot compare two empty commits")
788
786
789 def _validate_commit(self, commit):
787 def _validate_commit(self, commit):
790 if not isinstance(commit, BaseCommit):
788 if not isinstance(commit, BaseCommit):
791 raise TypeError(
789 raise TypeError(
792 "%s is not of type BaseCommit" % repr(commit))
790 "%s is not of type BaseCommit" % repr(commit))
793 if commit.repository != self and not isinstance(commit, EmptyCommit):
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
794 raise ValueError(
792 raise ValueError(
795 "Commit %s must be a valid commit from this repository %s, "
793 "Commit %s must be a valid commit from this repository %s, "
796 "related to this repository instead %s." %
794 "related to this repository instead %s." %
797 (commit, self, commit.repository))
795 (commit, self, commit.repository))
798
796
799 def _validate_commit_id(self, commit_id):
797 def _validate_commit_id(self, commit_id):
800 if not isinstance(commit_id, str):
798 if not isinstance(commit_id, str):
801 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
802
800
803 def _validate_commit_idx(self, commit_idx):
801 def _validate_commit_idx(self, commit_idx):
804 if not isinstance(commit_idx, int):
802 if not isinstance(commit_idx, int):
805 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
806
804
807 def _validate_branch_name(self, branch_name):
805 def _validate_branch_name(self, branch_name):
808 if branch_name and branch_name not in self.branches_all:
806 if branch_name and branch_name not in self.branches_all:
809 msg = ("Branch {} not found in {}".format(branch_name, self))
807 msg = (f"Branch {branch_name} not found in {self}")
810 raise BranchDoesNotExistError(msg)
808 raise BranchDoesNotExistError(msg)
811
809
812 #
810 #
813 # Supporting deprecated API parts
811 # Supporting deprecated API parts
814 # TODO: johbo: consider to move this into a mixin
812 # TODO: johbo: consider to move this into a mixin
815 #
813 #
816
814
817 @property
815 @property
818 def EMPTY_CHANGESET(self):
816 def EMPTY_CHANGESET(self):
819 warnings.warn(
817 warnings.warn(
820 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
821 return self.EMPTY_COMMIT_ID
819 return self.EMPTY_COMMIT_ID
822
820
823 @property
821 @property
824 def revisions(self):
822 def revisions(self):
825 warnings.warn("Use commits attribute instead", DeprecationWarning)
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
826 return self.commit_ids
824 return self.commit_ids
827
825
828 @revisions.setter
826 @revisions.setter
829 def revisions(self, value):
827 def revisions(self, value):
830 warnings.warn("Use commits attribute instead", DeprecationWarning)
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
831 self.commit_ids = value
829 self.commit_ids = value
832
830
833 def get_changeset(self, revision=None, pre_load=None):
831 def get_changeset(self, revision=None, pre_load=None):
834 warnings.warn("Use get_commit instead", DeprecationWarning)
832 warnings.warn("Use get_commit instead", DeprecationWarning)
835 commit_id = None
833 commit_id = None
836 commit_idx = None
834 commit_idx = None
837 if isinstance(revision, str):
835 if isinstance(revision, str):
838 commit_id = revision
836 commit_id = revision
839 else:
837 else:
840 commit_idx = revision
838 commit_idx = revision
841 return self.get_commit(
839 return self.get_commit(
842 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
843
841
844 def get_changesets(
842 def get_changesets(
845 self, start=None, end=None, start_date=None, end_date=None,
843 self, start=None, end=None, start_date=None, end_date=None,
846 branch_name=None, pre_load=None):
844 branch_name=None, pre_load=None):
847 warnings.warn("Use get_commits instead", DeprecationWarning)
845 warnings.warn("Use get_commits instead", DeprecationWarning)
848 start_id = self._revision_to_commit(start)
846 start_id = self._revision_to_commit(start)
849 end_id = self._revision_to_commit(end)
847 end_id = self._revision_to_commit(end)
850 return self.get_commits(
848 return self.get_commits(
851 start_id=start_id, end_id=end_id, start_date=start_date,
849 start_id=start_id, end_id=end_id, start_date=start_date,
852 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
853
851
854 def _revision_to_commit(self, revision):
852 def _revision_to_commit(self, revision):
855 """
853 """
856 Translates a revision to a commit_id
854 Translates a revision to a commit_id
857
855
858 Helps to support the old changeset based API which allows to use
856 Helps to support the old changeset based API which allows to use
859 commit ids and commit indices interchangeable.
857 commit ids and commit indices interchangeable.
860 """
858 """
861 if revision is None:
859 if revision is None:
862 return revision
860 return revision
863
861
864 if isinstance(revision, str):
862 if isinstance(revision, str):
865 commit_id = revision
863 commit_id = revision
866 else:
864 else:
867 commit_id = self.commit_ids[revision]
865 commit_id = self.commit_ids[revision]
868 return commit_id
866 return commit_id
869
867
870 @property
868 @property
871 def in_memory_changeset(self):
869 def in_memory_changeset(self):
872 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
873 return self.in_memory_commit
871 return self.in_memory_commit
874
872
875 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
876 """
874 """
877 Returns a path permission checker or None if not supported
875 Returns a path permission checker or None if not supported
878
876
879 :param username: session user name
877 :param username: session user name
880 :return: an instance of BasePathPermissionChecker or None
878 :return: an instance of BasePathPermissionChecker or None
881 """
879 """
882 return None
880 return None
883
881
884 def install_hooks(self, force=False):
882 def install_hooks(self, force=False):
885 return self._remote.install_hooks(force)
883 return self._remote.install_hooks(force)
886
884
887 def get_hooks_info(self):
885 def get_hooks_info(self):
888 return self._remote.get_hooks_info()
886 return self._remote.get_hooks_info()
889
887
890 def vcsserver_invalidate_cache(self, delete=False):
888 def vcsserver_invalidate_cache(self, delete=False):
891 return self._remote.vcsserver_invalidate_cache(delete)
889 return self._remote.vcsserver_invalidate_cache(delete)
892
890
893
891
894 class BaseCommit(object):
892 class BaseCommit(object):
895 """
893 """
896 Each backend should implement it's commit representation.
894 Each backend should implement it's commit representation.
897
895
898 **Attributes**
896 **Attributes**
899
897
900 ``repository``
898 ``repository``
901 repository object within which commit exists
899 repository object within which commit exists
902
900
903 ``id``
901 ``id``
904 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
905 just ``tip``.
903 just ``tip``.
906
904
907 ``raw_id``
905 ``raw_id``
908 raw commit representation (i.e. full 40 length sha for git
906 raw commit representation (i.e. full 40 length sha for git
909 backend)
907 backend)
910
908
911 ``short_id``
909 ``short_id``
912 shortened (if apply) version of ``raw_id``; it would be simple
910 shortened (if apply) version of ``raw_id``; it would be simple
913 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
914 as ``raw_id`` for subversion
912 as ``raw_id`` for subversion
915
913
916 ``idx``
914 ``idx``
917 commit index
915 commit index
918
916
919 ``files``
917 ``files``
920 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
921
919
922 ``dirs``
920 ``dirs``
923 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
924
922
925 ``nodes``
923 ``nodes``
926 combined list of ``Node`` objects
924 combined list of ``Node`` objects
927
925
928 ``author``
926 ``author``
929 author of the commit, as unicode
927 author of the commit, as unicode
930
928
931 ``message``
929 ``message``
932 message of the commit, as unicode
930 message of the commit, as unicode
933
931
934 ``parents``
932 ``parents``
935 list of parent commits
933 list of parent commits
936
934
937 """
935 """
938 repository = None
936 repository = None
939 branch = None
937 branch = None
940
938
941 """
939 """
942 Depending on the backend this should be set to the branch name of the
940 Depending on the backend this should be set to the branch name of the
943 commit. Backends not supporting branches on commits should leave this
941 commit. Backends not supporting branches on commits should leave this
944 value as ``None``.
942 value as ``None``.
945 """
943 """
946
944
947 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
948 """
946 """
949 This template is used to generate a default prefix for repository archives
947 This template is used to generate a default prefix for repository archives
950 if no prefix has been specified.
948 if no prefix has been specified.
951 """
949 """
952
950
953 def __repr__(self):
951 def __repr__(self):
954 return self.__str__()
952 return self.__str__()
955
953
956 def __str__(self):
954 def __str__(self):
957 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
958
956
959 def __eq__(self, other):
957 def __eq__(self, other):
960 same_instance = isinstance(other, self.__class__)
958 same_instance = isinstance(other, self.__class__)
961 return same_instance and self.raw_id == other.raw_id
959 return same_instance and self.raw_id == other.raw_id
962
960
963 def __json__(self):
961 def __json__(self):
964 parents = []
962 parents = []
965 try:
963 try:
966 for parent in self.parents:
964 for parent in self.parents:
967 parents.append({'raw_id': parent.raw_id})
965 parents.append({'raw_id': parent.raw_id})
968 except NotImplementedError:
966 except NotImplementedError:
969 # empty commit doesn't have parents implemented
967 # empty commit doesn't have parents implemented
970 pass
968 pass
971
969
972 return {
970 return {
973 'short_id': self.short_id,
971 'short_id': self.short_id,
974 'raw_id': self.raw_id,
972 'raw_id': self.raw_id,
975 'revision': self.idx,
973 'revision': self.idx,
976 'message': self.message,
974 'message': self.message,
977 'date': self.date,
975 'date': self.date,
978 'author': self.author,
976 'author': self.author,
979 'parents': parents,
977 'parents': parents,
980 'branch': self.branch
978 'branch': self.branch
981 }
979 }
982
980
983 def __getstate__(self):
981 def __getstate__(self):
984 d = self.__dict__.copy()
982 d = self.__dict__.copy()
985 d.pop('_remote', None)
983 d.pop('_remote', None)
986 d.pop('repository', None)
984 d.pop('repository', None)
987 return d
985 return d
988
986
989 def get_remote(self):
987 def get_remote(self):
990 return self._remote
988 return self._remote
991
989
992 def serialize(self):
990 def serialize(self):
993 return self.__json__()
991 return self.__json__()
994
992
995 def _get_refs(self):
993 def _get_refs(self):
996 return {
994 return {
997 'branches': [self.branch] if self.branch else [],
995 'branches': [self.branch] if self.branch else [],
998 'bookmarks': getattr(self, 'bookmarks', []),
996 'bookmarks': getattr(self, 'bookmarks', []),
999 'tags': self.tags
997 'tags': self.tags
1000 }
998 }
1001
999
1002 @LazyProperty
1000 @LazyProperty
1003 def last(self):
1001 def last(self):
1004 """
1002 """
1005 ``True`` if this is last commit in repository, ``False``
1003 ``True`` if this is last commit in repository, ``False``
1006 otherwise; trying to access this attribute while there is no
1004 otherwise; trying to access this attribute while there is no
1007 commits would raise `EmptyRepositoryError`
1005 commits would raise `EmptyRepositoryError`
1008 """
1006 """
1009 if self.repository is None:
1007 if self.repository is None:
1010 raise CommitError("Cannot check if it's most recent commit")
1008 raise CommitError("Cannot check if it's most recent commit")
1011 return self.raw_id == self.repository.commit_ids[-1]
1009 return self.raw_id == self.repository.commit_ids[-1]
1012
1010
1013 @LazyProperty
1011 @LazyProperty
1014 def parents(self):
1012 def parents(self):
1015 """
1013 """
1016 Returns list of parent commits.
1014 Returns list of parent commits.
1017 """
1015 """
1018 raise NotImplementedError
1016 raise NotImplementedError
1019
1017
1020 @LazyProperty
1018 @LazyProperty
1021 def first_parent(self):
1019 def first_parent(self):
1022 """
1020 """
1023 Returns list of parent commits.
1021 Returns list of parent commits.
1024 """
1022 """
1025 return self.parents[0] if self.parents else EmptyCommit()
1023 return self.parents[0] if self.parents else EmptyCommit()
1026
1024
1027 @property
1025 @property
1028 def merge(self):
1026 def merge(self):
1029 """
1027 """
1030 Returns boolean if commit is a merge.
1028 Returns boolean if commit is a merge.
1031 """
1029 """
1032 return len(self.parents) > 1
1030 return len(self.parents) > 1
1033
1031
1034 @LazyProperty
1032 @LazyProperty
1035 def children(self):
1033 def children(self):
1036 """
1034 """
1037 Returns list of child commits.
1035 Returns list of child commits.
1038 """
1036 """
1039 raise NotImplementedError
1037 raise NotImplementedError
1040
1038
1041 @LazyProperty
1039 @LazyProperty
1042 def id(self):
1040 def id(self):
1043 """
1041 """
1044 Returns string identifying this commit.
1042 Returns string identifying this commit.
1045 """
1043 """
1046 raise NotImplementedError
1044 raise NotImplementedError
1047
1045
1048 @LazyProperty
1046 @LazyProperty
1049 def raw_id(self):
1047 def raw_id(self):
1050 """
1048 """
1051 Returns raw string identifying this commit.
1049 Returns raw string identifying this commit.
1052 """
1050 """
1053 raise NotImplementedError
1051 raise NotImplementedError
1054
1052
1055 @LazyProperty
1053 @LazyProperty
1056 def short_id(self):
1054 def short_id(self):
1057 """
1055 """
1058 Returns shortened version of ``raw_id`` attribute, as string,
1056 Returns shortened version of ``raw_id`` attribute, as string,
1059 identifying this commit, useful for presentation to users.
1057 identifying this commit, useful for presentation to users.
1060 """
1058 """
1061 raise NotImplementedError
1059 raise NotImplementedError
1062
1060
1063 @LazyProperty
1061 @LazyProperty
1064 def idx(self):
1062 def idx(self):
1065 """
1063 """
1066 Returns integer identifying this commit.
1064 Returns integer identifying this commit.
1067 """
1065 """
1068 raise NotImplementedError
1066 raise NotImplementedError
1069
1067
1070 @LazyProperty
1068 @LazyProperty
1071 def committer(self):
1069 def committer(self):
1072 """
1070 """
1073 Returns committer for this commit
1071 Returns committer for this commit
1074 """
1072 """
1075 raise NotImplementedError
1073 raise NotImplementedError
1076
1074
1077 @LazyProperty
1075 @LazyProperty
1078 def committer_name(self):
1076 def committer_name(self):
1079 """
1077 """
1080 Returns committer name for this commit
1078 Returns committer name for this commit
1081 """
1079 """
1082
1080
1083 return author_name(self.committer)
1081 return author_name(self.committer)
1084
1082
1085 @LazyProperty
1083 @LazyProperty
1086 def committer_email(self):
1084 def committer_email(self):
1087 """
1085 """
1088 Returns committer email address for this commit
1086 Returns committer email address for this commit
1089 """
1087 """
1090
1088
1091 return author_email(self.committer)
1089 return author_email(self.committer)
1092
1090
1093 @LazyProperty
1091 @LazyProperty
1094 def author(self):
1092 def author(self):
1095 """
1093 """
1096 Returns author for this commit
1094 Returns author for this commit
1097 """
1095 """
1098
1096
1099 raise NotImplementedError
1097 raise NotImplementedError
1100
1098
1101 @LazyProperty
1099 @LazyProperty
1102 def author_name(self):
1100 def author_name(self):
1103 """
1101 """
1104 Returns author name for this commit
1102 Returns author name for this commit
1105 """
1103 """
1106
1104
1107 return author_name(self.author)
1105 return author_name(self.author)
1108
1106
1109 @LazyProperty
1107 @LazyProperty
1110 def author_email(self):
1108 def author_email(self):
1111 """
1109 """
1112 Returns author email address for this commit
1110 Returns author email address for this commit
1113 """
1111 """
1114
1112
1115 return author_email(self.author)
1113 return author_email(self.author)
1116
1114
1117 def get_file_mode(self, path: bytes):
1115 def get_file_mode(self, path: bytes):
1118 """
1116 """
1119 Returns stat mode of the file at `path`.
1117 Returns stat mode of the file at `path`.
1120 """
1118 """
1121 raise NotImplementedError
1119 raise NotImplementedError
1122
1120
1123 def is_link(self, path):
1121 def is_link(self, path):
1124 """
1122 """
1125 Returns ``True`` if given `path` is a symlink
1123 Returns ``True`` if given `path` is a symlink
1126 """
1124 """
1127 raise NotImplementedError
1125 raise NotImplementedError
1128
1126
1129 def is_node_binary(self, path):
1127 def is_node_binary(self, path):
1130 """
1128 """
1131 Returns ``True`` is given path is a binary file
1129 Returns ``True`` is given path is a binary file
1132 """
1130 """
1133 raise NotImplementedError
1131 raise NotImplementedError
1134
1132
1135 def node_md5_hash(self, path):
1133 def node_md5_hash(self, path):
1136 """
1134 """
1137 Returns md5 hash of a node data
1135 Returns md5 hash of a node data
1138 """
1136 """
1139 raise NotImplementedError
1137 raise NotImplementedError
1140
1138
1141 def get_file_content(self, path) -> bytes:
1139 def get_file_content(self, path) -> bytes:
1142 """
1140 """
1143 Returns content of the file at the given `path`.
1141 Returns content of the file at the given `path`.
1144 """
1142 """
1145 raise NotImplementedError
1143 raise NotImplementedError
1146
1144
1147 def get_file_content_streamed(self, path):
1145 def get_file_content_streamed(self, path):
1148 """
1146 """
1149 returns a streaming response from vcsserver with file content
1147 returns a streaming response from vcsserver with file content
1150 """
1148 """
1151 raise NotImplementedError
1149 raise NotImplementedError
1152
1150
1153 def get_file_size(self, path):
1151 def get_file_size(self, path):
1154 """
1152 """
1155 Returns size of the file at the given `path`.
1153 Returns size of the file at the given `path`.
1156 """
1154 """
1157 raise NotImplementedError
1155 raise NotImplementedError
1158
1156
1159 def get_path_commit(self, path, pre_load=None):
1157 def get_path_commit(self, path, pre_load=None):
1160 """
1158 """
1161 Returns last commit of the file at the given `path`.
1159 Returns last commit of the file at the given `path`.
1162
1160
1163 :param pre_load: Optional. List of commit attributes to load.
1161 :param pre_load: Optional. List of commit attributes to load.
1164 """
1162 """
1165 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1166 if not commits:
1164 if not commits:
1167 raise RepositoryError(
1165 raise RepositoryError(
1168 'Failed to fetch history for path {}. '
1166 'Failed to fetch history for path {}. '
1169 'Please check if such path exists in your repository'.format(
1167 'Please check if such path exists in your repository'.format(
1170 path))
1168 path))
1171 return commits[0]
1169 return commits[0]
1172
1170
1173 def get_path_history(self, path, limit=None, pre_load=None):
1171 def get_path_history(self, path, limit=None, pre_load=None):
1174 """
1172 """
1175 Returns history of file as reversed list of :class:`BaseCommit`
1173 Returns history of file as reversed list of :class:`BaseCommit`
1176 objects for which file at given `path` has been modified.
1174 objects for which file at given `path` has been modified.
1177
1175
1178 :param limit: Optional. Allows to limit the size of the returned
1176 :param limit: Optional. Allows to limit the size of the returned
1179 history. This is intended as a hint to the underlying backend, so
1177 history. This is intended as a hint to the underlying backend, so
1180 that it can apply optimizations depending on the limit.
1178 that it can apply optimizations depending on the limit.
1181 :param pre_load: Optional. List of commit attributes to load.
1179 :param pre_load: Optional. List of commit attributes to load.
1182 """
1180 """
1183 raise NotImplementedError
1181 raise NotImplementedError
1184
1182
1185 def get_file_annotate(self, path, pre_load=None):
1183 def get_file_annotate(self, path, pre_load=None):
1186 """
1184 """
1187 Returns a generator of four element tuples with
1185 Returns a generator of four element tuples with
1188 lineno, sha, commit lazy loader and line
1186 lineno, sha, commit lazy loader and line
1189
1187
1190 :param pre_load: Optional. List of commit attributes to load.
1188 :param pre_load: Optional. List of commit attributes to load.
1191 """
1189 """
1192 raise NotImplementedError
1190 raise NotImplementedError
1193
1191
1194 def get_nodes(self, path, pre_load=None):
1192 def get_nodes(self, path, pre_load=None):
1195 """
1193 """
1196 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1197 state of commit at the given ``path``.
1195 state of commit at the given ``path``.
1198
1196
1199 :raises ``CommitError``: if node at the given ``path`` is not
1197 :raises ``CommitError``: if node at the given ``path`` is not
1200 instance of ``DirNode``
1198 instance of ``DirNode``
1201 """
1199 """
1202 raise NotImplementedError
1200 raise NotImplementedError
1203
1201
1204 def get_node(self, path):
1202 def get_node(self, path):
1205 """
1203 """
1206 Returns ``Node`` object from the given ``path``.
1204 Returns ``Node`` object from the given ``path``.
1207
1205
1208 :raises ``NodeDoesNotExistError``: if there is no node at the given
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1209 ``path``
1207 ``path``
1210 """
1208 """
1211 raise NotImplementedError
1209 raise NotImplementedError
1212
1210
1213 def get_largefile_node(self, path):
1211 def get_largefile_node(self, path):
1214 """
1212 """
1215 Returns the path to largefile from Mercurial/Git-lfs storage.
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1216 or None if it's not a largefile node
1214 or None if it's not a largefile node
1217 """
1215 """
1218 return None
1216 return None
1219
1217
1220 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1221 archive_dir_name=None, write_metadata=False, mtime=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1222 archive_at_path='/', cache_config=None):
1220 archive_at_path='/', cache_config=None):
1223 """
1221 """
1224 Creates an archive containing the contents of the repository.
1222 Creates an archive containing the contents of the repository.
1225
1223
1226 :param archive_name_key: unique key under this archive should be generated
1224 :param archive_name_key: unique key under this archive should be generated
1227 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1225 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1228 :param archive_dir_name: name of root directory in archive.
1226 :param archive_dir_name: name of root directory in archive.
1229 Default is repository name and commit's short_id joined with dash:
1227 Default is repository name and commit's short_id joined with dash:
1230 ``"{repo_name}-{short_id}"``.
1228 ``"{repo_name}-{short_id}"``.
1231 :param write_metadata: write a metadata file into archive.
1229 :param write_metadata: write a metadata file into archive.
1232 :param mtime: custom modification time for archive creation, defaults
1230 :param mtime: custom modification time for archive creation, defaults
1233 to time.time() if not given.
1231 to time.time() if not given.
1234 :param archive_at_path: pack files at this path (default '/')
1232 :param archive_at_path: pack files at this path (default '/')
1235 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1236
1234
1237 :raise VCSError: If prefix has a problem.
1235 :raise VCSError: If prefix has a problem.
1238 """
1236 """
1239 cache_config = cache_config or {}
1237 cache_config = cache_config or {}
1240 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1241 if kind not in allowed_kinds:
1239 if kind not in allowed_kinds:
1242 raise ImproperArchiveTypeError(
1240 raise ImproperArchiveTypeError(
1243 'Archive kind (%s) not supported use one of %s' %
1241 'Archive kind (%s) not supported use one of %s' %
1244 (kind, allowed_kinds))
1242 (kind, allowed_kinds))
1245
1243
1246 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1247 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 mtime = mtime is not None or time.mktime(self.date.timetuple())
1248 commit_id = self.raw_id
1246 commit_id = self.raw_id
1249
1247
1250 return self.repository._remote.archive_repo(
1248 return self.repository._remote.archive_repo(
1251 archive_name_key, kind, mtime, archive_at_path,
1249 archive_name_key, kind, mtime, archive_at_path,
1252 archive_dir_name, commit_id, cache_config)
1250 archive_dir_name, commit_id, cache_config)
1253
1251
1254 def _validate_archive_prefix(self, archive_dir_name):
1252 def _validate_archive_prefix(self, archive_dir_name):
1255 if archive_dir_name is None:
1253 if archive_dir_name is None:
1256 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1257 repo_name=safe_str(self.repository.name),
1255 repo_name=safe_str(self.repository.name),
1258 short_id=self.short_id)
1256 short_id=self.short_id)
1259 elif not isinstance(archive_dir_name, str):
1257 elif not isinstance(archive_dir_name, str):
1260 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1261 elif archive_dir_name.startswith('/'):
1259 elif archive_dir_name.startswith('/'):
1262 raise VCSError("Prefix cannot start with leading slash")
1260 raise VCSError("Prefix cannot start with leading slash")
1263 elif archive_dir_name.strip() == '':
1261 elif archive_dir_name.strip() == '':
1264 raise VCSError("Prefix cannot be empty")
1262 raise VCSError("Prefix cannot be empty")
1265 elif not archive_dir_name.isascii():
1263 elif not archive_dir_name.isascii():
1266 raise VCSError("Prefix cannot contain non ascii characters")
1264 raise VCSError("Prefix cannot contain non ascii characters")
1267 return archive_dir_name
1265 return archive_dir_name
1268
1266
1269 @LazyProperty
1267 @LazyProperty
1270 def root(self):
1268 def root(self):
1271 """
1269 """
1272 Returns ``RootNode`` object for this commit.
1270 Returns ``RootNode`` object for this commit.
1273 """
1271 """
1274 return self.get_node('')
1272 return self.get_node('')
1275
1273
1276 def next(self, branch=None):
1274 def next(self, branch=None):
1277 """
1275 """
1278 Returns next commit from current, if branch is gives it will return
1276 Returns next commit from current, if branch is gives it will return
1279 next commit belonging to this branch
1277 next commit belonging to this branch
1280
1278
1281 :param branch: show commits within the given named branch
1279 :param branch: show commits within the given named branch
1282 """
1280 """
1283 indexes = range(self.idx + 1, self.repository.count())
1281 indexes = range(self.idx + 1, self.repository.count())
1284 return self._find_next(indexes, branch)
1282 return self._find_next(indexes, branch)
1285
1283
1286 def prev(self, branch=None):
1284 def prev(self, branch=None):
1287 """
1285 """
1288 Returns previous commit from current, if branch is gives it will
1286 Returns previous commit from current, if branch is gives it will
1289 return previous commit belonging to this branch
1287 return previous commit belonging to this branch
1290
1288
1291 :param branch: show commit within the given named branch
1289 :param branch: show commit within the given named branch
1292 """
1290 """
1293 indexes = range(self.idx - 1, -1, -1)
1291 indexes = range(self.idx - 1, -1, -1)
1294 return self._find_next(indexes, branch)
1292 return self._find_next(indexes, branch)
1295
1293
1296 def _find_next(self, indexes, branch=None):
1294 def _find_next(self, indexes, branch=None):
1297 if branch and self.branch != branch:
1295 if branch and self.branch != branch:
1298 raise VCSError('Branch option used on commit not belonging '
1296 raise VCSError('Branch option used on commit not belonging '
1299 'to that branch')
1297 'to that branch')
1300
1298
1301 for next_idx in indexes:
1299 for next_idx in indexes:
1302 commit = self.repository.get_commit(commit_idx=next_idx)
1300 commit = self.repository.get_commit(commit_idx=next_idx)
1303 if branch and branch != commit.branch:
1301 if branch and branch != commit.branch:
1304 continue
1302 continue
1305 return commit
1303 return commit
1306 raise CommitDoesNotExistError
1304 raise CommitDoesNotExistError
1307
1305
1308 def diff(self, ignore_whitespace=True, context=3):
1306 def diff(self, ignore_whitespace=True, context=3):
1309 """
1307 """
1310 Returns a `Diff` object representing the change made by this commit.
1308 Returns a `Diff` object representing the change made by this commit.
1311 """
1309 """
1312 parent = self.first_parent
1310 parent = self.first_parent
1313 diff = self.repository.get_diff(
1311 diff = self.repository.get_diff(
1314 parent, self,
1312 parent, self,
1315 ignore_whitespace=ignore_whitespace,
1313 ignore_whitespace=ignore_whitespace,
1316 context=context)
1314 context=context)
1317 return diff
1315 return diff
1318
1316
1319 @LazyProperty
1317 @LazyProperty
1320 def added(self):
1318 def added(self):
1321 """
1319 """
1322 Returns list of added ``FileNode`` objects.
1320 Returns list of added ``FileNode`` objects.
1323 """
1321 """
1324 raise NotImplementedError
1322 raise NotImplementedError
1325
1323
1326 @LazyProperty
1324 @LazyProperty
1327 def changed(self):
1325 def changed(self):
1328 """
1326 """
1329 Returns list of modified ``FileNode`` objects.
1327 Returns list of modified ``FileNode`` objects.
1330 """
1328 """
1331 raise NotImplementedError
1329 raise NotImplementedError
1332
1330
1333 @LazyProperty
1331 @LazyProperty
1334 def removed(self):
1332 def removed(self):
1335 """
1333 """
1336 Returns list of removed ``FileNode`` objects.
1334 Returns list of removed ``FileNode`` objects.
1337 """
1335 """
1338 raise NotImplementedError
1336 raise NotImplementedError
1339
1337
1340 @LazyProperty
1338 @LazyProperty
1341 def size(self):
1339 def size(self):
1342 """
1340 """
1343 Returns total number of bytes from contents of all filenodes.
1341 Returns total number of bytes from contents of all filenodes.
1344 """
1342 """
1345 return sum(node.size for node in self.get_filenodes_generator())
1343 return sum(node.size for node in self.get_filenodes_generator())
1346
1344
1347 def walk(self, topurl=''):
1345 def walk(self, topurl=''):
1348 """
1346 """
1349 Similar to os.walk method. Insted of filesystem it walks through
1347 Similar to os.walk method. Insted of filesystem it walks through
1350 commit starting at given ``topurl``. Returns generator of tuples
1348 commit starting at given ``topurl``. Returns generator of tuples
1351 (top_node, dirnodes, filenodes).
1349 (top_node, dirnodes, filenodes).
1352 """
1350 """
1353 from rhodecode.lib.vcs.nodes import DirNode
1351 from rhodecode.lib.vcs.nodes import DirNode
1354
1352
1355 if isinstance(topurl, DirNode):
1353 if isinstance(topurl, DirNode):
1356 top_node = topurl
1354 top_node = topurl
1357 else:
1355 else:
1358 top_node = self.get_node(topurl)
1356 top_node = self.get_node(topurl)
1359
1357
1360 has_default_pre_load = False
1358 has_default_pre_load = False
1361 if isinstance(top_node, DirNode):
1359 if isinstance(top_node, DirNode):
1362 # used to inject as we walk same defaults as given top_node
1360 # used to inject as we walk same defaults as given top_node
1363 default_pre_load = top_node.default_pre_load
1361 default_pre_load = top_node.default_pre_load
1364 has_default_pre_load = True
1362 has_default_pre_load = True
1365
1363
1366 if not top_node.is_dir():
1364 if not top_node.is_dir():
1367 return
1365 return
1368 yield top_node, top_node.dirs, top_node.files
1366 yield top_node, top_node.dirs, top_node.files
1369 for dir_node in top_node.dirs:
1367 for dir_node in top_node.dirs:
1370 if has_default_pre_load:
1368 if has_default_pre_load:
1371 dir_node.default_pre_load = default_pre_load
1369 dir_node.default_pre_load = default_pre_load
1372 yield from self.walk(dir_node)
1370 yield from self.walk(dir_node)
1373
1371
1374 def get_filenodes_generator(self):
1372 def get_filenodes_generator(self):
1375 """
1373 """
1376 Returns generator that yields *all* file nodes.
1374 Returns generator that yields *all* file nodes.
1377 """
1375 """
1378 for topnode, dirs, files in self.walk():
1376 for topnode, dirs, files in self.walk():
1379 yield from files
1377 yield from files
1380
1378
1381 #
1379 #
1382 # Utilities for sub classes to support consistent behavior
1380 # Utilities for sub classes to support consistent behavior
1383 #
1381 #
1384
1382
1385 def no_node_at_path(self, path):
1383 def no_node_at_path(self, path):
1386 return NodeDoesNotExistError(
1384 return NodeDoesNotExistError(
1387 f"There is no file nor directory at the given path: "
1385 f"There is no file nor directory at the given path: "
1388 f"`{safe_str(path)}` at commit {self.short_id}")
1386 f"`{safe_str(path)}` at commit {self.short_id}")
1389
1387
1390 def _fix_path(self, path: str) -> str:
1388 def _fix_path(self, path: str) -> str:
1391 """
1389 """
1392 Paths are stored without trailing slash so we need to get rid off it if
1390 Paths are stored without trailing slash so we need to get rid off it if
1393 needed.
1391 needed.
1394 """
1392 """
1395 return safe_str(path).rstrip('/')
1393 return safe_str(path).rstrip('/')
1396
1394
1397 #
1395 #
1398 # Deprecated API based on changesets
1396 # Deprecated API based on changesets
1399 #
1397 #
1400
1398
1401 @property
1399 @property
1402 def revision(self):
1400 def revision(self):
1403 warnings.warn("Use idx instead", DeprecationWarning)
1401 warnings.warn("Use idx instead", DeprecationWarning)
1404 return self.idx
1402 return self.idx
1405
1403
1406 @revision.setter
1404 @revision.setter
1407 def revision(self, value):
1405 def revision(self, value):
1408 warnings.warn("Use idx instead", DeprecationWarning)
1406 warnings.warn("Use idx instead", DeprecationWarning)
1409 self.idx = value
1407 self.idx = value
1410
1408
1411 def get_file_changeset(self, path):
1409 def get_file_changeset(self, path):
1412 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1413 return self.get_path_commit(path)
1411 return self.get_path_commit(path)
1414
1412
1415
1413
1416 class BaseChangesetClass(type):
1414 class BaseChangesetClass(type):
1417
1415
1418 def __instancecheck__(self, instance):
1416 def __instancecheck__(self, instance):
1419 return isinstance(instance, BaseCommit)
1417 return isinstance(instance, BaseCommit)
1420
1418
1421
1419
1422 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1423
1421
1424 def __new__(cls, *args, **kwargs):
1422 def __new__(cls, *args, **kwargs):
1425 warnings.warn(
1423 warnings.warn(
1426 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1427 return super().__new__(cls, *args, **kwargs)
1425 return super().__new__(cls, *args, **kwargs)
1428
1426
1429
1427
1430 class BaseInMemoryCommit(object):
1428 class BaseInMemoryCommit(object):
1431 """
1429 """
1432 Represents differences between repository's state (most recent head) and
1430 Represents differences between repository's state (most recent head) and
1433 changes made *in place*.
1431 changes made *in place*.
1434
1432
1435 **Attributes**
1433 **Attributes**
1436
1434
1437 ``repository``
1435 ``repository``
1438 repository object for this in-memory-commit
1436 repository object for this in-memory-commit
1439
1437
1440 ``added``
1438 ``added``
1441 list of ``FileNode`` objects marked as *added*
1439 list of ``FileNode`` objects marked as *added*
1442
1440
1443 ``changed``
1441 ``changed``
1444 list of ``FileNode`` objects marked as *changed*
1442 list of ``FileNode`` objects marked as *changed*
1445
1443
1446 ``removed``
1444 ``removed``
1447 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1448 *removed*
1446 *removed*
1449
1447
1450 ``parents``
1448 ``parents``
1451 list of :class:`BaseCommit` instances representing parents of
1449 list of :class:`BaseCommit` instances representing parents of
1452 in-memory commit. Should always be 2-element sequence.
1450 in-memory commit. Should always be 2-element sequence.
1453
1451
1454 """
1452 """
1455
1453
1456 def __init__(self, repository):
1454 def __init__(self, repository):
1457 self.repository = repository
1455 self.repository = repository
1458 self.added = []
1456 self.added = []
1459 self.changed = []
1457 self.changed = []
1460 self.removed = []
1458 self.removed = []
1461 self.parents = []
1459 self.parents = []
1462
1460
1463 def add(self, *filenodes):
1461 def add(self, *filenodes):
1464 """
1462 """
1465 Marks given ``FileNode`` objects as *to be committed*.
1463 Marks given ``FileNode`` objects as *to be committed*.
1466
1464
1467 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1468 latest commit
1466 latest commit
1469 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 :raises ``NodeAlreadyAddedError``: if node with same path is already
1470 marked as *added*
1468 marked as *added*
1471 """
1469 """
1472 # Check if not already marked as *added* first
1470 # Check if not already marked as *added* first
1473 for node in filenodes:
1471 for node in filenodes:
1474 if node.path in (n.path for n in self.added):
1472 if node.path in (n.path for n in self.added):
1475 raise NodeAlreadyAddedError(
1473 raise NodeAlreadyAddedError(
1476 "Such FileNode %s is already marked for addition"
1474 "Such FileNode %s is already marked for addition"
1477 % node.path)
1475 % node.path)
1478 for node in filenodes:
1476 for node in filenodes:
1479 self.added.append(node)
1477 self.added.append(node)
1480
1478
1481 def change(self, *filenodes):
1479 def change(self, *filenodes):
1482 """
1480 """
1483 Marks given ``FileNode`` objects to be *changed* in next commit.
1481 Marks given ``FileNode`` objects to be *changed* in next commit.
1484
1482
1485 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 :raises ``EmptyRepositoryError``: if there are no commits yet
1486 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 :raises ``NodeAlreadyExistsError``: if node with same path is already
1487 marked to be *changed*
1485 marked to be *changed*
1488 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1489 marked to be *removed*
1487 marked to be *removed*
1490 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1491 commit
1489 commit
1492 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 :raises ``NodeNotChangedError``: if node hasn't really be changed
1493 """
1491 """
1494 for node in filenodes:
1492 for node in filenodes:
1495 if node.path in (n.path for n in self.removed):
1493 if node.path in (n.path for n in self.removed):
1496 raise NodeAlreadyRemovedError(
1494 raise NodeAlreadyRemovedError(
1497 "Node at %s is already marked as removed" % node.path)
1495 "Node at %s is already marked as removed" % node.path)
1498 try:
1496 try:
1499 self.repository.get_commit()
1497 self.repository.get_commit()
1500 except EmptyRepositoryError:
1498 except EmptyRepositoryError:
1501 raise EmptyRepositoryError(
1499 raise EmptyRepositoryError(
1502 "Nothing to change - try to *add* new nodes rather than "
1500 "Nothing to change - try to *add* new nodes rather than "
1503 "changing them")
1501 "changing them")
1504 for node in filenodes:
1502 for node in filenodes:
1505 if node.path in (n.path for n in self.changed):
1503 if node.path in (n.path for n in self.changed):
1506 raise NodeAlreadyChangedError(
1504 raise NodeAlreadyChangedError(
1507 "Node at '%s' is already marked as changed" % node.path)
1505 "Node at '%s' is already marked as changed" % node.path)
1508 self.changed.append(node)
1506 self.changed.append(node)
1509
1507
1510 def remove(self, *filenodes):
1508 def remove(self, *filenodes):
1511 """
1509 """
1512 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1513 *removed* in next commit.
1511 *removed* in next commit.
1514
1512
1515 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1516 be *removed*
1514 be *removed*
1517 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1518 be *changed*
1516 be *changed*
1519 """
1517 """
1520 for node in filenodes:
1518 for node in filenodes:
1521 if node.path in (n.path for n in self.removed):
1519 if node.path in (n.path for n in self.removed):
1522 raise NodeAlreadyRemovedError(
1520 raise NodeAlreadyRemovedError(
1523 "Node is already marked to for removal at %s" % node.path)
1521 "Node is already marked to for removal at %s" % node.path)
1524 if node.path in (n.path for n in self.changed):
1522 if node.path in (n.path for n in self.changed):
1525 raise NodeAlreadyChangedError(
1523 raise NodeAlreadyChangedError(
1526 "Node is already marked to be changed at %s" % node.path)
1524 "Node is already marked to be changed at %s" % node.path)
1527 # We only mark node as *removed* - real removal is done by
1525 # We only mark node as *removed* - real removal is done by
1528 # commit method
1526 # commit method
1529 self.removed.append(node)
1527 self.removed.append(node)
1530
1528
1531 def reset(self):
1529 def reset(self):
1532 """
1530 """
1533 Resets this instance to initial state (cleans ``added``, ``changed``
1531 Resets this instance to initial state (cleans ``added``, ``changed``
1534 and ``removed`` lists).
1532 and ``removed`` lists).
1535 """
1533 """
1536 self.added = []
1534 self.added = []
1537 self.changed = []
1535 self.changed = []
1538 self.removed = []
1536 self.removed = []
1539 self.parents = []
1537 self.parents = []
1540
1538
1541 def get_ipaths(self):
1539 def get_ipaths(self):
1542 """
1540 """
1543 Returns generator of paths from nodes marked as added, changed or
1541 Returns generator of paths from nodes marked as added, changed or
1544 removed.
1542 removed.
1545 """
1543 """
1546 for node in itertools.chain(self.added, self.changed, self.removed):
1544 for node in itertools.chain(self.added, self.changed, self.removed):
1547 yield node.path
1545 yield node.path
1548
1546
1549 def get_paths(self):
1547 def get_paths(self):
1550 """
1548 """
1551 Returns list of paths from nodes marked as added, changed or removed.
1549 Returns list of paths from nodes marked as added, changed or removed.
1552 """
1550 """
1553 return list(self.get_ipaths())
1551 return list(self.get_ipaths())
1554
1552
1555 def check_integrity(self, parents=None):
1553 def check_integrity(self, parents=None):
1556 """
1554 """
1557 Checks in-memory commit's integrity. Also, sets parents if not
1555 Checks in-memory commit's integrity. Also, sets parents if not
1558 already set.
1556 already set.
1559
1557
1560 :raises CommitError: if any error occurs (i.e.
1558 :raises CommitError: if any error occurs (i.e.
1561 ``NodeDoesNotExistError``).
1559 ``NodeDoesNotExistError``).
1562 """
1560 """
1563 if not self.parents:
1561 if not self.parents:
1564 parents = parents or []
1562 parents = parents or []
1565 if len(parents) == 0:
1563 if len(parents) == 0:
1566 try:
1564 try:
1567 parents = [self.repository.get_commit(), None]
1565 parents = [self.repository.get_commit(), None]
1568 except EmptyRepositoryError:
1566 except EmptyRepositoryError:
1569 parents = [None, None]
1567 parents = [None, None]
1570 elif len(parents) == 1:
1568 elif len(parents) == 1:
1571 parents += [None]
1569 parents += [None]
1572 self.parents = parents
1570 self.parents = parents
1573
1571
1574 # Local parents, only if not None
1572 # Local parents, only if not None
1575 parents = [p for p in self.parents if p]
1573 parents = [p for p in self.parents if p]
1576
1574
1577 # Check nodes marked as added
1575 # Check nodes marked as added
1578 for p in parents:
1576 for p in parents:
1579 for node in self.added:
1577 for node in self.added:
1580 try:
1578 try:
1581 p.get_node(node.path)
1579 p.get_node(node.path)
1582 except NodeDoesNotExistError:
1580 except NodeDoesNotExistError:
1583 pass
1581 pass
1584 else:
1582 else:
1585 raise NodeAlreadyExistsError(
1583 raise NodeAlreadyExistsError(
1586 "Node `{}` already exists at {}".format(node.path, p))
1584 f"Node `{node.path}` already exists at {p}")
1587
1585
1588 # Check nodes marked as changed
1586 # Check nodes marked as changed
1589 missing = set(self.changed)
1587 missing = set(self.changed)
1590 not_changed = set(self.changed)
1588 not_changed = set(self.changed)
1591 if self.changed and not parents:
1589 if self.changed and not parents:
1592 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 raise NodeDoesNotExistError(str(self.changed[0].path))
1593 for p in parents:
1591 for p in parents:
1594 for node in self.changed:
1592 for node in self.changed:
1595 try:
1593 try:
1596 old = p.get_node(node.path)
1594 old = p.get_node(node.path)
1597 missing.remove(node)
1595 missing.remove(node)
1598 # if content actually changed, remove node from not_changed
1596 # if content actually changed, remove node from not_changed
1599 if old.content != node.content:
1597 if old.content != node.content:
1600 not_changed.remove(node)
1598 not_changed.remove(node)
1601 except NodeDoesNotExistError:
1599 except NodeDoesNotExistError:
1602 pass
1600 pass
1603 if self.changed and missing:
1601 if self.changed and missing:
1604 raise NodeDoesNotExistError(
1602 raise NodeDoesNotExistError(
1605 "Node `%s` marked as modified but missing in parents: %s"
1603 "Node `%s` marked as modified but missing in parents: %s"
1606 % (node.path, parents))
1604 % (node.path, parents))
1607
1605
1608 if self.changed and not_changed:
1606 if self.changed and not_changed:
1609 raise NodeNotChangedError(
1607 raise NodeNotChangedError(
1610 "Node `%s` wasn't actually changed (parents: %s)"
1608 "Node `%s` wasn't actually changed (parents: %s)"
1611 % (not_changed.pop().path, parents))
1609 % (not_changed.pop().path, parents))
1612
1610
1613 # Check nodes marked as removed
1611 # Check nodes marked as removed
1614 if self.removed and not parents:
1612 if self.removed and not parents:
1615 raise NodeDoesNotExistError(
1613 raise NodeDoesNotExistError(
1616 "Cannot remove node at %s as there "
1614 "Cannot remove node at %s as there "
1617 "were no parents specified" % self.removed[0].path)
1615 "were no parents specified" % self.removed[0].path)
1618 really_removed = set()
1616 really_removed = set()
1619 for p in parents:
1617 for p in parents:
1620 for node in self.removed:
1618 for node in self.removed:
1621 try:
1619 try:
1622 p.get_node(node.path)
1620 p.get_node(node.path)
1623 really_removed.add(node)
1621 really_removed.add(node)
1624 except CommitError:
1622 except CommitError:
1625 pass
1623 pass
1626 not_removed = set(self.removed) - really_removed
1624 not_removed = set(self.removed) - really_removed
1627 if not_removed:
1625 if not_removed:
1628 # TODO: johbo: This code branch does not seem to be covered
1626 # TODO: johbo: This code branch does not seem to be covered
1629 raise NodeDoesNotExistError(
1627 raise NodeDoesNotExistError(
1630 "Cannot remove node at %s from "
1628 "Cannot remove node at %s from "
1631 "following parents: %s" % (not_removed, parents))
1629 "following parents: %s" % (not_removed, parents))
1632
1630
1633 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1631 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1634 """
1632 """
1635 Performs in-memory commit (doesn't check workdir in any way) and
1633 Performs in-memory commit (doesn't check workdir in any way) and
1636 returns newly created :class:`BaseCommit`. Updates repository's
1634 returns newly created :class:`BaseCommit`. Updates repository's
1637 attribute `commits`.
1635 attribute `commits`.
1638
1636
1639 .. note::
1637 .. note::
1640
1638
1641 While overriding this method each backend's should call
1639 While overriding this method each backend's should call
1642 ``self.check_integrity(parents)`` in the first place.
1640 ``self.check_integrity(parents)`` in the first place.
1643
1641
1644 :param message: message of the commit
1642 :param message: message of the commit
1645 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1643 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1646 :param parents: single parent or sequence of parents from which commit
1644 :param parents: single parent or sequence of parents from which commit
1647 would be derived
1645 would be derived
1648 :param date: ``datetime.datetime`` instance. Defaults to
1646 :param date: ``datetime.datetime`` instance. Defaults to
1649 ``datetime.datetime.now()``.
1647 ``datetime.datetime.now()``.
1650 :param branch: branch name, as string. If none given, default backend's
1648 :param branch: branch name, as string. If none given, default backend's
1651 branch would be used.
1649 branch would be used.
1652
1650
1653 :raises ``CommitError``: if any error occurs while committing
1651 :raises ``CommitError``: if any error occurs while committing
1654 """
1652 """
1655 raise NotImplementedError
1653 raise NotImplementedError
1656
1654
1657
1655
1658 class BaseInMemoryChangesetClass(type):
1656 class BaseInMemoryChangesetClass(type):
1659
1657
1660 def __instancecheck__(self, instance):
1658 def __instancecheck__(self, instance):
1661 return isinstance(instance, BaseInMemoryCommit)
1659 return isinstance(instance, BaseInMemoryCommit)
1662
1660
1663
1661
1664 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1662 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1665
1663
1666 def __new__(cls, *args, **kwargs):
1664 def __new__(cls, *args, **kwargs):
1667 warnings.warn(
1665 warnings.warn(
1668 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1666 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1669 return super().__new__(cls, *args, **kwargs)
1667 return super().__new__(cls, *args, **kwargs)
1670
1668
1671
1669
1672 class EmptyCommit(BaseCommit):
1670 class EmptyCommit(BaseCommit):
1673 """
1671 """
1674 An dummy empty commit. It's possible to pass hash when creating
1672 An dummy empty commit. It's possible to pass hash when creating
1675 an EmptyCommit
1673 an EmptyCommit
1676 """
1674 """
1677
1675
1678 def __init__(
1676 def __init__(
1679 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1677 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1680 message='', author='', date=None):
1678 message='', author='', date=None):
1681 self._empty_commit_id = commit_id
1679 self._empty_commit_id = commit_id
1682 # TODO: johbo: Solve idx parameter, default value does not make
1680 # TODO: johbo: Solve idx parameter, default value does not make
1683 # too much sense
1681 # too much sense
1684 self.idx = idx
1682 self.idx = idx
1685 self.message = message
1683 self.message = message
1686 self.author = author
1684 self.author = author
1687 self.date = date or datetime.datetime.fromtimestamp(0)
1685 self.date = date or datetime.datetime.fromtimestamp(0)
1688 self.repository = repo
1686 self.repository = repo
1689 self.alias = alias
1687 self.alias = alias
1690
1688
1691 @LazyProperty
1689 @LazyProperty
1692 def raw_id(self):
1690 def raw_id(self):
1693 """
1691 """
1694 Returns raw string identifying this commit, useful for web
1692 Returns raw string identifying this commit, useful for web
1695 representation.
1693 representation.
1696 """
1694 """
1697
1695
1698 return self._empty_commit_id
1696 return self._empty_commit_id
1699
1697
1700 @LazyProperty
1698 @LazyProperty
1701 def branch(self):
1699 def branch(self):
1702 if self.alias:
1700 if self.alias:
1703 from rhodecode.lib.vcs.backends import get_backend
1701 from rhodecode.lib.vcs.backends import get_backend
1704 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1702 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1705
1703
1706 @LazyProperty
1704 @LazyProperty
1707 def short_id(self):
1705 def short_id(self):
1708 return self.raw_id[:12]
1706 return self.raw_id[:12]
1709
1707
1710 @LazyProperty
1708 @LazyProperty
1711 def id(self):
1709 def id(self):
1712 return self.raw_id
1710 return self.raw_id
1713
1711
1714 def get_path_commit(self, path, pre_load=None):
1712 def get_path_commit(self, path, pre_load=None):
1715 return self
1713 return self
1716
1714
1717 def get_file_content(self, path) -> bytes:
1715 def get_file_content(self, path) -> bytes:
1718 return b''
1716 return b''
1719
1717
1720 def get_file_content_streamed(self, path):
1718 def get_file_content_streamed(self, path):
1721 yield self.get_file_content(path)
1719 yield self.get_file_content(path)
1722
1720
1723 def get_file_size(self, path):
1721 def get_file_size(self, path):
1724 return 0
1722 return 0
1725
1723
1726
1724
1727 class EmptyChangesetClass(type):
1725 class EmptyChangesetClass(type):
1728
1726
1729 def __instancecheck__(self, instance):
1727 def __instancecheck__(self, instance):
1730 return isinstance(instance, EmptyCommit)
1728 return isinstance(instance, EmptyCommit)
1731
1729
1732
1730
1733 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1731 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1734
1732
1735 def __new__(cls, *args, **kwargs):
1733 def __new__(cls, *args, **kwargs):
1736 warnings.warn(
1734 warnings.warn(
1737 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1735 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1738 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1736 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1739
1737
1740 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1738 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1741 alias=None, revision=-1, message='', author='', date=None):
1739 alias=None, revision=-1, message='', author='', date=None):
1742 if requested_revision is not None:
1740 if requested_revision is not None:
1743 warnings.warn(
1741 warnings.warn(
1744 "Parameter requested_revision not supported anymore",
1742 "Parameter requested_revision not supported anymore",
1745 DeprecationWarning)
1743 DeprecationWarning)
1746 super().__init__(
1744 super().__init__(
1747 commit_id=cs, repo=repo, alias=alias, idx=revision,
1745 commit_id=cs, repo=repo, alias=alias, idx=revision,
1748 message=message, author=author, date=date)
1746 message=message, author=author, date=date)
1749
1747
1750 @property
1748 @property
1751 def revision(self):
1749 def revision(self):
1752 warnings.warn("Use idx instead", DeprecationWarning)
1750 warnings.warn("Use idx instead", DeprecationWarning)
1753 return self.idx
1751 return self.idx
1754
1752
1755 @revision.setter
1753 @revision.setter
1756 def revision(self, value):
1754 def revision(self, value):
1757 warnings.warn("Use idx instead", DeprecationWarning)
1755 warnings.warn("Use idx instead", DeprecationWarning)
1758 self.idx = value
1756 self.idx = value
1759
1757
1760
1758
1761 class EmptyRepository(BaseRepository):
1759 class EmptyRepository(BaseRepository):
1762 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1760 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1763 pass
1761 pass
1764
1762
1765 def get_diff(self, *args, **kwargs):
1763 def get_diff(self, *args, **kwargs):
1766 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1764 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1767 return GitDiff(b'')
1765 return GitDiff(b'')
1768
1766
1769
1767
1770 class CollectionGenerator(object):
1768 class CollectionGenerator(object):
1771
1769
1772 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1770 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1773 self.repo = repo
1771 self.repo = repo
1774 self.commit_ids = commit_ids
1772 self.commit_ids = commit_ids
1775 self.collection_size = collection_size
1773 self.collection_size = collection_size
1776 self.pre_load = pre_load
1774 self.pre_load = pre_load
1777 self.translate_tag = translate_tag
1775 self.translate_tag = translate_tag
1778
1776
1779 def __len__(self):
1777 def __len__(self):
1780 if self.collection_size is not None:
1778 if self.collection_size is not None:
1781 return self.collection_size
1779 return self.collection_size
1782 return self.commit_ids.__len__()
1780 return self.commit_ids.__len__()
1783
1781
1784 def __iter__(self):
1782 def __iter__(self):
1785 for commit_id in self.commit_ids:
1783 for commit_id in self.commit_ids:
1786 # TODO: johbo: Mercurial passes in commit indices or commit ids
1784 # TODO: johbo: Mercurial passes in commit indices or commit ids
1787 yield self._commit_factory(commit_id)
1785 yield self._commit_factory(commit_id)
1788
1786
1789 def _commit_factory(self, commit_id):
1787 def _commit_factory(self, commit_id):
1790 """
1788 """
1791 Allows backends to override the way commits are generated.
1789 Allows backends to override the way commits are generated.
1792 """
1790 """
1793 return self.repo.get_commit(
1791 return self.repo.get_commit(
1794 commit_id=commit_id, pre_load=self.pre_load,
1792 commit_id=commit_id, pre_load=self.pre_load,
1795 translate_tag=self.translate_tag)
1793 translate_tag=self.translate_tag)
1796
1794
1797 def __getitem__(self, key):
1795 def __getitem__(self, key):
1798 """Return either a single element by index, or a sliced collection."""
1796 """Return either a single element by index, or a sliced collection."""
1799
1797
1800 if isinstance(key, slice):
1798 if isinstance(key, slice):
1801 commit_ids = self.commit_ids[key.start:key.stop]
1799 commit_ids = self.commit_ids[key.start:key.stop]
1802
1800
1803 else:
1801 else:
1804 # single item
1802 # single item
1805 commit_ids = self.commit_ids[key]
1803 commit_ids = self.commit_ids[key]
1806
1804
1807 return self.__class__(
1805 return self.__class__(
1808 self.repo, commit_ids, pre_load=self.pre_load,
1806 self.repo, commit_ids, pre_load=self.pre_load,
1809 translate_tag=self.translate_tag)
1807 translate_tag=self.translate_tag)
1810
1808
1811 def __repr__(self):
1809 def __repr__(self):
1812 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1810 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1813
1811
1814
1812
1815 class Config(object):
1813 class Config(object):
1816 """
1814 """
1817 Represents the configuration for a repository.
1815 Represents the configuration for a repository.
1818
1816
1819 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1817 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1820 standard library. It implements only the needed subset.
1818 standard library. It implements only the needed subset.
1821 """
1819 """
1822
1820
1823 def __init__(self):
1821 def __init__(self):
1824 self._values = {}
1822 self._values = {}
1825
1823
1826 def copy(self):
1824 def copy(self):
1827 clone = Config()
1825 clone = Config()
1828 for section, values in self._values.items():
1826 for section, values in self._values.items():
1829 clone._values[section] = values.copy()
1827 clone._values[section] = values.copy()
1830 return clone
1828 return clone
1831
1829
1832 def __repr__(self):
1830 def __repr__(self):
1833 return '<Config({} sections) at {}>'.format(
1831 return '<Config({} sections) at {}>'.format(
1834 len(self._values), hex(id(self)))
1832 len(self._values), hex(id(self)))
1835
1833
1836 def items(self, section):
1834 def items(self, section):
1837 return self._values.get(section, {}).items()
1835 return self._values.get(section, {}).items()
1838
1836
1839 def get(self, section, option):
1837 def get(self, section, option):
1840 return self._values.get(section, {}).get(option)
1838 return self._values.get(section, {}).get(option)
1841
1839
1842 def set(self, section, option, value):
1840 def set(self, section, option, value):
1843 section_values = self._values.setdefault(section, {})
1841 section_values = self._values.setdefault(section, {})
1844 section_values[option] = value
1842 section_values[option] = value
1845
1843
1846 def clear_section(self, section):
1844 def clear_section(self, section):
1847 self._values[section] = {}
1845 self._values[section] = {}
1848
1846
1849 def serialize(self):
1847 def serialize(self):
1850 """
1848 """
1851 Creates a list of three tuples (section, key, value) representing
1849 Creates a list of three tuples (section, key, value) representing
1852 this config object.
1850 this config object.
1853 """
1851 """
1854 items = []
1852 items = []
1855 for section in self._values:
1853 for section in self._values:
1856 for option, value in self._values[section].items():
1854 for option, value in self._values[section].items():
1857 items.append(
1855 items.append(
1858 (safe_str(section), safe_str(option), safe_str(value)))
1856 (safe_str(section), safe_str(option), safe_str(value)))
1859 return items
1857 return items
1860
1858
1861
1859
1862 class Diff(object):
1860 class Diff(object):
1863 """
1861 """
1864 Represents a diff result from a repository backend.
1862 Represents a diff result from a repository backend.
1865
1863
1866 Subclasses have to provide a backend specific value for
1864 Subclasses have to provide a backend specific value for
1867 :attr:`_header_re` and :attr:`_meta_re`.
1865 :attr:`_header_re` and :attr:`_meta_re`.
1868 """
1866 """
1869 _meta_re = None
1867 _meta_re = None
1870 _header_re: bytes = re.compile(br"")
1868 _header_re: bytes = re.compile(br"")
1871
1869
1872 def __init__(self, raw_diff: bytes):
1870 def __init__(self, raw_diff: bytes):
1873 if not isinstance(raw_diff, bytes):
1871 if not isinstance(raw_diff, bytes):
1874 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1872 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1875
1873
1876 self.raw = memoryview(raw_diff)
1874 self.raw = memoryview(raw_diff)
1877
1875
1878 def get_header_re(self):
1876 def get_header_re(self):
1879 return self._header_re
1877 return self._header_re
1880
1878
1881 def chunks(self):
1879 def chunks(self):
1882 """
1880 """
1883 split the diff in chunks of separate --git a/file b/file chunks
1881 split the diff in chunks of separate --git a/file b/file chunks
1884 to make diffs consistent we must prepend with \n, and make sure
1882 to make diffs consistent we must prepend with \n, and make sure
1885 we can detect last chunk as this was also has special rule
1883 we can detect last chunk as this was also has special rule
1886 """
1884 """
1887
1885
1888 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1886 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1889
1887
1890 chunks = diff_parts[1:]
1888 chunks = diff_parts[1:]
1891 total_chunks = len(chunks)
1889 total_chunks = len(chunks)
1892
1890
1893 def diff_iter(_chunks):
1891 def diff_iter(_chunks):
1894 for cur_chunk, chunk in enumerate(_chunks, start=1):
1892 for cur_chunk, chunk in enumerate(_chunks, start=1):
1895 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1893 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1896 return diff_iter(chunks)
1894 return diff_iter(chunks)
1897
1895
1898
1896
1899 class DiffChunk(object):
1897 class DiffChunk(object):
1900
1898
1901 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1899 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1902 self.diff_obj = diff_obj
1900 self.diff_obj = diff_obj
1903
1901
1904 # since we split by \ndiff --git that part is lost from original diff
1902 # since we split by \ndiff --git that part is lost from original diff
1905 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1903 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1906 if not is_last_chunk:
1904 if not is_last_chunk:
1907 chunk += b'\n'
1905 chunk += b'\n'
1908 header_re = self.diff_obj.get_header_re()
1906 header_re = self.diff_obj.get_header_re()
1909 match = header_re.match(chunk)
1907 match = header_re.match(chunk)
1910 self.header = match.groupdict()
1908 self.header = match.groupdict()
1911 self.diff = chunk[match.end():]
1909 self.diff = chunk[match.end():]
1912 self.raw = chunk
1910 self.raw = chunk
1913
1911
1914 @property
1912 @property
1915 def header_as_str(self):
1913 def header_as_str(self):
1916 if self.header:
1914 if self.header:
1917 def safe_str_on_bytes(val):
1915 def safe_str_on_bytes(val):
1918 if isinstance(val, bytes):
1916 if isinstance(val, bytes):
1919 return safe_str(val)
1917 return safe_str(val)
1920 return val
1918 return val
1921 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1919 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1922
1920
1923 def __repr__(self):
1921 def __repr__(self):
1924 return f'DiffChunk({self.header_as_str})'
1922 return f'DiffChunk({self.header_as_str})'
1925
1923
1926
1924
1927 class BasePathPermissionChecker(object):
1925 class BasePathPermissionChecker(object):
1928
1926
1929 @staticmethod
1927 @staticmethod
1930 def create_from_patterns(includes, excludes):
1928 def create_from_patterns(includes, excludes):
1931 if includes and '*' in includes and not excludes:
1929 if includes and '*' in includes and not excludes:
1932 return AllPathPermissionChecker()
1930 return AllPathPermissionChecker()
1933 elif excludes and '*' in excludes:
1931 elif excludes and '*' in excludes:
1934 return NonePathPermissionChecker()
1932 return NonePathPermissionChecker()
1935 else:
1933 else:
1936 return PatternPathPermissionChecker(includes, excludes)
1934 return PatternPathPermissionChecker(includes, excludes)
1937
1935
1938 @property
1936 @property
1939 def has_full_access(self):
1937 def has_full_access(self):
1940 raise NotImplementedError()
1938 raise NotImplementedError()
1941
1939
1942 def has_access(self, path):
1940 def has_access(self, path):
1943 raise NotImplementedError()
1941 raise NotImplementedError()
1944
1942
1945
1943
1946 class AllPathPermissionChecker(BasePathPermissionChecker):
1944 class AllPathPermissionChecker(BasePathPermissionChecker):
1947
1945
1948 @property
1946 @property
1949 def has_full_access(self):
1947 def has_full_access(self):
1950 return True
1948 return True
1951
1949
1952 def has_access(self, path):
1950 def has_access(self, path):
1953 return True
1951 return True
1954
1952
1955
1953
1956 class NonePathPermissionChecker(BasePathPermissionChecker):
1954 class NonePathPermissionChecker(BasePathPermissionChecker):
1957
1955
1958 @property
1956 @property
1959 def has_full_access(self):
1957 def has_full_access(self):
1960 return False
1958 return False
1961
1959
1962 def has_access(self, path):
1960 def has_access(self, path):
1963 return False
1961 return False
1964
1962
1965
1963
1966 class PatternPathPermissionChecker(BasePathPermissionChecker):
1964 class PatternPathPermissionChecker(BasePathPermissionChecker):
1967
1965
1968 def __init__(self, includes, excludes):
1966 def __init__(self, includes, excludes):
1969 self.includes = includes
1967 self.includes = includes
1970 self.excludes = excludes
1968 self.excludes = excludes
1971 self.includes_re = [] if not includes else [
1969 self.includes_re = [] if not includes else [
1972 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1970 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1973 self.excludes_re = [] if not excludes else [
1971 self.excludes_re = [] if not excludes else [
1974 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1972 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1975
1973
1976 @property
1974 @property
1977 def has_full_access(self):
1975 def has_full_access(self):
1978 return '*' in self.includes and not self.excludes
1976 return '*' in self.includes and not self.excludes
1979
1977
1980 def has_access(self, path):
1978 def has_access(self, path):
1981 for regex in self.excludes_re:
1979 for regex in self.excludes_re:
1982 if regex.match(path):
1980 if regex.match(path):
1983 return False
1981 return False
1984 for regex in self.includes_re:
1982 for regex in self.includes_re:
1985 if regex.match(path):
1983 if regex.match(path):
1986 return True
1984 return True
1987 return False
1985 return False
@@ -1,1053 +1,1053 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 "Cannot create repository at %s, location already exist"
153 "Cannot create repository at %s, location already exist"
154 % self.path)
154 % self.path)
155
155
156 if bare and do_workspace_checkout:
156 if bare and do_workspace_checkout:
157 raise RepositoryError("Cannot update a bare repository")
157 raise RepositoryError("Cannot update a bare repository")
158 try:
158 try:
159
159
160 if src_url:
160 if src_url:
161 # check URL before any actions
161 # check URL before any actions
162 GitRepository.check_url(src_url, self.config)
162 GitRepository.check_url(src_url, self.config)
163
163
164 if create:
164 if create:
165 os.makedirs(self.path, mode=0o755)
165 os.makedirs(self.path, mode=0o755)
166
166
167 if bare:
167 if bare:
168 self._remote.init_bare()
168 self._remote.init_bare()
169 else:
169 else:
170 self._remote.init()
170 self._remote.init()
171
171
172 if src_url and bare:
172 if src_url and bare:
173 # bare repository only allows a fetch and checkout is not allowed
173 # bare repository only allows a fetch and checkout is not allowed
174 self.fetch(src_url, commit_ids=None)
174 self.fetch(src_url, commit_ids=None)
175 elif src_url:
175 elif src_url:
176 self.pull(src_url, commit_ids=None,
176 self.pull(src_url, commit_ids=None,
177 update_after=do_workspace_checkout)
177 update_after=do_workspace_checkout)
178
178
179 else:
179 else:
180 if not self._remote.assert_correct_path():
180 if not self._remote.assert_correct_path():
181 raise RepositoryError(
181 raise RepositoryError(
182 'Path "%s" does not contain a Git repository' %
182 'Path "%s" does not contain a Git repository' %
183 (self.path,))
183 (self.path,))
184
184
185 # TODO: johbo: check if we have to translate the OSError here
185 # TODO: johbo: check if we have to translate the OSError here
186 except OSError as err:
186 except OSError as err:
187 raise RepositoryError(err)
187 raise RepositoryError(err)
188
188
189 def _get_all_commit_ids(self):
189 def _get_all_commit_ids(self):
190 return self._remote.get_all_commit_ids()
190 return self._remote.get_all_commit_ids()
191
191
192 def _get_commit_ids(self, filters=None):
192 def _get_commit_ids(self, filters=None):
193 # we must check if this repo is not empty, since later command
193 # we must check if this repo is not empty, since later command
194 # fails if it is. And it's cheaper to ask than throw the subprocess
194 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # errors
195 # errors
196
196
197 head = self._remote.head(show_exc=False)
197 head = self._remote.head(show_exc=False)
198
198
199 if not head:
199 if not head:
200 return []
200 return []
201
201
202 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
203 extra_filter = []
203 extra_filter = []
204
204
205 if filters:
205 if filters:
206 if filters.get('since'):
206 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
208 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
210 if filters.get('branch_name'):
211 rev_filter = []
211 rev_filter = []
212 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
214
214
215 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
217 # if filters.get('start'):
217 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
219 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
221
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
223 try:
224 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
225 except RepositoryError:
226 # Can be raised for empty repositories
226 # Can be raised for empty repositories
227 return []
227 return []
228 return output.splitlines()
228 return output.splitlines()
229
229
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
231
231
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
240
240
241 is_bstr = isinstance(commit_id_or_idx, str)
241 is_bstr = isinstance(commit_id_or_idx, str)
242 is_branch = reference_obj and reference_obj.branch
242 is_branch = reference_obj and reference_obj.branch
243
243
244 lookup_ok = False
244 lookup_ok = False
245 if is_bstr:
245 if is_bstr:
246 # Need to call remote to translate id for tagging scenarios,
246 # Need to call remote to translate id for tagging scenarios,
247 # or branch that are numeric
247 # or branch that are numeric
248 try:
248 try:
249 remote_data = self._remote.get_object(commit_id_or_idx,
249 remote_data = self._remote.get_object(commit_id_or_idx,
250 maybe_unreachable=maybe_unreachable)
250 maybe_unreachable=maybe_unreachable)
251 commit_id_or_idx = remote_data["commit_id"]
251 commit_id_or_idx = remote_data["commit_id"]
252 lookup_ok = True
252 lookup_ok = True
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 lookup_ok = False
254 lookup_ok = False
255
255
256 if lookup_ok is False:
256 if lookup_ok is False:
257 is_numeric_idx = \
257 is_numeric_idx = \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
259 or isinstance(commit_id_or_idx, int)
259 or isinstance(commit_id_or_idx, int)
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
261 try:
261 try:
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
263 lookup_ok = True
263 lookup_ok = True
264 except Exception:
264 except Exception:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # we failed regular lookup, and by integer number lookup
267 # we failed regular lookup, and by integer number lookup
268 if lookup_ok is False:
268 if lookup_ok is False:
269 raise CommitDoesNotExistError(commit_missing_err)
269 raise CommitDoesNotExistError(commit_missing_err)
270
270
271 # Ensure we return full id
271 # Ensure we return full id
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
273 raise CommitDoesNotExistError(
273 raise CommitDoesNotExistError(
274 "Given commit id %s not recognized" % commit_id_or_idx)
274 "Given commit id %s not recognized" % commit_id_or_idx)
275 return commit_id_or_idx
275 return commit_id_or_idx
276
276
277 def get_hook_location(self):
277 def get_hook_location(self):
278 """
278 """
279 returns absolute path to location where hooks are stored
279 returns absolute path to location where hooks are stored
280 """
280 """
281 loc = os.path.join(self.path, 'hooks')
281 loc = os.path.join(self.path, 'hooks')
282 if not self.bare:
282 if not self.bare:
283 loc = os.path.join(self.path, '.git', 'hooks')
283 loc = os.path.join(self.path, '.git', 'hooks')
284 return loc
284 return loc
285
285
286 @LazyProperty
286 @LazyProperty
287 def last_change(self):
287 def last_change(self):
288 """
288 """
289 Returns last change made on this repository as
289 Returns last change made on this repository as
290 `datetime.datetime` object.
290 `datetime.datetime` object.
291 """
291 """
292 try:
292 try:
293 return self.get_commit().date
293 return self.get_commit().date
294 except RepositoryError:
294 except RepositoryError:
295 tzoffset = makedate()[1]
295 tzoffset = makedate()[1]
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
297
297
298 def _get_fs_mtime(self):
298 def _get_fs_mtime(self):
299 idx_loc = '' if self.bare else '.git'
299 idx_loc = '' if self.bare else '.git'
300 # fallback to filesystem
300 # fallback to filesystem
301 in_path = os.path.join(self.path, idx_loc, "index")
301 in_path = os.path.join(self.path, idx_loc, "index")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
303 if os.path.exists(in_path):
303 if os.path.exists(in_path):
304 return os.stat(in_path).st_mtime
304 return os.stat(in_path).st_mtime
305 else:
305 else:
306 return os.stat(he_path).st_mtime
306 return os.stat(he_path).st_mtime
307
307
308 @LazyProperty
308 @LazyProperty
309 def description(self):
309 def description(self):
310 description = self._remote.get_description()
310 description = self._remote.get_description()
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
312
312
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
314 if self.is_empty():
314 if self.is_empty():
315 return OrderedDict()
315 return OrderedDict()
316
316
317 result = []
317 result = []
318 for ref, sha in self._refs.items():
318 for ref, sha in self._refs.items():
319 if ref.startswith(prefix):
319 if ref.startswith(prefix):
320 ref_name = ref
320 ref_name = ref
321 if strip_prefix:
321 if strip_prefix:
322 ref_name = ref[len(prefix):]
322 ref_name = ref[len(prefix):]
323 result.append((safe_str(ref_name), sha))
323 result.append((safe_str(ref_name), sha))
324
324
325 def get_name(entry):
325 def get_name(entry):
326 return entry[0]
326 return entry[0]
327
327
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
329
329
330 def _get_branches(self):
330 def _get_branches(self):
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches(self):
334 def branches(self):
335 return self._get_branches()
335 return self._get_branches()
336
336
337 @CachedProperty
337 @CachedProperty
338 def branches_closed(self):
338 def branches_closed(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def bookmarks(self):
342 def bookmarks(self):
343 return {}
343 return {}
344
344
345 @CachedProperty
345 @CachedProperty
346 def branches_all(self):
346 def branches_all(self):
347 all_branches = {}
347 all_branches = {}
348 all_branches.update(self.branches)
348 all_branches.update(self.branches)
349 all_branches.update(self.branches_closed)
349 all_branches.update(self.branches_closed)
350 return all_branches
350 return all_branches
351
351
352 @CachedProperty
352 @CachedProperty
353 def tags(self):
353 def tags(self):
354 return self._get_tags()
354 return self._get_tags()
355
355
356 def _get_tags(self):
356 def _get_tags(self):
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
358
358
359 def tag(self, name, user, commit_id=None, message=None, date=None,
359 def tag(self, name, user, commit_id=None, message=None, date=None,
360 **kwargs):
360 **kwargs):
361 # TODO: fix this method to apply annotated tags correct with message
361 # TODO: fix this method to apply annotated tags correct with message
362 """
362 """
363 Creates and returns a tag for the given ``commit_id``.
363 Creates and returns a tag for the given ``commit_id``.
364
364
365 :param name: name for new tag
365 :param name: name for new tag
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
367 :param commit_id: commit id for which new tag would be created
367 :param commit_id: commit id for which new tag would be created
368 :param message: message of the tag's commit
368 :param message: message of the tag's commit
369 :param date: date of tag's commit
369 :param date: date of tag's commit
370
370
371 :raises TagAlreadyExistError: if tag with same name already exists
371 :raises TagAlreadyExistError: if tag with same name already exists
372 """
372 """
373 if name in self.tags:
373 if name in self.tags:
374 raise TagAlreadyExistError("Tag %s already exists" % name)
374 raise TagAlreadyExistError("Tag %s already exists" % name)
375 commit = self.get_commit(commit_id=commit_id)
375 commit = self.get_commit(commit_id=commit_id)
376 message = message or "Added tag {} for commit {}".format(name, commit.raw_id)
376 message = message or f"Added tag {name} for commit {commit.raw_id}"
377
377
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
379
379
380 self._invalidate_prop_cache('tags')
380 self._invalidate_prop_cache('tags')
381 self._invalidate_prop_cache('_refs')
381 self._invalidate_prop_cache('_refs')
382
382
383 return commit
383 return commit
384
384
385 def remove_tag(self, name, user, message=None, date=None):
385 def remove_tag(self, name, user, message=None, date=None):
386 """
386 """
387 Removes tag with the given ``name``.
387 Removes tag with the given ``name``.
388
388
389 :param name: name of the tag to be removed
389 :param name: name of the tag to be removed
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
391 :param message: message of the tag's removal commit
391 :param message: message of the tag's removal commit
392 :param date: date of tag's removal commit
392 :param date: date of tag's removal commit
393
393
394 :raises TagDoesNotExistError: if tag with given name does not exists
394 :raises TagDoesNotExistError: if tag with given name does not exists
395 """
395 """
396 if name not in self.tags:
396 if name not in self.tags:
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
398
398
399 self._remote.tag_remove(name)
399 self._remote.tag_remove(name)
400 self._invalidate_prop_cache('tags')
400 self._invalidate_prop_cache('tags')
401 self._invalidate_prop_cache('_refs')
401 self._invalidate_prop_cache('_refs')
402
402
403 def _get_refs(self):
403 def _get_refs(self):
404 return self._remote.get_refs()
404 return self._remote.get_refs()
405
405
406 @CachedProperty
406 @CachedProperty
407 def _refs(self):
407 def _refs(self):
408 return self._get_refs()
408 return self._get_refs()
409
409
410 @property
410 @property
411 def _ref_tree(self):
411 def _ref_tree(self):
412 node = tree = {}
412 node = tree = {}
413 for ref, sha in self._refs.items():
413 for ref, sha in self._refs.items():
414 path = ref.split('/')
414 path = ref.split('/')
415 for bit in path[:-1]:
415 for bit in path[:-1]:
416 node = node.setdefault(bit, {})
416 node = node.setdefault(bit, {})
417 node[path[-1]] = sha
417 node[path[-1]] = sha
418 node = tree
418 node = tree
419 return tree
419 return tree
420
420
421 def get_remote_ref(self, ref_name):
421 def get_remote_ref(self, ref_name):
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
423 try:
423 try:
424 return self._refs[ref_key]
424 return self._refs[ref_key]
425 except Exception:
425 except Exception:
426 return
426 return
427
427
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434
434
435 if self.is_empty():
435 if self.is_empty():
436 raise EmptyRepositoryError("There are no commits yet")
436 raise EmptyRepositoryError("There are no commits yet")
437
437
438 if commit_id is not None:
438 if commit_id is not None:
439 self._validate_commit_id(commit_id)
439 self._validate_commit_id(commit_id)
440 try:
440 try:
441 # we have cached idx, use it without contacting the remote
441 # we have cached idx, use it without contacting the remote
442 idx = self._commit_ids[commit_id]
442 idx = self._commit_ids[commit_id]
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
444 except KeyError:
444 except KeyError:
445 pass
445 pass
446
446
447 elif commit_idx is not None:
447 elif commit_idx is not None:
448 self._validate_commit_idx(commit_idx)
448 self._validate_commit_idx(commit_idx)
449 try:
449 try:
450 _commit_id = self.commit_ids[commit_idx]
450 _commit_id = self.commit_ids[commit_idx]
451 if commit_idx < 0:
451 if commit_idx < 0:
452 commit_idx = self.commit_ids.index(_commit_id)
452 commit_idx = self.commit_ids.index(_commit_id)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
454 except IndexError:
454 except IndexError:
455 commit_id = commit_idx
455 commit_id = commit_idx
456 else:
456 else:
457 commit_id = "tip"
457 commit_id = "tip"
458
458
459 if translate_tag:
459 if translate_tag:
460 commit_id = self._lookup_commit(
460 commit_id = self._lookup_commit(
461 commit_id, maybe_unreachable=maybe_unreachable,
461 commit_id, maybe_unreachable=maybe_unreachable,
462 reference_obj=reference_obj)
462 reference_obj=reference_obj)
463
463
464 try:
464 try:
465 idx = self._commit_ids[commit_id]
465 idx = self._commit_ids[commit_id]
466 except KeyError:
466 except KeyError:
467 idx = -1
467 idx = -1
468
468
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
470
470
471 def get_commits(
471 def get_commits(
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
474 """
474 """
475 Returns generator of `GitCommit` objects from start to end (both
475 Returns generator of `GitCommit` objects from start to end (both
476 are inclusive), in ascending date order.
476 are inclusive), in ascending date order.
477
477
478 :param start_id: None, str(commit_id)
478 :param start_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
480 :param start_date: if specified, commits with commit date less than
480 :param start_date: if specified, commits with commit date less than
481 ``start_date`` would be filtered out from returned set
481 ``start_date`` would be filtered out from returned set
482 :param end_date: if specified, commits with commit date greater than
482 :param end_date: if specified, commits with commit date greater than
483 ``end_date`` would be filtered out from returned set
483 ``end_date`` would be filtered out from returned set
484 :param branch_name: if specified, commits not reachable from given
484 :param branch_name: if specified, commits not reachable from given
485 branch would be filtered out from returned set
485 branch would be filtered out from returned set
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
487 Mercurial evolve
487 Mercurial evolve
488 :raise BranchDoesNotExistError: If given `branch_name` does not
488 :raise BranchDoesNotExistError: If given `branch_name` does not
489 exist.
489 exist.
490 :raise CommitDoesNotExistError: If commits for given `start` or
490 :raise CommitDoesNotExistError: If commits for given `start` or
491 `end` could not be found.
491 `end` could not be found.
492
492
493 """
493 """
494 if self.is_empty():
494 if self.is_empty():
495 raise EmptyRepositoryError("There are no commits yet")
495 raise EmptyRepositoryError("There are no commits yet")
496
496
497 self._validate_branch_name(branch_name)
497 self._validate_branch_name(branch_name)
498
498
499 if start_id is not None:
499 if start_id is not None:
500 self._validate_commit_id(start_id)
500 self._validate_commit_id(start_id)
501 if end_id is not None:
501 if end_id is not None:
502 self._validate_commit_id(end_id)
502 self._validate_commit_id(end_id)
503
503
504 start_raw_id = self._lookup_commit(start_id)
504 start_raw_id = self._lookup_commit(start_id)
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
506 end_raw_id = self._lookup_commit(end_id)
506 end_raw_id = self._lookup_commit(end_id)
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
508
508
509 if None not in [start_id, end_id] and start_pos > end_pos:
509 if None not in [start_id, end_id] and start_pos > end_pos:
510 raise RepositoryError(
510 raise RepositoryError(
511 "Start commit '%s' cannot be after end commit '%s'" %
511 "Start commit '%s' cannot be after end commit '%s'" %
512 (start_id, end_id))
512 (start_id, end_id))
513
513
514 if end_pos is not None:
514 if end_pos is not None:
515 end_pos += 1
515 end_pos += 1
516
516
517 filter_ = []
517 filter_ = []
518 if branch_name:
518 if branch_name:
519 filter_.append({'branch_name': branch_name})
519 filter_.append({'branch_name': branch_name})
520 if start_date and not end_date:
520 if start_date and not end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 if end_date and not start_date:
522 if end_date and not start_date:
523 filter_.append({'until': end_date})
523 filter_.append({'until': end_date})
524 if start_date and end_date:
524 if start_date and end_date:
525 filter_.append({'since': start_date})
525 filter_.append({'since': start_date})
526 filter_.append({'until': end_date})
526 filter_.append({'until': end_date})
527
527
528 # if start_pos or end_pos:
528 # if start_pos or end_pos:
529 # filter_.append({'start': start_pos})
529 # filter_.append({'start': start_pos})
530 # filter_.append({'end': end_pos})
530 # filter_.append({'end': end_pos})
531
531
532 if filter_:
532 if filter_:
533 revfilters = {
533 revfilters = {
534 'branch_name': branch_name,
534 'branch_name': branch_name,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
537 'start': start_pos,
537 'start': start_pos,
538 'end': end_pos,
538 'end': end_pos,
539 }
539 }
540 commit_ids = self._get_commit_ids(filters=revfilters)
540 commit_ids = self._get_commit_ids(filters=revfilters)
541
541
542 else:
542 else:
543 commit_ids = self.commit_ids
543 commit_ids = self.commit_ids
544
544
545 if start_pos or end_pos:
545 if start_pos or end_pos:
546 commit_ids = commit_ids[start_pos: end_pos]
546 commit_ids = commit_ids[start_pos: end_pos]
547
547
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
549 translate_tag=translate_tags)
549 translate_tag=translate_tags)
550
550
551 def get_diff(
551 def get_diff(
552 self, commit1, commit2, path='', ignore_whitespace=False,
552 self, commit1, commit2, path='', ignore_whitespace=False,
553 context=3, path1=None):
553 context=3, path1=None):
554 """
554 """
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
556 ``commit2`` since ``commit1``.
556 ``commit2`` since ``commit1``.
557
557
558 :param commit1: Entry point from which diff is shown. Can be
558 :param commit1: Entry point from which diff is shown. Can be
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
560 the changes since empty state of the repository until ``commit2``
560 the changes since empty state of the repository until ``commit2``
561 :param commit2: Until which commits changes should be shown.
561 :param commit2: Until which commits changes should be shown.
562 :param path:
562 :param path:
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 changes. Defaults to ``False``.
564 changes. Defaults to ``False``.
565 :param context: How many lines before/after changed lines should be
565 :param context: How many lines before/after changed lines should be
566 shown. Defaults to ``3``.
566 shown. Defaults to ``3``.
567 :param path1:
567 :param path1:
568 """
568 """
569 self._validate_diff_commits(commit1, commit2)
569 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
570 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
571 raise ValueError("Diff of two different paths not supported.")
572
572
573 if path:
573 if path:
574 file_filter = path
574 file_filter = path
575 else:
575 else:
576 file_filter = None
576 file_filter = None
577
577
578 diff = self._remote.diff(
578 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
580 opt_ignorews=ignore_whitespace,
581 context=context)
581 context=context)
582
582
583 return GitDiff(diff)
583 return GitDiff(diff)
584
584
585 def strip(self, commit_id, branch_name):
585 def strip(self, commit_id, branch_name):
586 commit = self.get_commit(commit_id=commit_id)
586 commit = self.get_commit(commit_id=commit_id)
587 if commit.merge:
587 if commit.merge:
588 raise Exception('Cannot reset to merge commit')
588 raise Exception('Cannot reset to merge commit')
589
589
590 # parent is going to be the new head now
590 # parent is going to be the new head now
591 commit = commit.parents[0]
591 commit = commit.parents[0]
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
593
593
594 # clear cached properties
594 # clear cached properties
595 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('commit_ids')
596 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('_refs')
597 self._invalidate_prop_cache('branches')
597 self._invalidate_prop_cache('branches')
598
598
599 return len(self.commit_ids)
599 return len(self.commit_ids)
600
600
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
603 self, commit_id1, repo2, commit_id2)
603 self, commit_id1, repo2, commit_id2)
604
604
605 if commit_id1 == commit_id2:
605 if commit_id1 == commit_id2:
606 return commit_id1
606 return commit_id1
607
607
608 if self != repo2:
608 if self != repo2:
609 commits = self._remote.get_missing_revs(
609 commits = self._remote.get_missing_revs(
610 commit_id1, commit_id2, repo2.path)
610 commit_id1, commit_id2, repo2.path)
611 if commits:
611 if commits:
612 commit = repo2.get_commit(commits[-1])
612 commit = repo2.get_commit(commits[-1])
613 if commit.parents:
613 if commit.parents:
614 ancestor_id = commit.parents[0].raw_id
614 ancestor_id = commit.parents[0].raw_id
615 else:
615 else:
616 ancestor_id = None
616 ancestor_id = None
617 else:
617 else:
618 # no commits from other repo, ancestor_id is the commit_id2
618 # no commits from other repo, ancestor_id is the commit_id2
619 ancestor_id = commit_id2
619 ancestor_id = commit_id2
620 else:
620 else:
621 output, __ = self.run_git_command(
621 output, __ = self.run_git_command(
622 ['merge-base', commit_id1, commit_id2])
622 ['merge-base', commit_id1, commit_id2])
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
624
624
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
626
626
627 return ancestor_id
627 return ancestor_id
628
628
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
630 repo1 = self
630 repo1 = self
631 ancestor_id = None
631 ancestor_id = None
632
632
633 if commit_id1 == commit_id2:
633 if commit_id1 == commit_id2:
634 commits = []
634 commits = []
635 elif repo1 != repo2:
635 elif repo1 != repo2:
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
637 repo2.path)
637 repo2.path)
638 commits = [
638 commits = [
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
640 for commit_id in reversed(missing_ids)]
640 for commit_id in reversed(missing_ids)]
641 else:
641 else:
642 output, __ = repo1.run_git_command(
642 output, __ = repo1.run_git_command(
643 ['log', '--reverse', '--pretty=format: %H', '-s',
643 ['log', '--reverse', '--pretty=format: %H', '-s',
644 '{}..{}'.format(commit_id1, commit_id2)])
644 f'{commit_id1}..{commit_id2}'])
645 commits = [
645 commits = [
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
648
648
649 return commits
649 return commits
650
650
651 @LazyProperty
651 @LazyProperty
652 def in_memory_commit(self):
652 def in_memory_commit(self):
653 """
653 """
654 Returns ``GitInMemoryCommit`` object for this repository.
654 Returns ``GitInMemoryCommit`` object for this repository.
655 """
655 """
656 return GitInMemoryCommit(self)
656 return GitInMemoryCommit(self)
657
657
658 def pull(self, url, commit_ids=None, update_after=False):
658 def pull(self, url, commit_ids=None, update_after=False):
659 """
659 """
660 Pull changes from external location. Pull is different in GIT
660 Pull changes from external location. Pull is different in GIT
661 that fetch since it's doing a checkout
661 that fetch since it's doing a checkout
662
662
663 :param commit_ids: Optional. Can be set to a list of commit ids
663 :param commit_ids: Optional. Can be set to a list of commit ids
664 which shall be pulled from the other repository.
664 which shall be pulled from the other repository.
665 """
665 """
666 refs = None
666 refs = None
667 if commit_ids is not None:
667 if commit_ids is not None:
668 remote_refs = self._remote.get_remote_refs(url)
668 remote_refs = self._remote.get_remote_refs(url)
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.pull(url, refs=refs, update_after=update_after)
671 self._remote.invalidate_vcs_cache()
671 self._remote.invalidate_vcs_cache()
672
672
673 def fetch(self, url, commit_ids=None):
673 def fetch(self, url, commit_ids=None):
674 """
674 """
675 Fetch all git objects from external location.
675 Fetch all git objects from external location.
676 """
676 """
677 self._remote.sync_fetch(url, refs=commit_ids)
677 self._remote.sync_fetch(url, refs=commit_ids)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679
679
680 def push(self, url):
680 def push(self, url):
681 refs = None
681 refs = None
682 self._remote.sync_push(url, refs=refs)
682 self._remote.sync_push(url, refs=refs)
683
683
684 def set_refs(self, ref_name, commit_id):
684 def set_refs(self, ref_name, commit_id):
685 self._remote.set_refs(ref_name, commit_id)
685 self._remote.set_refs(ref_name, commit_id)
686 self._invalidate_prop_cache('_refs')
686 self._invalidate_prop_cache('_refs')
687
687
688 def remove_ref(self, ref_name):
688 def remove_ref(self, ref_name):
689 self._remote.remove_ref(ref_name)
689 self._remote.remove_ref(ref_name)
690 self._invalidate_prop_cache('_refs')
690 self._invalidate_prop_cache('_refs')
691
691
692 def run_gc(self, prune=True):
692 def run_gc(self, prune=True):
693 cmd = ['gc', '--aggressive']
693 cmd = ['gc', '--aggressive']
694 if prune:
694 if prune:
695 cmd += ['--prune=now']
695 cmd += ['--prune=now']
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
697 return stderr
697 return stderr
698
698
699 def _update_server_info(self):
699 def _update_server_info(self):
700 """
700 """
701 runs gits update-server-info command in this repo instance
701 runs gits update-server-info command in this repo instance
702 """
702 """
703 self._remote.update_server_info()
703 self._remote.update_server_info()
704
704
705 def _current_branch(self):
705 def _current_branch(self):
706 """
706 """
707 Return the name of the current branch.
707 Return the name of the current branch.
708
708
709 It only works for non bare repositories (i.e. repositories with a
709 It only works for non bare repositories (i.e. repositories with a
710 working copy)
710 working copy)
711 """
711 """
712 if self.bare:
712 if self.bare:
713 raise RepositoryError('Bare git repos do not have active branches')
713 raise RepositoryError('Bare git repos do not have active branches')
714
714
715 if self.is_empty():
715 if self.is_empty():
716 return None
716 return None
717
717
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
719 return stdout.strip()
719 return stdout.strip()
720
720
721 def _checkout(self, branch_name, create=False, force=False):
721 def _checkout(self, branch_name, create=False, force=False):
722 """
722 """
723 Checkout a branch in the working directory.
723 Checkout a branch in the working directory.
724
724
725 It tries to create the branch if create is True, failing if the branch
725 It tries to create the branch if create is True, failing if the branch
726 already exists.
726 already exists.
727
727
728 It only works for non bare repositories (i.e. repositories with a
728 It only works for non bare repositories (i.e. repositories with a
729 working copy)
729 working copy)
730 """
730 """
731 if self.bare:
731 if self.bare:
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
733
733
734 cmd = ['checkout']
734 cmd = ['checkout']
735 if force:
735 if force:
736 cmd.append('-f')
736 cmd.append('-f')
737 if create:
737 if create:
738 cmd.append('-b')
738 cmd.append('-b')
739 cmd.append(branch_name)
739 cmd.append(branch_name)
740 self.run_git_command(cmd, fail_on_stderr=False)
740 self.run_git_command(cmd, fail_on_stderr=False)
741
741
742 def _create_branch(self, branch_name, commit_id):
742 def _create_branch(self, branch_name, commit_id):
743 """
743 """
744 creates a branch in a GIT repo
744 creates a branch in a GIT repo
745 """
745 """
746 self._remote.create_branch(branch_name, commit_id)
746 self._remote.create_branch(branch_name, commit_id)
747
747
748 def _identify(self):
748 def _identify(self):
749 """
749 """
750 Return the current state of the working directory.
750 Return the current state of the working directory.
751 """
751 """
752 if self.bare:
752 if self.bare:
753 raise RepositoryError('Bare git repos do not have active branches')
753 raise RepositoryError('Bare git repos do not have active branches')
754
754
755 if self.is_empty():
755 if self.is_empty():
756 return None
756 return None
757
757
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
759 return stdout.strip()
759 return stdout.strip()
760
760
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
762 """
762 """
763 Create a local clone of the current repo.
763 Create a local clone of the current repo.
764 """
764 """
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
766 # clone will only fetch the active branch.
766 # clone will only fetch the active branch.
767 cmd = ['clone', '--branch', branch_name,
767 cmd = ['clone', '--branch', branch_name,
768 self.path, os.path.abspath(clone_path)]
768 self.path, os.path.abspath(clone_path)]
769
769
770 self.run_git_command(cmd, fail_on_stderr=False)
770 self.run_git_command(cmd, fail_on_stderr=False)
771
771
772 # if we get the different source branch, make sure we also fetch it for
772 # if we get the different source branch, make sure we also fetch it for
773 # merge conditions
773 # merge conditions
774 if source_branch and source_branch != branch_name:
774 if source_branch and source_branch != branch_name:
775 # check if the ref exists.
775 # check if the ref exists.
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
777 if shadow_repo.get_remote_ref(source_branch):
777 if shadow_repo.get_remote_ref(source_branch):
778 cmd = ['fetch', self.path, source_branch]
778 cmd = ['fetch', self.path, source_branch]
779 self.run_git_command(cmd, fail_on_stderr=False)
779 self.run_git_command(cmd, fail_on_stderr=False)
780
780
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
782 """
782 """
783 Fetch a branch from a local repository.
783 Fetch a branch from a local repository.
784 """
784 """
785 repository_path = os.path.abspath(repository_path)
785 repository_path = os.path.abspath(repository_path)
786 if repository_path == self.path:
786 if repository_path == self.path:
787 raise ValueError('Cannot fetch from the same repository')
787 raise ValueError('Cannot fetch from the same repository')
788
788
789 if use_origin:
789 if use_origin:
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
791 branch=branch_name)
791 branch=branch_name)
792
792
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
794 repository_path, branch_name]
794 repository_path, branch_name]
795 self.run_git_command(cmd, fail_on_stderr=False)
795 self.run_git_command(cmd, fail_on_stderr=False)
796
796
797 def _local_reset(self, branch_name):
797 def _local_reset(self, branch_name):
798 branch_name = f'{branch_name}'
798 branch_name = f'{branch_name}'
799 cmd = ['reset', '--hard', branch_name, '--']
799 cmd = ['reset', '--hard', branch_name, '--']
800 self.run_git_command(cmd, fail_on_stderr=False)
800 self.run_git_command(cmd, fail_on_stderr=False)
801
801
802 def _last_fetch_heads(self):
802 def _last_fetch_heads(self):
803 """
803 """
804 Return the last fetched heads that need merging.
804 Return the last fetched heads that need merging.
805
805
806 The algorithm is defined at
806 The algorithm is defined at
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
808 """
808 """
809 if not self.bare:
809 if not self.bare:
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
811 else:
811 else:
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
813
813
814 heads = []
814 heads = []
815 with open(fetch_heads_path) as f:
815 with open(fetch_heads_path) as f:
816 for line in f:
816 for line in f:
817 if ' not-for-merge ' in line:
817 if ' not-for-merge ' in line:
818 continue
818 continue
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
820 heads.append(line)
820 heads.append(line)
821
821
822 return heads
822 return heads
823
823
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
826
826
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
828 """
828 """
829 Pull a branch from a local repository.
829 Pull a branch from a local repository.
830 """
830 """
831 if self.bare:
831 if self.bare:
832 raise RepositoryError('Cannot pull into a bare git repository')
832 raise RepositoryError('Cannot pull into a bare git repository')
833 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # N.B.(skreft): The --ff-only option is to make sure this is a
834 # fast-forward (i.e., we are only pulling new changes and there are no
834 # fast-forward (i.e., we are only pulling new changes and there are no
835 # conflicts with our current branch)
835 # conflicts with our current branch)
836 # Additionally, that option needs to go before --no-tags, otherwise git
836 # Additionally, that option needs to go before --no-tags, otherwise git
837 # pull complains about it being an unknown flag.
837 # pull complains about it being an unknown flag.
838 cmd = ['pull']
838 cmd = ['pull']
839 if ff_only:
839 if ff_only:
840 cmd.append('--ff-only')
840 cmd.append('--ff-only')
841 cmd.extend(['--no-tags', repository_path, branch_name])
841 cmd.extend(['--no-tags', repository_path, branch_name])
842 self.run_git_command(cmd, fail_on_stderr=False)
842 self.run_git_command(cmd, fail_on_stderr=False)
843
843
844 def _local_merge(self, merge_message, user_name, user_email, heads):
844 def _local_merge(self, merge_message, user_name, user_email, heads):
845 """
845 """
846 Merge the given head into the checked out branch.
846 Merge the given head into the checked out branch.
847
847
848 It will force a merge commit.
848 It will force a merge commit.
849
849
850 Currently it raises an error if the repo is empty, as it is not possible
850 Currently it raises an error if the repo is empty, as it is not possible
851 to create a merge commit in an empty repo.
851 to create a merge commit in an empty repo.
852
852
853 :param merge_message: The message to use for the merge commit.
853 :param merge_message: The message to use for the merge commit.
854 :param heads: the heads to merge.
854 :param heads: the heads to merge.
855 """
855 """
856 if self.bare:
856 if self.bare:
857 raise RepositoryError('Cannot merge into a bare git repository')
857 raise RepositoryError('Cannot merge into a bare git repository')
858
858
859 if not heads:
859 if not heads:
860 return
860 return
861
861
862 if self.is_empty():
862 if self.is_empty():
863 # TODO(skreft): do something more robust in this case.
863 # TODO(skreft): do something more robust in this case.
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
865 unresolved = None
865 unresolved = None
866
866
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
868 # commit message. We also specify the user who is doing the merge.
868 # commit message. We also specify the user who is doing the merge.
869 cmd = ['-c', f'user.name="{user_name}"',
869 cmd = ['-c', f'user.name="{user_name}"',
870 '-c', f'user.email={user_email}',
870 '-c', f'user.email={user_email}',
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
872
872
873 merge_cmd = cmd + heads
873 merge_cmd = cmd + heads
874
874
875 try:
875 try:
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
877 except RepositoryError:
877 except RepositoryError:
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
879 fail_on_stderr=False)[0].splitlines()
879 fail_on_stderr=False)[0].splitlines()
880 # NOTE(marcink): we add U notation for consistent with HG backend output
880 # NOTE(marcink): we add U notation for consistent with HG backend output
881 unresolved = [f'U {f}' for f in files]
881 unresolved = [f'U {f}' for f in files]
882
882
883 # Cleanup any merge leftovers
883 # Cleanup any merge leftovers
884 self._remote.invalidate_vcs_cache()
884 self._remote.invalidate_vcs_cache()
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
886
886
887 if unresolved:
887 if unresolved:
888 raise UnresolvedFilesInRepo(unresolved)
888 raise UnresolvedFilesInRepo(unresolved)
889 else:
889 else:
890 raise
890 raise
891
891
892 def _local_push(
892 def _local_push(
893 self, source_branch, repository_path, target_branch,
893 self, source_branch, repository_path, target_branch,
894 enable_hooks=False, rc_scm_data=None):
894 enable_hooks=False, rc_scm_data=None):
895 """
895 """
896 Push the source_branch to the given repository and target_branch.
896 Push the source_branch to the given repository and target_branch.
897
897
898 Currently it if the target_branch is not master and the target repo is
898 Currently it if the target_branch is not master and the target repo is
899 empty, the push will work, but then GitRepository won't be able to find
899 empty, the push will work, but then GitRepository won't be able to find
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
901 pointing to master, which does not exist).
901 pointing to master, which does not exist).
902
902
903 It does not run the hooks in the target repo.
903 It does not run the hooks in the target repo.
904 """
904 """
905 # TODO(skreft): deal with the case in which the target repo is empty,
905 # TODO(skreft): deal with the case in which the target repo is empty,
906 # and the target_branch is not master.
906 # and the target_branch is not master.
907 target_repo = GitRepository(repository_path)
907 target_repo = GitRepository(repository_path)
908 if (not target_repo.bare and
908 if (not target_repo.bare and
909 target_repo._current_branch() == target_branch):
909 target_repo._current_branch() == target_branch):
910 # Git prevents pushing to the checked out branch, so simulate it by
910 # Git prevents pushing to the checked out branch, so simulate it by
911 # pulling into the target repository.
911 # pulling into the target repository.
912 target_repo._local_pull(self.path, source_branch)
912 target_repo._local_pull(self.path, source_branch)
913 else:
913 else:
914 cmd = ['push', os.path.abspath(repository_path),
914 cmd = ['push', os.path.abspath(repository_path),
915 '{}:{}'.format(source_branch, target_branch)]
915 f'{source_branch}:{target_branch}']
916 gitenv = {}
916 gitenv = {}
917 if rc_scm_data:
917 if rc_scm_data:
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
919
919
920 if not enable_hooks:
920 if not enable_hooks:
921 gitenv['RC_SKIP_HOOKS'] = '1'
921 gitenv['RC_SKIP_HOOKS'] = '1'
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
923
923
924 def _get_new_pr_branch(self, source_branch, target_branch):
924 def _get_new_pr_branch(self, source_branch, target_branch):
925 prefix = 'pr_{}-{}_'.format(source_branch, target_branch)
925 prefix = f'pr_{source_branch}-{target_branch}_'
926 pr_branches = []
926 pr_branches = []
927 for branch in self.branches:
927 for branch in self.branches:
928 if branch.startswith(prefix):
928 if branch.startswith(prefix):
929 pr_branches.append(int(branch[len(prefix):]))
929 pr_branches.append(int(branch[len(prefix):]))
930
930
931 if not pr_branches:
931 if not pr_branches:
932 branch_id = 0
932 branch_id = 0
933 else:
933 else:
934 branch_id = max(pr_branches) + 1
934 branch_id = max(pr_branches) + 1
935
935
936 return '%s%d' % (prefix, branch_id)
936 return '%s%d' % (prefix, branch_id)
937
937
938 def _maybe_prepare_merge_workspace(
938 def _maybe_prepare_merge_workspace(
939 self, repo_id, workspace_id, target_ref, source_ref):
939 self, repo_id, workspace_id, target_ref, source_ref):
940 shadow_repository_path = self._get_shadow_repository_path(
940 shadow_repository_path = self._get_shadow_repository_path(
941 self.path, repo_id, workspace_id)
941 self.path, repo_id, workspace_id)
942 if not os.path.exists(shadow_repository_path):
942 if not os.path.exists(shadow_repository_path):
943 self._local_clone(
943 self._local_clone(
944 shadow_repository_path, target_ref.name, source_ref.name)
944 shadow_repository_path, target_ref.name, source_ref.name)
945 log.debug('Prepared %s shadow repository in %s',
945 log.debug('Prepared %s shadow repository in %s',
946 self.alias, shadow_repository_path)
946 self.alias, shadow_repository_path)
947
947
948 return shadow_repository_path
948 return shadow_repository_path
949
949
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
951 source_repo, source_ref, merge_message,
951 source_repo, source_ref, merge_message,
952 merger_name, merger_email, dry_run=False,
952 merger_name, merger_email, dry_run=False,
953 use_rebase=False, close_branch=False):
953 use_rebase=False, close_branch=False):
954
954
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
956 'rebase' if use_rebase else 'merge', dry_run)
956 'rebase' if use_rebase else 'merge', dry_run)
957 if target_ref.commit_id != self.branches[target_ref.name]:
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
959 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
960 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
963
963
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
965 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
967
968 # checkout source, if it's different. Otherwise we could not
968 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
969 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
970 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
971 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
972 shadow_repo._checkout(source_ref.name, force=True)
973
973
974 # checkout target, and fetch changes
974 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
975 shadow_repo._checkout(target_ref.name, force=True)
976
976
977 # fetch/reset pull the target, in case it is changed
977 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
978 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
980 shadow_repo._local_reset(target_ref.name)
981
981
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
983 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
987 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
988 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
989 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
991 metadata={'target_ref': target_ref})
992
992
993 # calculate new branch
993 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
994 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
995 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
997 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
998 shadow_repo._checkout(pr_branch, create=True)
999 try:
999 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1001 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1002 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1003 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1004 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1006 metadata={'source_ref': source_ref})
1007
1007
1008 merge_ref = None
1008 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1009 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1010 metadata = {}
1011 try:
1011 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1013 [source_ref.commit_id])
1014 merge_possible = True
1014 merge_possible = True
1015
1015
1016 # Need to invalidate the cache, or otherwise we
1016 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1017 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1020
1021 # Set a reference pointing to the merge commit. This reference may
1021 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1022 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1023 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1026 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1030
1031 merge_possible = False
1031 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1033
1034 if merge_possible and not dry_run:
1034 if merge_possible and not dry_run:
1035 try:
1035 try:
1036 shadow_repo._local_push(
1036 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1039 merge_succeeded = True
1040 except RepositoryError:
1040 except RepositoryError:
1041 log.exception(
1041 log.exception(
1042 'Failure when doing local push from the shadow '
1042 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1043 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1044 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1046 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1047 metadata['merge_commit'] = pr_branch
1048 else:
1048 else:
1049 merge_succeeded = False
1049 merge_succeeded = False
1050
1050
1051 return MergeResponse(
1051 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1053 metadata=metadata)
@@ -1,403 +1,403 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG commit module
20 HG commit module
21 """
21 """
22
22
23 import os
23 import os
24
24
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26
26
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
29 from rhodecode.lib.vcs import path as vcspath
29 from rhodecode.lib.vcs import path as vcspath
30 from rhodecode.lib.vcs.backends import base
30 from rhodecode.lib.vcs.backends import base
31 from rhodecode.lib.vcs.exceptions import CommitError
31 from rhodecode.lib.vcs.exceptions import CommitError
32 from rhodecode.lib.vcs.nodes import (
32 from rhodecode.lib.vcs.nodes import (
33 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
33 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
34 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
34 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
35 LargeFileNode)
35 LargeFileNode)
36 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
36 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
37
37
38
38
39 class MercurialCommit(base.BaseCommit):
39 class MercurialCommit(base.BaseCommit):
40 """
40 """
41 Represents state of the repository at the single commit.
41 Represents state of the repository at the single commit.
42 """
42 """
43
43
44 _filter_pre_load = [
44 _filter_pre_load = [
45 # git specific property not supported here
45 # git specific property not supported here
46 "_commit",
46 "_commit",
47 ]
47 ]
48
48
49 def __init__(self, repository, raw_id, idx, pre_load=None):
49 def __init__(self, repository, raw_id, idx, pre_load=None):
50 raw_id = safe_str(raw_id)
50 raw_id = safe_str(raw_id)
51
51
52 self.repository = repository
52 self.repository = repository
53 self._remote = repository._remote
53 self._remote = repository._remote
54
54
55 self.raw_id = raw_id
55 self.raw_id = raw_id
56 self.idx = idx
56 self.idx = idx
57
57
58 self._set_bulk_properties(pre_load)
58 self._set_bulk_properties(pre_load)
59
59
60 # caches
60 # caches
61 self.nodes = {}
61 self.nodes = {}
62 self._stat_modes = {} # stat info for paths
62 self._stat_modes = {} # stat info for paths
63
63
64 def _set_bulk_properties(self, pre_load):
64 def _set_bulk_properties(self, pre_load):
65 if not pre_load:
65 if not pre_load:
66 return
66 return
67 pre_load = [entry for entry in pre_load
67 pre_load = [entry for entry in pre_load
68 if entry not in self._filter_pre_load]
68 if entry not in self._filter_pre_load]
69 if not pre_load:
69 if not pre_load:
70 return
70 return
71
71
72 result = self._remote.bulk_request(self.raw_id, pre_load)
72 result = self._remote.bulk_request(self.raw_id, pre_load)
73
73
74 for attr, value in result.items():
74 for attr, value in result.items():
75 if attr in ["author", "branch", "message"]:
75 if attr in ["author", "branch", "message"]:
76 value = safe_str(value)
76 value = safe_str(value)
77 elif attr == "affected_files":
77 elif attr == "affected_files":
78 value = list(map(safe_str, value))
78 value = list(map(safe_str, value))
79 elif attr == "date":
79 elif attr == "date":
80 value = utcdate_fromtimestamp(*value)
80 value = utcdate_fromtimestamp(*value)
81 elif attr in ["children", "parents"]:
81 elif attr in ["children", "parents"]:
82 value = self._make_commits(value)
82 value = self._make_commits(value)
83 elif attr in ["phase"]:
83 elif attr in ["phase"]:
84 value = self._get_phase_text(value)
84 value = self._get_phase_text(value)
85 self.__dict__[attr] = value
85 self.__dict__[attr] = value
86
86
87 @LazyProperty
87 @LazyProperty
88 def tags(self):
88 def tags(self):
89 tags = [name for name, commit_id in self.repository.tags.items()
89 tags = [name for name, commit_id in self.repository.tags.items()
90 if commit_id == self.raw_id]
90 if commit_id == self.raw_id]
91 return tags
91 return tags
92
92
93 @LazyProperty
93 @LazyProperty
94 def branch(self):
94 def branch(self):
95 return safe_str(self._remote.ctx_branch(self.raw_id))
95 return safe_str(self._remote.ctx_branch(self.raw_id))
96
96
97 @LazyProperty
97 @LazyProperty
98 def bookmarks(self):
98 def bookmarks(self):
99 bookmarks = [
99 bookmarks = [
100 name for name, commit_id in self.repository.bookmarks.items()
100 name for name, commit_id in self.repository.bookmarks.items()
101 if commit_id == self.raw_id]
101 if commit_id == self.raw_id]
102 return bookmarks
102 return bookmarks
103
103
104 @LazyProperty
104 @LazyProperty
105 def message(self):
105 def message(self):
106 return safe_str(self._remote.ctx_description(self.raw_id))
106 return safe_str(self._remote.ctx_description(self.raw_id))
107
107
108 @LazyProperty
108 @LazyProperty
109 def committer(self):
109 def committer(self):
110 return safe_str(self.author)
110 return safe_str(self.author)
111
111
112 @LazyProperty
112 @LazyProperty
113 def author(self):
113 def author(self):
114 return safe_str(self._remote.ctx_user(self.raw_id))
114 return safe_str(self._remote.ctx_user(self.raw_id))
115
115
116 @LazyProperty
116 @LazyProperty
117 def date(self):
117 def date(self):
118 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
118 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
119
119
120 @LazyProperty
120 @LazyProperty
121 def status(self):
121 def status(self):
122 """
122 """
123 Returns modified, added, removed, deleted files for current commit
123 Returns modified, added, removed, deleted files for current commit
124 """
124 """
125 return self._remote.ctx_status(self.raw_id)
125 return self._remote.ctx_status(self.raw_id)
126
126
127 @LazyProperty
127 @LazyProperty
128 def _file_paths(self):
128 def _file_paths(self):
129 return self._remote.ctx_list(self.raw_id)
129 return self._remote.ctx_list(self.raw_id)
130
130
131 @LazyProperty
131 @LazyProperty
132 def _dir_paths(self):
132 def _dir_paths(self):
133 dir_paths = ['']
133 dir_paths = ['']
134 dir_paths.extend(list(set(get_dirs_for_path(*self._file_paths))))
134 dir_paths.extend(list(set(get_dirs_for_path(*self._file_paths))))
135
135
136 return dir_paths
136 return dir_paths
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return 'tip'
145 return 'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, commit_ids, pre_load=None):
152 def _make_commits(self, commit_ids, pre_load=None):
153 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
153 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
154 for commit_id in commit_ids]
154 for commit_id in commit_ids]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.raw_id)
161 parents = self._remote.ctx_parents(self.raw_id)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.raw_id)
173 phase_id = self._remote.ctx_phase(self.raw_id)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_str(phase_text)
176 return safe_str(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.raw_id)
185 hidden = self._remote.ctx_hidden(self.raw_id)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.raw_id)
193 children = self._remote.ctx_children(self.raw_id)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def _get_kind(self, path):
196 def _get_kind(self, path):
197 path = self._fix_path(path)
197 path = self._fix_path(path)
198 if path in self._file_paths:
198 if path in self._file_paths:
199 return NodeKind.FILE
199 return NodeKind.FILE
200 elif path in self._dir_paths:
200 elif path in self._dir_paths:
201 return NodeKind.DIR
201 return NodeKind.DIR
202 else:
202 else:
203 raise CommitError(f"Node does not exist at the given path '{path}'")
203 raise CommitError(f"Node does not exist at the given path '{path}'")
204
204
205 def _assert_is_path(self, path) -> str:
205 def _assert_is_path(self, path) -> str:
206 path = self._fix_path(path)
206 path = self._fix_path(path)
207 if self._get_kind(path) != NodeKind.FILE:
207 if self._get_kind(path) != NodeKind.FILE:
208 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
208 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
209
209
210 return path
210 return path
211
211
212 def get_file_mode(self, path: bytes):
212 def get_file_mode(self, path: bytes):
213 """
213 """
214 Returns stat mode of the file at the given ``path``.
214 Returns stat mode of the file at the given ``path``.
215 """
215 """
216 path = self._assert_is_path(path)
216 path = self._assert_is_path(path)
217
217
218 if path not in self._stat_modes:
218 if path not in self._stat_modes:
219 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
219 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
220
220
221 if 'x' in self._stat_modes[path]:
221 if 'x' in self._stat_modes[path]:
222 return base.FILEMODE_EXECUTABLE
222 return base.FILEMODE_EXECUTABLE
223 return base.FILEMODE_DEFAULT
223 return base.FILEMODE_DEFAULT
224
224
225 def is_link(self, path):
225 def is_link(self, path):
226 path = self._assert_is_path(path)
226 path = self._assert_is_path(path)
227 if path not in self._stat_modes:
227 if path not in self._stat_modes:
228 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
228 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
229
229
230 return 'l' in self._stat_modes[path]
230 return 'l' in self._stat_modes[path]
231
231
232 def is_node_binary(self, path):
232 def is_node_binary(self, path):
233 path = self._assert_is_path(path)
233 path = self._assert_is_path(path)
234 return self._remote.is_binary(self.raw_id, path)
234 return self._remote.is_binary(self.raw_id, path)
235
235
236 def node_md5_hash(self, path):
236 def node_md5_hash(self, path):
237 path = self._assert_is_path(path)
237 path = self._assert_is_path(path)
238 return self._remote.md5_hash(self.raw_id, path)
238 return self._remote.md5_hash(self.raw_id, path)
239
239
240 def get_file_content(self, path):
240 def get_file_content(self, path):
241 """
241 """
242 Returns content of the file at given ``path``.
242 Returns content of the file at given ``path``.
243 """
243 """
244 path = self._assert_is_path(path)
244 path = self._assert_is_path(path)
245 return self._remote.fctx_node_data(self.raw_id, path)
245 return self._remote.fctx_node_data(self.raw_id, path)
246
246
247 def get_file_content_streamed(self, path):
247 def get_file_content_streamed(self, path):
248 path = self._assert_is_path(path)
248 path = self._assert_is_path(path)
249 stream_method = getattr(self._remote, 'stream:fctx_node_data')
249 stream_method = getattr(self._remote, 'stream:fctx_node_data')
250 return stream_method(self.raw_id, path)
250 return stream_method(self.raw_id, path)
251
251
252 def get_file_size(self, path):
252 def get_file_size(self, path):
253 """
253 """
254 Returns size of the file at given ``path``.
254 Returns size of the file at given ``path``.
255 """
255 """
256 path = self._assert_is_path(path)
256 path = self._assert_is_path(path)
257 return self._remote.fctx_size(self.raw_id, path)
257 return self._remote.fctx_size(self.raw_id, path)
258
258
259 def get_path_history(self, path, limit=None, pre_load=None):
259 def get_path_history(self, path, limit=None, pre_load=None):
260 """
260 """
261 Returns history of file as reversed list of `MercurialCommit` objects
261 Returns history of file as reversed list of `MercurialCommit` objects
262 for which file at given ``path`` has been modified.
262 for which file at given ``path`` has been modified.
263 """
263 """
264 path = self._assert_is_path(path)
264 path = self._assert_is_path(path)
265 hist = self._remote.node_history(self.raw_id, path, limit)
265 hist = self._remote.node_history(self.raw_id, path, limit)
266 return [
266 return [
267 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
267 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
268 for commit_id in hist]
268 for commit_id in hist]
269
269
270 def get_file_annotate(self, path, pre_load=None):
270 def get_file_annotate(self, path, pre_load=None):
271 """
271 """
272 Returns a generator of four element tuples with
272 Returns a generator of four element tuples with
273 lineno, commit_id, commit lazy loader and line
273 lineno, commit_id, commit lazy loader and line
274 """
274 """
275 result = self._remote.fctx_annotate(self.raw_id, path)
275 result = self._remote.fctx_annotate(self.raw_id, path)
276
276
277 for ln_no, commit_id, content in result:
277 for ln_no, commit_id, content in result:
278 yield (
278 yield (
279 ln_no, commit_id,
279 ln_no, commit_id,
280 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
280 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
281 content)
281 content)
282
282
283 def get_nodes(self, path, pre_load=None):
283 def get_nodes(self, path, pre_load=None):
284 """
284 """
285 Returns combined ``DirNode`` and ``FileNode`` objects list representing
285 Returns combined ``DirNode`` and ``FileNode`` objects list representing
286 state of commit at the given ``path``. If node at the given ``path``
286 state of commit at the given ``path``. If node at the given ``path``
287 is not instance of ``DirNode``, CommitError would be raised.
287 is not instance of ``DirNode``, CommitError would be raised.
288 """
288 """
289
289
290 if self._get_kind(path) != NodeKind.DIR:
290 if self._get_kind(path) != NodeKind.DIR:
291 raise CommitError(
291 raise CommitError(
292 "Directory does not exist for idx {} at '{}'".format(self.raw_id, path))
292 f"Directory does not exist for idx {self.raw_id} at '{path}'")
293 path = self._fix_path(path)
293 path = self._fix_path(path)
294
294
295 filenodes = [
295 filenodes = [
296 FileNode(safe_bytes(f), commit=self, pre_load=pre_load) for f in self._file_paths
296 FileNode(safe_bytes(f), commit=self, pre_load=pre_load) for f in self._file_paths
297 if os.path.dirname(f) == path]
297 if os.path.dirname(f) == path]
298 # TODO: johbo: Check if this can be done in a more obvious way
298 # TODO: johbo: Check if this can be done in a more obvious way
299 dirs = path == '' and '' or [
299 dirs = path == '' and '' or [
300 d for d in self._dir_paths
300 d for d in self._dir_paths
301 if d and vcspath.dirname(d) == path]
301 if d and vcspath.dirname(d) == path]
302 dirnodes = [
302 dirnodes = [
303 DirNode(safe_bytes(d), commit=self) for d in dirs
303 DirNode(safe_bytes(d), commit=self) for d in dirs
304 if os.path.dirname(d) == path]
304 if os.path.dirname(d) == path]
305
305
306 alias = self.repository.alias
306 alias = self.repository.alias
307 for k, vals in self._submodules.items():
307 for k, vals in self._submodules.items():
308 if vcspath.dirname(k) == path:
308 if vcspath.dirname(k) == path:
309 loc = vals[0]
309 loc = vals[0]
310 commit = vals[1]
310 commit = vals[1]
311 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
311 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
312
312
313 nodes = dirnodes + filenodes
313 nodes = dirnodes + filenodes
314 for node in nodes:
314 for node in nodes:
315 if node.path not in self.nodes:
315 if node.path not in self.nodes:
316 self.nodes[node.path] = node
316 self.nodes[node.path] = node
317 nodes.sort()
317 nodes.sort()
318
318
319 return nodes
319 return nodes
320
320
321 def get_node(self, path, pre_load=None):
321 def get_node(self, path, pre_load=None):
322 """
322 """
323 Returns `Node` object from the given `path`. If there is no node at
323 Returns `Node` object from the given `path`. If there is no node at
324 the given `path`, `NodeDoesNotExistError` would be raised.
324 the given `path`, `NodeDoesNotExistError` would be raised.
325 """
325 """
326 path = self._fix_path(path)
326 path = self._fix_path(path)
327
327
328 if path not in self.nodes:
328 if path not in self.nodes:
329 if path in self._file_paths:
329 if path in self._file_paths:
330 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
330 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
331 elif path in self._dir_paths:
331 elif path in self._dir_paths:
332 if path == '':
332 if path == '':
333 node = RootNode(commit=self)
333 node = RootNode(commit=self)
334 else:
334 else:
335 node = DirNode(safe_bytes(path), commit=self)
335 node = DirNode(safe_bytes(path), commit=self)
336 else:
336 else:
337 raise self.no_node_at_path(path)
337 raise self.no_node_at_path(path)
338
338
339 # cache node
339 # cache node
340 self.nodes[path] = node
340 self.nodes[path] = node
341 return self.nodes[path]
341 return self.nodes[path]
342
342
343 def get_largefile_node(self, path):
343 def get_largefile_node(self, path):
344 pointer_spec = self._remote.is_large_file(self.raw_id, path)
344 pointer_spec = self._remote.is_large_file(self.raw_id, path)
345 if pointer_spec:
345 if pointer_spec:
346 # content of that file regular FileNode is the hash of largefile
346 # content of that file regular FileNode is the hash of largefile
347 file_id = self.get_file_content(path).strip()
347 file_id = self.get_file_content(path).strip()
348
348
349 if self._remote.in_largefiles_store(file_id):
349 if self._remote.in_largefiles_store(file_id):
350 lf_path = self._remote.store_path(file_id)
350 lf_path = self._remote.store_path(file_id)
351 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
351 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
352 elif self._remote.in_user_cache(file_id):
352 elif self._remote.in_user_cache(file_id):
353 lf_path = self._remote.store_path(file_id)
353 lf_path = self._remote.store_path(file_id)
354 self._remote.link(file_id, path)
354 self._remote.link(file_id, path)
355 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
355 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
356
356
357 @LazyProperty
357 @LazyProperty
358 def _submodules(self):
358 def _submodules(self):
359 """
359 """
360 Returns a dictionary with submodule information from substate file
360 Returns a dictionary with submodule information from substate file
361 of hg repository.
361 of hg repository.
362 """
362 """
363 return self._remote.ctx_substate(self.raw_id)
363 return self._remote.ctx_substate(self.raw_id)
364
364
365 @LazyProperty
365 @LazyProperty
366 def affected_files(self):
366 def affected_files(self):
367 """
367 """
368 Gets a fast accessible file changes for given commit
368 Gets a fast accessible file changes for given commit
369 """
369 """
370 return self._remote.ctx_files(self.raw_id)
370 return self._remote.ctx_files(self.raw_id)
371
371
372 @property
372 @property
373 def added(self):
373 def added(self):
374 """
374 """
375 Returns list of added ``FileNode`` objects.
375 Returns list of added ``FileNode`` objects.
376 """
376 """
377 return AddedFileNodesGenerator(self.added_paths, self)
377 return AddedFileNodesGenerator(self.added_paths, self)
378
378
379 @LazyProperty
379 @LazyProperty
380 def added_paths(self):
380 def added_paths(self):
381 return [n for n in self.status[1]]
381 return [n for n in self.status[1]]
382
382
383 @property
383 @property
384 def changed(self):
384 def changed(self):
385 """
385 """
386 Returns list of modified ``FileNode`` objects.
386 Returns list of modified ``FileNode`` objects.
387 """
387 """
388 return ChangedFileNodesGenerator(self.changed_paths, self)
388 return ChangedFileNodesGenerator(self.changed_paths, self)
389
389
390 @LazyProperty
390 @LazyProperty
391 def changed_paths(self):
391 def changed_paths(self):
392 return [n for n in self.status[0]]
392 return [n for n in self.status[0]]
393
393
394 @property
394 @property
395 def removed(self):
395 def removed(self):
396 """
396 """
397 Returns list of removed ``FileNode`` objects.
397 Returns list of removed ``FileNode`` objects.
398 """
398 """
399 return RemovedFileNodesGenerator(self.removed_paths, self)
399 return RemovedFileNodesGenerator(self.removed_paths, self)
400
400
401 @LazyProperty
401 @LazyProperty
402 def removed_paths(self):
402 def removed_paths(self):
403 return [n for n in self.status[2]]
403 return [n for n in self.status[2]]
@@ -1,1013 +1,1013 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = {commit_id: index
105 self._commit_ids = {commit_id: index
106 for index, commit_id in enumerate(commit_ids)}
106 for index, commit_id in enumerate(commit_ids)}
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(n, h,) for n, h in
138 _branches = [(n, h,) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(n, h,) for n, h in
157 _tags = [(n, h,) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag {} for commit {}".format(name, commit.short_id)
181 message = f"Added tag {name} for commit {commit.short_id}"
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (n, h) for n, h in
233 (n, h) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 f"Cannot create repository at {self.path}, location already exist")
368 f"Cannot create repository at {self.path}, location already exist")
369
369
370 if src_url:
370 if src_url:
371 url = str(self._get_url(src_url))
371 url = str(self._get_url(src_url))
372 MercurialRepository.check_url(url, self.config)
372 MercurialRepository.check_url(url, self.config)
373
373
374 self._remote.clone(url, self.path, do_workspace_checkout)
374 self._remote.clone(url, self.path, do_workspace_checkout)
375
375
376 # Don't try to create if we've already cloned repo
376 # Don't try to create if we've already cloned repo
377 create = False
377 create = False
378
378
379 if create:
379 if create:
380 os.makedirs(self.path, mode=0o755)
380 os.makedirs(self.path, mode=0o755)
381
381
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_str(contact or self.DEFAULT_CONTACT)
399 return safe_str(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 if url != 'default' and '://' not in url:
428 if url != 'default' and '://' not in url:
429 url = "file:" + urllib.request.pathname2url(url)
429 url = "file:" + urllib.request.pathname2url(url)
430 return url
430 return url
431
431
432 def get_hook_location(self):
432 def get_hook_location(self):
433 """
433 """
434 returns absolute path to location where hooks are stored
434 returns absolute path to location where hooks are stored
435 """
435 """
436 return os.path.join(self.path, '.hg', '.hgrc')
436 return os.path.join(self.path, '.hg', '.hgrc')
437
437
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 """
440 """
441 Returns ``MercurialCommit`` object representing repository's
441 Returns ``MercurialCommit`` object representing repository's
442 commit at the given `commit_id` or `commit_idx`.
442 commit at the given `commit_id` or `commit_idx`.
443 """
443 """
444 if self.is_empty():
444 if self.is_empty():
445 raise EmptyRepositoryError("There are no commits yet")
445 raise EmptyRepositoryError("There are no commits yet")
446
446
447 if commit_id is not None:
447 if commit_id is not None:
448 self._validate_commit_id(commit_id)
448 self._validate_commit_id(commit_id)
449 try:
449 try:
450 # we have cached idx, use it without contacting the remote
450 # we have cached idx, use it without contacting the remote
451 idx = self._commit_ids[commit_id]
451 idx = self._commit_ids[commit_id]
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 except KeyError:
453 except KeyError:
454 pass
454 pass
455
455
456 elif commit_idx is not None:
456 elif commit_idx is not None:
457 self._validate_commit_idx(commit_idx)
457 self._validate_commit_idx(commit_idx)
458 try:
458 try:
459 _commit_id = self.commit_ids[commit_idx]
459 _commit_id = self.commit_ids[commit_idx]
460 if commit_idx < 0:
460 if commit_idx < 0:
461 commit_idx = self.commit_ids.index(_commit_id)
461 commit_idx = self.commit_ids.index(_commit_id)
462
462
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 except IndexError:
464 except IndexError:
465 commit_id = commit_idx
465 commit_id = commit_idx
466 else:
466 else:
467 commit_id = "tip"
467 commit_id = "tip"
468
468
469 # case here is no cached version, do an actual lookup instead
469 # case here is no cached version, do an actual lookup instead
470 try:
470 try:
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 except CommitDoesNotExistError:
472 except CommitDoesNotExistError:
473 msg = "Commit {} does not exist for `{}`".format(
473 msg = "Commit {} does not exist for `{}`".format(
474 *map(safe_str, [commit_id, self.name]))
474 *map(safe_str, [commit_id, self.name]))
475 raise CommitDoesNotExistError(msg)
475 raise CommitDoesNotExistError(msg)
476
476
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478
478
479 def get_commits(
479 def get_commits(
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 """
482 """
483 Returns generator of ``MercurialCommit`` objects from start to end
483 Returns generator of ``MercurialCommit`` objects from start to end
484 (both are inclusive)
484 (both are inclusive)
485
485
486 :param start_id: None, str(commit_id)
486 :param start_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
488 :param start_date: if specified, commits with commit date less than
488 :param start_date: if specified, commits with commit date less than
489 ``start_date`` would be filtered out from returned set
489 ``start_date`` would be filtered out from returned set
490 :param end_date: if specified, commits with commit date greater than
490 :param end_date: if specified, commits with commit date greater than
491 ``end_date`` would be filtered out from returned set
491 ``end_date`` would be filtered out from returned set
492 :param branch_name: if specified, commits not reachable from given
492 :param branch_name: if specified, commits not reachable from given
493 branch would be filtered out from returned set
493 branch would be filtered out from returned set
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 Mercurial evolve
495 Mercurial evolve
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 exist.
497 exist.
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 ``end`` could not be found.
499 ``end`` could not be found.
500 """
500 """
501 # actually we should check now if it's not an empty repo
501 # actually we should check now if it's not an empty repo
502 if self.is_empty():
502 if self.is_empty():
503 raise EmptyRepositoryError("There are no commits yet")
503 raise EmptyRepositoryError("There are no commits yet")
504 self._validate_branch_name(branch_name)
504 self._validate_branch_name(branch_name)
505
505
506 branch_ancestors = False
506 branch_ancestors = False
507 if start_id is not None:
507 if start_id is not None:
508 self._validate_commit_id(start_id)
508 self._validate_commit_id(start_id)
509 c_start = self.get_commit(commit_id=start_id)
509 c_start = self.get_commit(commit_id=start_id)
510 start_pos = self._commit_ids[c_start.raw_id]
510 start_pos = self._commit_ids[c_start.raw_id]
511 else:
511 else:
512 start_pos = None
512 start_pos = None
513
513
514 if end_id is not None:
514 if end_id is not None:
515 self._validate_commit_id(end_id)
515 self._validate_commit_id(end_id)
516 c_end = self.get_commit(commit_id=end_id)
516 c_end = self.get_commit(commit_id=end_id)
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 else:
518 else:
519 end_pos = None
519 end_pos = None
520
520
521 if None not in [start_id, end_id] and start_pos > end_pos:
521 if None not in [start_id, end_id] and start_pos > end_pos:
522 raise RepositoryError(
522 raise RepositoryError(
523 "Start commit '%s' cannot be after end commit '%s'" %
523 "Start commit '%s' cannot be after end commit '%s'" %
524 (start_id, end_id))
524 (start_id, end_id))
525
525
526 if end_pos is not None:
526 if end_pos is not None:
527 end_pos += 1
527 end_pos += 1
528
528
529 commit_filter = []
529 commit_filter = []
530
530
531 if branch_name and not branch_ancestors:
531 if branch_name and not branch_ancestors:
532 commit_filter.append('branch("{}")'.format(branch_name))
532 commit_filter.append(f'branch("{branch_name}")')
533 elif branch_name and branch_ancestors:
533 elif branch_name and branch_ancestors:
534 commit_filter.append('ancestors(branch("{}"))'.format(branch_name))
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535
535
536 if start_date and not end_date:
536 if start_date and not end_date:
537 commit_filter.append('date(">{}")'.format(start_date))
537 commit_filter.append(f'date(">{start_date}")')
538 if end_date and not start_date:
538 if end_date and not start_date:
539 commit_filter.append('date("<{}")'.format(end_date))
539 commit_filter.append(f'date("<{end_date}")')
540 if start_date and end_date:
540 if start_date and end_date:
541 commit_filter.append(
541 commit_filter.append(
542 'date(">{}") and date("<{}")'.format(start_date, end_date))
542 f'date(">{start_date}") and date("<{end_date}")')
543
543
544 if not show_hidden:
544 if not show_hidden:
545 commit_filter.append('not obsolete()')
545 commit_filter.append('not obsolete()')
546 commit_filter.append('not hidden()')
546 commit_filter.append('not hidden()')
547
547
548 # TODO: johbo: Figure out a simpler way for this solution
548 # TODO: johbo: Figure out a simpler way for this solution
549 collection_generator = CollectionGenerator
549 collection_generator = CollectionGenerator
550 if commit_filter:
550 if commit_filter:
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 revisions = self._remote.rev_range([commit_filter])
552 revisions = self._remote.rev_range([commit_filter])
553 collection_generator = MercurialIndexBasedCollectionGenerator
553 collection_generator = MercurialIndexBasedCollectionGenerator
554 else:
554 else:
555 revisions = self.commit_ids
555 revisions = self.commit_ids
556
556
557 if start_pos or end_pos:
557 if start_pos or end_pos:
558 revisions = revisions[start_pos:end_pos]
558 revisions = revisions[start_pos:end_pos]
559
559
560 return collection_generator(self, revisions, pre_load=pre_load)
560 return collection_generator(self, revisions, pre_load=pre_load)
561
561
562 def pull(self, url, commit_ids=None):
562 def pull(self, url, commit_ids=None):
563 """
563 """
564 Pull changes from external location.
564 Pull changes from external location.
565
565
566 :param commit_ids: Optional. Can be set to a list of commit ids
566 :param commit_ids: Optional. Can be set to a list of commit ids
567 which shall be pulled from the other repository.
567 which shall be pulled from the other repository.
568 """
568 """
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.pull(url, commit_ids=commit_ids)
570 self._remote.pull(url, commit_ids=commit_ids)
571 self._remote.invalidate_vcs_cache()
571 self._remote.invalidate_vcs_cache()
572
572
573 def fetch(self, url, commit_ids=None):
573 def fetch(self, url, commit_ids=None):
574 """
574 """
575 Backward compatibility with GIT fetch==pull
575 Backward compatibility with GIT fetch==pull
576 """
576 """
577 return self.pull(url, commit_ids=commit_ids)
577 return self.pull(url, commit_ids=commit_ids)
578
578
579 def push(self, url):
579 def push(self, url):
580 url = self._get_url(url)
580 url = self._get_url(url)
581 self._remote.sync_push(url)
581 self._remote.sync_push(url)
582
582
583 def _local_clone(self, clone_path):
583 def _local_clone(self, clone_path):
584 """
584 """
585 Create a local clone of the current repo.
585 Create a local clone of the current repo.
586 """
586 """
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 hooks=False)
588 hooks=False)
589
589
590 def _update(self, revision, clean=False):
590 def _update(self, revision, clean=False):
591 """
591 """
592 Update the working copy to the specified revision.
592 Update the working copy to the specified revision.
593 """
593 """
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 self._remote.update(revision, clean=clean)
595 self._remote.update(revision, clean=clean)
596
596
597 def _identify(self):
597 def _identify(self):
598 """
598 """
599 Return the current state of the working directory.
599 Return the current state of the working directory.
600 """
600 """
601 return self._remote.identify().strip().rstrip('+')
601 return self._remote.identify().strip().rstrip('+')
602
602
603 def _heads(self, branch=None):
603 def _heads(self, branch=None):
604 """
604 """
605 Return the commit ids of the repository heads.
605 Return the commit ids of the repository heads.
606 """
606 """
607 return self._remote.heads(branch=branch).strip().split(' ')
607 return self._remote.heads(branch=branch).strip().split(' ')
608
608
609 def _ancestor(self, revision1, revision2):
609 def _ancestor(self, revision1, revision2):
610 """
610 """
611 Return the common ancestor of the two revisions.
611 Return the common ancestor of the two revisions.
612 """
612 """
613 return self._remote.ancestor(revision1, revision2)
613 return self._remote.ancestor(revision1, revision2)
614
614
615 def _local_push(
615 def _local_push(
616 self, revision, repository_path, push_branches=False,
616 self, revision, repository_path, push_branches=False,
617 enable_hooks=False):
617 enable_hooks=False):
618 """
618 """
619 Push the given revision to the specified repository.
619 Push the given revision to the specified repository.
620
620
621 :param push_branches: allow to create branches in the target repo.
621 :param push_branches: allow to create branches in the target repo.
622 """
622 """
623 self._remote.push(
623 self._remote.push(
624 [revision], repository_path, hooks=enable_hooks,
624 [revision], repository_path, hooks=enable_hooks,
625 push_branches=push_branches)
625 push_branches=push_branches)
626
626
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 """
629 """
630 Merge the given source_revision into the checked out revision.
630 Merge the given source_revision into the checked out revision.
631
631
632 Returns the commit id of the merge and a boolean indicating if the
632 Returns the commit id of the merge and a boolean indicating if the
633 commit needs to be pushed.
633 commit needs to be pushed.
634 """
634 """
635 source_ref_commit_id = source_ref.commit_id
635 source_ref_commit_id = source_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
637
637
638 # update our workdir to target ref, for proper merge
638 # update our workdir to target ref, for proper merge
639 self._update(target_ref_commit_id, clean=True)
639 self._update(target_ref_commit_id, clean=True)
640
640
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643
643
644 if close_commit_id:
644 if close_commit_id:
645 # NOTE(marcink): if we get the close commit, this is our new source
645 # NOTE(marcink): if we get the close commit, this is our new source
646 # which will include the close commit itself.
646 # which will include the close commit itself.
647 source_ref_commit_id = close_commit_id
647 source_ref_commit_id = close_commit_id
648
648
649 if ancestor == source_ref_commit_id:
649 if ancestor == source_ref_commit_id:
650 # Nothing to do, the changes were already integrated
650 # Nothing to do, the changes were already integrated
651 return target_ref_commit_id, False
651 return target_ref_commit_id, False
652
652
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 # In this case we should force a commit message
654 # In this case we should force a commit message
655 return source_ref_commit_id, True
655 return source_ref_commit_id, True
656
656
657 unresolved = None
657 unresolved = None
658 if use_rebase:
658 if use_rebase:
659 try:
659 try:
660 bookmark_name = 'rcbook{}{}'.format(source_ref_commit_id, target_ref_commit_id)
660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 self._remote.rebase(
662 self._remote.rebase(
663 source=source_ref_commit_id, dest=target_ref_commit_id)
663 source=source_ref_commit_id, dest=target_ref_commit_id)
664 self._remote.invalidate_vcs_cache()
664 self._remote.invalidate_vcs_cache()
665 self._update(bookmark_name, clean=True)
665 self._update(bookmark_name, clean=True)
666 return self._identify(), True
666 return self._identify(), True
667 except RepositoryError as e:
667 except RepositoryError as e:
668 # The rebase-abort may raise another exception which 'hides'
668 # The rebase-abort may raise another exception which 'hides'
669 # the original one, therefore we log it here.
669 # the original one, therefore we log it here.
670 log.exception('Error while rebasing shadow repo during merge.')
670 log.exception('Error while rebasing shadow repo during merge.')
671 if 'unresolved conflicts' in safe_str(e):
671 if 'unresolved conflicts' in safe_str(e):
672 unresolved = self._remote.get_unresolved_files()
672 unresolved = self._remote.get_unresolved_files()
673 log.debug('unresolved files: %s', unresolved)
673 log.debug('unresolved files: %s', unresolved)
674
674
675 # Cleanup any rebase leftovers
675 # Cleanup any rebase leftovers
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 self._remote.rebase(abort=True)
677 self._remote.rebase(abort=True)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679 self._remote.update(clean=True)
679 self._remote.update(clean=True)
680 if unresolved:
680 if unresolved:
681 raise UnresolvedFilesInRepo(unresolved)
681 raise UnresolvedFilesInRepo(unresolved)
682 else:
682 else:
683 raise
683 raise
684 else:
684 else:
685 try:
685 try:
686 self._remote.merge(source_ref_commit_id)
686 self._remote.merge(source_ref_commit_id)
687 self._remote.invalidate_vcs_cache()
687 self._remote.invalidate_vcs_cache()
688 self._remote.commit(
688 self._remote.commit(
689 message=safe_str(merge_message),
689 message=safe_str(merge_message),
690 username=safe_str('{} <{}>'.format(user_name, user_email)))
690 username=safe_str(f'{user_name} <{user_email}>'))
691 self._remote.invalidate_vcs_cache()
691 self._remote.invalidate_vcs_cache()
692 return self._identify(), True
692 return self._identify(), True
693 except RepositoryError as e:
693 except RepositoryError as e:
694 # The merge-abort may raise another exception which 'hides'
694 # The merge-abort may raise another exception which 'hides'
695 # the original one, therefore we log it here.
695 # the original one, therefore we log it here.
696 log.exception('Error while merging shadow repo during merge.')
696 log.exception('Error while merging shadow repo during merge.')
697 if 'unresolved merge conflicts' in safe_str(e):
697 if 'unresolved merge conflicts' in safe_str(e):
698 unresolved = self._remote.get_unresolved_files()
698 unresolved = self._remote.get_unresolved_files()
699 log.debug('unresolved files: %s', unresolved)
699 log.debug('unresolved files: %s', unresolved)
700
700
701 # Cleanup any merge leftovers
701 # Cleanup any merge leftovers
702 self._remote.update(clean=True)
702 self._remote.update(clean=True)
703 if unresolved:
703 if unresolved:
704 raise UnresolvedFilesInRepo(unresolved)
704 raise UnresolvedFilesInRepo(unresolved)
705 else:
705 else:
706 raise
706 raise
707
707
708 def _local_close(self, target_ref, user_name, user_email,
708 def _local_close(self, target_ref, user_name, user_email,
709 source_ref, close_message=''):
709 source_ref, close_message=''):
710 """
710 """
711 Close the branch of the given source_revision
711 Close the branch of the given source_revision
712
712
713 Returns the commit id of the close and a boolean indicating if the
713 Returns the commit id of the close and a boolean indicating if the
714 commit needs to be pushed.
714 commit needs to be pushed.
715 """
715 """
716 self._update(source_ref.commit_id)
716 self._update(source_ref.commit_id)
717 message = close_message or f"Closing branch: `{source_ref.name}`"
717 message = close_message or f"Closing branch: `{source_ref.name}`"
718 try:
718 try:
719 self._remote.commit(
719 self._remote.commit(
720 message=safe_str(message),
720 message=safe_str(message),
721 username=safe_str('{} <{}>'.format(user_name, user_email)),
721 username=safe_str(f'{user_name} <{user_email}>'),
722 close_branch=True)
722 close_branch=True)
723 self._remote.invalidate_vcs_cache()
723 self._remote.invalidate_vcs_cache()
724 return self._identify(), True
724 return self._identify(), True
725 except RepositoryError:
725 except RepositoryError:
726 # Cleanup any commit leftovers
726 # Cleanup any commit leftovers
727 self._remote.update(clean=True)
727 self._remote.update(clean=True)
728 raise
728 raise
729
729
730 def _is_the_same_branch(self, target_ref, source_ref):
730 def _is_the_same_branch(self, target_ref, source_ref):
731 return (
731 return (
732 self._get_branch_name(target_ref) ==
732 self._get_branch_name(target_ref) ==
733 self._get_branch_name(source_ref))
733 self._get_branch_name(source_ref))
734
734
735 def _get_branch_name(self, ref):
735 def _get_branch_name(self, ref):
736 if ref.type == 'branch':
736 if ref.type == 'branch':
737 return ref.name
737 return ref.name
738 return self._remote.ctx_branch(ref.commit_id)
738 return self._remote.ctx_branch(ref.commit_id)
739
739
740 def _maybe_prepare_merge_workspace(
740 def _maybe_prepare_merge_workspace(
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 shadow_repository_path = self._get_shadow_repository_path(
742 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
743 self.path, repo_id, workspace_id)
744 if not os.path.exists(shadow_repository_path):
744 if not os.path.exists(shadow_repository_path):
745 self._local_clone(shadow_repository_path)
745 self._local_clone(shadow_repository_path)
746 log.debug(
746 log.debug(
747 'Prepared shadow repository in %s', shadow_repository_path)
747 'Prepared shadow repository in %s', shadow_repository_path)
748
748
749 return shadow_repository_path
749 return shadow_repository_path
750
750
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 source_repo, source_ref, merge_message,
752 source_repo, source_ref, merge_message,
753 merger_name, merger_email, dry_run=False,
753 merger_name, merger_email, dry_run=False,
754 use_rebase=False, close_branch=False):
754 use_rebase=False, close_branch=False):
755
755
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 'rebase' if use_rebase else 'merge', dry_run)
757 'rebase' if use_rebase else 'merge', dry_run)
758 if target_ref.commit_id not in self._heads():
758 if target_ref.commit_id not in self._heads():
759 return MergeResponse(
759 return MergeResponse(
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 metadata={'target_ref': target_ref})
761 metadata={'target_ref': target_ref})
762
762
763 try:
763 try:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 heads_all = self._heads(target_ref.name)
765 heads_all = self._heads(target_ref.name)
766 max_heads = 10
766 max_heads = 10
767 if len(heads_all) > max_heads:
767 if len(heads_all) > max_heads:
768 heads = '\n,'.join(
768 heads = '\n,'.join(
769 heads_all[:max_heads] +
769 heads_all[:max_heads] +
770 ['and {} more.'.format(len(heads_all)-max_heads)])
770 [f'and {len(heads_all)-max_heads} more.'])
771 else:
771 else:
772 heads = '\n,'.join(heads_all)
772 heads = '\n,'.join(heads_all)
773 metadata = {
773 metadata = {
774 'target_ref': target_ref,
774 'target_ref': target_ref,
775 'source_ref': source_ref,
775 'source_ref': source_ref,
776 'heads': heads
776 'heads': heads
777 }
777 }
778 return MergeResponse(
778 return MergeResponse(
779 False, False, None,
779 False, False, None,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 metadata=metadata)
781 metadata=metadata)
782 except CommitDoesNotExistError:
782 except CommitDoesNotExistError:
783 log.exception('Failure when looking up branch heads on hg target')
783 log.exception('Failure when looking up branch heads on hg target')
784 return MergeResponse(
784 return MergeResponse(
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 metadata={'target_ref': target_ref})
786 metadata={'target_ref': target_ref})
787
787
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 repo_id, workspace_id, target_ref, source_ref)
789 repo_id, workspace_id, target_ref, source_ref)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791
791
792 log.debug('Pulling in target reference %s', target_ref)
792 log.debug('Pulling in target reference %s', target_ref)
793 self._validate_pull_reference(target_ref)
793 self._validate_pull_reference(target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
795
795
796 try:
796 try:
797 log.debug('Pulling in source reference %s', source_ref)
797 log.debug('Pulling in source reference %s', source_ref)
798 source_repo._validate_pull_reference(source_ref)
798 source_repo._validate_pull_reference(source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
800 except CommitDoesNotExistError:
800 except CommitDoesNotExistError:
801 log.exception('Failure when doing local pull on hg shadow repo')
801 log.exception('Failure when doing local pull on hg shadow repo')
802 return MergeResponse(
802 return MergeResponse(
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 metadata={'source_ref': source_ref})
804 metadata={'source_ref': source_ref})
805
805
806 merge_ref = None
806 merge_ref = None
807 merge_commit_id = None
807 merge_commit_id = None
808 close_commit_id = None
808 close_commit_id = None
809 merge_failure_reason = MergeFailureReason.NONE
809 merge_failure_reason = MergeFailureReason.NONE
810 metadata = {}
810 metadata = {}
811
811
812 # enforce that close branch should be used only in case we source from
812 # enforce that close branch should be used only in case we source from
813 # an actual Branch
813 # an actual Branch
814 close_branch = close_branch and source_ref.type == 'branch'
814 close_branch = close_branch and source_ref.type == 'branch'
815
815
816 # don't allow to close branch if source and target are the same
816 # don't allow to close branch if source and target are the same
817 close_branch = close_branch and source_ref.name != target_ref.name
817 close_branch = close_branch and source_ref.name != target_ref.name
818
818
819 needs_push_on_close = False
819 needs_push_on_close = False
820 if close_branch and not use_rebase and not dry_run:
820 if close_branch and not use_rebase and not dry_run:
821 try:
821 try:
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 target_ref, merger_name, merger_email, source_ref)
823 target_ref, merger_name, merger_email, source_ref)
824 merge_possible = True
824 merge_possible = True
825 except RepositoryError:
825 except RepositoryError:
826 log.exception('Failure when doing close branch on '
826 log.exception('Failure when doing close branch on '
827 'shadow repo: %s', shadow_repo)
827 'shadow repo: %s', shadow_repo)
828 merge_possible = False
828 merge_possible = False
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 else:
830 else:
831 merge_possible = True
831 merge_possible = True
832
832
833 needs_push = False
833 needs_push = False
834 if merge_possible:
834 if merge_possible:
835
835
836 try:
836 try:
837 merge_commit_id, needs_push = shadow_repo._local_merge(
837 merge_commit_id, needs_push = shadow_repo._local_merge(
838 target_ref, merge_message, merger_name, merger_email,
838 target_ref, merge_message, merger_name, merger_email,
839 source_ref, use_rebase=use_rebase,
839 source_ref, use_rebase=use_rebase,
840 close_commit_id=close_commit_id, dry_run=dry_run)
840 close_commit_id=close_commit_id, dry_run=dry_run)
841 merge_possible = True
841 merge_possible = True
842
842
843 # read the state of the close action, if it
843 # read the state of the close action, if it
844 # maybe required a push
844 # maybe required a push
845 needs_push = needs_push or needs_push_on_close
845 needs_push = needs_push or needs_push_on_close
846
846
847 # Set a bookmark pointing to the merge commit. This bookmark
847 # Set a bookmark pointing to the merge commit. This bookmark
848 # may be used to easily identify the last successful merge
848 # may be used to easily identify the last successful merge
849 # commit in the shadow repository.
849 # commit in the shadow repository.
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 except SubrepoMergeError:
852 except SubrepoMergeError:
853 log.exception(
853 log.exception(
854 'Subrepo merge error during local merge on hg shadow repo.')
854 'Subrepo merge error during local merge on hg shadow repo.')
855 merge_possible = False
855 merge_possible = False
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 needs_push = False
857 needs_push = False
858 except RepositoryError as e:
858 except RepositoryError as e:
859 log.exception('Failure when doing local merge on hg shadow repo')
859 log.exception('Failure when doing local merge on hg shadow repo')
860 if isinstance(e, UnresolvedFilesInRepo):
860 if isinstance(e, UnresolvedFilesInRepo):
861 all_conflicts = list(e.args[0])
861 all_conflicts = list(e.args[0])
862 max_conflicts = 20
862 max_conflicts = 20
863 if len(all_conflicts) > max_conflicts:
863 if len(all_conflicts) > max_conflicts:
864 conflicts = all_conflicts[:max_conflicts] \
864 conflicts = all_conflicts[:max_conflicts] \
865 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
865 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 else:
866 else:
867 conflicts = all_conflicts
867 conflicts = all_conflicts
868 metadata['unresolved_files'] = \
868 metadata['unresolved_files'] = \
869 '\n* conflict: ' + \
869 '\n* conflict: ' + \
870 ('\n * conflict: '.join(conflicts))
870 ('\n * conflict: '.join(conflicts))
871
871
872 merge_possible = False
872 merge_possible = False
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 needs_push = False
874 needs_push = False
875
875
876 if merge_possible and not dry_run:
876 if merge_possible and not dry_run:
877 if needs_push:
877 if needs_push:
878 # In case the target is a bookmark, update it, so after pushing
878 # In case the target is a bookmark, update it, so after pushing
879 # the bookmarks is also updated in the target.
879 # the bookmarks is also updated in the target.
880 if target_ref.type == 'book':
880 if target_ref.type == 'book':
881 shadow_repo.bookmark(
881 shadow_repo.bookmark(
882 target_ref.name, revision=merge_commit_id)
882 target_ref.name, revision=merge_commit_id)
883 try:
883 try:
884 shadow_repo_with_hooks = self.get_shadow_instance(
884 shadow_repo_with_hooks = self.get_shadow_instance(
885 shadow_repository_path,
885 shadow_repository_path,
886 enable_hooks=True)
886 enable_hooks=True)
887 # This is the actual merge action, we push from shadow
887 # This is the actual merge action, we push from shadow
888 # into origin.
888 # into origin.
889 # Note: the push_branches option will push any new branch
889 # Note: the push_branches option will push any new branch
890 # defined in the source repository to the target. This may
890 # defined in the source repository to the target. This may
891 # be dangerous as branches are permanent in Mercurial.
891 # be dangerous as branches are permanent in Mercurial.
892 # This feature was requested in issue #441.
892 # This feature was requested in issue #441.
893 shadow_repo_with_hooks._local_push(
893 shadow_repo_with_hooks._local_push(
894 merge_commit_id, self.path, push_branches=True,
894 merge_commit_id, self.path, push_branches=True,
895 enable_hooks=True)
895 enable_hooks=True)
896
896
897 # maybe we also need to push the close_commit_id
897 # maybe we also need to push the close_commit_id
898 if close_commit_id:
898 if close_commit_id:
899 shadow_repo_with_hooks._local_push(
899 shadow_repo_with_hooks._local_push(
900 close_commit_id, self.path, push_branches=True,
900 close_commit_id, self.path, push_branches=True,
901 enable_hooks=True)
901 enable_hooks=True)
902 merge_succeeded = True
902 merge_succeeded = True
903 except RepositoryError:
903 except RepositoryError:
904 log.exception(
904 log.exception(
905 'Failure when doing local push from the shadow '
905 'Failure when doing local push from the shadow '
906 'repository to the target repository at %s.', self.path)
906 'repository to the target repository at %s.', self.path)
907 merge_succeeded = False
907 merge_succeeded = False
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 metadata['target'] = 'hg shadow repo'
909 metadata['target'] = 'hg shadow repo'
910 metadata['merge_commit'] = merge_commit_id
910 metadata['merge_commit'] = merge_commit_id
911 else:
911 else:
912 merge_succeeded = True
912 merge_succeeded = True
913 else:
913 else:
914 merge_succeeded = False
914 merge_succeeded = False
915
915
916 return MergeResponse(
916 return MergeResponse(
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 metadata=metadata)
918 metadata=metadata)
919
919
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 config = self.config.copy()
921 config = self.config.copy()
922 if not enable_hooks:
922 if not enable_hooks:
923 config.clear_section('hooks')
923 config.clear_section('hooks')
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925
925
926 def _validate_pull_reference(self, reference):
926 def _validate_pull_reference(self, reference):
927 if not (reference.name in self.bookmarks or
927 if not (reference.name in self.bookmarks or
928 reference.name in self.branches or
928 reference.name in self.branches or
929 self.get_commit(reference.commit_id)):
929 self.get_commit(reference.commit_id)):
930 raise CommitDoesNotExistError(
930 raise CommitDoesNotExistError(
931 'Unknown branch, bookmark or commit id')
931 'Unknown branch, bookmark or commit id')
932
932
933 def _local_pull(self, repository_path, reference):
933 def _local_pull(self, repository_path, reference):
934 """
934 """
935 Fetch a branch, bookmark or commit from a local repository.
935 Fetch a branch, bookmark or commit from a local repository.
936 """
936 """
937 repository_path = os.path.abspath(repository_path)
937 repository_path = os.path.abspath(repository_path)
938 if repository_path == self.path:
938 if repository_path == self.path:
939 raise ValueError('Cannot pull from the same repository')
939 raise ValueError('Cannot pull from the same repository')
940
940
941 reference_type_to_option_name = {
941 reference_type_to_option_name = {
942 'book': 'bookmark',
942 'book': 'bookmark',
943 'branch': 'branch',
943 'branch': 'branch',
944 }
944 }
945 option_name = reference_type_to_option_name.get(
945 option_name = reference_type_to_option_name.get(
946 reference.type, 'revision')
946 reference.type, 'revision')
947
947
948 if option_name == 'revision':
948 if option_name == 'revision':
949 ref = reference.commit_id
949 ref = reference.commit_id
950 else:
950 else:
951 ref = reference.name
951 ref = reference.name
952
952
953 options = {option_name: [ref]}
953 options = {option_name: [ref]}
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 self._remote.invalidate_vcs_cache()
955 self._remote.invalidate_vcs_cache()
956
956
957 def bookmark(self, bookmark, revision=None):
957 def bookmark(self, bookmark, revision=None):
958 if isinstance(bookmark, str):
958 if isinstance(bookmark, str):
959 bookmark = safe_str(bookmark)
959 bookmark = safe_str(bookmark)
960 self._remote.bookmark(bookmark, revision=revision)
960 self._remote.bookmark(bookmark, revision=revision)
961 self._remote.invalidate_vcs_cache()
961 self._remote.invalidate_vcs_cache()
962
962
963 def get_path_permissions(self, username):
963 def get_path_permissions(self, username):
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965
965
966 def read_patterns(suffix):
966 def read_patterns(suffix):
967 svalue = None
967 svalue = None
968 for section, option in [
968 for section, option in [
969 ('narrowacl', username + suffix),
969 ('narrowacl', username + suffix),
970 ('narrowacl', 'default' + suffix),
970 ('narrowacl', 'default' + suffix),
971 ('narrowhgacl', username + suffix),
971 ('narrowhgacl', username + suffix),
972 ('narrowhgacl', 'default' + suffix)
972 ('narrowhgacl', 'default' + suffix)
973 ]:
973 ]:
974 try:
974 try:
975 svalue = hgacl.get(section, option)
975 svalue = hgacl.get(section, option)
976 break # stop at the first value we find
976 break # stop at the first value we find
977 except configparser.NoOptionError:
977 except configparser.NoOptionError:
978 pass
978 pass
979 if not svalue:
979 if not svalue:
980 return None
980 return None
981 result = ['/']
981 result = ['/']
982 for pattern in svalue.split():
982 for pattern in svalue.split():
983 result.append(pattern)
983 result.append(pattern)
984 if '*' not in pattern and '?' not in pattern:
984 if '*' not in pattern and '?' not in pattern:
985 result.append(pattern + '/*')
985 result.append(pattern + '/*')
986 return result
986 return result
987
987
988 if os.path.exists(hgacl_file):
988 if os.path.exists(hgacl_file):
989 try:
989 try:
990 hgacl = configparser.RawConfigParser()
990 hgacl = configparser.RawConfigParser()
991 hgacl.read(hgacl_file)
991 hgacl.read(hgacl_file)
992
992
993 includes = read_patterns('.includes')
993 includes = read_patterns('.includes')
994 excludes = read_patterns('.excludes')
994 excludes = read_patterns('.excludes')
995 return BasePathPermissionChecker.create_from_patterns(
995 return BasePathPermissionChecker.create_from_patterns(
996 includes, excludes)
996 includes, excludes)
997 except BaseException as e:
997 except BaseException as e:
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 hgacl_file, self.name, e)
999 hgacl_file, self.name, e)
1000 raise exceptions.RepositoryRequirementError(msg)
1000 raise exceptions.RepositoryRequirementError(msg)
1001 else:
1001 else:
1002 return None
1002 return None
1003
1003
1004
1004
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006
1006
1007 def _commit_factory(self, commit_id):
1007 def _commit_factory(self, commit_id):
1008 if isinstance(commit_id, int):
1008 if isinstance(commit_id, int):
1009 return self.repo.get_commit(
1009 return self.repo.get_commit(
1010 commit_idx=commit_id, pre_load=self.pre_load)
1010 commit_idx=commit_id, pre_load=self.pre_load)
1011 else:
1011 else:
1012 return self.repo.get_commit(
1012 return self.repo.get_commit(
1013 commit_id=commit_id, pre_load=self.pre_load)
1013 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,254 +1,254 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN commit module
20 SVN commit module
21 """
21 """
22
22
23
23
24 import dateutil.parser
24 import dateutil.parser
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26
26
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.vcs import nodes, path as vcspath
28 from rhodecode.lib.vcs import nodes, path as vcspath
29 from rhodecode.lib.vcs.backends import base
29 from rhodecode.lib.vcs.backends import base
30 from rhodecode.lib.vcs.exceptions import CommitError
30 from rhodecode.lib.vcs.exceptions import CommitError
31
31
32
32
33 _SVN_PROP_TRUE = '*'
33 _SVN_PROP_TRUE = '*'
34
34
35
35
36 class SubversionCommit(base.BaseCommit):
36 class SubversionCommit(base.BaseCommit):
37 """
37 """
38 Subversion specific implementation of commits
38 Subversion specific implementation of commits
39
39
40 .. attribute:: branch
40 .. attribute:: branch
41
41
42 The Subversion backend does not support to assign branches to
42 The Subversion backend does not support to assign branches to
43 specific commits. This attribute has always the value `None`.
43 specific commits. This attribute has always the value `None`.
44
44
45 """
45 """
46
46
47 def __init__(self, repository, commit_id):
47 def __init__(self, repository, commit_id):
48 self.repository = repository
48 self.repository = repository
49 self.idx = self.repository._get_commit_idx(commit_id)
49 self.idx = self.repository._get_commit_idx(commit_id)
50 self._svn_rev = self.idx + 1
50 self._svn_rev = self.idx + 1
51 self._remote = repository._remote
51 self._remote = repository._remote
52 # TODO: handling of raw_id should be a method on repository itself,
52 # TODO: handling of raw_id should be a method on repository itself,
53 # which knows how to translate commit index and commit id
53 # which knows how to translate commit index and commit id
54 self.raw_id = commit_id
54 self.raw_id = commit_id
55 self.short_id = commit_id
55 self.short_id = commit_id
56 self.id = 'r{}'.format(commit_id)
56 self.id = f'r{commit_id}'
57
57
58 # TODO: Implement the following placeholder attributes
58 # TODO: Implement the following placeholder attributes
59 self.nodes = {}
59 self.nodes = {}
60 self.tags = []
60 self.tags = []
61
61
62 @property
62 @property
63 def author(self):
63 def author(self):
64 return safe_str(self._properties.get('svn:author'))
64 return safe_str(self._properties.get('svn:author'))
65
65
66 @property
66 @property
67 def date(self):
67 def date(self):
68 return _date_from_svn_properties(self._properties)
68 return _date_from_svn_properties(self._properties)
69
69
70 @property
70 @property
71 def message(self):
71 def message(self):
72 return safe_str(self._properties.get('svn:log'))
72 return safe_str(self._properties.get('svn:log'))
73
73
74 @LazyProperty
74 @LazyProperty
75 def _properties(self):
75 def _properties(self):
76 return self._remote.revision_properties(self._svn_rev)
76 return self._remote.revision_properties(self._svn_rev)
77
77
78 @LazyProperty
78 @LazyProperty
79 def parents(self):
79 def parents(self):
80 parent_idx = self.idx - 1
80 parent_idx = self.idx - 1
81 if parent_idx >= 0:
81 if parent_idx >= 0:
82 parent = self.repository.get_commit(commit_idx=parent_idx)
82 parent = self.repository.get_commit(commit_idx=parent_idx)
83 return [parent]
83 return [parent]
84 return []
84 return []
85
85
86 @LazyProperty
86 @LazyProperty
87 def children(self):
87 def children(self):
88 child_idx = self.idx + 1
88 child_idx = self.idx + 1
89 if child_idx < len(self.repository.commit_ids):
89 if child_idx < len(self.repository.commit_ids):
90 child = self.repository.get_commit(commit_idx=child_idx)
90 child = self.repository.get_commit(commit_idx=child_idx)
91 return [child]
91 return [child]
92 return []
92 return []
93
93
94 def get_file_mode(self, path: bytes):
94 def get_file_mode(self, path: bytes):
95 # Note: Subversion flags files which are executable with a special
95 # Note: Subversion flags files which are executable with a special
96 # property `svn:executable` which is set to the value ``"*"``.
96 # property `svn:executable` which is set to the value ``"*"``.
97 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
97 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
98 return base.FILEMODE_EXECUTABLE
98 return base.FILEMODE_EXECUTABLE
99 else:
99 else:
100 return base.FILEMODE_DEFAULT
100 return base.FILEMODE_DEFAULT
101
101
102 def is_link(self, path):
102 def is_link(self, path):
103 # Note: Subversion has a flag for special files, the content of the
103 # Note: Subversion has a flag for special files, the content of the
104 # file contains the type of that file.
104 # file contains the type of that file.
105 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
105 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
106 return self.get_file_content(path).startswith(b'link')
106 return self.get_file_content(path).startswith(b'link')
107 return False
107 return False
108
108
109 def is_node_binary(self, path):
109 def is_node_binary(self, path):
110 path = self._fix_path(path)
110 path = self._fix_path(path)
111 return self._remote.is_binary(self._svn_rev, safe_str(path))
111 return self._remote.is_binary(self._svn_rev, safe_str(path))
112
112
113 def node_md5_hash(self, path):
113 def node_md5_hash(self, path):
114 path = self._fix_path(path)
114 path = self._fix_path(path)
115 return self._remote.md5_hash(self._svn_rev, safe_str(path))
115 return self._remote.md5_hash(self._svn_rev, safe_str(path))
116
116
117 def _get_file_property(self, path, name):
117 def _get_file_property(self, path, name):
118 file_properties = self._remote.node_properties(
118 file_properties = self._remote.node_properties(
119 safe_str(path), self._svn_rev)
119 safe_str(path), self._svn_rev)
120 return file_properties.get(name)
120 return file_properties.get(name)
121
121
122 def get_file_content(self, path):
122 def get_file_content(self, path):
123 path = self._fix_path(path)
123 path = self._fix_path(path)
124 return self._remote.get_file_content(self._svn_rev, safe_str(path))
124 return self._remote.get_file_content(self._svn_rev, safe_str(path))
125
125
126 def get_file_content_streamed(self, path):
126 def get_file_content_streamed(self, path):
127 path = self._fix_path(path)
127 path = self._fix_path(path)
128
128
129 stream_method = getattr(self._remote, 'stream:get_file_content')
129 stream_method = getattr(self._remote, 'stream:get_file_content')
130 return stream_method(self._svn_rev, safe_str(path))
130 return stream_method(self._svn_rev, safe_str(path))
131
131
132 def get_file_size(self, path):
132 def get_file_size(self, path):
133 path = self._fix_path(path)
133 path = self._fix_path(path)
134 return self._remote.get_file_size(self._svn_rev, safe_str(path))
134 return self._remote.get_file_size(self._svn_rev, safe_str(path))
135
135
136 def get_path_history(self, path, limit=None, pre_load=None):
136 def get_path_history(self, path, limit=None, pre_load=None):
137 path = safe_str(self._fix_path(path))
137 path = safe_str(self._fix_path(path))
138 history = self._remote.node_history(path, self._svn_rev, limit)
138 history = self._remote.node_history(path, self._svn_rev, limit)
139 return [
139 return [
140 self.repository.get_commit(commit_id=str(svn_rev))
140 self.repository.get_commit(commit_id=str(svn_rev))
141 for svn_rev in history]
141 for svn_rev in history]
142
142
143 def get_file_annotate(self, path, pre_load=None):
143 def get_file_annotate(self, path, pre_load=None):
144 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
144 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
145
145
146 for zero_based_line_no, svn_rev, content in result:
146 for zero_based_line_no, svn_rev, content in result:
147 commit_id = str(svn_rev)
147 commit_id = str(svn_rev)
148 line_no = zero_based_line_no + 1
148 line_no = zero_based_line_no + 1
149 yield (
149 yield (
150 line_no,
150 line_no,
151 commit_id,
151 commit_id,
152 lambda: self.repository.get_commit(commit_id=commit_id),
152 lambda: self.repository.get_commit(commit_id=commit_id),
153 content)
153 content)
154
154
155 def get_node(self, path, pre_load=None):
155 def get_node(self, path, pre_load=None):
156 path = self._fix_path(path)
156 path = self._fix_path(path)
157 if path not in self.nodes:
157 if path not in self.nodes:
158
158
159 if path == '':
159 if path == '':
160 node = nodes.RootNode(commit=self)
160 node = nodes.RootNode(commit=self)
161 else:
161 else:
162 node_type = self._remote.get_node_type(self._svn_rev, safe_str(path))
162 node_type = self._remote.get_node_type(self._svn_rev, safe_str(path))
163 if node_type == 'dir':
163 if node_type == 'dir':
164 node = nodes.DirNode(safe_bytes(path), commit=self)
164 node = nodes.DirNode(safe_bytes(path), commit=self)
165 elif node_type == 'file':
165 elif node_type == 'file':
166 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
166 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
167 else:
167 else:
168 raise self.no_node_at_path(path)
168 raise self.no_node_at_path(path)
169
169
170 self.nodes[path] = node
170 self.nodes[path] = node
171 return self.nodes[path]
171 return self.nodes[path]
172
172
173 def get_nodes(self, path, pre_load=None):
173 def get_nodes(self, path, pre_load=None):
174 if self._get_kind(path) != nodes.NodeKind.DIR:
174 if self._get_kind(path) != nodes.NodeKind.DIR:
175 raise CommitError(
175 raise CommitError(
176 f"Directory does not exist for commit {self.raw_id} at '{path}'")
176 f"Directory does not exist for commit {self.raw_id} at '{path}'")
177 path = safe_str(self._fix_path(path))
177 path = safe_str(self._fix_path(path))
178
178
179 path_nodes = []
179 path_nodes = []
180 for name, kind in self._remote.get_nodes(self._svn_rev, path):
180 for name, kind in self._remote.get_nodes(self._svn_rev, path):
181 node_path = vcspath.join(path, name)
181 node_path = vcspath.join(path, name)
182 if kind == 'dir':
182 if kind == 'dir':
183 node = nodes.DirNode(safe_bytes(node_path), commit=self)
183 node = nodes.DirNode(safe_bytes(node_path), commit=self)
184 elif kind == 'file':
184 elif kind == 'file':
185 node = nodes.FileNode(safe_bytes(node_path), commit=self, pre_load=pre_load)
185 node = nodes.FileNode(safe_bytes(node_path), commit=self, pre_load=pre_load)
186 else:
186 else:
187 raise ValueError(f"Node kind {kind} not supported.")
187 raise ValueError(f"Node kind {kind} not supported.")
188 self.nodes[node_path] = node
188 self.nodes[node_path] = node
189 path_nodes.append(node)
189 path_nodes.append(node)
190
190
191 return path_nodes
191 return path_nodes
192
192
193 def _get_kind(self, path):
193 def _get_kind(self, path):
194 path = self._fix_path(path)
194 path = self._fix_path(path)
195 kind = self._remote.get_node_type(self._svn_rev, path)
195 kind = self._remote.get_node_type(self._svn_rev, path)
196 if kind == 'file':
196 if kind == 'file':
197 return nodes.NodeKind.FILE
197 return nodes.NodeKind.FILE
198 elif kind == 'dir':
198 elif kind == 'dir':
199 return nodes.NodeKind.DIR
199 return nodes.NodeKind.DIR
200 else:
200 else:
201 raise CommitError(
201 raise CommitError(
202 "Node does not exist at the given path '{}'".format(path))
202 f"Node does not exist at the given path '{path}'")
203
203
204 @LazyProperty
204 @LazyProperty
205 def _changes_cache(self):
205 def _changes_cache(self):
206 return self._remote.revision_changes(self._svn_rev)
206 return self._remote.revision_changes(self._svn_rev)
207
207
208 @LazyProperty
208 @LazyProperty
209 def affected_files(self):
209 def affected_files(self):
210 changed_files = set()
210 changed_files = set()
211 for files in self._changes_cache.values():
211 for files in self._changes_cache.values():
212 changed_files.update(files)
212 changed_files.update(files)
213 return list(changed_files)
213 return list(changed_files)
214
214
215 @LazyProperty
215 @LazyProperty
216 def id(self):
216 def id(self):
217 return self.raw_id
217 return self.raw_id
218
218
219 @property
219 @property
220 def added(self):
220 def added(self):
221 return nodes.AddedFileNodesGenerator(self.added_paths, self)
221 return nodes.AddedFileNodesGenerator(self.added_paths, self)
222
222
223 @LazyProperty
223 @LazyProperty
224 def added_paths(self):
224 def added_paths(self):
225 return [n for n in self._changes_cache['added']]
225 return [n for n in self._changes_cache['added']]
226
226
227 @property
227 @property
228 def changed(self):
228 def changed(self):
229 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
229 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
230
230
231 @LazyProperty
231 @LazyProperty
232 def changed_paths(self):
232 def changed_paths(self):
233 return [n for n in self._changes_cache['changed']]
233 return [n for n in self._changes_cache['changed']]
234
234
235 @property
235 @property
236 def removed(self):
236 def removed(self):
237 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
237 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
238
238
239 @LazyProperty
239 @LazyProperty
240 def removed_paths(self):
240 def removed_paths(self):
241 return [n for n in self._changes_cache['removed']]
241 return [n for n in self._changes_cache['removed']]
242
242
243
243
244 def _date_from_svn_properties(properties):
244 def _date_from_svn_properties(properties):
245 """
245 """
246 Parses the date out of given svn properties.
246 Parses the date out of given svn properties.
247
247
248 :return: :class:`datetime.datetime` instance. The object is naive.
248 :return: :class:`datetime.datetime` instance. The object is naive.
249 """
249 """
250
250
251 aware_date = dateutil.parser.parse(properties.get('svn:date'))
251 aware_date = dateutil.parser.parse(properties.get('svn:date'))
252 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
252 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
253 final_date = aware_date
253 final_date = aware_date
254 return final_date.replace(tzinfo=None)
254 return final_date.replace(tzinfo=None)
@@ -1,367 +1,367 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN repository module
20 SVN repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import urllib.request
25 import urllib.request
26 import urllib.parse
26 import urllib.parse
27 import urllib.error
27 import urllib.error
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from collections import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.str_utils import safe_str
33 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 f"Cannot create repository at {self.path}, location already exist"
91 f"Cannot create repository at {self.path}, location already exist"
92 )
92 )
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in range(1, head + 1)]
105 return [str(r) for r in range(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
119 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
168 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
169
169
170 def get_name(item):
170 def get_name(item):
171 return item[0]
171 return item[0]
172
172
173 return OrderedDict(sorted(found_items.items(), key=get_name))
173 return OrderedDict(sorted(found_items.items(), key=get_name))
174
174
175 def _patterns_from_section(self, section):
175 def _patterns_from_section(self, section):
176 return (pattern for key, pattern in self.config.items(section))
176 return (pattern for key, pattern in self.config.items(section))
177
177
178 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
178 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
179 if self != repo2:
179 if self != repo2:
180 raise ValueError(
180 raise ValueError(
181 "Subversion does not support getting common ancestor of"
181 "Subversion does not support getting common ancestor of"
182 " different repositories.")
182 " different repositories.")
183
183
184 if int(commit_id1) < int(commit_id2):
184 if int(commit_id1) < int(commit_id2):
185 return commit_id1
185 return commit_id1
186 return commit_id2
186 return commit_id2
187
187
188 def verify(self):
188 def verify(self):
189 verify = self._remote.verify()
189 verify = self._remote.verify()
190
190
191 self._remote.invalidate_vcs_cache()
191 self._remote.invalidate_vcs_cache()
192 return verify
192 return verify
193
193
194 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
194 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
195 # TODO: johbo: Implement better comparison, this is a very naive
195 # TODO: johbo: Implement better comparison, this is a very naive
196 # version which does not allow to compare branches, tags or folders
196 # version which does not allow to compare branches, tags or folders
197 # at all.
197 # at all.
198 if repo2 != self:
198 if repo2 != self:
199 raise ValueError(
199 raise ValueError(
200 "Subversion does not support comparison of of different "
200 "Subversion does not support comparison of of different "
201 "repositories.")
201 "repositories.")
202
202
203 if commit_id1 == commit_id2:
203 if commit_id1 == commit_id2:
204 return []
204 return []
205
205
206 commit_idx1 = self._get_commit_idx(commit_id1)
206 commit_idx1 = self._get_commit_idx(commit_id1)
207 commit_idx2 = self._get_commit_idx(commit_id2)
207 commit_idx2 = self._get_commit_idx(commit_id2)
208
208
209 commits = [
209 commits = [
210 self.get_commit(commit_idx=idx)
210 self.get_commit(commit_idx=idx)
211 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
211 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
212
212
213 return commits
213 return commits
214
214
215 def _get_commit_idx(self, commit_id):
215 def _get_commit_idx(self, commit_id):
216 try:
216 try:
217 svn_rev = int(commit_id)
217 svn_rev = int(commit_id)
218 except:
218 except:
219 # TODO: johbo: this might be only one case, HEAD, check this
219 # TODO: johbo: this might be only one case, HEAD, check this
220 svn_rev = self._remote.lookup(commit_id)
220 svn_rev = self._remote.lookup(commit_id)
221 commit_idx = svn_rev - 1
221 commit_idx = svn_rev - 1
222 if commit_idx >= len(self.commit_ids):
222 if commit_idx >= len(self.commit_ids):
223 raise CommitDoesNotExistError(
223 raise CommitDoesNotExistError(
224 "Commit at index {} does not exist.".format(commit_idx))
224 f"Commit at index {commit_idx} does not exist.")
225 return commit_idx
225 return commit_idx
226
226
227 @staticmethod
227 @staticmethod
228 def check_url(url, config):
228 def check_url(url, config):
229 """
229 """
230 Check if `url` is a valid source to import a Subversion repository.
230 Check if `url` is a valid source to import a Subversion repository.
231 """
231 """
232 # convert to URL if it's a local directory
232 # convert to URL if it's a local directory
233 if os.path.isdir(url):
233 if os.path.isdir(url):
234 url = 'file://' + urllib.request.pathname2url(url)
234 url = 'file://' + urllib.request.pathname2url(url)
235 return connection.Svn.check_url(url, config.serialize())
235 return connection.Svn.check_url(url, config.serialize())
236
236
237 @staticmethod
237 @staticmethod
238 def is_valid_repository(path):
238 def is_valid_repository(path):
239 try:
239 try:
240 SubversionRepository(path)
240 SubversionRepository(path)
241 return True
241 return True
242 except VCSError:
242 except VCSError:
243 pass
243 pass
244 return False
244 return False
245
245
246 def _check_path(self):
246 def _check_path(self):
247 if not os.path.exists(self.path):
247 if not os.path.exists(self.path):
248 raise VCSError('Path "{}" does not exist!'.format(self.path))
248 raise VCSError(f'Path "{self.path}" does not exist!')
249 if not self._remote.is_path_valid_repository(self.path):
249 if not self._remote.is_path_valid_repository(self.path):
250 raise VCSError(
250 raise VCSError(
251 'Path "%s" does not contain a Subversion repository' %
251 'Path "%s" does not contain a Subversion repository' %
252 (self.path, ))
252 (self.path, ))
253
253
254 @LazyProperty
254 @LazyProperty
255 def last_change(self):
255 def last_change(self):
256 """
256 """
257 Returns last change made on this repository as
257 Returns last change made on this repository as
258 `datetime.datetime` object.
258 `datetime.datetime` object.
259 """
259 """
260 # Subversion always has a first commit which has id "0" and contains
260 # Subversion always has a first commit which has id "0" and contains
261 # what we are looking for.
261 # what we are looking for.
262 last_id = len(self.commit_ids)
262 last_id = len(self.commit_ids)
263 properties = self._remote.revision_properties(last_id)
263 properties = self._remote.revision_properties(last_id)
264 return _date_from_svn_properties(properties)
264 return _date_from_svn_properties(properties)
265
265
266 @LazyProperty
266 @LazyProperty
267 def in_memory_commit(self):
267 def in_memory_commit(self):
268 return SubversionInMemoryCommit(self)
268 return SubversionInMemoryCommit(self)
269
269
270 def get_hook_location(self):
270 def get_hook_location(self):
271 """
271 """
272 returns absolute path to location where hooks are stored
272 returns absolute path to location where hooks are stored
273 """
273 """
274 return os.path.join(self.path, 'hooks')
274 return os.path.join(self.path, 'hooks')
275
275
276 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
276 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
277 translate_tag=None, maybe_unreachable=False, reference_obj=None):
277 translate_tag=None, maybe_unreachable=False, reference_obj=None):
278 if self.is_empty():
278 if self.is_empty():
279 raise EmptyRepositoryError("There are no commits yet")
279 raise EmptyRepositoryError("There are no commits yet")
280 if commit_id is not None:
280 if commit_id is not None:
281 self._validate_commit_id(commit_id)
281 self._validate_commit_id(commit_id)
282 elif commit_idx is not None:
282 elif commit_idx is not None:
283 self._validate_commit_idx(commit_idx)
283 self._validate_commit_idx(commit_idx)
284 try:
284 try:
285 commit_id = self.commit_ids[commit_idx]
285 commit_id = self.commit_ids[commit_idx]
286 except IndexError:
286 except IndexError:
287 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
287 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
288
288
289 commit_id = self._sanitize_commit_id(commit_id)
289 commit_id = self._sanitize_commit_id(commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
291 return commit
291 return commit
292
292
293 def get_commits(
293 def get_commits(
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
296 if self.is_empty():
296 if self.is_empty():
297 raise EmptyRepositoryError("There are no commit_ids yet")
297 raise EmptyRepositoryError("There are no commit_ids yet")
298 self._validate_branch_name(branch_name)
298 self._validate_branch_name(branch_name)
299
299
300 if start_id is not None:
300 if start_id is not None:
301 self._validate_commit_id(start_id)
301 self._validate_commit_id(start_id)
302 if end_id is not None:
302 if end_id is not None:
303 self._validate_commit_id(end_id)
303 self._validate_commit_id(end_id)
304
304
305 start_raw_id = self._sanitize_commit_id(start_id)
305 start_raw_id = self._sanitize_commit_id(start_id)
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
307 end_raw_id = self._sanitize_commit_id(end_id)
307 end_raw_id = self._sanitize_commit_id(end_id)
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
309
309
310 if None not in [start_id, end_id] and start_pos > end_pos:
310 if None not in [start_id, end_id] and start_pos > end_pos:
311 raise RepositoryError(
311 raise RepositoryError(
312 "Start commit '%s' cannot be after end commit '%s'" %
312 "Start commit '%s' cannot be after end commit '%s'" %
313 (start_id, end_id))
313 (start_id, end_id))
314 if end_pos is not None:
314 if end_pos is not None:
315 end_pos += 1
315 end_pos += 1
316
316
317 # Date based filtering
317 # Date based filtering
318 if start_date or end_date:
318 if start_date or end_date:
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
320 date_astimestamp(start_date) if start_date else None,
320 date_astimestamp(start_date) if start_date else None,
321 date_astimestamp(end_date) if end_date else None)
321 date_astimestamp(end_date) if end_date else None)
322 start_pos = start_raw_id - 1
322 start_pos = start_raw_id - 1
323 end_pos = end_raw_id
323 end_pos = end_raw_id
324
324
325 commit_ids = self.commit_ids
325 commit_ids = self.commit_ids
326
326
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
329 svn_rev = int(self.commit_ids[-1])
329 svn_rev = int(self.commit_ids[-1])
330 commit_ids = self._remote.node_history(
330 commit_ids = self._remote.node_history(
331 path=branch_name, revision=svn_rev, limit=None)
331 path=branch_name, revision=svn_rev, limit=None)
332 commit_ids = [str(i) for i in reversed(commit_ids)]
332 commit_ids = [str(i) for i in reversed(commit_ids)]
333
333
334 if start_pos or end_pos:
334 if start_pos or end_pos:
335 commit_ids = commit_ids[start_pos:end_pos]
335 commit_ids = commit_ids[start_pos:end_pos]
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
337
337
338 def _sanitize_commit_id(self, commit_id):
338 def _sanitize_commit_id(self, commit_id):
339 if commit_id and commit_id.isdigit():
339 if commit_id and commit_id.isdigit():
340 if int(commit_id) <= len(self.commit_ids):
340 if int(commit_id) <= len(self.commit_ids):
341 return commit_id
341 return commit_id
342 else:
342 else:
343 raise CommitDoesNotExistError(
343 raise CommitDoesNotExistError(
344 "Commit {} does not exist.".format(commit_id))
344 f"Commit {commit_id} does not exist.")
345 if commit_id not in [
345 if commit_id not in [
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
347 raise CommitDoesNotExistError(
347 raise CommitDoesNotExistError(
348 "Commit id {} not understood.".format(commit_id))
348 f"Commit id {commit_id} not understood.")
349 svn_rev = self._remote.lookup('HEAD')
349 svn_rev = self._remote.lookup('HEAD')
350 return str(svn_rev)
350 return str(svn_rev)
351
351
352 def get_diff(
352 def get_diff(
353 self, commit1, commit2, path=None, ignore_whitespace=False,
353 self, commit1, commit2, path=None, ignore_whitespace=False,
354 context=3, path1=None):
354 context=3, path1=None):
355 self._validate_diff_commits(commit1, commit2)
355 self._validate_diff_commits(commit1, commit2)
356 svn_rev1 = int(commit1.raw_id)
356 svn_rev1 = int(commit1.raw_id)
357 svn_rev2 = int(commit2.raw_id)
357 svn_rev2 = int(commit2.raw_id)
358 diff = self._remote.diff(
358 diff = self._remote.diff(
359 svn_rev1, svn_rev2, path1=path1, path2=path,
359 svn_rev1, svn_rev2, path1=path1, path2=path,
360 ignore_whitespace=ignore_whitespace, context=context)
360 ignore_whitespace=ignore_whitespace, context=context)
361 return SubversionDiff(diff)
361 return SubversionDiff(diff)
362
362
363
363
364 def _sanitize_url(url):
364 def _sanitize_url(url):
365 if '://' not in url:
365 if '://' not in url:
366 url = 'file://' + urllib.request.pathname2url(url)
366 url = 'file://' + urllib.request.pathname2url(url)
367 return url
367 return url
@@ -1,159 +1,159 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities aimed to help achieve mostly basic tasks.
20 Utilities aimed to help achieve mostly basic tasks.
21 """
21 """
22
22
23
23
24
24
25
25
26 import re
26 import re
27 import os
27 import os
28 import time
28 import time
29 import datetime
29 import datetime
30 import logging
30 import logging
31
31
32 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
33 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
34
34
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 def get_scm(path):
39 def get_scm(path):
40 """
40 """
41 Returns one of alias from ``ALIASES`` (in order of precedence same as
41 Returns one of alias from ``ALIASES`` (in order of precedence same as
42 shortcuts given in ``ALIASES``) and working dir path for the given
42 shortcuts given in ``ALIASES``) and working dir path for the given
43 argument. If no scm-specific directory is found or more than one scm is
43 argument. If no scm-specific directory is found or more than one scm is
44 found at that directory, ``VCSError`` is raised.
44 found at that directory, ``VCSError`` is raised.
45 """
45 """
46 if not os.path.isdir(path):
46 if not os.path.isdir(path):
47 raise VCSError("Given path %s is not a directory" % path)
47 raise VCSError("Given path %s is not a directory" % path)
48
48
49 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
49 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
50
50
51 if len(found_scms) > 1:
51 if len(found_scms) > 1:
52 found = ', '.join(x[0] for x in found_scms)
52 found = ', '.join(x[0] for x in found_scms)
53 raise VCSError(
53 raise VCSError(
54 'More than one [{}] scm found at given path {}'.format(found, path))
54 f'More than one [{found}] scm found at given path {path}')
55
55
56 if len(found_scms) == 0:
56 if len(found_scms) == 0:
57 raise VCSError('No scm found at given path %s' % path)
57 raise VCSError('No scm found at given path %s' % path)
58
58
59 return found_scms[0]
59 return found_scms[0]
60
60
61
61
62 def get_scm_backend(backend_type):
62 def get_scm_backend(backend_type):
63 from rhodecode.lib.vcs.backends import get_backend
63 from rhodecode.lib.vcs.backends import get_backend
64 return get_backend(backend_type)
64 return get_backend(backend_type)
65
65
66
66
67 def get_scms_for_path(path):
67 def get_scms_for_path(path):
68 """
68 """
69 Returns all scm's found at the given path. If no scm is recognized
69 Returns all scm's found at the given path. If no scm is recognized
70 - empty list is returned.
70 - empty list is returned.
71
71
72 :param path: path to directory which should be checked. May be callable.
72 :param path: path to directory which should be checked. May be callable.
73
73
74 :raises VCSError: if given ``path`` is not a directory
74 :raises VCSError: if given ``path`` is not a directory
75 """
75 """
76 from rhodecode.lib.vcs.backends import get_backend
76 from rhodecode.lib.vcs.backends import get_backend
77 if hasattr(path, '__call__'):
77 if hasattr(path, '__call__'):
78 path = path()
78 path = path()
79 if not os.path.isdir(path):
79 if not os.path.isdir(path):
80 raise VCSError("Given path %r is not a directory" % path)
80 raise VCSError("Given path %r is not a directory" % path)
81
81
82 result = []
82 result = []
83 for key in settings.available_aliases():
83 for key in settings.available_aliases():
84 try:
84 try:
85 backend = get_backend(key)
85 backend = get_backend(key)
86 except VCSBackendNotSupportedError:
86 except VCSBackendNotSupportedError:
87 log.warning('VCSBackendNotSupportedError: %s not supported', key)
87 log.warning('VCSBackendNotSupportedError: %s not supported', key)
88 continue
88 continue
89 if backend.is_valid_repository(path):
89 if backend.is_valid_repository(path):
90 result.append(key)
90 result.append(key)
91 return result
91 return result
92
92
93
93
94 def parse_datetime(text):
94 def parse_datetime(text):
95 """
95 """
96 Parses given text and returns ``datetime.datetime`` instance or raises
96 Parses given text and returns ``datetime.datetime`` instance or raises
97 ``ValueError``.
97 ``ValueError``.
98
98
99 :param text: string of desired date/datetime or something more verbose,
99 :param text: string of desired date/datetime or something more verbose,
100 like *yesterday*, *2weeks 3days*, etc.
100 like *yesterday*, *2weeks 3days*, etc.
101 """
101 """
102 if not text:
102 if not text:
103 raise ValueError('Wrong date: "%s"' % text)
103 raise ValueError('Wrong date: "%s"' % text)
104
104
105 if isinstance(text, datetime.datetime):
105 if isinstance(text, datetime.datetime):
106 return text
106 return text
107
107
108 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
108 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
109 text = text.strip().lower()[:19]
109 text = text.strip().lower()[:19]
110
110
111 input_formats = (
111 input_formats = (
112 '%Y-%m-%d %H:%M:%S',
112 '%Y-%m-%d %H:%M:%S',
113 '%Y-%m-%dt%H:%M:%S',
113 '%Y-%m-%dt%H:%M:%S',
114 '%Y-%m-%d %H:%M',
114 '%Y-%m-%d %H:%M',
115 '%Y-%m-%dt%H:%M',
115 '%Y-%m-%dt%H:%M',
116 '%Y-%m-%d',
116 '%Y-%m-%d',
117 '%m/%d/%Y %H:%M:%S',
117 '%m/%d/%Y %H:%M:%S',
118 '%m/%d/%Yt%H:%M:%S',
118 '%m/%d/%Yt%H:%M:%S',
119 '%m/%d/%Y %H:%M',
119 '%m/%d/%Y %H:%M',
120 '%m/%d/%Yt%H:%M',
120 '%m/%d/%Yt%H:%M',
121 '%m/%d/%Y',
121 '%m/%d/%Y',
122 '%m/%d/%y %H:%M:%S',
122 '%m/%d/%y %H:%M:%S',
123 '%m/%d/%yt%H:%M:%S',
123 '%m/%d/%yt%H:%M:%S',
124 '%m/%d/%y %H:%M',
124 '%m/%d/%y %H:%M',
125 '%m/%d/%yt%H:%M',
125 '%m/%d/%yt%H:%M',
126 '%m/%d/%y',
126 '%m/%d/%y',
127 )
127 )
128 for format_def in input_formats:
128 for format_def in input_formats:
129 try:
129 try:
130 return datetime.datetime(*time.strptime(text, format_def)[:6])
130 return datetime.datetime(*time.strptime(text, format_def)[:6])
131 except ValueError:
131 except ValueError:
132 pass
132 pass
133
133
134 # Try descriptive texts
134 # Try descriptive texts
135 if text == 'tomorrow':
135 if text == 'tomorrow':
136 future = datetime.datetime.now() + datetime.timedelta(days=1)
136 future = datetime.datetime.now() + datetime.timedelta(days=1)
137 args = future.timetuple()[:3] + (23, 59, 59)
137 args = future.timetuple()[:3] + (23, 59, 59)
138 return datetime.datetime(*args)
138 return datetime.datetime(*args)
139 elif text == 'today':
139 elif text == 'today':
140 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
140 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
141 elif text == 'now':
141 elif text == 'now':
142 return datetime.datetime.now()
142 return datetime.datetime.now()
143 elif text == 'yesterday':
143 elif text == 'yesterday':
144 past = datetime.datetime.now() - datetime.timedelta(days=1)
144 past = datetime.datetime.now() - datetime.timedelta(days=1)
145 return datetime.datetime(*past.timetuple()[:3])
145 return datetime.datetime(*past.timetuple()[:3])
146 else:
146 else:
147 days = 0
147 days = 0
148 matched = re.match(
148 matched = re.match(
149 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
149 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
150 if matched:
150 if matched:
151 groupdict = matched.groupdict()
151 groupdict = matched.groupdict()
152 if groupdict['days']:
152 if groupdict['days']:
153 days += int(matched.groupdict()['days'])
153 days += int(matched.groupdict()['days'])
154 if groupdict['weeks']:
154 if groupdict['weeks']:
155 days += int(matched.groupdict()['weeks']) * 7
155 days += int(matched.groupdict()['weeks']) * 7
156 past = datetime.datetime.now() - datetime.timedelta(days=days)
156 past = datetime.datetime.now() - datetime.timedelta(days=days)
157 return datetime.datetime(*past.timetuple()[:3])
157 return datetime.datetime(*past.timetuple()[:3])
158
158
159 raise ValueError('Wrong date: "%s"' % text)
159 raise ValueError('Wrong date: "%s"' % text)
@@ -1,88 +1,87 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Utilities to be shared by multiple controllers.
20 Utilities to be shared by multiple controllers.
22
21
23 Should only contain utilities to be shared in the controller layer.
22 Should only contain utilities to be shared in the controller layer.
24 """
23 """
25
24
26 from rhodecode.lib import helpers as h
25 from rhodecode.lib import helpers as h
27 from rhodecode.lib.vcs.exceptions import RepositoryError
26 from rhodecode.lib.vcs.exceptions import RepositoryError
28
27
29
28
30 def parse_path_ref(ref, default_path=None):
29 def parse_path_ref(ref, default_path=None):
31 """
30 """
32 Parse out a path and reference combination and return both parts of it.
31 Parse out a path and reference combination and return both parts of it.
33
32
34 This is used to allow support of path based comparisons for Subversion
33 This is used to allow support of path based comparisons for Subversion
35 as an iterim solution in parameter handling.
34 as an iterim solution in parameter handling.
36 """
35 """
37 if '@' in ref:
36 if '@' in ref:
38 return ref.rsplit('@', 1)
37 return ref.rsplit('@', 1)
39 else:
38 else:
40 return default_path, ref
39 return default_path, ref
41
40
42
41
43 def get_format_ref_id(repo):
42 def get_format_ref_id(repo):
44 """Returns a `repo` specific reference formatter function"""
43 """Returns a `repo` specific reference formatter function"""
45 if h.is_svn(repo):
44 if h.is_svn(repo):
46 return _format_ref_id_svn
45 return _format_ref_id_svn
47 else:
46 else:
48 return _format_ref_id
47 return _format_ref_id
49
48
50
49
51 def _format_ref_id(name, raw_id):
50 def _format_ref_id(name, raw_id):
52 """Default formatting of a given reference `name`"""
51 """Default formatting of a given reference `name`"""
53 return name
52 return name
54
53
55
54
56 def _format_ref_id_svn(name, raw_id):
55 def _format_ref_id_svn(name, raw_id):
57 """Special way of formatting a reference for Subversion including path"""
56 """Special way of formatting a reference for Subversion including path"""
58 return '%s@%s' % (name, raw_id)
57 return '{}@{}'.format(name, raw_id)
59
58
60
59
61 def get_commit_from_ref_name(repo, ref_name, ref_type=None):
60 def get_commit_from_ref_name(repo, ref_name, ref_type=None):
62 """
61 """
63 Gets the commit for a `ref_name` taking into account `ref_type`.
62 Gets the commit for a `ref_name` taking into account `ref_type`.
64 Needed in case a bookmark / tag share the same name.
63 Needed in case a bookmark / tag share the same name.
65
64
66 :param repo: the repo instance
65 :param repo: the repo instance
67 :param ref_name: the name of the ref to get
66 :param ref_name: the name of the ref to get
68 :param ref_type: optional, used to disambiguate colliding refs
67 :param ref_type: optional, used to disambiguate colliding refs
69 """
68 """
70 repo_scm = repo.scm_instance()
69 repo_scm = repo.scm_instance()
71 ref_type_mapping = {
70 ref_type_mapping = {
72 'book': repo_scm.bookmarks,
71 'book': repo_scm.bookmarks,
73 'bookmark': repo_scm.bookmarks,
72 'bookmark': repo_scm.bookmarks,
74 'tag': repo_scm.tags,
73 'tag': repo_scm.tags,
75 'branch': repo_scm.branches,
74 'branch': repo_scm.branches,
76 }
75 }
77
76
78 commit_id = ref_name
77 commit_id = ref_name
79 if repo_scm.alias != 'svn': # pass svn refs straight to backend until
78 if repo_scm.alias != 'svn': # pass svn refs straight to backend until
80 # the branch issue with svn is fixed
79 # the branch issue with svn is fixed
81 if ref_type and ref_type in ref_type_mapping:
80 if ref_type and ref_type in ref_type_mapping:
82 try:
81 try:
83 commit_id = ref_type_mapping[ref_type][ref_name]
82 commit_id = ref_type_mapping[ref_type][ref_name]
84 except KeyError:
83 except KeyError:
85 raise RepositoryError(
84 raise RepositoryError(
86 '%s "%s" does not exist' % (ref_type, ref_name))
85 '{} "{}" does not exist'.format(ref_type, ref_name))
87
86
88 return repo_scm.get_commit(commit_id)
87 return repo_scm.get_commit(commit_id)
@@ -1,141 +1,140 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 import logging
20 import logging
22
21
23 import rhodecode
22 import rhodecode
24 from rhodecode.model import meta, db
23 from rhodecode.model import meta, db
25 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
24 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
26
25
27 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
28
27
29
28
30 def init_model(engine, encryption_key: bytes = b''):
29 def init_model(engine, encryption_key: bytes = b''):
31 """
30 """
32 Initializes db session, bind the engine with the metadata,
31 Initializes db session, bind the engine with the metadata,
33 Call this before using any of the tables or classes in the model,
32 Call this before using any of the tables or classes in the model,
34 preferably once in application start
33 preferably once in application start
35
34
36 :param engine: engine to bind to
35 :param engine: engine to bind to
37 :param encryption_key: key used for encryption
36 :param encryption_key: key used for encryption
38 """
37 """
39
38
40 engine_str = obfuscate_url_pw(str(engine.url))
39 engine_str = obfuscate_url_pw(str(engine.url))
41 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
40 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
42
41
43 meta.bind_engine_to_session(engine)
42 meta.bind_engine_to_session(engine)
44 init_model_encryption(db, enc_key=encryption_key)
43 init_model_encryption(db, enc_key=encryption_key)
45
44
46
45
47 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
46 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
48 if not enc_key:
47 if not enc_key:
49 from pyramid.threadlocal import get_current_registry
48 from pyramid.threadlocal import get_current_registry
50 config = config or get_current_registry().settings
49 config = config or get_current_registry().settings
51 enc_key = get_encryption_key(config)
50 enc_key = get_encryption_key(config)
52
51
53 for db_model in db_models:
52 for db_model in db_models:
54 log.debug('setting encryption key for model %s', db_model)
53 log.debug('setting encryption key for model %s', db_model)
55 db_model.ENCRYPTION_KEY = enc_key
54 db_model.ENCRYPTION_KEY = enc_key
56
55
57
56
58 class BaseModel(object):
57 class BaseModel(object):
59 """
58 """
60 Base Model for all RhodeCode models, it adds sql alchemy session
59 Base Model for all RhodeCode models, it adds sql alchemy session
61 into instance of model
60 into instance of model
62
61
63 :param sa: If passed it reuses this session instead of creating a new one
62 :param sa: If passed it reuses this session instead of creating a new one
64 """
63 """
65
64
66 cls = None # override in child class
65 cls = None # override in child class
67
66
68 def __init__(self, sa=None):
67 def __init__(self, sa=None):
69 if sa is not None:
68 if sa is not None:
70 self.sa = sa
69 self.sa = sa
71 else:
70 else:
72 self.sa = meta.Session()
71 self.sa = meta.Session()
73
72
74 def _get_instance(self, cls, instance, callback=None):
73 def _get_instance(self, cls, instance, callback=None):
75 """
74 """
76 Gets instance of given cls using some simple lookup mechanism.
75 Gets instance of given cls using some simple lookup mechanism.
77
76
78 :param cls: classes to fetch
77 :param cls: classes to fetch
79 :param instance: int or Instance
78 :param instance: int or Instance
80 :param callback: callback to call if all lookups failed
79 :param callback: callback to call if all lookups failed
81 """
80 """
82
81
83 if isinstance(instance, cls):
82 if isinstance(instance, cls):
84 return instance
83 return instance
85 elif isinstance(instance, int):
84 elif isinstance(instance, int):
86 if isinstance(cls, tuple):
85 if isinstance(cls, tuple):
87 # if we pass multi instances we pick first to .get()
86 # if we pass multi instances we pick first to .get()
88 cls = cls[0]
87 cls = cls[0]
89 return cls.get(instance)
88 return cls.get(instance)
90 else:
89 else:
91 if instance:
90 if instance:
92 if callback is None:
91 if callback is None:
93 raise Exception(
92 raise Exception(
94 'given object must be int or Instance of %s '
93 'given object must be int or Instance of %s '
95 'got %s, no callback provided' % (cls, type(instance))
94 'got %s, no callback provided' % (cls, type(instance))
96 )
95 )
97 else:
96 else:
98 return callback(instance)
97 return callback(instance)
99
98
100 def _get_user(self, user):
99 def _get_user(self, user):
101 """
100 """
102 Helper method to get user by ID, or username fallback
101 Helper method to get user by ID, or username fallback
103
102
104 :param user: UserID, username, or User instance
103 :param user: UserID, username, or User instance
105 """
104 """
106 return self._get_instance(
105 return self._get_instance(
107 db.User, user, callback=db.User.get_by_username)
106 db.User, user, callback=db.User.get_by_username)
108
107
109 def _get_user_group(self, user_group):
108 def _get_user_group(self, user_group):
110 """
109 """
111 Helper method to get user by ID, or username fallback
110 Helper method to get user by ID, or username fallback
112
111
113 :param user_group: UserGroupID, user_group_name, or UserGroup instance
112 :param user_group: UserGroupID, user_group_name, or UserGroup instance
114 """
113 """
115 return self._get_instance(
114 return self._get_instance(
116 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
115 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
117
116
118 def _get_repo(self, repository):
117 def _get_repo(self, repository):
119 """
118 """
120 Helper method to get repository by ID, or repository name
119 Helper method to get repository by ID, or repository name
121
120
122 :param repository: RepoID, repository name or Repository Instance
121 :param repository: RepoID, repository name or Repository Instance
123 """
122 """
124 return self._get_instance(
123 return self._get_instance(
125 db.Repository, repository, callback=db.Repository.get_by_repo_name)
124 db.Repository, repository, callback=db.Repository.get_by_repo_name)
126
125
127 def _get_perm(self, permission):
126 def _get_perm(self, permission):
128 """
127 """
129 Helper method to get permission by ID, or permission name
128 Helper method to get permission by ID, or permission name
130
129
131 :param permission: PermissionID, permission_name or Permission instance
130 :param permission: PermissionID, permission_name or Permission instance
132 """
131 """
133 return self._get_instance(
132 return self._get_instance(
134 db.Permission, permission, callback=db.Permission.get_by_key)
133 db.Permission, permission, callback=db.Permission.get_by_key)
135
134
136 @classmethod
135 @classmethod
137 def get_all(cls):
136 def get_all(cls):
138 """
137 """
139 Returns all instances of what is defined in `cls` class variable
138 Returns all instances of what is defined in `cls` class variable
140 """
139 """
141 return cls.cls.getAll()
140 return cls.cls.getAll()
@@ -1,124 +1,122 b''
1
2
3 # Copyright (C) 2013-2023 RhodeCode GmbH
1 # Copyright (C) 2013-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 authentication tokens model for RhodeCode
20 authentication tokens model for RhodeCode
23 """
21 """
24
22
25 import time
23 import time
26 import logging
24 import logging
27 import traceback
25 import traceback
28 from sqlalchemy import or_
26 from sqlalchemy import or_
29
27
30 from rhodecode.model import BaseModel
28 from rhodecode.model import BaseModel
31 from rhodecode.model.db import UserApiKeys
29 from rhodecode.model.db import UserApiKeys
32 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
33
31
34 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
35
33
36
34
37 class AuthTokenModel(BaseModel):
35 class AuthTokenModel(BaseModel):
38 cls = UserApiKeys
36 cls = UserApiKeys
39
37
40 @classmethod
38 @classmethod
41 def get_lifetime_values(cls, translator):
39 def get_lifetime_values(cls, translator):
42 from rhodecode.lib import helpers as h
40 from rhodecode.lib import helpers as h
43 _ = translator
41 _ = translator
44
42
45 def date_after_min(mins):
43 def date_after_min(mins):
46 after = time.time() + (60 * mins)
44 after = time.time() + (60 * mins)
47 return h.format_date(h.time_to_datetime(after))
45 return h.format_date(h.time_to_datetime(after))
48
46
49 return [
47 return [
50 (str(-1),
48 (str(-1),
51 _('forever')),
49 _('forever')),
52 (str(5),
50 (str(5),
53 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
51 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
54 (str(60),
52 (str(60),
55 _('1 hour {end_date}').format(end_date=date_after_min(60))),
53 _('1 hour {end_date}').format(end_date=date_after_min(60))),
56 (str(60 * 24),
54 (str(60 * 24),
57 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
55 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
58 (str(60 * 24 * 30),
56 (str(60 * 24 * 30),
59 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
57 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
60 ]
58 ]
61
59
62 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
60 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
63 """
61 """
64 :param user: user or user_id
62 :param user: user or user_id
65 :param description: description of ApiKey
63 :param description: description of ApiKey
66 :param lifetime: expiration time in minutes
64 :param lifetime: expiration time in minutes
67 :param role: role for the apikey
65 :param role: role for the apikey
68 """
66 """
69 from rhodecode.lib.auth import generate_auth_token
67 from rhodecode.lib.auth import generate_auth_token
70
68
71 user = self._get_user(user)
69 user = self._get_user(user)
72
70
73 new_auth_token = UserApiKeys()
71 new_auth_token = UserApiKeys()
74 new_auth_token.api_key = generate_auth_token(user.username)
72 new_auth_token.api_key = generate_auth_token(user.username)
75 new_auth_token.user_id = user.user_id
73 new_auth_token.user_id = user.user_id
76 new_auth_token.description = description
74 new_auth_token.description = description
77 new_auth_token.role = role
75 new_auth_token.role = role
78 new_auth_token.expires = time.time() + (lifetime * 60) \
76 new_auth_token.expires = time.time() + (lifetime * 60) \
79 if lifetime != -1 else -1
77 if lifetime != -1 else -1
80 Session().add(new_auth_token)
78 Session().add(new_auth_token)
81
79
82 return new_auth_token
80 return new_auth_token
83
81
84 def delete(self, auth_token_id, user=None):
82 def delete(self, auth_token_id, user=None):
85 """
83 """
86 Deletes given api_key, if user is set it also filters the object for
84 Deletes given api_key, if user is set it also filters the object for
87 deletion by given user.
85 deletion by given user.
88 """
86 """
89 auth_token = UserApiKeys.query().filter(
87 auth_token = UserApiKeys.query().filter(
90 UserApiKeys.user_api_key_id == auth_token_id)
88 UserApiKeys.user_api_key_id == auth_token_id)
91
89
92 if user:
90 if user:
93 user = self._get_user(user)
91 user = self._get_user(user)
94 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
92 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
95 auth_token = auth_token.scalar()
93 auth_token = auth_token.scalar()
96
94
97 if auth_token:
95 if auth_token:
98 try:
96 try:
99 Session().delete(auth_token)
97 Session().delete(auth_token)
100 except Exception:
98 except Exception:
101 log.error(traceback.format_exc())
99 log.error(traceback.format_exc())
102 raise
100 raise
103
101
104 def get_auth_tokens(self, user, show_expired=True):
102 def get_auth_tokens(self, user, show_expired=True):
105 user = self._get_user(user)
103 user = self._get_user(user)
106 user_auth_tokens = UserApiKeys.query()\
104 user_auth_tokens = UserApiKeys.query()\
107 .filter(UserApiKeys.user_id == user.user_id)
105 .filter(UserApiKeys.user_id == user.user_id)
108 if not show_expired:
106 if not show_expired:
109 user_auth_tokens = user_auth_tokens\
107 user_auth_tokens = user_auth_tokens\
110 .filter(or_(UserApiKeys.expires == -1,
108 .filter(or_(UserApiKeys.expires == -1,
111 UserApiKeys.expires >= time.time()))
109 UserApiKeys.expires >= time.time()))
112 user_auth_tokens = user_auth_tokens.order_by(
110 user_auth_tokens = user_auth_tokens.order_by(
113 UserApiKeys.user_api_key_id)
111 UserApiKeys.user_api_key_id)
114 return user_auth_tokens
112 return user_auth_tokens
115
113
116 def get_auth_token(self, auth_token):
114 def get_auth_token(self, auth_token):
117 auth_token = UserApiKeys.query().filter(
115 auth_token = UserApiKeys.query().filter(
118 UserApiKeys.api_key == auth_token)
116 UserApiKeys.api_key == auth_token)
119 auth_token = auth_token \
117 auth_token = auth_token \
120 .filter(or_(UserApiKeys.expires == -1,
118 .filter(or_(UserApiKeys.expires == -1,
121 UserApiKeys.expires >= time.time()))\
119 UserApiKeys.expires >= time.time()))\
122 .first()
120 .first()
123
121
124 return auth_token
122 return auth_token
@@ -1,403 +1,402 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 import itertools
20 import itertools
22 import logging
21 import logging
23 import collections
22 import collections
24
23
25 from rhodecode.model import BaseModel
24 from rhodecode.model import BaseModel
26 from rhodecode.model.db import (
25 from rhodecode.model.db import (
27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
26 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
27 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
29 from rhodecode.lib.markup_renderer import (
28 from rhodecode.lib.markup_renderer import (
30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
29 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
31
30
32 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
33
32
34
33
35 class ChangesetStatusModel(BaseModel):
34 class ChangesetStatusModel(BaseModel):
36
35
37 cls = ChangesetStatus
36 cls = ChangesetStatus
38
37
39 def __get_changeset_status(self, changeset_status):
38 def __get_changeset_status(self, changeset_status):
40 return self._get_instance(ChangesetStatus, changeset_status)
39 return self._get_instance(ChangesetStatus, changeset_status)
41
40
42 def __get_pull_request(self, pull_request):
41 def __get_pull_request(self, pull_request):
43 return self._get_instance(PullRequest, pull_request)
42 return self._get_instance(PullRequest, pull_request)
44
43
45 def _get_status_query(self, repo, revision, pull_request,
44 def _get_status_query(self, repo, revision, pull_request,
46 with_revisions=False):
45 with_revisions=False):
47 repo = self._get_repo(repo)
46 repo = self._get_repo(repo)
48
47
49 q = ChangesetStatus.query()\
48 q = ChangesetStatus.query()\
50 .filter(ChangesetStatus.repo == repo)
49 .filter(ChangesetStatus.repo == repo)
51 if not with_revisions:
50 if not with_revisions:
52 q = q.filter(ChangesetStatus.version == 0)
51 q = q.filter(ChangesetStatus.version == 0)
53
52
54 if revision:
53 if revision:
55 q = q.filter(ChangesetStatus.revision == revision)
54 q = q.filter(ChangesetStatus.revision == revision)
56 elif pull_request:
55 elif pull_request:
57 pull_request = self.__get_pull_request(pull_request)
56 pull_request = self.__get_pull_request(pull_request)
58 # TODO: johbo: Think about the impact of this join, there must
57 # TODO: johbo: Think about the impact of this join, there must
59 # be a reason why ChangesetStatus and ChanagesetComment is linked
58 # be a reason why ChangesetStatus and ChanagesetComment is linked
60 # to the pull request. Might be that we want to do the same for
59 # to the pull request. Might be that we want to do the same for
61 # the pull_request_version_id.
60 # the pull_request_version_id.
62 q = q.join(ChangesetComment).filter(
61 q = q.join(ChangesetComment).filter(
63 ChangesetStatus.pull_request == pull_request,
62 ChangesetStatus.pull_request == pull_request,
64 ChangesetComment.pull_request_version_id == None)
63 ChangesetComment.pull_request_version_id == None)
65 else:
64 else:
66 raise Exception('Please specify revision or pull_request')
65 raise Exception('Please specify revision or pull_request')
67 q = q.order_by(ChangesetStatus.version.asc())
66 q = q.order_by(ChangesetStatus.version.asc())
68 return q
67 return q
69
68
70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
69 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
71 trim_votes=True):
70 trim_votes=True):
72 """
71 """
73 Calculate status based on given group members, and voting rule
72 Calculate status based on given group members, and voting rule
74
73
75
74
76 group1 - 4 members, 3 required for approval
75 group1 - 4 members, 3 required for approval
77 user1 - approved
76 user1 - approved
78 user2 - reject
77 user2 - reject
79 user3 - approved
78 user3 - approved
80 user4 - rejected
79 user4 - rejected
81
80
82 final_state: rejected, reasons not at least 3 votes
81 final_state: rejected, reasons not at least 3 votes
83
82
84
83
85 group1 - 4 members, 2 required for approval
84 group1 - 4 members, 2 required for approval
86 user1 - approved
85 user1 - approved
87 user2 - reject
86 user2 - reject
88 user3 - approved
87 user3 - approved
89 user4 - rejected
88 user4 - rejected
90
89
91 final_state: approved, reasons got at least 2 approvals
90 final_state: approved, reasons got at least 2 approvals
92
91
93 group1 - 4 members, ALL required for approval
92 group1 - 4 members, ALL required for approval
94 user1 - approved
93 user1 - approved
95 user2 - reject
94 user2 - reject
96 user3 - approved
95 user3 - approved
97 user4 - rejected
96 user4 - rejected
98
97
99 final_state: rejected, reasons not all approvals
98 final_state: rejected, reasons not all approvals
100
99
101
100
102 group1 - 4 members, ALL required for approval
101 group1 - 4 members, ALL required for approval
103 user1 - approved
102 user1 - approved
104 user2 - approved
103 user2 - approved
105 user3 - approved
104 user3 - approved
106 user4 - approved
105 user4 - approved
107
106
108 final_state: approved, reason all approvals received
107 final_state: approved, reason all approvals received
109
108
110 group1 - 4 members, 5 required for approval
109 group1 - 4 members, 5 required for approval
111 (approval should be shorted to number of actual members)
110 (approval should be shorted to number of actual members)
112
111
113 user1 - approved
112 user1 - approved
114 user2 - approved
113 user2 - approved
115 user3 - approved
114 user3 - approved
116 user4 - approved
115 user4 - approved
117
116
118 final_state: approved, reason all approvals received
117 final_state: approved, reason all approvals received
119
118
120 """
119 """
121 group_vote_data = {}
120 group_vote_data = {}
122 got_rule = False
121 got_rule = False
123 members = collections.OrderedDict()
122 members = collections.OrderedDict()
124 for review_obj, user, reasons, mandatory, statuses \
123 for review_obj, user, reasons, mandatory, statuses \
125 in group_statuses_by_reviewers:
124 in group_statuses_by_reviewers:
126
125
127 if not got_rule:
126 if not got_rule:
128 group_vote_data = review_obj.rule_user_group_data()
127 group_vote_data = review_obj.rule_user_group_data()
129 got_rule = bool(group_vote_data)
128 got_rule = bool(group_vote_data)
130
129
131 members[user.user_id] = statuses
130 members[user.user_id] = statuses
132
131
133 if not group_vote_data:
132 if not group_vote_data:
134 return []
133 return []
135
134
136 required_votes = group_vote_data['vote_rule']
135 required_votes = group_vote_data['vote_rule']
137 if required_votes == -1:
136 if required_votes == -1:
138 # -1 means all required, so we replace it with how many people
137 # -1 means all required, so we replace it with how many people
139 # are in the members
138 # are in the members
140 required_votes = len(members)
139 required_votes = len(members)
141
140
142 if trim_votes and required_votes > len(members):
141 if trim_votes and required_votes > len(members):
143 # we require more votes than we have members in the group
142 # we require more votes than we have members in the group
144 # in this case we trim the required votes to the number of members
143 # in this case we trim the required votes to the number of members
145 required_votes = len(members)
144 required_votes = len(members)
146
145
147 approvals = sum([
146 approvals = sum([
148 1 for statuses in members.values()
147 1 for statuses in members.values()
149 if statuses and
148 if statuses and
150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
149 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
151
150
152 calculated_votes = []
151 calculated_votes = []
153 # we have all votes from users, now check if we have enough votes
152 # we have all votes from users, now check if we have enough votes
154 # to fill other
153 # to fill other
155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
154 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
156 if approvals >= required_votes:
155 if approvals >= required_votes:
157 fill_in = ChangesetStatus.STATUS_APPROVED
156 fill_in = ChangesetStatus.STATUS_APPROVED
158
157
159 for member, statuses in members.items():
158 for member, statuses in members.items():
160 if statuses:
159 if statuses:
161 ver, latest = statuses[0]
160 ver, latest = statuses[0]
162 if fill_in == ChangesetStatus.STATUS_APPROVED:
161 if fill_in == ChangesetStatus.STATUS_APPROVED:
163 calculated_votes.append(fill_in)
162 calculated_votes.append(fill_in)
164 else:
163 else:
165 calculated_votes.append(latest.status)
164 calculated_votes.append(latest.status)
166 else:
165 else:
167 calculated_votes.append(fill_in)
166 calculated_votes.append(fill_in)
168
167
169 return calculated_votes
168 return calculated_votes
170
169
171 def calculate_status(self, statuses_by_reviewers):
170 def calculate_status(self, statuses_by_reviewers):
172 """
171 """
173 Given the approval statuses from reviewers, calculates final approval
172 Given the approval statuses from reviewers, calculates final approval
174 status. There can only be 3 results, all approved, all rejected. If
173 status. There can only be 3 results, all approved, all rejected. If
175 there is no consensus the PR is under review.
174 there is no consensus the PR is under review.
176
175
177 :param statuses_by_reviewers:
176 :param statuses_by_reviewers:
178 """
177 """
179
178
180 def group_rule(element):
179 def group_rule(element):
181 _review_obj = element[0]
180 _review_obj = element[0]
182 rule_data = _review_obj.rule_user_group_data()
181 rule_data = _review_obj.rule_user_group_data()
183 if rule_data and rule_data['id']:
182 if rule_data and rule_data['id']:
184 return rule_data['id']
183 return rule_data['id']
185 # don't return None, as we cant compare this
184 # don't return None, as we cant compare this
186 return 0
185 return 0
187
186
188 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
187 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
188
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
190
192 reviewers_number = len(statuses_by_reviewers)
191 reviewers_number = len(statuses_by_reviewers)
193 votes = collections.defaultdict(int)
192 votes = collections.defaultdict(int)
194 for group, group_statuses_by_reviewers in voting_by_groups:
193 for group, group_statuses_by_reviewers in voting_by_groups:
195 if group:
194 if group:
196 # calculate how the "group" voted
195 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
196 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
197 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
198 votes[vote_status] += 1
200 else:
199 else:
201
200
202 for review_obj, user, reasons, mandatory, statuses \
201 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
202 in group_statuses_by_reviewers:
204 # individual vote
203 # individual vote
205 if statuses:
204 if statuses:
206 ver, latest = statuses[0]
205 ver, latest = statuses[0]
207 votes[latest.status] += 1
206 votes[latest.status] += 1
208
207
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
211
210
212 # TODO(marcink): with group voting, how does rejected work,
211 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
212 # do we ever get rejected state ?
214
213
215 if approved_votes_count and (approved_votes_count == reviewers_number):
214 if approved_votes_count and (approved_votes_count == reviewers_number):
216 return ChangesetStatus.STATUS_APPROVED
215 return ChangesetStatus.STATUS_APPROVED
217
216
218 if rejected_votes_count and (rejected_votes_count == reviewers_number):
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
219 return ChangesetStatus.STATUS_REJECTED
218 return ChangesetStatus.STATUS_REJECTED
220
219
221 return ChangesetStatus.STATUS_UNDER_REVIEW
220 return ChangesetStatus.STATUS_UNDER_REVIEW
222
221
223 def get_statuses(self, repo, revision=None, pull_request=None,
222 def get_statuses(self, repo, revision=None, pull_request=None,
224 with_revisions=False):
223 with_revisions=False):
225 q = self._get_status_query(repo, revision, pull_request,
224 q = self._get_status_query(repo, revision, pull_request,
226 with_revisions)
225 with_revisions)
227 return q.all()
226 return q.all()
228
227
229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
230 """
229 """
231 Returns latest status of changeset for given revision or for given
230 Returns latest status of changeset for given revision or for given
232 pull request. Statuses are versioned inside a table itself and
231 pull request. Statuses are versioned inside a table itself and
233 version == 0 is always the current one
232 version == 0 is always the current one
234
233
235 :param repo:
234 :param repo:
236 :param revision: 40char hash or None
235 :param revision: 40char hash or None
237 :param pull_request: pull_request reference
236 :param pull_request: pull_request reference
238 :param as_str: return status as string not object
237 :param as_str: return status as string not object
239 """
238 """
240 q = self._get_status_query(repo, revision, pull_request)
239 q = self._get_status_query(repo, revision, pull_request)
241
240
242 # need to use first here since there can be multiple statuses
241 # need to use first here since there can be multiple statuses
243 # returned from pull_request
242 # returned from pull_request
244 status = q.first()
243 status = q.first()
245 if as_str:
244 if as_str:
246 status = status.status if status else status
245 status = status.status if status else status
247 st = status or ChangesetStatus.DEFAULT
246 st = status or ChangesetStatus.DEFAULT
248 return str(st)
247 return str(st)
249 return status
248 return status
250
249
251 def _render_auto_status_message(
250 def _render_auto_status_message(
252 self, status, commit_id=None, pull_request=None):
251 self, status, commit_id=None, pull_request=None):
253 """
252 """
254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
255 so it's always looking the same disregarding on which default
254 so it's always looking the same disregarding on which default
256 renderer system is using.
255 renderer system is using.
257
256
258 :param status: status text to change into
257 :param status: status text to change into
259 :param commit_id: the commit_id we change the status for
258 :param commit_id: the commit_id we change the status for
260 :param pull_request: the pull request we change the status for
259 :param pull_request: the pull request we change the status for
261 """
260 """
262
261
263 new_status = ChangesetStatus.get_status_lbl(status)
262 new_status = ChangesetStatus.get_status_lbl(status)
264
263
265 params = {
264 params = {
266 'new_status_label': new_status,
265 'new_status_label': new_status,
267 'pull_request': pull_request,
266 'pull_request': pull_request,
268 'commit_id': commit_id,
267 'commit_id': commit_id,
269 }
268 }
270 renderer = RstTemplateRenderer()
269 renderer = RstTemplateRenderer()
271 return renderer.render('auto_status_change.mako', **params)
270 return renderer.render('auto_status_change.mako', **params)
272
271
273 def set_status(self, repo, status, user, comment=None, revision=None,
272 def set_status(self, repo, status, user, comment=None, revision=None,
274 pull_request=None, dont_allow_on_closed_pull_request=False):
273 pull_request=None, dont_allow_on_closed_pull_request=False):
275 """
274 """
276 Creates new status for changeset or updates the old ones bumping their
275 Creates new status for changeset or updates the old ones bumping their
277 version, leaving the current status at
276 version, leaving the current status at
278
277
279 :param repo:
278 :param repo:
280 :param revision:
279 :param revision:
281 :param status:
280 :param status:
282 :param user:
281 :param user:
283 :param comment:
282 :param comment:
284 :param dont_allow_on_closed_pull_request: don't allow a status change
283 :param dont_allow_on_closed_pull_request: don't allow a status change
285 if last status was for pull request and it's closed. We shouldn't
284 if last status was for pull request and it's closed. We shouldn't
286 mess around this manually
285 mess around this manually
287 """
286 """
288 repo = self._get_repo(repo)
287 repo = self._get_repo(repo)
289
288
290 q = ChangesetStatus.query()
289 q = ChangesetStatus.query()
291
290
292 if revision:
291 if revision:
293 q = q.filter(ChangesetStatus.repo == repo)
292 q = q.filter(ChangesetStatus.repo == repo)
294 q = q.filter(ChangesetStatus.revision == revision)
293 q = q.filter(ChangesetStatus.revision == revision)
295 elif pull_request:
294 elif pull_request:
296 pull_request = self.__get_pull_request(pull_request)
295 pull_request = self.__get_pull_request(pull_request)
297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
299 cur_statuses = q.all()
298 cur_statuses = q.all()
300
299
301 # if statuses exists and last is associated with a closed pull request
300 # if statuses exists and last is associated with a closed pull request
302 # we need to check if we can allow this status change
301 # we need to check if we can allow this status change
303 if (dont_allow_on_closed_pull_request and cur_statuses
302 if (dont_allow_on_closed_pull_request and cur_statuses
304 and getattr(cur_statuses[0].pull_request, 'status', '')
303 and getattr(cur_statuses[0].pull_request, 'status', '')
305 == PullRequest.STATUS_CLOSED):
304 == PullRequest.STATUS_CLOSED):
306 raise StatusChangeOnClosedPullRequestError(
305 raise StatusChangeOnClosedPullRequestError(
307 'Changing status on closed pull request is not allowed'
306 'Changing status on closed pull request is not allowed'
308 )
307 )
309
308
310 # update all current statuses with older version
309 # update all current statuses with older version
311 if cur_statuses:
310 if cur_statuses:
312 for st in cur_statuses:
311 for st in cur_statuses:
313 st.version += 1
312 st.version += 1
314 Session().add(st)
313 Session().add(st)
315 Session().flush()
314 Session().flush()
316
315
317 def _create_status(user, repo, status, comment, revision, pull_request):
316 def _create_status(user, repo, status, comment, revision, pull_request):
318 new_status = ChangesetStatus()
317 new_status = ChangesetStatus()
319 new_status.author = self._get_user(user)
318 new_status.author = self._get_user(user)
320 new_status.repo = self._get_repo(repo)
319 new_status.repo = self._get_repo(repo)
321 new_status.status = status
320 new_status.status = status
322 new_status.comment = comment
321 new_status.comment = comment
323 new_status.revision = revision
322 new_status.revision = revision
324 new_status.pull_request = pull_request
323 new_status.pull_request = pull_request
325 return new_status
324 return new_status
326
325
327 if not comment:
326 if not comment:
328 from rhodecode.model.comment import CommentsModel
327 from rhodecode.model.comment import CommentsModel
329 comment = CommentsModel().create(
328 comment = CommentsModel().create(
330 text=self._render_auto_status_message(
329 text=self._render_auto_status_message(
331 status, commit_id=revision, pull_request=pull_request),
330 status, commit_id=revision, pull_request=pull_request),
332 repo=repo,
331 repo=repo,
333 user=user,
332 user=user,
334 pull_request=pull_request,
333 pull_request=pull_request,
335 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
336 )
335 )
337
336
338 if revision:
337 if revision:
339 new_status = _create_status(
338 new_status = _create_status(
340 user=user, repo=repo, status=status, comment=comment,
339 user=user, repo=repo, status=status, comment=comment,
341 revision=revision, pull_request=pull_request)
340 revision=revision, pull_request=pull_request)
342 Session().add(new_status)
341 Session().add(new_status)
343 return new_status
342 return new_status
344 elif pull_request:
343 elif pull_request:
345 # pull request can have more than one revision associated to it
344 # pull request can have more than one revision associated to it
346 # we need to create new version for each one
345 # we need to create new version for each one
347 new_statuses = []
346 new_statuses = []
348 repo = pull_request.source_repo
347 repo = pull_request.source_repo
349 for rev in pull_request.revisions:
348 for rev in pull_request.revisions:
350 new_status = _create_status(
349 new_status = _create_status(
351 user=user, repo=repo, status=status, comment=comment,
350 user=user, repo=repo, status=status, comment=comment,
352 revision=rev, pull_request=pull_request)
351 revision=rev, pull_request=pull_request)
353 new_statuses.append(new_status)
352 new_statuses.append(new_status)
354 Session().add(new_status)
353 Session().add(new_status)
355 return new_statuses
354 return new_statuses
356
355
357 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
358
357
359 commit_statuses_map = collections.defaultdict(list)
358 commit_statuses_map = collections.defaultdict(list)
360 for st in commit_statuses:
359 for st in commit_statuses:
361 commit_statuses_map[st.author.username] += [st]
360 commit_statuses_map[st.author.username] += [st]
362
361
363 reviewers = []
362 reviewers = []
364
363
365 def version(commit_status):
364 def version(commit_status):
366 return commit_status.version
365 return commit_status.version
367
366
368 for obj in reviewers_data:
367 for obj in reviewers_data:
369 if not obj.user:
368 if not obj.user:
370 continue
369 continue
371 if user and obj.user.username != user.username:
370 if user and obj.user.username != user.username:
372 # single user filter
371 # single user filter
373 continue
372 continue
374
373
375 statuses = commit_statuses_map.get(obj.user.username, None)
374 statuses = commit_statuses_map.get(obj.user.username, None)
376 if statuses:
375 if statuses:
377 status_groups = itertools.groupby(
376 status_groups = itertools.groupby(
378 sorted(statuses, key=version), version)
377 sorted(statuses, key=version), version)
379 statuses = [(x, list(y)[0]) for x, y in status_groups]
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
380
379
381 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
381
383 if user:
382 if user:
384 return reviewers[0] if reviewers else reviewers
383 return reviewers[0] if reviewers else reviewers
385 else:
384 else:
386 return reviewers
385 return reviewers
387
386
388 def reviewers_statuses(self, pull_request, user=None):
387 def reviewers_statuses(self, pull_request, user=None):
389 _commit_statuses = self.get_statuses(
388 _commit_statuses = self.get_statuses(
390 pull_request.source_repo,
389 pull_request.source_repo,
391 pull_request=pull_request,
390 pull_request=pull_request,
392 with_revisions=True)
391 with_revisions=True)
393 reviewers = pull_request.get_pull_request_reviewers(
392 reviewers = pull_request.get_pull_request_reviewers(
394 role=PullRequestReviewers.ROLE_REVIEWER)
393 role=PullRequestReviewers.ROLE_REVIEWER)
395 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
396
395
397 def calculated_review_status(self, pull_request):
396 def calculated_review_status(self, pull_request):
398 """
397 """
399 calculate pull request status based on reviewers, it should be a list
398 calculate pull request status based on reviewers, it should be a list
400 of two element lists.
399 of two element lists.
401 """
400 """
402 reviewers = self.reviewers_statuses(pull_request)
401 reviewers = self.reviewers_statuses(pull_request)
403 return self.calculate_status(reviewers)
402 return self.calculate_status(reviewers)
@@ -1,855 +1,852 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 comments model for RhodeCode
20 comments model for RhodeCode
23 """
21 """
24 import datetime
22 import datetime
25
23
26 import logging
24 import logging
27 import traceback
25 import traceback
28 import collections
26 import collections
29
27
30 from pyramid.threadlocal import get_current_registry, get_current_request
28 from pyramid.threadlocal import get_current_registry, get_current_request
31 from sqlalchemy.sql.expression import null
29 from sqlalchemy.sql.expression import null
32 from sqlalchemy.sql.functions import coalesce
30 from sqlalchemy.sql.functions import coalesce
33
31
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
32 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 from rhodecode.lib import audit_logger
33 from rhodecode.lib import audit_logger
36 from rhodecode.lib.exceptions import CommentVersionMismatch
34 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (
37 from rhodecode.model.db import (
40 false, true,
38 false, true,
41 ChangesetComment,
39 ChangesetComment,
42 User,
40 User,
43 Notification,
41 Notification,
44 PullRequest,
42 PullRequest,
45 AttributeDict,
43 AttributeDict,
46 ChangesetCommentHistory,
44 ChangesetCommentHistory,
47 )
45 )
48 from rhodecode.model.notification import NotificationModel
46 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
50 from rhodecode.model.settings import VcsSettingsModel
48 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.notification import EmailNotificationModel
49 from rhodecode.model.notification import EmailNotificationModel
52 from rhodecode.model.validation_schema.schemas import comment_schema
50 from rhodecode.model.validation_schema.schemas import comment_schema
53
51
54
52
55 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
56
54
57
55
58 class CommentsModel(BaseModel):
56 class CommentsModel(BaseModel):
59
57
60 cls = ChangesetComment
58 cls = ChangesetComment
61
59
62 DIFF_CONTEXT_BEFORE = 3
60 DIFF_CONTEXT_BEFORE = 3
63 DIFF_CONTEXT_AFTER = 3
61 DIFF_CONTEXT_AFTER = 3
64
62
65 def __get_commit_comment(self, changeset_comment):
63 def __get_commit_comment(self, changeset_comment):
66 return self._get_instance(ChangesetComment, changeset_comment)
64 return self._get_instance(ChangesetComment, changeset_comment)
67
65
68 def __get_pull_request(self, pull_request):
66 def __get_pull_request(self, pull_request):
69 return self._get_instance(PullRequest, pull_request)
67 return self._get_instance(PullRequest, pull_request)
70
68
71 def _extract_mentions(self, s):
69 def _extract_mentions(self, s):
72 user_objects = []
70 user_objects = []
73 for username in extract_mentioned_users(s):
71 for username in extract_mentioned_users(s):
74 user_obj = User.get_by_username(username, case_insensitive=True)
72 user_obj = User.get_by_username(username, case_insensitive=True)
75 if user_obj:
73 if user_obj:
76 user_objects.append(user_obj)
74 user_objects.append(user_obj)
77 return user_objects
75 return user_objects
78
76
79 def _get_renderer(self, global_renderer='rst', request=None):
77 def _get_renderer(self, global_renderer='rst', request=None):
80 request = request or get_current_request()
78 request = request or get_current_request()
81
79
82 try:
80 try:
83 global_renderer = request.call_context.visual.default_renderer
81 global_renderer = request.call_context.visual.default_renderer
84 except AttributeError:
82 except AttributeError:
85 log.debug("Renderer not set, falling back "
83 log.debug("Renderer not set, falling back "
86 "to default renderer '%s'", global_renderer)
84 "to default renderer '%s'", global_renderer)
87 except Exception:
85 except Exception:
88 log.error(traceback.format_exc())
86 log.error(traceback.format_exc())
89 return global_renderer
87 return global_renderer
90
88
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
89 def aggregate_comments(self, comments, versions, show_version, inline=False):
92 # group by versions, and count until, and display objects
90 # group by versions, and count until, and display objects
93
91
94 comment_groups = collections.defaultdict(list)
92 comment_groups = collections.defaultdict(list)
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
93 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
96
94
97 def yield_comments(pos):
95 def yield_comments(pos):
98 for co in comment_groups[pos]:
96 yield from comment_groups[pos]
99 yield co
100
97
101 comment_versions = collections.defaultdict(
98 comment_versions = collections.defaultdict(
102 lambda: collections.defaultdict(list))
99 lambda: collections.defaultdict(list))
103 prev_prvid = -1
100 prev_prvid = -1
104 # fake last entry with None, to aggregate on "latest" version which
101 # fake last entry with None, to aggregate on "latest" version which
105 # doesn't have an pull_request_version_id
102 # doesn't have an pull_request_version_id
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
103 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
107 prvid = ver.pull_request_version_id
104 prvid = ver.pull_request_version_id
108 if prev_prvid == -1:
105 if prev_prvid == -1:
109 prev_prvid = prvid
106 prev_prvid = prvid
110
107
111 for co in yield_comments(prvid):
108 for co in yield_comments(prvid):
112 comment_versions[prvid]['at'].append(co)
109 comment_versions[prvid]['at'].append(co)
113
110
114 # save until
111 # save until
115 current = comment_versions[prvid]['at']
112 current = comment_versions[prvid]['at']
116 prev_until = comment_versions[prev_prvid]['until']
113 prev_until = comment_versions[prev_prvid]['until']
117 cur_until = prev_until + current
114 cur_until = prev_until + current
118 comment_versions[prvid]['until'].extend(cur_until)
115 comment_versions[prvid]['until'].extend(cur_until)
119
116
120 # save outdated
117 # save outdated
121 if inline:
118 if inline:
122 outdated = [x for x in cur_until
119 outdated = [x for x in cur_until
123 if x.outdated_at_version(show_version)]
120 if x.outdated_at_version(show_version)]
124 else:
121 else:
125 outdated = [x for x in cur_until
122 outdated = [x for x in cur_until
126 if x.older_than_version(show_version)]
123 if x.older_than_version(show_version)]
127 display = [x for x in cur_until if x not in outdated]
124 display = [x for x in cur_until if x not in outdated]
128
125
129 comment_versions[prvid]['outdated'] = outdated
126 comment_versions[prvid]['outdated'] = outdated
130 comment_versions[prvid]['display'] = display
127 comment_versions[prvid]['display'] = display
131
128
132 prev_prvid = prvid
129 prev_prvid = prvid
133
130
134 return comment_versions
131 return comment_versions
135
132
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
133 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
137 qry = Session().query(ChangesetComment) \
134 qry = Session().query(ChangesetComment) \
138 .filter(ChangesetComment.repo == repo)
135 .filter(ChangesetComment.repo == repo)
139
136
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
137 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
138 qry = qry.filter(ChangesetComment.comment_type == comment_type)
142
139
143 if user:
140 if user:
144 user = self._get_user(user)
141 user = self._get_user(user)
145 if user:
142 if user:
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
143 qry = qry.filter(ChangesetComment.user_id == user.user_id)
147
144
148 if commit_id:
145 if commit_id:
149 qry = qry.filter(ChangesetComment.revision == commit_id)
146 qry = qry.filter(ChangesetComment.revision == commit_id)
150
147
151 qry = qry.order_by(ChangesetComment.created_on)
148 qry = qry.order_by(ChangesetComment.created_on)
152 return qry.all()
149 return qry.all()
153
150
154 def get_repository_unresolved_todos(self, repo):
151 def get_repository_unresolved_todos(self, repo):
155 todos = Session().query(ChangesetComment) \
152 todos = Session().query(ChangesetComment) \
156 .filter(ChangesetComment.repo == repo) \
153 .filter(ChangesetComment.repo == repo) \
157 .filter(ChangesetComment.resolved_by == None) \
154 .filter(ChangesetComment.resolved_by == None) \
158 .filter(ChangesetComment.comment_type
155 .filter(ChangesetComment.comment_type
159 == ChangesetComment.COMMENT_TYPE_TODO)
156 == ChangesetComment.COMMENT_TYPE_TODO)
160 todos = todos.all()
157 todos = todos.all()
161
158
162 return todos
159 return todos
163
160
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
161 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
165
162
166 todos = Session().query(ChangesetComment) \
163 todos = Session().query(ChangesetComment) \
167 .filter(ChangesetComment.pull_request == pull_request) \
164 .filter(ChangesetComment.pull_request == pull_request) \
168 .filter(ChangesetComment.resolved_by == None) \
165 .filter(ChangesetComment.resolved_by == None) \
169 .filter(ChangesetComment.comment_type
166 .filter(ChangesetComment.comment_type
170 == ChangesetComment.COMMENT_TYPE_TODO)
167 == ChangesetComment.COMMENT_TYPE_TODO)
171
168
172 if not include_drafts:
169 if not include_drafts:
173 todos = todos.filter(ChangesetComment.draft == false())
170 todos = todos.filter(ChangesetComment.draft == false())
174
171
175 if not show_outdated:
172 if not show_outdated:
176 todos = todos.filter(
173 todos = todos.filter(
177 coalesce(ChangesetComment.display_state, '') !=
174 coalesce(ChangesetComment.display_state, '') !=
178 ChangesetComment.COMMENT_OUTDATED)
175 ChangesetComment.COMMENT_OUTDATED)
179
176
180 todos = todos.all()
177 todos = todos.all()
181
178
182 return todos
179 return todos
183
180
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
181 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
185
182
186 todos = Session().query(ChangesetComment) \
183 todos = Session().query(ChangesetComment) \
187 .filter(ChangesetComment.pull_request == pull_request) \
184 .filter(ChangesetComment.pull_request == pull_request) \
188 .filter(ChangesetComment.resolved_by != None) \
185 .filter(ChangesetComment.resolved_by != None) \
189 .filter(ChangesetComment.comment_type
186 .filter(ChangesetComment.comment_type
190 == ChangesetComment.COMMENT_TYPE_TODO)
187 == ChangesetComment.COMMENT_TYPE_TODO)
191
188
192 if not include_drafts:
189 if not include_drafts:
193 todos = todos.filter(ChangesetComment.draft == false())
190 todos = todos.filter(ChangesetComment.draft == false())
194
191
195 if not show_outdated:
192 if not show_outdated:
196 todos = todos.filter(
193 todos = todos.filter(
197 coalesce(ChangesetComment.display_state, '') !=
194 coalesce(ChangesetComment.display_state, '') !=
198 ChangesetComment.COMMENT_OUTDATED)
195 ChangesetComment.COMMENT_OUTDATED)
199
196
200 todos = todos.all()
197 todos = todos.all()
201
198
202 return todos
199 return todos
203
200
204 def get_pull_request_drafts(self, user_id, pull_request):
201 def get_pull_request_drafts(self, user_id, pull_request):
205 drafts = Session().query(ChangesetComment) \
202 drafts = Session().query(ChangesetComment) \
206 .filter(ChangesetComment.pull_request == pull_request) \
203 .filter(ChangesetComment.pull_request == pull_request) \
207 .filter(ChangesetComment.user_id == user_id) \
204 .filter(ChangesetComment.user_id == user_id) \
208 .filter(ChangesetComment.draft == true())
205 .filter(ChangesetComment.draft == true())
209 return drafts.all()
206 return drafts.all()
210
207
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
208 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
212
209
213 todos = Session().query(ChangesetComment) \
210 todos = Session().query(ChangesetComment) \
214 .filter(ChangesetComment.revision == commit_id) \
211 .filter(ChangesetComment.revision == commit_id) \
215 .filter(ChangesetComment.resolved_by == None) \
212 .filter(ChangesetComment.resolved_by == None) \
216 .filter(ChangesetComment.comment_type
213 .filter(ChangesetComment.comment_type
217 == ChangesetComment.COMMENT_TYPE_TODO)
214 == ChangesetComment.COMMENT_TYPE_TODO)
218
215
219 if not include_drafts:
216 if not include_drafts:
220 todos = todos.filter(ChangesetComment.draft == false())
217 todos = todos.filter(ChangesetComment.draft == false())
221
218
222 if not show_outdated:
219 if not show_outdated:
223 todos = todos.filter(
220 todos = todos.filter(
224 coalesce(ChangesetComment.display_state, '') !=
221 coalesce(ChangesetComment.display_state, '') !=
225 ChangesetComment.COMMENT_OUTDATED)
222 ChangesetComment.COMMENT_OUTDATED)
226
223
227 todos = todos.all()
224 todos = todos.all()
228
225
229 return todos
226 return todos
230
227
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
228 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
232
229
233 todos = Session().query(ChangesetComment) \
230 todos = Session().query(ChangesetComment) \
234 .filter(ChangesetComment.revision == commit_id) \
231 .filter(ChangesetComment.revision == commit_id) \
235 .filter(ChangesetComment.resolved_by != None) \
232 .filter(ChangesetComment.resolved_by != None) \
236 .filter(ChangesetComment.comment_type
233 .filter(ChangesetComment.comment_type
237 == ChangesetComment.COMMENT_TYPE_TODO)
234 == ChangesetComment.COMMENT_TYPE_TODO)
238
235
239 if not include_drafts:
236 if not include_drafts:
240 todos = todos.filter(ChangesetComment.draft == false())
237 todos = todos.filter(ChangesetComment.draft == false())
241
238
242 if not show_outdated:
239 if not show_outdated:
243 todos = todos.filter(
240 todos = todos.filter(
244 coalesce(ChangesetComment.display_state, '') !=
241 coalesce(ChangesetComment.display_state, '') !=
245 ChangesetComment.COMMENT_OUTDATED)
242 ChangesetComment.COMMENT_OUTDATED)
246
243
247 todos = todos.all()
244 todos = todos.all()
248
245
249 return todos
246 return todos
250
247
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
248 def get_commit_inline_comments(self, commit_id, include_drafts=True):
252 inline_comments = Session().query(ChangesetComment) \
249 inline_comments = Session().query(ChangesetComment) \
253 .filter(ChangesetComment.line_no != None) \
250 .filter(ChangesetComment.line_no != None) \
254 .filter(ChangesetComment.f_path != None) \
251 .filter(ChangesetComment.f_path != None) \
255 .filter(ChangesetComment.revision == commit_id)
252 .filter(ChangesetComment.revision == commit_id)
256
253
257 if not include_drafts:
254 if not include_drafts:
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
255 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
259
256
260 inline_comments = inline_comments.all()
257 inline_comments = inline_comments.all()
261 return inline_comments
258 return inline_comments
262
259
263 def _log_audit_action(self, action, action_data, auth_user, comment):
260 def _log_audit_action(self, action, action_data, auth_user, comment):
264 audit_logger.store(
261 audit_logger.store(
265 action=action,
262 action=action,
266 action_data=action_data,
263 action_data=action_data,
267 user=auth_user,
264 user=auth_user,
268 repo=comment.repo)
265 repo=comment.repo)
269
266
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
267 def create(self, text, repo, user, commit_id=None, pull_request=None,
271 f_path=None, line_no=None, status_change=None,
268 f_path=None, line_no=None, status_change=None,
272 status_change_type=None, comment_type=None, is_draft=False,
269 status_change_type=None, comment_type=None, is_draft=False,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
270 resolves_comment_id=None, closing_pr=False, send_email=True,
274 renderer=None, auth_user=None, extra_recipients=None):
271 renderer=None, auth_user=None, extra_recipients=None):
275 """
272 """
276 Creates new comment for commit or pull request.
273 Creates new comment for commit or pull request.
277 IF status_change is not none this comment is associated with a
274 IF status_change is not none this comment is associated with a
278 status change of commit or commit associated with pull request
275 status change of commit or commit associated with pull request
279
276
280 :param text:
277 :param text:
281 :param repo:
278 :param repo:
282 :param user:
279 :param user:
283 :param commit_id:
280 :param commit_id:
284 :param pull_request:
281 :param pull_request:
285 :param f_path:
282 :param f_path:
286 :param line_no:
283 :param line_no:
287 :param status_change: Label for status change
284 :param status_change: Label for status change
288 :param comment_type: Type of comment
285 :param comment_type: Type of comment
289 :param is_draft: is comment a draft only
286 :param is_draft: is comment a draft only
290 :param resolves_comment_id: id of comment which this one will resolve
287 :param resolves_comment_id: id of comment which this one will resolve
291 :param status_change_type: type of status change
288 :param status_change_type: type of status change
292 :param closing_pr:
289 :param closing_pr:
293 :param send_email:
290 :param send_email:
294 :param renderer: pick renderer for this comment
291 :param renderer: pick renderer for this comment
295 :param auth_user: current authenticated user calling this method
292 :param auth_user: current authenticated user calling this method
296 :param extra_recipients: list of extra users to be added to recipients
293 :param extra_recipients: list of extra users to be added to recipients
297 """
294 """
298
295
299 request = get_current_request()
296 request = get_current_request()
300 _ = request.translate
297 _ = request.translate
301
298
302 if not renderer:
299 if not renderer:
303 renderer = self._get_renderer(request=request)
300 renderer = self._get_renderer(request=request)
304
301
305 repo = self._get_repo(repo)
302 repo = self._get_repo(repo)
306 user = self._get_user(user)
303 user = self._get_user(user)
307 auth_user = auth_user or user
304 auth_user = auth_user or user
308
305
309 schema = comment_schema.CommentSchema()
306 schema = comment_schema.CommentSchema()
310 validated_kwargs = schema.deserialize(dict(
307 validated_kwargs = schema.deserialize(dict(
311 comment_body=text,
308 comment_body=text,
312 comment_type=comment_type,
309 comment_type=comment_type,
313 is_draft=is_draft,
310 is_draft=is_draft,
314 comment_file=f_path,
311 comment_file=f_path,
315 comment_line=line_no,
312 comment_line=line_no,
316 renderer_type=renderer,
313 renderer_type=renderer,
317 status_change=status_change_type,
314 status_change=status_change_type,
318 resolves_comment_id=resolves_comment_id,
315 resolves_comment_id=resolves_comment_id,
319 repo=repo.repo_id,
316 repo=repo.repo_id,
320 user=user.user_id,
317 user=user.user_id,
321 ))
318 ))
322
319
323 is_draft = validated_kwargs['is_draft']
320 is_draft = validated_kwargs['is_draft']
324
321
325 comment = ChangesetComment()
322 comment = ChangesetComment()
326 comment.renderer = validated_kwargs['renderer_type']
323 comment.renderer = validated_kwargs['renderer_type']
327 comment.text = validated_kwargs['comment_body']
324 comment.text = validated_kwargs['comment_body']
328 comment.f_path = validated_kwargs['comment_file']
325 comment.f_path = validated_kwargs['comment_file']
329 comment.line_no = validated_kwargs['comment_line']
326 comment.line_no = validated_kwargs['comment_line']
330 comment.comment_type = validated_kwargs['comment_type']
327 comment.comment_type = validated_kwargs['comment_type']
331 comment.draft = is_draft
328 comment.draft = is_draft
332
329
333 comment.repo = repo
330 comment.repo = repo
334 comment.author = user
331 comment.author = user
335 resolved_comment = self.__get_commit_comment(
332 resolved_comment = self.__get_commit_comment(
336 validated_kwargs['resolves_comment_id'])
333 validated_kwargs['resolves_comment_id'])
337
334
338 # check if the comment actually belongs to this PR
335 # check if the comment actually belongs to this PR
339 if resolved_comment and resolved_comment.pull_request and \
336 if resolved_comment and resolved_comment.pull_request and \
340 resolved_comment.pull_request != pull_request:
337 resolved_comment.pull_request != pull_request:
341 log.warning('Comment tried to resolved unrelated todo comment: %s',
338 log.warning('Comment tried to resolved unrelated todo comment: %s',
342 resolved_comment)
339 resolved_comment)
343 # comment not bound to this pull request, forbid
340 # comment not bound to this pull request, forbid
344 resolved_comment = None
341 resolved_comment = None
345
342
346 elif resolved_comment and resolved_comment.repo and \
343 elif resolved_comment and resolved_comment.repo and \
347 resolved_comment.repo != repo:
344 resolved_comment.repo != repo:
348 log.warning('Comment tried to resolved unrelated todo comment: %s',
345 log.warning('Comment tried to resolved unrelated todo comment: %s',
349 resolved_comment)
346 resolved_comment)
350 # comment not bound to this repo, forbid
347 # comment not bound to this repo, forbid
351 resolved_comment = None
348 resolved_comment = None
352
349
353 if resolved_comment and resolved_comment.resolved_by:
350 if resolved_comment and resolved_comment.resolved_by:
354 # if this comment is already resolved, don't mark it again!
351 # if this comment is already resolved, don't mark it again!
355 resolved_comment = None
352 resolved_comment = None
356
353
357 comment.resolved_comment = resolved_comment
354 comment.resolved_comment = resolved_comment
358
355
359 pull_request_id = pull_request
356 pull_request_id = pull_request
360
357
361 commit_obj = None
358 commit_obj = None
362 pull_request_obj = None
359 pull_request_obj = None
363
360
364 if commit_id:
361 if commit_id:
365 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
362 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
366 # do a lookup, so we don't pass something bad here
363 # do a lookup, so we don't pass something bad here
367 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
364 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
368 comment.revision = commit_obj.raw_id
365 comment.revision = commit_obj.raw_id
369
366
370 elif pull_request_id:
367 elif pull_request_id:
371 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
368 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
372 pull_request_obj = self.__get_pull_request(pull_request_id)
369 pull_request_obj = self.__get_pull_request(pull_request_id)
373 comment.pull_request = pull_request_obj
370 comment.pull_request = pull_request_obj
374 else:
371 else:
375 raise Exception('Please specify commit or pull_request_id')
372 raise Exception('Please specify commit or pull_request_id')
376
373
377 Session().add(comment)
374 Session().add(comment)
378 Session().flush()
375 Session().flush()
379 kwargs = {
376 kwargs = {
380 'user': user,
377 'user': user,
381 'renderer_type': renderer,
378 'renderer_type': renderer,
382 'repo_name': repo.repo_name,
379 'repo_name': repo.repo_name,
383 'status_change': status_change,
380 'status_change': status_change,
384 'status_change_type': status_change_type,
381 'status_change_type': status_change_type,
385 'comment_body': text,
382 'comment_body': text,
386 'comment_file': f_path,
383 'comment_file': f_path,
387 'comment_line': line_no,
384 'comment_line': line_no,
388 'comment_type': comment_type or 'note',
385 'comment_type': comment_type or 'note',
389 'comment_id': comment.comment_id
386 'comment_id': comment.comment_id
390 }
387 }
391
388
392 if commit_obj:
389 if commit_obj:
393 recipients = ChangesetComment.get_users(
390 recipients = ChangesetComment.get_users(
394 revision=commit_obj.raw_id)
391 revision=commit_obj.raw_id)
395 # add commit author if it's in RhodeCode system
392 # add commit author if it's in RhodeCode system
396 cs_author = User.get_from_cs_author(commit_obj.author)
393 cs_author = User.get_from_cs_author(commit_obj.author)
397 if not cs_author:
394 if not cs_author:
398 # use repo owner if we cannot extract the author correctly
395 # use repo owner if we cannot extract the author correctly
399 cs_author = repo.user
396 cs_author = repo.user
400 recipients += [cs_author]
397 recipients += [cs_author]
401
398
402 commit_comment_url = self.get_url(comment, request=request)
399 commit_comment_url = self.get_url(comment, request=request)
403 commit_comment_reply_url = self.get_url(
400 commit_comment_reply_url = self.get_url(
404 comment, request=request,
401 comment, request=request,
405 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
406
403
407 target_repo_url = h.link_to(
404 target_repo_url = h.link_to(
408 repo.repo_name,
405 repo.repo_name,
409 h.route_url('repo_summary', repo_name=repo.repo_name))
406 h.route_url('repo_summary', repo_name=repo.repo_name))
410
407
411 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
412 commit_id=commit_id)
409 commit_id=commit_id)
413
410
414 # commit specifics
411 # commit specifics
415 kwargs.update({
412 kwargs.update({
416 'commit': commit_obj,
413 'commit': commit_obj,
417 'commit_message': commit_obj.message,
414 'commit_message': commit_obj.message,
418 'commit_target_repo_url': target_repo_url,
415 'commit_target_repo_url': target_repo_url,
419 'commit_comment_url': commit_comment_url,
416 'commit_comment_url': commit_comment_url,
420 'commit_comment_reply_url': commit_comment_reply_url,
417 'commit_comment_reply_url': commit_comment_reply_url,
421 'commit_url': commit_url,
418 'commit_url': commit_url,
422 'thread_ids': [commit_url, commit_comment_url],
419 'thread_ids': [commit_url, commit_comment_url],
423 })
420 })
424
421
425 elif pull_request_obj:
422 elif pull_request_obj:
426 # get the current participants of this pull request
423 # get the current participants of this pull request
427 recipients = ChangesetComment.get_users(
424 recipients = ChangesetComment.get_users(
428 pull_request_id=pull_request_obj.pull_request_id)
425 pull_request_id=pull_request_obj.pull_request_id)
429 # add pull request author
426 # add pull request author
430 recipients += [pull_request_obj.author]
427 recipients += [pull_request_obj.author]
431
428
432 # add the reviewers to notification
429 # add the reviewers to notification
433 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
430 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
434
431
435 pr_target_repo = pull_request_obj.target_repo
432 pr_target_repo = pull_request_obj.target_repo
436 pr_source_repo = pull_request_obj.source_repo
433 pr_source_repo = pull_request_obj.source_repo
437
434
438 pr_comment_url = self.get_url(comment, request=request)
435 pr_comment_url = self.get_url(comment, request=request)
439 pr_comment_reply_url = self.get_url(
436 pr_comment_reply_url = self.get_url(
440 comment, request=request,
437 comment, request=request,
441 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
438 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
442
439
443 pr_url = h.route_url(
440 pr_url = h.route_url(
444 'pullrequest_show',
441 'pullrequest_show',
445 repo_name=pr_target_repo.repo_name,
442 repo_name=pr_target_repo.repo_name,
446 pull_request_id=pull_request_obj.pull_request_id, )
443 pull_request_id=pull_request_obj.pull_request_id, )
447
444
448 # set some variables for email notification
445 # set some variables for email notification
449 pr_target_repo_url = h.route_url(
446 pr_target_repo_url = h.route_url(
450 'repo_summary', repo_name=pr_target_repo.repo_name)
447 'repo_summary', repo_name=pr_target_repo.repo_name)
451
448
452 pr_source_repo_url = h.route_url(
449 pr_source_repo_url = h.route_url(
453 'repo_summary', repo_name=pr_source_repo.repo_name)
450 'repo_summary', repo_name=pr_source_repo.repo_name)
454
451
455 # pull request specifics
452 # pull request specifics
456 kwargs.update({
453 kwargs.update({
457 'pull_request': pull_request_obj,
454 'pull_request': pull_request_obj,
458 'pr_id': pull_request_obj.pull_request_id,
455 'pr_id': pull_request_obj.pull_request_id,
459 'pull_request_url': pr_url,
456 'pull_request_url': pr_url,
460 'pull_request_target_repo': pr_target_repo,
457 'pull_request_target_repo': pr_target_repo,
461 'pull_request_target_repo_url': pr_target_repo_url,
458 'pull_request_target_repo_url': pr_target_repo_url,
462 'pull_request_source_repo': pr_source_repo,
459 'pull_request_source_repo': pr_source_repo,
463 'pull_request_source_repo_url': pr_source_repo_url,
460 'pull_request_source_repo_url': pr_source_repo_url,
464 'pr_comment_url': pr_comment_url,
461 'pr_comment_url': pr_comment_url,
465 'pr_comment_reply_url': pr_comment_reply_url,
462 'pr_comment_reply_url': pr_comment_reply_url,
466 'pr_closing': closing_pr,
463 'pr_closing': closing_pr,
467 'thread_ids': [pr_url, pr_comment_url],
464 'thread_ids': [pr_url, pr_comment_url],
468 })
465 })
469
466
470 if send_email:
467 if send_email:
471 recipients += [self._get_user(u) for u in (extra_recipients or [])]
468 recipients += [self._get_user(u) for u in (extra_recipients or [])]
472
469
473 mention_recipients = set(
470 mention_recipients = set(
474 self._extract_mentions(text)).difference(recipients)
471 self._extract_mentions(text)).difference(recipients)
475
472
476 # create notification objects, and emails
473 # create notification objects, and emails
477 NotificationModel().create(
474 NotificationModel().create(
478 created_by=user,
475 created_by=user,
479 notification_subject='', # Filled in based on the notification_type
476 notification_subject='', # Filled in based on the notification_type
480 notification_body='', # Filled in based on the notification_type
477 notification_body='', # Filled in based on the notification_type
481 notification_type=notification_type,
478 notification_type=notification_type,
482 recipients=recipients,
479 recipients=recipients,
483 mention_recipients=mention_recipients,
480 mention_recipients=mention_recipients,
484 email_kwargs=kwargs,
481 email_kwargs=kwargs,
485 )
482 )
486
483
487 Session().flush()
484 Session().flush()
488 if comment.pull_request:
485 if comment.pull_request:
489 action = 'repo.pull_request.comment.create'
486 action = 'repo.pull_request.comment.create'
490 else:
487 else:
491 action = 'repo.commit.comment.create'
488 action = 'repo.commit.comment.create'
492
489
493 if not is_draft:
490 if not is_draft:
494 comment_data = comment.get_api_data()
491 comment_data = comment.get_api_data()
495
492
496 self._log_audit_action(
493 self._log_audit_action(
497 action, {'data': comment_data}, auth_user, comment)
494 action, {'data': comment_data}, auth_user, comment)
498
495
499 return comment
496 return comment
500
497
501 def edit(self, comment_id, text, auth_user, version):
498 def edit(self, comment_id, text, auth_user, version):
502 """
499 """
503 Change existing comment for commit or pull request.
500 Change existing comment for commit or pull request.
504
501
505 :param comment_id:
502 :param comment_id:
506 :param text:
503 :param text:
507 :param auth_user: current authenticated user calling this method
504 :param auth_user: current authenticated user calling this method
508 :param version: last comment version
505 :param version: last comment version
509 """
506 """
510 if not text:
507 if not text:
511 log.warning('Missing text for comment, skipping...')
508 log.warning('Missing text for comment, skipping...')
512 return
509 return
513
510
514 comment = ChangesetComment.get(comment_id)
511 comment = ChangesetComment.get(comment_id)
515 old_comment_text = comment.text
512 old_comment_text = comment.text
516 comment.text = text
513 comment.text = text
517 comment.modified_at = datetime.datetime.now()
514 comment.modified_at = datetime.datetime.now()
518 version = safe_int(version)
515 version = safe_int(version)
519
516
520 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
517 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
521 # would return 3 here
518 # would return 3 here
522 comment_version = ChangesetCommentHistory.get_version(comment_id)
519 comment_version = ChangesetCommentHistory.get_version(comment_id)
523
520
524 if isinstance(version, int) and (comment_version - version) != 1:
521 if isinstance(version, int) and (comment_version - version) != 1:
525 log.warning(
522 log.warning(
526 'Version mismatch comment_version {} submitted {}, skipping'.format(
523 'Version mismatch comment_version {} submitted {}, skipping'.format(
527 comment_version-1, # -1 since note above
524 comment_version-1, # -1 since note above
528 version
525 version
529 )
526 )
530 )
527 )
531 raise CommentVersionMismatch()
528 raise CommentVersionMismatch()
532
529
533 comment_history = ChangesetCommentHistory()
530 comment_history = ChangesetCommentHistory()
534 comment_history.comment_id = comment_id
531 comment_history.comment_id = comment_id
535 comment_history.version = comment_version
532 comment_history.version = comment_version
536 comment_history.created_by_user_id = auth_user.user_id
533 comment_history.created_by_user_id = auth_user.user_id
537 comment_history.text = old_comment_text
534 comment_history.text = old_comment_text
538 # TODO add email notification
535 # TODO add email notification
539 Session().add(comment_history)
536 Session().add(comment_history)
540 Session().add(comment)
537 Session().add(comment)
541 Session().flush()
538 Session().flush()
542
539
543 if comment.pull_request:
540 if comment.pull_request:
544 action = 'repo.pull_request.comment.edit'
541 action = 'repo.pull_request.comment.edit'
545 else:
542 else:
546 action = 'repo.commit.comment.edit'
543 action = 'repo.commit.comment.edit'
547
544
548 comment_data = comment.get_api_data()
545 comment_data = comment.get_api_data()
549 comment_data['old_comment_text'] = old_comment_text
546 comment_data['old_comment_text'] = old_comment_text
550 self._log_audit_action(
547 self._log_audit_action(
551 action, {'data': comment_data}, auth_user, comment)
548 action, {'data': comment_data}, auth_user, comment)
552
549
553 return comment_history
550 return comment_history
554
551
555 def delete(self, comment, auth_user):
552 def delete(self, comment, auth_user):
556 """
553 """
557 Deletes given comment
554 Deletes given comment
558 """
555 """
559 comment = self.__get_commit_comment(comment)
556 comment = self.__get_commit_comment(comment)
560 old_data = comment.get_api_data()
557 old_data = comment.get_api_data()
561 Session().delete(comment)
558 Session().delete(comment)
562
559
563 if comment.pull_request:
560 if comment.pull_request:
564 action = 'repo.pull_request.comment.delete'
561 action = 'repo.pull_request.comment.delete'
565 else:
562 else:
566 action = 'repo.commit.comment.delete'
563 action = 'repo.commit.comment.delete'
567
564
568 self._log_audit_action(
565 self._log_audit_action(
569 action, {'old_data': old_data}, auth_user, comment)
566 action, {'old_data': old_data}, auth_user, comment)
570
567
571 return comment
568 return comment
572
569
573 def get_all_comments(self, repo_id, revision=None, pull_request=None,
570 def get_all_comments(self, repo_id, revision=None, pull_request=None,
574 include_drafts=True, count_only=False):
571 include_drafts=True, count_only=False):
575 q = ChangesetComment.query()\
572 q = ChangesetComment.query()\
576 .filter(ChangesetComment.repo_id == repo_id)
573 .filter(ChangesetComment.repo_id == repo_id)
577 if revision:
574 if revision:
578 q = q.filter(ChangesetComment.revision == revision)
575 q = q.filter(ChangesetComment.revision == revision)
579 elif pull_request:
576 elif pull_request:
580 pull_request = self.__get_pull_request(pull_request)
577 pull_request = self.__get_pull_request(pull_request)
581 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
578 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
582 else:
579 else:
583 raise Exception('Please specify commit or pull_request')
580 raise Exception('Please specify commit or pull_request')
584 if not include_drafts:
581 if not include_drafts:
585 q = q.filter(ChangesetComment.draft == false())
582 q = q.filter(ChangesetComment.draft == false())
586 q = q.order_by(ChangesetComment.created_on)
583 q = q.order_by(ChangesetComment.created_on)
587 if count_only:
584 if count_only:
588 return q.count()
585 return q.count()
589
586
590 return q.all()
587 return q.all()
591
588
592 def get_url(self, comment, request=None, permalink=False, anchor=None):
589 def get_url(self, comment, request=None, permalink=False, anchor=None):
593 if not request:
590 if not request:
594 request = get_current_request()
591 request = get_current_request()
595
592
596 comment = self.__get_commit_comment(comment)
593 comment = self.__get_commit_comment(comment)
597 if anchor is None:
594 if anchor is None:
598 anchor = 'comment-{}'.format(comment.comment_id)
595 anchor = f'comment-{comment.comment_id}'
599
596
600 if comment.pull_request:
597 if comment.pull_request:
601 pull_request = comment.pull_request
598 pull_request = comment.pull_request
602 if permalink:
599 if permalink:
603 return request.route_url(
600 return request.route_url(
604 'pull_requests_global',
601 'pull_requests_global',
605 pull_request_id=pull_request.pull_request_id,
602 pull_request_id=pull_request.pull_request_id,
606 _anchor=anchor)
603 _anchor=anchor)
607 else:
604 else:
608 return request.route_url(
605 return request.route_url(
609 'pullrequest_show',
606 'pullrequest_show',
610 repo_name=safe_str(pull_request.target_repo.repo_name),
607 repo_name=safe_str(pull_request.target_repo.repo_name),
611 pull_request_id=pull_request.pull_request_id,
608 pull_request_id=pull_request.pull_request_id,
612 _anchor=anchor)
609 _anchor=anchor)
613
610
614 else:
611 else:
615 repo = comment.repo
612 repo = comment.repo
616 commit_id = comment.revision
613 commit_id = comment.revision
617
614
618 if permalink:
615 if permalink:
619 return request.route_url(
616 return request.route_url(
620 'repo_commit', repo_name=safe_str(repo.repo_id),
617 'repo_commit', repo_name=safe_str(repo.repo_id),
621 commit_id=commit_id,
618 commit_id=commit_id,
622 _anchor=anchor)
619 _anchor=anchor)
623
620
624 else:
621 else:
625 return request.route_url(
622 return request.route_url(
626 'repo_commit', repo_name=safe_str(repo.repo_name),
623 'repo_commit', repo_name=safe_str(repo.repo_name),
627 commit_id=commit_id,
624 commit_id=commit_id,
628 _anchor=anchor)
625 _anchor=anchor)
629
626
630 def get_comments(self, repo_id, revision=None, pull_request=None):
627 def get_comments(self, repo_id, revision=None, pull_request=None):
631 """
628 """
632 Gets main comments based on revision or pull_request_id
629 Gets main comments based on revision or pull_request_id
633
630
634 :param repo_id:
631 :param repo_id:
635 :param revision:
632 :param revision:
636 :param pull_request:
633 :param pull_request:
637 """
634 """
638
635
639 q = ChangesetComment.query()\
636 q = ChangesetComment.query()\
640 .filter(ChangesetComment.repo_id == repo_id)\
637 .filter(ChangesetComment.repo_id == repo_id)\
641 .filter(ChangesetComment.line_no == None)\
638 .filter(ChangesetComment.line_no == None)\
642 .filter(ChangesetComment.f_path == None)
639 .filter(ChangesetComment.f_path == None)
643 if revision:
640 if revision:
644 q = q.filter(ChangesetComment.revision == revision)
641 q = q.filter(ChangesetComment.revision == revision)
645 elif pull_request:
642 elif pull_request:
646 pull_request = self.__get_pull_request(pull_request)
643 pull_request = self.__get_pull_request(pull_request)
647 q = q.filter(ChangesetComment.pull_request == pull_request)
644 q = q.filter(ChangesetComment.pull_request == pull_request)
648 else:
645 else:
649 raise Exception('Please specify commit or pull_request')
646 raise Exception('Please specify commit or pull_request')
650 q = q.order_by(ChangesetComment.created_on)
647 q = q.order_by(ChangesetComment.created_on)
651 return q.all()
648 return q.all()
652
649
653 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
650 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
654 q = self._get_inline_comments_query(repo_id, revision, pull_request)
651 q = self._get_inline_comments_query(repo_id, revision, pull_request)
655 return self._group_comments_by_path_and_line_number(q)
652 return self._group_comments_by_path_and_line_number(q)
656
653
657 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
654 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
658 version=None):
655 version=None):
659 inline_comms = []
656 inline_comms = []
660 for fname, per_line_comments in inline_comments.items():
657 for fname, per_line_comments in inline_comments.items():
661 for lno, comments in per_line_comments.items():
658 for lno, comments in per_line_comments.items():
662 for comm in comments:
659 for comm in comments:
663 if not comm.outdated_at_version(version) and skip_outdated:
660 if not comm.outdated_at_version(version) and skip_outdated:
664 inline_comms.append(comm)
661 inline_comms.append(comm)
665
662
666 return inline_comms
663 return inline_comms
667
664
668 def get_outdated_comments(self, repo_id, pull_request):
665 def get_outdated_comments(self, repo_id, pull_request):
669 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
666 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
670 # of a pull request.
667 # of a pull request.
671 q = self._all_inline_comments_of_pull_request(pull_request)
668 q = self._all_inline_comments_of_pull_request(pull_request)
672 q = q.filter(
669 q = q.filter(
673 ChangesetComment.display_state ==
670 ChangesetComment.display_state ==
674 ChangesetComment.COMMENT_OUTDATED
671 ChangesetComment.COMMENT_OUTDATED
675 ).order_by(ChangesetComment.comment_id.asc())
672 ).order_by(ChangesetComment.comment_id.asc())
676
673
677 return self._group_comments_by_path_and_line_number(q)
674 return self._group_comments_by_path_and_line_number(q)
678
675
679 def _get_inline_comments_query(self, repo_id, revision, pull_request):
676 def _get_inline_comments_query(self, repo_id, revision, pull_request):
680 # TODO: johbo: Split this into two methods: One for PR and one for
677 # TODO: johbo: Split this into two methods: One for PR and one for
681 # commit.
678 # commit.
682 if revision:
679 if revision:
683 q = Session().query(ChangesetComment).filter(
680 q = Session().query(ChangesetComment).filter(
684 ChangesetComment.repo_id == repo_id,
681 ChangesetComment.repo_id == repo_id,
685 ChangesetComment.line_no != null(),
682 ChangesetComment.line_no != null(),
686 ChangesetComment.f_path != null(),
683 ChangesetComment.f_path != null(),
687 ChangesetComment.revision == revision)
684 ChangesetComment.revision == revision)
688
685
689 elif pull_request:
686 elif pull_request:
690 pull_request = self.__get_pull_request(pull_request)
687 pull_request = self.__get_pull_request(pull_request)
691 if not CommentsModel.use_outdated_comments(pull_request):
688 if not CommentsModel.use_outdated_comments(pull_request):
692 q = self._visible_inline_comments_of_pull_request(pull_request)
689 q = self._visible_inline_comments_of_pull_request(pull_request)
693 else:
690 else:
694 q = self._all_inline_comments_of_pull_request(pull_request)
691 q = self._all_inline_comments_of_pull_request(pull_request)
695
692
696 else:
693 else:
697 raise Exception('Please specify commit or pull_request_id')
694 raise Exception('Please specify commit or pull_request_id')
698 q = q.order_by(ChangesetComment.comment_id.asc())
695 q = q.order_by(ChangesetComment.comment_id.asc())
699 return q
696 return q
700
697
701 def _group_comments_by_path_and_line_number(self, q):
698 def _group_comments_by_path_and_line_number(self, q):
702 comments = q.all()
699 comments = q.all()
703 paths = collections.defaultdict(lambda: collections.defaultdict(list))
700 paths = collections.defaultdict(lambda: collections.defaultdict(list))
704 for co in comments:
701 for co in comments:
705 paths[co.f_path][co.line_no].append(co)
702 paths[co.f_path][co.line_no].append(co)
706 return paths
703 return paths
707
704
708 @classmethod
705 @classmethod
709 def needed_extra_diff_context(cls):
706 def needed_extra_diff_context(cls):
710 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
707 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
711
708
712 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
709 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
713 if not CommentsModel.use_outdated_comments(pull_request):
710 if not CommentsModel.use_outdated_comments(pull_request):
714 return
711 return
715
712
716 comments = self._visible_inline_comments_of_pull_request(pull_request)
713 comments = self._visible_inline_comments_of_pull_request(pull_request)
717 comments_to_outdate = comments.all()
714 comments_to_outdate = comments.all()
718
715
719 for comment in comments_to_outdate:
716 for comment in comments_to_outdate:
720 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
717 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
721
718
722 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
719 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
723 diff_line = _parse_comment_line_number(comment.line_no)
720 diff_line = _parse_comment_line_number(comment.line_no)
724
721
725 try:
722 try:
726 old_context = old_diff_proc.get_context_of_line(
723 old_context = old_diff_proc.get_context_of_line(
727 path=comment.f_path, diff_line=diff_line)
724 path=comment.f_path, diff_line=diff_line)
728 new_context = new_diff_proc.get_context_of_line(
725 new_context = new_diff_proc.get_context_of_line(
729 path=comment.f_path, diff_line=diff_line)
726 path=comment.f_path, diff_line=diff_line)
730 except (diffs.LineNotInDiffException,
727 except (diffs.LineNotInDiffException,
731 diffs.FileNotInDiffException):
728 diffs.FileNotInDiffException):
732 if not comment.draft:
729 if not comment.draft:
733 comment.display_state = ChangesetComment.COMMENT_OUTDATED
730 comment.display_state = ChangesetComment.COMMENT_OUTDATED
734 return
731 return
735
732
736 if old_context == new_context:
733 if old_context == new_context:
737 return
734 return
738
735
739 if self._should_relocate_diff_line(diff_line):
736 if self._should_relocate_diff_line(diff_line):
740 new_diff_lines = new_diff_proc.find_context(
737 new_diff_lines = new_diff_proc.find_context(
741 path=comment.f_path, context=old_context,
738 path=comment.f_path, context=old_context,
742 offset=self.DIFF_CONTEXT_BEFORE)
739 offset=self.DIFF_CONTEXT_BEFORE)
743 if not new_diff_lines and not comment.draft:
740 if not new_diff_lines and not comment.draft:
744 comment.display_state = ChangesetComment.COMMENT_OUTDATED
741 comment.display_state = ChangesetComment.COMMENT_OUTDATED
745 else:
742 else:
746 new_diff_line = self._choose_closest_diff_line(
743 new_diff_line = self._choose_closest_diff_line(
747 diff_line, new_diff_lines)
744 diff_line, new_diff_lines)
748 comment.line_no = _diff_to_comment_line_number(new_diff_line)
745 comment.line_no = _diff_to_comment_line_number(new_diff_line)
749 else:
746 else:
750 if not comment.draft:
747 if not comment.draft:
751 comment.display_state = ChangesetComment.COMMENT_OUTDATED
748 comment.display_state = ChangesetComment.COMMENT_OUTDATED
752
749
753 def _should_relocate_diff_line(self, diff_line):
750 def _should_relocate_diff_line(self, diff_line):
754 """
751 """
755 Checks if relocation shall be tried for the given `diff_line`.
752 Checks if relocation shall be tried for the given `diff_line`.
756
753
757 If a comment points into the first lines, then we can have a situation
754 If a comment points into the first lines, then we can have a situation
758 that after an update another line has been added on top. In this case
755 that after an update another line has been added on top. In this case
759 we would find the context still and move the comment around. This
756 we would find the context still and move the comment around. This
760 would be wrong.
757 would be wrong.
761 """
758 """
762 should_relocate = (
759 should_relocate = (
763 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
760 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
764 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
761 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
765 return should_relocate
762 return should_relocate
766
763
767 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
764 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
768 candidate = new_diff_lines[0]
765 candidate = new_diff_lines[0]
769 best_delta = _diff_line_delta(diff_line, candidate)
766 best_delta = _diff_line_delta(diff_line, candidate)
770 for new_diff_line in new_diff_lines[1:]:
767 for new_diff_line in new_diff_lines[1:]:
771 delta = _diff_line_delta(diff_line, new_diff_line)
768 delta = _diff_line_delta(diff_line, new_diff_line)
772 if delta < best_delta:
769 if delta < best_delta:
773 candidate = new_diff_line
770 candidate = new_diff_line
774 best_delta = delta
771 best_delta = delta
775 return candidate
772 return candidate
776
773
777 def _visible_inline_comments_of_pull_request(self, pull_request):
774 def _visible_inline_comments_of_pull_request(self, pull_request):
778 comments = self._all_inline_comments_of_pull_request(pull_request)
775 comments = self._all_inline_comments_of_pull_request(pull_request)
779 comments = comments.filter(
776 comments = comments.filter(
780 coalesce(ChangesetComment.display_state, '') !=
777 coalesce(ChangesetComment.display_state, '') !=
781 ChangesetComment.COMMENT_OUTDATED)
778 ChangesetComment.COMMENT_OUTDATED)
782 return comments
779 return comments
783
780
784 def _all_inline_comments_of_pull_request(self, pull_request):
781 def _all_inline_comments_of_pull_request(self, pull_request):
785 comments = Session().query(ChangesetComment)\
782 comments = Session().query(ChangesetComment)\
786 .filter(ChangesetComment.line_no != None)\
783 .filter(ChangesetComment.line_no != None)\
787 .filter(ChangesetComment.f_path != None)\
784 .filter(ChangesetComment.f_path != None)\
788 .filter(ChangesetComment.pull_request == pull_request)
785 .filter(ChangesetComment.pull_request == pull_request)
789 return comments
786 return comments
790
787
791 def _all_general_comments_of_pull_request(self, pull_request):
788 def _all_general_comments_of_pull_request(self, pull_request):
792 comments = Session().query(ChangesetComment)\
789 comments = Session().query(ChangesetComment)\
793 .filter(ChangesetComment.line_no == None)\
790 .filter(ChangesetComment.line_no == None)\
794 .filter(ChangesetComment.f_path == None)\
791 .filter(ChangesetComment.f_path == None)\
795 .filter(ChangesetComment.pull_request == pull_request)
792 .filter(ChangesetComment.pull_request == pull_request)
796
793
797 return comments
794 return comments
798
795
799 @staticmethod
796 @staticmethod
800 def use_outdated_comments(pull_request):
797 def use_outdated_comments(pull_request):
801 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
798 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
802 settings = settings_model.get_general_settings()
799 settings = settings_model.get_general_settings()
803 return settings.get('rhodecode_use_outdated_comments', False)
800 return settings.get('rhodecode_use_outdated_comments', False)
804
801
805 def trigger_commit_comment_hook(self, repo, user, action, data=None):
802 def trigger_commit_comment_hook(self, repo, user, action, data=None):
806 repo = self._get_repo(repo)
803 repo = self._get_repo(repo)
807 target_scm = repo.scm_instance()
804 target_scm = repo.scm_instance()
808 if action == 'create':
805 if action == 'create':
809 trigger_hook = hooks_utils.trigger_comment_commit_hooks
806 trigger_hook = hooks_utils.trigger_comment_commit_hooks
810 elif action == 'edit':
807 elif action == 'edit':
811 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
808 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
812 else:
809 else:
813 return
810 return
814
811
815 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
812 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
816 repo, action, trigger_hook)
813 repo, action, trigger_hook)
817 trigger_hook(
814 trigger_hook(
818 username=user.username,
815 username=user.username,
819 repo_name=repo.repo_name,
816 repo_name=repo.repo_name,
820 repo_type=target_scm.alias,
817 repo_type=target_scm.alias,
821 repo=repo,
818 repo=repo,
822 data=data)
819 data=data)
823
820
824
821
825 def _parse_comment_line_number(line_no):
822 def _parse_comment_line_number(line_no):
826 """
823 r"""
827 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
824 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
828 """
825 """
829 old_line = None
826 old_line = None
830 new_line = None
827 new_line = None
831 if line_no.startswith('o'):
828 if line_no.startswith('o'):
832 old_line = int(line_no[1:])
829 old_line = int(line_no[1:])
833 elif line_no.startswith('n'):
830 elif line_no.startswith('n'):
834 new_line = int(line_no[1:])
831 new_line = int(line_no[1:])
835 else:
832 else:
836 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
833 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
837 return diffs.DiffLineNumber(old_line, new_line)
834 return diffs.DiffLineNumber(old_line, new_line)
838
835
839
836
840 def _diff_to_comment_line_number(diff_line):
837 def _diff_to_comment_line_number(diff_line):
841 if diff_line.new is not None:
838 if diff_line.new is not None:
842 return u'n{}'.format(diff_line.new)
839 return f'n{diff_line.new}'
843 elif diff_line.old is not None:
840 elif diff_line.old is not None:
844 return u'o{}'.format(diff_line.old)
841 return f'o{diff_line.old}'
845 return u''
842 return ''
846
843
847
844
848 def _diff_line_delta(a, b):
845 def _diff_line_delta(a, b):
849 if None not in (a.new, b.new):
846 if None not in (a.new, b.new):
850 return abs(a.new - b.new)
847 return abs(a.new - b.new)
851 elif None not in (a.old, b.old):
848 elif None not in (a.old, b.old):
852 return abs(a.old - b.old)
849 return abs(a.old - b.old)
853 else:
850 else:
854 raise ValueError(
851 raise ValueError(
855 "Cannot compute delta between {} and {}".format(a, b))
852 f"Cannot compute delta between {a} and {b}")
@@ -1,5861 +1,5860 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Database Models for RhodeCode Enterprise
20 Database Models for RhodeCode Enterprise
22 """
21 """
23
22
24 import re
23 import re
25 import os
24 import os
26 import time
25 import time
27 import string
26 import string
28 import logging
27 import logging
29 import datetime
28 import datetime
30 import uuid
29 import uuid
31 import warnings
30 import warnings
32 import ipaddress
31 import ipaddress
33 import functools
32 import functools
34 import traceback
33 import traceback
35 import collections
34 import collections
36
35
37 from sqlalchemy import (
36 from sqlalchemy import (
38 or_, and_, not_, func, cast, TypeDecorator, event, select,
37 or_, and_, not_, func, cast, TypeDecorator, event, select,
39 true, false, null,
38 true, false, null,
40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 Text, Float, PickleType, BigInteger)
41 Text, Float, PickleType, BigInteger)
43 from sqlalchemy.sql.expression import case
42 from sqlalchemy.sql.expression import case
44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
46 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
47 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
48 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
49 from sqlalchemy.exc import IntegrityError # pragma: no cover
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
51 from zope.cachedescriptors.property import Lazy as LazyProperty
50 from zope.cachedescriptors.property import Lazy as LazyProperty
52 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
53 from webhelpers2.text import remove_formatting
52 from webhelpers2.text import remove_formatting
54
53
55 from rhodecode.lib.str_utils import safe_bytes
54 from rhodecode.lib.str_utils import safe_bytes
56 from rhodecode.translation import _
55 from rhodecode.translation import _
57 from rhodecode.lib.vcs import get_vcs_instance, VCSError
56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
58 from rhodecode.lib.vcs.backends.base import (
57 from rhodecode.lib.vcs.backends.base import (
59 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
60 from rhodecode.lib.utils2 import (
59 from rhodecode.lib.utils2 import (
61 str2bool, safe_str, get_commit_safe, sha1_safe,
60 str2bool, safe_str, get_commit_safe, sha1_safe,
62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
64 from rhodecode.lib.jsonalchemy import (
63 from rhodecode.lib.jsonalchemy import (
65 MutationObj, MutationList, JsonType, JsonRaw)
64 MutationObj, MutationList, JsonType, JsonRaw)
66 from rhodecode.lib.hash_utils import sha1
65 from rhodecode.lib.hash_utils import sha1
67 from rhodecode.lib import ext_json
66 from rhodecode.lib import ext_json
68 from rhodecode.lib import enc_utils
67 from rhodecode.lib import enc_utils
69 from rhodecode.lib.ext_json import json
68 from rhodecode.lib.ext_json import json
70 from rhodecode.lib.caching_query import FromCache
69 from rhodecode.lib.caching_query import FromCache
71 from rhodecode.lib.exceptions import (
70 from rhodecode.lib.exceptions import (
72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
73 from rhodecode.model.meta import Base, Session
72 from rhodecode.model.meta import Base, Session
74
73
75 URL_SEP = '/'
74 URL_SEP = '/'
76 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
77
76
78 # =============================================================================
77 # =============================================================================
79 # BASE CLASSES
78 # BASE CLASSES
80 # =============================================================================
79 # =============================================================================
81
80
82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
83 # beaker.session.secret if first is not set.
82 # beaker.session.secret if first is not set.
84 # and initialized at environment.py
83 # and initialized at environment.py
85 ENCRYPTION_KEY: bytes = b''
84 ENCRYPTION_KEY: bytes = b''
86
85
87 # used to sort permissions by types, '#' used here is not allowed to be in
86 # used to sort permissions by types, '#' used here is not allowed to be in
88 # usernames, and it's very early in sorted string.printable table.
87 # usernames, and it's very early in sorted string.printable table.
89 PERMISSION_TYPE_SORT = {
88 PERMISSION_TYPE_SORT = {
90 'admin': '####',
89 'admin': '####',
91 'write': '###',
90 'write': '###',
92 'read': '##',
91 'read': '##',
93 'none': '#',
92 'none': '#',
94 }
93 }
95
94
96
95
97 def display_user_sort(obj):
96 def display_user_sort(obj):
98 """
97 """
99 Sort function used to sort permissions in .permissions() function of
98 Sort function used to sort permissions in .permissions() function of
100 Repository, RepoGroup, UserGroup. Also it put the default user in front
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
101 of all other resources
100 of all other resources
102 """
101 """
103
102
104 if obj.username == User.DEFAULT_USER:
103 if obj.username == User.DEFAULT_USER:
105 return '#####'
104 return '#####'
106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
107 extra_sort_num = '1' # default
106 extra_sort_num = '1' # default
108
107
109 # NOTE(dan): inactive duplicates goes last
108 # NOTE(dan): inactive duplicates goes last
110 if getattr(obj, 'duplicate_perm', None):
109 if getattr(obj, 'duplicate_perm', None):
111 extra_sort_num = '9'
110 extra_sort_num = '9'
112 return prefix + extra_sort_num + obj.username
111 return prefix + extra_sort_num + obj.username
113
112
114
113
115 def display_user_group_sort(obj):
114 def display_user_group_sort(obj):
116 """
115 """
117 Sort function used to sort permissions in .permissions() function of
116 Sort function used to sort permissions in .permissions() function of
118 Repository, RepoGroup, UserGroup. Also it put the default user in front
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
119 of all other resources
118 of all other resources
120 """
119 """
121
120
122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
123 return prefix + obj.users_group_name
122 return prefix + obj.users_group_name
124
123
125
124
126 def _hash_key(k):
125 def _hash_key(k):
127 return sha1_safe(k)
126 return sha1_safe(k)
128
127
129
128
130 def in_filter_generator(qry, items, limit=500):
129 def in_filter_generator(qry, items, limit=500):
131 """
130 """
132 Splits IN() into multiple with OR
131 Splits IN() into multiple with OR
133 e.g.::
132 e.g.::
134 cnt = Repository.query().filter(
133 cnt = Repository.query().filter(
135 or_(
134 or_(
136 *in_filter_generator(Repository.repo_id, range(100000))
135 *in_filter_generator(Repository.repo_id, range(100000))
137 )).count()
136 )).count()
138 """
137 """
139 if not items:
138 if not items:
140 # empty list will cause empty query which might cause security issues
139 # empty list will cause empty query which might cause security issues
141 # this can lead to hidden unpleasant results
140 # this can lead to hidden unpleasant results
142 items = [-1]
141 items = [-1]
143
142
144 parts = []
143 parts = []
145 for chunk in range(0, len(items), limit):
144 for chunk in range(0, len(items), limit):
146 parts.append(
145 parts.append(
147 qry.in_(items[chunk: chunk + limit])
146 qry.in_(items[chunk: chunk + limit])
148 )
147 )
149
148
150 return parts
149 return parts
151
150
152
151
153 base_table_args = {
152 base_table_args = {
154 'extend_existing': True,
153 'extend_existing': True,
155 'mysql_engine': 'InnoDB',
154 'mysql_engine': 'InnoDB',
156 'mysql_charset': 'utf8',
155 'mysql_charset': 'utf8',
157 'sqlite_autoincrement': True
156 'sqlite_autoincrement': True
158 }
157 }
159
158
160
159
161 class EncryptedTextValue(TypeDecorator):
160 class EncryptedTextValue(TypeDecorator):
162 """
161 """
163 Special column for encrypted long text data, use like::
162 Special column for encrypted long text data, use like::
164
163
165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
166
165
167 This column is intelligent so if value is in unencrypted form it return
166 This column is intelligent so if value is in unencrypted form it return
168 unencrypted form, but on save it always encrypts
167 unencrypted form, but on save it always encrypts
169 """
168 """
170 cache_ok = True
169 cache_ok = True
171 impl = Text
170 impl = Text
172
171
173 def process_bind_param(self, value, dialect):
172 def process_bind_param(self, value, dialect):
174 """
173 """
175 Setter for storing value
174 Setter for storing value
176 """
175 """
177 import rhodecode
176 import rhodecode
178 if not value:
177 if not value:
179 return value
178 return value
180
179
181 # protect against double encrypting if values is already encrypted
180 # protect against double encrypting if values is already encrypted
182 if value.startswith('enc$aes$') \
181 if value.startswith('enc$aes$') \
183 or value.startswith('enc$aes_hmac$') \
182 or value.startswith('enc$aes_hmac$') \
184 or value.startswith('enc2$'):
183 or value.startswith('enc2$'):
185 raise ValueError('value needs to be in unencrypted format, '
184 raise ValueError('value needs to be in unencrypted format, '
186 'ie. not starting with enc$ or enc2$')
185 'ie. not starting with enc$ or enc2$')
187
186
188 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
189 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
190 return safe_str(bytes_val)
189 return safe_str(bytes_val)
191
190
192 def process_result_value(self, value, dialect):
191 def process_result_value(self, value, dialect):
193 """
192 """
194 Getter for retrieving value
193 Getter for retrieving value
195 """
194 """
196
195
197 import rhodecode
196 import rhodecode
198 if not value:
197 if not value:
199 return value
198 return value
200
199
201 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
202
201
203 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
204
203
205 return safe_str(bytes_val)
204 return safe_str(bytes_val)
206
205
207
206
208 class BaseModel(object):
207 class BaseModel(object):
209 """
208 """
210 Base Model for all classes
209 Base Model for all classes
211 """
210 """
212
211
213 @classmethod
212 @classmethod
214 def _get_keys(cls):
213 def _get_keys(cls):
215 """return column names for this model """
214 """return column names for this model """
216 return class_mapper(cls).c.keys()
215 return class_mapper(cls).c.keys()
217
216
218 def get_dict(self):
217 def get_dict(self):
219 """
218 """
220 return dict with keys and values corresponding
219 return dict with keys and values corresponding
221 to this model data """
220 to this model data """
222
221
223 d = {}
222 d = {}
224 for k in self._get_keys():
223 for k in self._get_keys():
225 d[k] = getattr(self, k)
224 d[k] = getattr(self, k)
226
225
227 # also use __json__() if present to get additional fields
226 # also use __json__() if present to get additional fields
228 _json_attr = getattr(self, '__json__', None)
227 _json_attr = getattr(self, '__json__', None)
229 if _json_attr:
228 if _json_attr:
230 # update with attributes from __json__
229 # update with attributes from __json__
231 if callable(_json_attr):
230 if callable(_json_attr):
232 _json_attr = _json_attr()
231 _json_attr = _json_attr()
233 for k, val in _json_attr.items():
232 for k, val in _json_attr.items():
234 d[k] = val
233 d[k] = val
235 return d
234 return d
236
235
237 def get_appstruct(self):
236 def get_appstruct(self):
238 """return list with keys and values tuples corresponding
237 """return list with keys and values tuples corresponding
239 to this model data """
238 to this model data """
240
239
241 lst = []
240 lst = []
242 for k in self._get_keys():
241 for k in self._get_keys():
243 lst.append((k, getattr(self, k),))
242 lst.append((k, getattr(self, k),))
244 return lst
243 return lst
245
244
246 def populate_obj(self, populate_dict):
245 def populate_obj(self, populate_dict):
247 """populate model with data from given populate_dict"""
246 """populate model with data from given populate_dict"""
248
247
249 for k in self._get_keys():
248 for k in self._get_keys():
250 if k in populate_dict:
249 if k in populate_dict:
251 setattr(self, k, populate_dict[k])
250 setattr(self, k, populate_dict[k])
252
251
253 @classmethod
252 @classmethod
254 def query(cls):
253 def query(cls):
255 return Session().query(cls)
254 return Session().query(cls)
256
255
257 @classmethod
256 @classmethod
258 def select(cls, custom_cls=None):
257 def select(cls, custom_cls=None):
259 """
258 """
260 stmt = cls.select().where(cls.user_id==1)
259 stmt = cls.select().where(cls.user_id==1)
261 # optionally
260 # optionally
262 stmt = cls.select(User.user_id).where(cls.user_id==1)
261 stmt = cls.select(User.user_id).where(cls.user_id==1)
263 result = cls.execute(stmt) | cls.scalars(stmt)
262 result = cls.execute(stmt) | cls.scalars(stmt)
264 """
263 """
265
264
266 if custom_cls:
265 if custom_cls:
267 stmt = select(custom_cls)
266 stmt = select(custom_cls)
268 else:
267 else:
269 stmt = select(cls)
268 stmt = select(cls)
270 return stmt
269 return stmt
271
270
272 @classmethod
271 @classmethod
273 def execute(cls, stmt):
272 def execute(cls, stmt):
274 return Session().execute(stmt)
273 return Session().execute(stmt)
275
274
276 @classmethod
275 @classmethod
277 def scalars(cls, stmt):
276 def scalars(cls, stmt):
278 return Session().scalars(stmt)
277 return Session().scalars(stmt)
279
278
280 @classmethod
279 @classmethod
281 def get(cls, id_):
280 def get(cls, id_):
282 if id_:
281 if id_:
283 return cls.query().get(id_)
282 return cls.query().get(id_)
284
283
285 @classmethod
284 @classmethod
286 def get_or_404(cls, id_):
285 def get_or_404(cls, id_):
287 from pyramid.httpexceptions import HTTPNotFound
286 from pyramid.httpexceptions import HTTPNotFound
288
287
289 try:
288 try:
290 id_ = int(id_)
289 id_ = int(id_)
291 except (TypeError, ValueError):
290 except (TypeError, ValueError):
292 raise HTTPNotFound()
291 raise HTTPNotFound()
293
292
294 res = cls.query().get(id_)
293 res = cls.query().get(id_)
295 if not res:
294 if not res:
296 raise HTTPNotFound()
295 raise HTTPNotFound()
297 return res
296 return res
298
297
299 @classmethod
298 @classmethod
300 def getAll(cls):
299 def getAll(cls):
301 # deprecated and left for backward compatibility
300 # deprecated and left for backward compatibility
302 return cls.get_all()
301 return cls.get_all()
303
302
304 @classmethod
303 @classmethod
305 def get_all(cls):
304 def get_all(cls):
306 return cls.query().all()
305 return cls.query().all()
307
306
308 @classmethod
307 @classmethod
309 def delete(cls, id_):
308 def delete(cls, id_):
310 obj = cls.query().get(id_)
309 obj = cls.query().get(id_)
311 Session().delete(obj)
310 Session().delete(obj)
312
311
313 @classmethod
312 @classmethod
314 def identity_cache(cls, session, attr_name, value):
313 def identity_cache(cls, session, attr_name, value):
315 exist_in_session = []
314 exist_in_session = []
316 for (item_cls, pkey), instance in session.identity_map.items():
315 for (item_cls, pkey), instance in session.identity_map.items():
317 if cls == item_cls and getattr(instance, attr_name) == value:
316 if cls == item_cls and getattr(instance, attr_name) == value:
318 exist_in_session.append(instance)
317 exist_in_session.append(instance)
319 if exist_in_session:
318 if exist_in_session:
320 if len(exist_in_session) == 1:
319 if len(exist_in_session) == 1:
321 return exist_in_session[0]
320 return exist_in_session[0]
322 log.exception(
321 log.exception(
323 'multiple objects with attr %s and '
322 'multiple objects with attr %s and '
324 'value %s found with same name: %r',
323 'value %s found with same name: %r',
325 attr_name, value, exist_in_session)
324 attr_name, value, exist_in_session)
326
325
327 @property
326 @property
328 def cls_name(self):
327 def cls_name(self):
329 return self.__class__.__name__
328 return self.__class__.__name__
330
329
331 def __repr__(self):
330 def __repr__(self):
332 return f'<DB:{self.cls_name}>'
331 return f'<DB:{self.cls_name}>'
333
332
334
333
335 class RhodeCodeSetting(Base, BaseModel):
334 class RhodeCodeSetting(Base, BaseModel):
336 __tablename__ = 'rhodecode_settings'
335 __tablename__ = 'rhodecode_settings'
337 __table_args__ = (
336 __table_args__ = (
338 UniqueConstraint('app_settings_name'),
337 UniqueConstraint('app_settings_name'),
339 base_table_args
338 base_table_args
340 )
339 )
341
340
342 SETTINGS_TYPES = {
341 SETTINGS_TYPES = {
343 'str': safe_str,
342 'str': safe_str,
344 'int': safe_int,
343 'int': safe_int,
345 'unicode': safe_str,
344 'unicode': safe_str,
346 'bool': str2bool,
345 'bool': str2bool,
347 'list': functools.partial(aslist, sep=',')
346 'list': functools.partial(aslist, sep=',')
348 }
347 }
349 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
350 GLOBAL_CONF_KEY = 'app_settings'
349 GLOBAL_CONF_KEY = 'app_settings'
351
350
352 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
353 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
354 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
355 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
356
355
357 def __init__(self, key='', val='', type='unicode'):
356 def __init__(self, key='', val='', type='unicode'):
358 self.app_settings_name = key
357 self.app_settings_name = key
359 self.app_settings_type = type
358 self.app_settings_type = type
360 self.app_settings_value = val
359 self.app_settings_value = val
361
360
362 @validates('_app_settings_value')
361 @validates('_app_settings_value')
363 def validate_settings_value(self, key, val):
362 def validate_settings_value(self, key, val):
364 assert type(val) == str
363 assert type(val) == str
365 return val
364 return val
366
365
367 @hybrid_property
366 @hybrid_property
368 def app_settings_value(self):
367 def app_settings_value(self):
369 v = self._app_settings_value
368 v = self._app_settings_value
370 _type = self.app_settings_type
369 _type = self.app_settings_type
371 if _type:
370 if _type:
372 _type = self.app_settings_type.split('.')[0]
371 _type = self.app_settings_type.split('.')[0]
373 # decode the encrypted value
372 # decode the encrypted value
374 if 'encrypted' in self.app_settings_type:
373 if 'encrypted' in self.app_settings_type:
375 cipher = EncryptedTextValue()
374 cipher = EncryptedTextValue()
376 v = safe_str(cipher.process_result_value(v, None))
375 v = safe_str(cipher.process_result_value(v, None))
377
376
378 converter = self.SETTINGS_TYPES.get(_type) or \
377 converter = self.SETTINGS_TYPES.get(_type) or \
379 self.SETTINGS_TYPES['unicode']
378 self.SETTINGS_TYPES['unicode']
380 return converter(v)
379 return converter(v)
381
380
382 @app_settings_value.setter
381 @app_settings_value.setter
383 def app_settings_value(self, val):
382 def app_settings_value(self, val):
384 """
383 """
385 Setter that will always make sure we use unicode in app_settings_value
384 Setter that will always make sure we use unicode in app_settings_value
386
385
387 :param val:
386 :param val:
388 """
387 """
389 val = safe_str(val)
388 val = safe_str(val)
390 # encode the encrypted value
389 # encode the encrypted value
391 if 'encrypted' in self.app_settings_type:
390 if 'encrypted' in self.app_settings_type:
392 cipher = EncryptedTextValue()
391 cipher = EncryptedTextValue()
393 val = safe_str(cipher.process_bind_param(val, None))
392 val = safe_str(cipher.process_bind_param(val, None))
394 self._app_settings_value = val
393 self._app_settings_value = val
395
394
396 @hybrid_property
395 @hybrid_property
397 def app_settings_type(self):
396 def app_settings_type(self):
398 return self._app_settings_type
397 return self._app_settings_type
399
398
400 @app_settings_type.setter
399 @app_settings_type.setter
401 def app_settings_type(self, val):
400 def app_settings_type(self, val):
402 if val.split('.')[0] not in self.SETTINGS_TYPES:
401 if val.split('.')[0] not in self.SETTINGS_TYPES:
403 raise Exception('type must be one of %s got %s'
402 raise Exception('type must be one of %s got %s'
404 % (self.SETTINGS_TYPES.keys(), val))
403 % (self.SETTINGS_TYPES.keys(), val))
405 self._app_settings_type = val
404 self._app_settings_type = val
406
405
407 @classmethod
406 @classmethod
408 def get_by_prefix(cls, prefix):
407 def get_by_prefix(cls, prefix):
409 return RhodeCodeSetting.query()\
408 return RhodeCodeSetting.query()\
410 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
411 .all()
410 .all()
412
411
413 def __repr__(self):
412 def __repr__(self):
414 return "<%s('%s:%s[%s]')>" % (
413 return "<%s('%s:%s[%s]')>" % (
415 self.cls_name,
414 self.cls_name,
416 self.app_settings_name, self.app_settings_value,
415 self.app_settings_name, self.app_settings_value,
417 self.app_settings_type
416 self.app_settings_type
418 )
417 )
419
418
420
419
421 class RhodeCodeUi(Base, BaseModel):
420 class RhodeCodeUi(Base, BaseModel):
422 __tablename__ = 'rhodecode_ui'
421 __tablename__ = 'rhodecode_ui'
423 __table_args__ = (
422 __table_args__ = (
424 UniqueConstraint('ui_key'),
423 UniqueConstraint('ui_key'),
425 base_table_args
424 base_table_args
426 )
425 )
427 # Sync those values with vcsserver.config.hooks
426 # Sync those values with vcsserver.config.hooks
428
427
429 HOOK_REPO_SIZE = 'changegroup.repo_size'
428 HOOK_REPO_SIZE = 'changegroup.repo_size'
430 # HG
429 # HG
431 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
432 HOOK_PULL = 'outgoing.pull_logger'
431 HOOK_PULL = 'outgoing.pull_logger'
433 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
434 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
435 HOOK_PUSH = 'changegroup.push_logger'
434 HOOK_PUSH = 'changegroup.push_logger'
436 HOOK_PUSH_KEY = 'pushkey.key_push'
435 HOOK_PUSH_KEY = 'pushkey.key_push'
437
436
438 HOOKS_BUILTIN = [
437 HOOKS_BUILTIN = [
439 HOOK_PRE_PULL,
438 HOOK_PRE_PULL,
440 HOOK_PULL,
439 HOOK_PULL,
441 HOOK_PRE_PUSH,
440 HOOK_PRE_PUSH,
442 HOOK_PRETX_PUSH,
441 HOOK_PRETX_PUSH,
443 HOOK_PUSH,
442 HOOK_PUSH,
444 HOOK_PUSH_KEY,
443 HOOK_PUSH_KEY,
445 ]
444 ]
446
445
447 # TODO: johbo: Unify way how hooks are configured for git and hg,
446 # TODO: johbo: Unify way how hooks are configured for git and hg,
448 # git part is currently hardcoded.
447 # git part is currently hardcoded.
449
448
450 # SVN PATTERNS
449 # SVN PATTERNS
451 SVN_BRANCH_ID = 'vcs_svn_branch'
450 SVN_BRANCH_ID = 'vcs_svn_branch'
452 SVN_TAG_ID = 'vcs_svn_tag'
451 SVN_TAG_ID = 'vcs_svn_tag'
453
452
454 ui_id = Column(
453 ui_id = Column(
455 "ui_id", Integer(), nullable=False, unique=True, default=None,
454 "ui_id", Integer(), nullable=False, unique=True, default=None,
456 primary_key=True)
455 primary_key=True)
457 ui_section = Column(
456 ui_section = Column(
458 "ui_section", String(255), nullable=True, unique=None, default=None)
457 "ui_section", String(255), nullable=True, unique=None, default=None)
459 ui_key = Column(
458 ui_key = Column(
460 "ui_key", String(255), nullable=True, unique=None, default=None)
459 "ui_key", String(255), nullable=True, unique=None, default=None)
461 ui_value = Column(
460 ui_value = Column(
462 "ui_value", String(255), nullable=True, unique=None, default=None)
461 "ui_value", String(255), nullable=True, unique=None, default=None)
463 ui_active = Column(
462 ui_active = Column(
464 "ui_active", Boolean(), nullable=True, unique=None, default=True)
463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
465
464
466 def __repr__(self):
465 def __repr__(self):
467 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
468 self.ui_key, self.ui_value)
467 self.ui_key, self.ui_value)
469
468
470
469
471 class RepoRhodeCodeSetting(Base, BaseModel):
470 class RepoRhodeCodeSetting(Base, BaseModel):
472 __tablename__ = 'repo_rhodecode_settings'
471 __tablename__ = 'repo_rhodecode_settings'
473 __table_args__ = (
472 __table_args__ = (
474 UniqueConstraint(
473 UniqueConstraint(
475 'app_settings_name', 'repository_id',
474 'app_settings_name', 'repository_id',
476 name='uq_repo_rhodecode_setting_name_repo_id'),
475 name='uq_repo_rhodecode_setting_name_repo_id'),
477 base_table_args
476 base_table_args
478 )
477 )
479
478
480 repository_id = Column(
479 repository_id = Column(
481 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
482 nullable=False)
481 nullable=False)
483 app_settings_id = Column(
482 app_settings_id = Column(
484 "app_settings_id", Integer(), nullable=False, unique=True,
483 "app_settings_id", Integer(), nullable=False, unique=True,
485 default=None, primary_key=True)
484 default=None, primary_key=True)
486 app_settings_name = Column(
485 app_settings_name = Column(
487 "app_settings_name", String(255), nullable=True, unique=None,
486 "app_settings_name", String(255), nullable=True, unique=None,
488 default=None)
487 default=None)
489 _app_settings_value = Column(
488 _app_settings_value = Column(
490 "app_settings_value", String(4096), nullable=True, unique=None,
489 "app_settings_value", String(4096), nullable=True, unique=None,
491 default=None)
490 default=None)
492 _app_settings_type = Column(
491 _app_settings_type = Column(
493 "app_settings_type", String(255), nullable=True, unique=None,
492 "app_settings_type", String(255), nullable=True, unique=None,
494 default=None)
493 default=None)
495
494
496 repository = relationship('Repository', viewonly=True)
495 repository = relationship('Repository', viewonly=True)
497
496
498 def __init__(self, repository_id, key='', val='', type='unicode'):
497 def __init__(self, repository_id, key='', val='', type='unicode'):
499 self.repository_id = repository_id
498 self.repository_id = repository_id
500 self.app_settings_name = key
499 self.app_settings_name = key
501 self.app_settings_type = type
500 self.app_settings_type = type
502 self.app_settings_value = val
501 self.app_settings_value = val
503
502
504 @validates('_app_settings_value')
503 @validates('_app_settings_value')
505 def validate_settings_value(self, key, val):
504 def validate_settings_value(self, key, val):
506 assert type(val) == str
505 assert type(val) == str
507 return val
506 return val
508
507
509 @hybrid_property
508 @hybrid_property
510 def app_settings_value(self):
509 def app_settings_value(self):
511 v = self._app_settings_value
510 v = self._app_settings_value
512 type_ = self.app_settings_type
511 type_ = self.app_settings_type
513 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
514 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
515 return converter(v)
514 return converter(v)
516
515
517 @app_settings_value.setter
516 @app_settings_value.setter
518 def app_settings_value(self, val):
517 def app_settings_value(self, val):
519 """
518 """
520 Setter that will always make sure we use unicode in app_settings_value
519 Setter that will always make sure we use unicode in app_settings_value
521
520
522 :param val:
521 :param val:
523 """
522 """
524 self._app_settings_value = safe_str(val)
523 self._app_settings_value = safe_str(val)
525
524
526 @hybrid_property
525 @hybrid_property
527 def app_settings_type(self):
526 def app_settings_type(self):
528 return self._app_settings_type
527 return self._app_settings_type
529
528
530 @app_settings_type.setter
529 @app_settings_type.setter
531 def app_settings_type(self, val):
530 def app_settings_type(self, val):
532 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
533 if val not in SETTINGS_TYPES:
532 if val not in SETTINGS_TYPES:
534 raise Exception('type must be one of %s got %s'
533 raise Exception('type must be one of %s got %s'
535 % (SETTINGS_TYPES.keys(), val))
534 % (SETTINGS_TYPES.keys(), val))
536 self._app_settings_type = val
535 self._app_settings_type = val
537
536
538 def __repr__(self):
537 def __repr__(self):
539 return "<%s('%s:%s:%s[%s]')>" % (
538 return "<%s('%s:%s:%s[%s]')>" % (
540 self.cls_name, self.repository.repo_name,
539 self.cls_name, self.repository.repo_name,
541 self.app_settings_name, self.app_settings_value,
540 self.app_settings_name, self.app_settings_value,
542 self.app_settings_type
541 self.app_settings_type
543 )
542 )
544
543
545
544
546 class RepoRhodeCodeUi(Base, BaseModel):
545 class RepoRhodeCodeUi(Base, BaseModel):
547 __tablename__ = 'repo_rhodecode_ui'
546 __tablename__ = 'repo_rhodecode_ui'
548 __table_args__ = (
547 __table_args__ = (
549 UniqueConstraint(
548 UniqueConstraint(
550 'repository_id', 'ui_section', 'ui_key',
549 'repository_id', 'ui_section', 'ui_key',
551 name='uq_repo_rhodecode_ui_repository_id_section_key'),
550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
552 base_table_args
551 base_table_args
553 )
552 )
554
553
555 repository_id = Column(
554 repository_id = Column(
556 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
557 nullable=False)
556 nullable=False)
558 ui_id = Column(
557 ui_id = Column(
559 "ui_id", Integer(), nullable=False, unique=True, default=None,
558 "ui_id", Integer(), nullable=False, unique=True, default=None,
560 primary_key=True)
559 primary_key=True)
561 ui_section = Column(
560 ui_section = Column(
562 "ui_section", String(255), nullable=True, unique=None, default=None)
561 "ui_section", String(255), nullable=True, unique=None, default=None)
563 ui_key = Column(
562 ui_key = Column(
564 "ui_key", String(255), nullable=True, unique=None, default=None)
563 "ui_key", String(255), nullable=True, unique=None, default=None)
565 ui_value = Column(
564 ui_value = Column(
566 "ui_value", String(255), nullable=True, unique=None, default=None)
565 "ui_value", String(255), nullable=True, unique=None, default=None)
567 ui_active = Column(
566 ui_active = Column(
568 "ui_active", Boolean(), nullable=True, unique=None, default=True)
567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
569
568
570 repository = relationship('Repository', viewonly=True)
569 repository = relationship('Repository', viewonly=True)
571
570
572 def __repr__(self):
571 def __repr__(self):
573 return '<%s[%s:%s]%s=>%s]>' % (
572 return '<%s[%s:%s]%s=>%s]>' % (
574 self.cls_name, self.repository.repo_name,
573 self.cls_name, self.repository.repo_name,
575 self.ui_section, self.ui_key, self.ui_value)
574 self.ui_section, self.ui_key, self.ui_value)
576
575
577
576
578 class User(Base, BaseModel):
577 class User(Base, BaseModel):
579 __tablename__ = 'users'
578 __tablename__ = 'users'
580 __table_args__ = (
579 __table_args__ = (
581 UniqueConstraint('username'), UniqueConstraint('email'),
580 UniqueConstraint('username'), UniqueConstraint('email'),
582 Index('u_username_idx', 'username'),
581 Index('u_username_idx', 'username'),
583 Index('u_email_idx', 'email'),
582 Index('u_email_idx', 'email'),
584 base_table_args
583 base_table_args
585 )
584 )
586
585
587 DEFAULT_USER = 'default'
586 DEFAULT_USER = 'default'
588 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
589 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
590
589
591 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
592 username = Column("username", String(255), nullable=True, unique=None, default=None)
591 username = Column("username", String(255), nullable=True, unique=None, default=None)
593 password = Column("password", String(255), nullable=True, unique=None, default=None)
592 password = Column("password", String(255), nullable=True, unique=None, default=None)
594 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
595 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
596 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
597 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
598 _email = Column("email", String(255), nullable=True, unique=None, default=None)
597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
599 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
600 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
601 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
602
601
603 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
604 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
605 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
606 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
607 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
608 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
609
608
610 user_log = relationship('UserLog', back_populates='user')
609 user_log = relationship('UserLog', back_populates='user')
611 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
612
611
613 repositories = relationship('Repository', back_populates='user')
612 repositories = relationship('Repository', back_populates='user')
614 repository_groups = relationship('RepoGroup', back_populates='user')
613 repository_groups = relationship('RepoGroup', back_populates='user')
615 user_groups = relationship('UserGroup', back_populates='user')
614 user_groups = relationship('UserGroup', back_populates='user')
616
615
617 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
618 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
619
618
620 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
621 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
623
622
624 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
625
624
626 notifications = relationship('UserNotification', cascade='all', back_populates='user')
625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
627 # notifications assigned to this user
626 # notifications assigned to this user
628 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
629 # comments created by this user
628 # comments created by this user
630 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
631 # user profile extra info
630 # user profile extra info
632 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
633 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
634 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
635 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
636
635
637 # gists
636 # gists
638 user_gists = relationship('Gist', cascade='all', back_populates='owner')
637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
639 # user pull requests
638 # user pull requests
640 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
641
640
642 # external identities
641 # external identities
643 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
644 # review rules
643 # review rules
645 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
646
645
647 # artifacts owned
646 # artifacts owned
648 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
649
648
650 # no cascade, set NULL
649 # no cascade, set NULL
651 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
652
651
653 def __repr__(self):
652 def __repr__(self):
654 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
655
654
656 @hybrid_property
655 @hybrid_property
657 def email(self):
656 def email(self):
658 return self._email
657 return self._email
659
658
660 @email.setter
659 @email.setter
661 def email(self, val):
660 def email(self, val):
662 self._email = val.lower() if val else None
661 self._email = val.lower() if val else None
663
662
664 @hybrid_property
663 @hybrid_property
665 def first_name(self):
664 def first_name(self):
666 from rhodecode.lib import helpers as h
665 from rhodecode.lib import helpers as h
667 if self.name:
666 if self.name:
668 return h.escape(self.name)
667 return h.escape(self.name)
669 return self.name
668 return self.name
670
669
671 @hybrid_property
670 @hybrid_property
672 def last_name(self):
671 def last_name(self):
673 from rhodecode.lib import helpers as h
672 from rhodecode.lib import helpers as h
674 if self.lastname:
673 if self.lastname:
675 return h.escape(self.lastname)
674 return h.escape(self.lastname)
676 return self.lastname
675 return self.lastname
677
676
678 @hybrid_property
677 @hybrid_property
679 def api_key(self):
678 def api_key(self):
680 """
679 """
681 Fetch if exist an auth-token with role ALL connected to this user
680 Fetch if exist an auth-token with role ALL connected to this user
682 """
681 """
683 user_auth_token = UserApiKeys.query()\
682 user_auth_token = UserApiKeys.query()\
684 .filter(UserApiKeys.user_id == self.user_id)\
683 .filter(UserApiKeys.user_id == self.user_id)\
685 .filter(or_(UserApiKeys.expires == -1,
684 .filter(or_(UserApiKeys.expires == -1,
686 UserApiKeys.expires >= time.time()))\
685 UserApiKeys.expires >= time.time()))\
687 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
688 if user_auth_token:
687 if user_auth_token:
689 user_auth_token = user_auth_token.api_key
688 user_auth_token = user_auth_token.api_key
690
689
691 return user_auth_token
690 return user_auth_token
692
691
693 @api_key.setter
692 @api_key.setter
694 def api_key(self, val):
693 def api_key(self, val):
695 # don't allow to set API key this is deprecated for now
694 # don't allow to set API key this is deprecated for now
696 self._api_key = None
695 self._api_key = None
697
696
698 @property
697 @property
699 def reviewer_pull_requests(self):
698 def reviewer_pull_requests(self):
700 return PullRequestReviewers.query() \
699 return PullRequestReviewers.query() \
701 .options(joinedload(PullRequestReviewers.pull_request)) \
700 .options(joinedload(PullRequestReviewers.pull_request)) \
702 .filter(PullRequestReviewers.user_id == self.user_id) \
701 .filter(PullRequestReviewers.user_id == self.user_id) \
703 .all()
702 .all()
704
703
705 @property
704 @property
706 def firstname(self):
705 def firstname(self):
707 # alias for future
706 # alias for future
708 return self.name
707 return self.name
709
708
710 @property
709 @property
711 def emails(self):
710 def emails(self):
712 other = UserEmailMap.query()\
711 other = UserEmailMap.query()\
713 .filter(UserEmailMap.user == self) \
712 .filter(UserEmailMap.user == self) \
714 .order_by(UserEmailMap.email_id.asc()) \
713 .order_by(UserEmailMap.email_id.asc()) \
715 .all()
714 .all()
716 return [self.email] + [x.email for x in other]
715 return [self.email] + [x.email for x in other]
717
716
718 def emails_cached(self):
717 def emails_cached(self):
719 emails = []
718 emails = []
720 if self.user_id != self.get_default_user_id():
719 if self.user_id != self.get_default_user_id():
721 emails = UserEmailMap.query()\
720 emails = UserEmailMap.query()\
722 .filter(UserEmailMap.user == self) \
721 .filter(UserEmailMap.user == self) \
723 .order_by(UserEmailMap.email_id.asc())
722 .order_by(UserEmailMap.email_id.asc())
724
723
725 emails = emails.options(
724 emails = emails.options(
726 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
727 )
726 )
728
727
729 return [self.email] + [x.email for x in emails]
728 return [self.email] + [x.email for x in emails]
730
729
731 @property
730 @property
732 def auth_tokens(self):
731 def auth_tokens(self):
733 auth_tokens = self.get_auth_tokens()
732 auth_tokens = self.get_auth_tokens()
734 return [x.api_key for x in auth_tokens]
733 return [x.api_key for x in auth_tokens]
735
734
736 def get_auth_tokens(self):
735 def get_auth_tokens(self):
737 return UserApiKeys.query()\
736 return UserApiKeys.query()\
738 .filter(UserApiKeys.user == self)\
737 .filter(UserApiKeys.user == self)\
739 .order_by(UserApiKeys.user_api_key_id.asc())\
738 .order_by(UserApiKeys.user_api_key_id.asc())\
740 .all()
739 .all()
741
740
742 @LazyProperty
741 @LazyProperty
743 def feed_token(self):
742 def feed_token(self):
744 return self.get_feed_token()
743 return self.get_feed_token()
745
744
746 def get_feed_token(self, cache=True):
745 def get_feed_token(self, cache=True):
747 feed_tokens = UserApiKeys.query()\
746 feed_tokens = UserApiKeys.query()\
748 .filter(UserApiKeys.user == self)\
747 .filter(UserApiKeys.user == self)\
749 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
750 if cache:
749 if cache:
751 feed_tokens = feed_tokens.options(
750 feed_tokens = feed_tokens.options(
752 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
753
752
754 feed_tokens = feed_tokens.all()
753 feed_tokens = feed_tokens.all()
755 if feed_tokens:
754 if feed_tokens:
756 return feed_tokens[0].api_key
755 return feed_tokens[0].api_key
757 return 'NO_FEED_TOKEN_AVAILABLE'
756 return 'NO_FEED_TOKEN_AVAILABLE'
758
757
759 @LazyProperty
758 @LazyProperty
760 def artifact_token(self):
759 def artifact_token(self):
761 return self.get_artifact_token()
760 return self.get_artifact_token()
762
761
763 def get_artifact_token(self, cache=True):
762 def get_artifact_token(self, cache=True):
764 artifacts_tokens = UserApiKeys.query()\
763 artifacts_tokens = UserApiKeys.query()\
765 .filter(UserApiKeys.user == self) \
764 .filter(UserApiKeys.user == self) \
766 .filter(or_(UserApiKeys.expires == -1,
765 .filter(or_(UserApiKeys.expires == -1,
767 UserApiKeys.expires >= time.time())) \
766 UserApiKeys.expires >= time.time())) \
768 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
769
768
770 if cache:
769 if cache:
771 artifacts_tokens = artifacts_tokens.options(
770 artifacts_tokens = artifacts_tokens.options(
772 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
773
772
774 artifacts_tokens = artifacts_tokens.all()
773 artifacts_tokens = artifacts_tokens.all()
775 if artifacts_tokens:
774 if artifacts_tokens:
776 return artifacts_tokens[0].api_key
775 return artifacts_tokens[0].api_key
777 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
778
777
779 def get_or_create_artifact_token(self):
778 def get_or_create_artifact_token(self):
780 artifacts_tokens = UserApiKeys.query()\
779 artifacts_tokens = UserApiKeys.query()\
781 .filter(UserApiKeys.user == self) \
780 .filter(UserApiKeys.user == self) \
782 .filter(or_(UserApiKeys.expires == -1,
781 .filter(or_(UserApiKeys.expires == -1,
783 UserApiKeys.expires >= time.time())) \
782 UserApiKeys.expires >= time.time())) \
784 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
785
784
786 artifacts_tokens = artifacts_tokens.all()
785 artifacts_tokens = artifacts_tokens.all()
787 if artifacts_tokens:
786 if artifacts_tokens:
788 return artifacts_tokens[0].api_key
787 return artifacts_tokens[0].api_key
789 else:
788 else:
790 from rhodecode.model.auth_token import AuthTokenModel
789 from rhodecode.model.auth_token import AuthTokenModel
791 artifact_token = AuthTokenModel().create(
790 artifact_token = AuthTokenModel().create(
792 self, 'auto-generated-artifact-token',
791 self, 'auto-generated-artifact-token',
793 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
794 Session.commit()
793 Session.commit()
795 return artifact_token.api_key
794 return artifact_token.api_key
796
795
797 @classmethod
796 @classmethod
798 def get(cls, user_id, cache=False):
797 def get(cls, user_id, cache=False):
799 if not user_id:
798 if not user_id:
800 return
799 return
801
800
802 user = cls.query()
801 user = cls.query()
803 if cache:
802 if cache:
804 user = user.options(
803 user = user.options(
805 FromCache("sql_cache_short", f"get_users_{user_id}"))
804 FromCache("sql_cache_short", f"get_users_{user_id}"))
806 return user.get(user_id)
805 return user.get(user_id)
807
806
808 @classmethod
807 @classmethod
809 def extra_valid_auth_tokens(cls, user, role=None):
808 def extra_valid_auth_tokens(cls, user, role=None):
810 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
811 .filter(or_(UserApiKeys.expires == -1,
810 .filter(or_(UserApiKeys.expires == -1,
812 UserApiKeys.expires >= time.time()))
811 UserApiKeys.expires >= time.time()))
813 if role:
812 if role:
814 tokens = tokens.filter(or_(UserApiKeys.role == role,
813 tokens = tokens.filter(or_(UserApiKeys.role == role,
815 UserApiKeys.role == UserApiKeys.ROLE_ALL))
814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
816 return tokens.all()
815 return tokens.all()
817
816
818 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
819 from rhodecode.lib import auth
818 from rhodecode.lib import auth
820
819
821 log.debug('Trying to authenticate user: %s via auth-token, '
820 log.debug('Trying to authenticate user: %s via auth-token, '
822 'and roles: %s', self, roles)
821 'and roles: %s', self, roles)
823
822
824 if not auth_token:
823 if not auth_token:
825 return False
824 return False
826
825
827 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
828 tokens_q = UserApiKeys.query()\
827 tokens_q = UserApiKeys.query()\
829 .filter(UserApiKeys.user_id == self.user_id)\
828 .filter(UserApiKeys.user_id == self.user_id)\
830 .filter(or_(UserApiKeys.expires == -1,
829 .filter(or_(UserApiKeys.expires == -1,
831 UserApiKeys.expires >= time.time()))
830 UserApiKeys.expires >= time.time()))
832
831
833 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
834
833
835 crypto_backend = auth.crypto_backend()
834 crypto_backend = auth.crypto_backend()
836 enc_token_map = {}
835 enc_token_map = {}
837 plain_token_map = {}
836 plain_token_map = {}
838 for token in tokens_q:
837 for token in tokens_q:
839 if token.api_key.startswith(crypto_backend.ENC_PREF):
838 if token.api_key.startswith(crypto_backend.ENC_PREF):
840 enc_token_map[token.api_key] = token
839 enc_token_map[token.api_key] = token
841 else:
840 else:
842 plain_token_map[token.api_key] = token
841 plain_token_map[token.api_key] = token
843 log.debug(
842 log.debug(
844 'Found %s plain and %s encrypted tokens to check for authentication for this user',
843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
845 len(plain_token_map), len(enc_token_map))
844 len(plain_token_map), len(enc_token_map))
846
845
847 # plain token match comes first
846 # plain token match comes first
848 match = plain_token_map.get(auth_token)
847 match = plain_token_map.get(auth_token)
849
848
850 # check encrypted tokens now
849 # check encrypted tokens now
851 if not match:
850 if not match:
852 for token_hash, token in enc_token_map.items():
851 for token_hash, token in enc_token_map.items():
853 # NOTE(marcink): this is expensive to calculate, but most secure
852 # NOTE(marcink): this is expensive to calculate, but most secure
854 if crypto_backend.hash_check(auth_token, token_hash):
853 if crypto_backend.hash_check(auth_token, token_hash):
855 match = token
854 match = token
856 break
855 break
857
856
858 if match:
857 if match:
859 log.debug('Found matching token %s', match)
858 log.debug('Found matching token %s', match)
860 if match.repo_id:
859 if match.repo_id:
861 log.debug('Found scope, checking for scope match of token %s', match)
860 log.debug('Found scope, checking for scope match of token %s', match)
862 if match.repo_id == scope_repo_id:
861 if match.repo_id == scope_repo_id:
863 return True
862 return True
864 else:
863 else:
865 log.debug(
864 log.debug(
866 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
867 'and calling scope is:%s, skipping further checks',
866 'and calling scope is:%s, skipping further checks',
868 match.repo, scope_repo_id)
867 match.repo, scope_repo_id)
869 return False
868 return False
870 else:
869 else:
871 return True
870 return True
872
871
873 return False
872 return False
874
873
875 @property
874 @property
876 def ip_addresses(self):
875 def ip_addresses(self):
877 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
878 return [x.ip_addr for x in ret]
877 return [x.ip_addr for x in ret]
879
878
880 @property
879 @property
881 def username_and_name(self):
880 def username_and_name(self):
882 return f'{self.username} ({self.first_name} {self.last_name})'
881 return f'{self.username} ({self.first_name} {self.last_name})'
883
882
884 @property
883 @property
885 def username_or_name_or_email(self):
884 def username_or_name_or_email(self):
886 full_name = self.full_name if self.full_name != ' ' else None
885 full_name = self.full_name if self.full_name != ' ' else None
887 return self.username or full_name or self.email
886 return self.username or full_name or self.email
888
887
889 @property
888 @property
890 def full_name(self):
889 def full_name(self):
891 return f'{self.first_name} {self.last_name}'
890 return f'{self.first_name} {self.last_name}'
892
891
893 @property
892 @property
894 def full_name_or_username(self):
893 def full_name_or_username(self):
895 return (f'{self.first_name} {self.last_name}'
894 return (f'{self.first_name} {self.last_name}'
896 if (self.first_name and self.last_name) else self.username)
895 if (self.first_name and self.last_name) else self.username)
897
896
898 @property
897 @property
899 def full_contact(self):
898 def full_contact(self):
900 return f'{self.first_name} {self.last_name} <{self.email}>'
899 return f'{self.first_name} {self.last_name} <{self.email}>'
901
900
902 @property
901 @property
903 def short_contact(self):
902 def short_contact(self):
904 return f'{self.first_name} {self.last_name}'
903 return f'{self.first_name} {self.last_name}'
905
904
906 @property
905 @property
907 def is_admin(self):
906 def is_admin(self):
908 return self.admin
907 return self.admin
909
908
910 @property
909 @property
911 def language(self):
910 def language(self):
912 return self.user_data.get('language')
911 return self.user_data.get('language')
913
912
914 def AuthUser(self, **kwargs):
913 def AuthUser(self, **kwargs):
915 """
914 """
916 Returns instance of AuthUser for this user
915 Returns instance of AuthUser for this user
917 """
916 """
918 from rhodecode.lib.auth import AuthUser
917 from rhodecode.lib.auth import AuthUser
919 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
920
919
921 @hybrid_property
920 @hybrid_property
922 def user_data(self):
921 def user_data(self):
923 if not self._user_data:
922 if not self._user_data:
924 return {}
923 return {}
925
924
926 try:
925 try:
927 return json.loads(self._user_data) or {}
926 return json.loads(self._user_data) or {}
928 except TypeError:
927 except TypeError:
929 return {}
928 return {}
930
929
931 @user_data.setter
930 @user_data.setter
932 def user_data(self, val):
931 def user_data(self, val):
933 if not isinstance(val, dict):
932 if not isinstance(val, dict):
934 raise Exception('user_data must be dict, got %s' % type(val))
933 raise Exception('user_data must be dict, got %s' % type(val))
935 try:
934 try:
936 self._user_data = safe_bytes(json.dumps(val))
935 self._user_data = safe_bytes(json.dumps(val))
937 except Exception:
936 except Exception:
938 log.error(traceback.format_exc())
937 log.error(traceback.format_exc())
939
938
940 @classmethod
939 @classmethod
941 def get_by_username(cls, username, case_insensitive=False,
940 def get_by_username(cls, username, case_insensitive=False,
942 cache=False):
941 cache=False):
943
942
944 if case_insensitive:
943 if case_insensitive:
945 q = cls.select().where(
944 q = cls.select().where(
946 func.lower(cls.username) == func.lower(username))
945 func.lower(cls.username) == func.lower(username))
947 else:
946 else:
948 q = cls.select().where(cls.username == username)
947 q = cls.select().where(cls.username == username)
949
948
950 if cache:
949 if cache:
951 hash_key = _hash_key(username)
950 hash_key = _hash_key(username)
952 q = q.options(
951 q = q.options(
953 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
954
953
955 return cls.execute(q).scalar_one_or_none()
954 return cls.execute(q).scalar_one_or_none()
956
955
957 @classmethod
956 @classmethod
958 def get_by_auth_token(cls, auth_token, cache=False):
957 def get_by_auth_token(cls, auth_token, cache=False):
959
958
960 q = cls.select(User)\
959 q = cls.select(User)\
961 .join(UserApiKeys)\
960 .join(UserApiKeys)\
962 .where(UserApiKeys.api_key == auth_token)\
961 .where(UserApiKeys.api_key == auth_token)\
963 .where(or_(UserApiKeys.expires == -1,
962 .where(or_(UserApiKeys.expires == -1,
964 UserApiKeys.expires >= time.time()))
963 UserApiKeys.expires >= time.time()))
965
964
966 if cache:
965 if cache:
967 q = q.options(
966 q = q.options(
968 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
969
968
970 matched_user = cls.execute(q).scalar_one_or_none()
969 matched_user = cls.execute(q).scalar_one_or_none()
971
970
972 return matched_user
971 return matched_user
973
972
974 @classmethod
973 @classmethod
975 def get_by_email(cls, email, case_insensitive=False, cache=False):
974 def get_by_email(cls, email, case_insensitive=False, cache=False):
976
975
977 if case_insensitive:
976 if case_insensitive:
978 q = cls.select().where(func.lower(cls.email) == func.lower(email))
977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
979 else:
978 else:
980 q = cls.select().where(cls.email == email)
979 q = cls.select().where(cls.email == email)
981
980
982 if cache:
981 if cache:
983 email_key = _hash_key(email)
982 email_key = _hash_key(email)
984 q = q.options(
983 q = q.options(
985 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
986
985
987 ret = cls.execute(q).scalar_one_or_none()
986 ret = cls.execute(q).scalar_one_or_none()
988
987
989 if ret is None:
988 if ret is None:
990 q = cls.select(UserEmailMap)
989 q = cls.select(UserEmailMap)
991 # try fetching in alternate email map
990 # try fetching in alternate email map
992 if case_insensitive:
991 if case_insensitive:
993 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
994 else:
993 else:
995 q = q.where(UserEmailMap.email == email)
994 q = q.where(UserEmailMap.email == email)
996 q = q.options(joinedload(UserEmailMap.user))
995 q = q.options(joinedload(UserEmailMap.user))
997 if cache:
996 if cache:
998 q = q.options(
997 q = q.options(
999 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
1000
999
1001 result = cls.execute(q).scalar_one_or_none()
1000 result = cls.execute(q).scalar_one_or_none()
1002 ret = getattr(result, 'user', None)
1001 ret = getattr(result, 'user', None)
1003
1002
1004 return ret
1003 return ret
1005
1004
1006 @classmethod
1005 @classmethod
1007 def get_from_cs_author(cls, author):
1006 def get_from_cs_author(cls, author):
1008 """
1007 """
1009 Tries to get User objects out of commit author string
1008 Tries to get User objects out of commit author string
1010
1009
1011 :param author:
1010 :param author:
1012 """
1011 """
1013 from rhodecode.lib.helpers import email, author_name
1012 from rhodecode.lib.helpers import email, author_name
1014 # Valid email in the attribute passed, see if they're in the system
1013 # Valid email in the attribute passed, see if they're in the system
1015 _email = email(author)
1014 _email = email(author)
1016 if _email:
1015 if _email:
1017 user = cls.get_by_email(_email, case_insensitive=True)
1016 user = cls.get_by_email(_email, case_insensitive=True)
1018 if user:
1017 if user:
1019 return user
1018 return user
1020 # Maybe we can match by username?
1019 # Maybe we can match by username?
1021 _author = author_name(author)
1020 _author = author_name(author)
1022 user = cls.get_by_username(_author, case_insensitive=True)
1021 user = cls.get_by_username(_author, case_insensitive=True)
1023 if user:
1022 if user:
1024 return user
1023 return user
1025
1024
1026 def update_userdata(self, **kwargs):
1025 def update_userdata(self, **kwargs):
1027 usr = self
1026 usr = self
1028 old = usr.user_data
1027 old = usr.user_data
1029 old.update(**kwargs)
1028 old.update(**kwargs)
1030 usr.user_data = old
1029 usr.user_data = old
1031 Session().add(usr)
1030 Session().add(usr)
1032 log.debug('updated userdata with %s', kwargs)
1031 log.debug('updated userdata with %s', kwargs)
1033
1032
1034 def update_lastlogin(self):
1033 def update_lastlogin(self):
1035 """Update user lastlogin"""
1034 """Update user lastlogin"""
1036 self.last_login = datetime.datetime.now()
1035 self.last_login = datetime.datetime.now()
1037 Session().add(self)
1036 Session().add(self)
1038 log.debug('updated user %s lastlogin', self.username)
1037 log.debug('updated user %s lastlogin', self.username)
1039
1038
1040 def update_password(self, new_password):
1039 def update_password(self, new_password):
1041 from rhodecode.lib.auth import get_crypt_password
1040 from rhodecode.lib.auth import get_crypt_password
1042
1041
1043 self.password = get_crypt_password(new_password)
1042 self.password = get_crypt_password(new_password)
1044 Session().add(self)
1043 Session().add(self)
1045
1044
1046 @classmethod
1045 @classmethod
1047 def get_first_super_admin(cls):
1046 def get_first_super_admin(cls):
1048 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1049 user = cls.scalars(stmt).first()
1048 user = cls.scalars(stmt).first()
1050
1049
1051 if user is None:
1050 if user is None:
1052 raise Exception('FATAL: Missing administrative account!')
1051 raise Exception('FATAL: Missing administrative account!')
1053 return user
1052 return user
1054
1053
1055 @classmethod
1054 @classmethod
1056 def get_all_super_admins(cls, only_active=False):
1055 def get_all_super_admins(cls, only_active=False):
1057 """
1056 """
1058 Returns all admin accounts sorted by username
1057 Returns all admin accounts sorted by username
1059 """
1058 """
1060 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1061 if only_active:
1060 if only_active:
1062 qry = qry.filter(User.active == true())
1061 qry = qry.filter(User.active == true())
1063 return qry.all()
1062 return qry.all()
1064
1063
1065 @classmethod
1064 @classmethod
1066 def get_all_user_ids(cls, only_active=True):
1065 def get_all_user_ids(cls, only_active=True):
1067 """
1066 """
1068 Returns all users IDs
1067 Returns all users IDs
1069 """
1068 """
1070 qry = Session().query(User.user_id)
1069 qry = Session().query(User.user_id)
1071
1070
1072 if only_active:
1071 if only_active:
1073 qry = qry.filter(User.active == true())
1072 qry = qry.filter(User.active == true())
1074 return [x.user_id for x in qry]
1073 return [x.user_id for x in qry]
1075
1074
1076 @classmethod
1075 @classmethod
1077 def get_default_user(cls, cache=False, refresh=False):
1076 def get_default_user(cls, cache=False, refresh=False):
1078 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1079 if user is None:
1078 if user is None:
1080 raise Exception('FATAL: Missing default account!')
1079 raise Exception('FATAL: Missing default account!')
1081 if refresh:
1080 if refresh:
1082 # The default user might be based on outdated state which
1081 # The default user might be based on outdated state which
1083 # has been loaded from the cache.
1082 # has been loaded from the cache.
1084 # A call to refresh() ensures that the
1083 # A call to refresh() ensures that the
1085 # latest state from the database is used.
1084 # latest state from the database is used.
1086 Session().refresh(user)
1085 Session().refresh(user)
1087
1086
1088 return user
1087 return user
1089
1088
1090 @classmethod
1089 @classmethod
1091 def get_default_user_id(cls):
1090 def get_default_user_id(cls):
1092 import rhodecode
1091 import rhodecode
1093 return rhodecode.CONFIG['default_user_id']
1092 return rhodecode.CONFIG['default_user_id']
1094
1093
1095 def _get_default_perms(self, user, suffix=''):
1094 def _get_default_perms(self, user, suffix=''):
1096 from rhodecode.model.permission import PermissionModel
1095 from rhodecode.model.permission import PermissionModel
1097 return PermissionModel().get_default_perms(user.user_perms, suffix)
1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1098
1097
1099 def get_default_perms(self, suffix=''):
1098 def get_default_perms(self, suffix=''):
1100 return self._get_default_perms(self, suffix)
1099 return self._get_default_perms(self, suffix)
1101
1100
1102 def get_api_data(self, include_secrets=False, details='full'):
1101 def get_api_data(self, include_secrets=False, details='full'):
1103 """
1102 """
1104 Common function for generating user related data for API
1103 Common function for generating user related data for API
1105
1104
1106 :param include_secrets: By default secrets in the API data will be replaced
1105 :param include_secrets: By default secrets in the API data will be replaced
1107 by a placeholder value to prevent exposing this data by accident. In case
1106 by a placeholder value to prevent exposing this data by accident. In case
1108 this data shall be exposed, set this flag to ``True``.
1107 this data shall be exposed, set this flag to ``True``.
1109
1108
1110 :param details: details can be 'basic|full' basic gives only a subset of
1109 :param details: details can be 'basic|full' basic gives only a subset of
1111 the available user information that includes user_id, name and emails.
1110 the available user information that includes user_id, name and emails.
1112 """
1111 """
1113 user = self
1112 user = self
1114 user_data = self.user_data
1113 user_data = self.user_data
1115 data = {
1114 data = {
1116 'user_id': user.user_id,
1115 'user_id': user.user_id,
1117 'username': user.username,
1116 'username': user.username,
1118 'firstname': user.name,
1117 'firstname': user.name,
1119 'lastname': user.lastname,
1118 'lastname': user.lastname,
1120 'description': user.description,
1119 'description': user.description,
1121 'email': user.email,
1120 'email': user.email,
1122 'emails': user.emails,
1121 'emails': user.emails,
1123 }
1122 }
1124 if details == 'basic':
1123 if details == 'basic':
1125 return data
1124 return data
1126
1125
1127 auth_token_length = 40
1126 auth_token_length = 40
1128 auth_token_replacement = '*' * auth_token_length
1127 auth_token_replacement = '*' * auth_token_length
1129
1128
1130 extras = {
1129 extras = {
1131 'auth_tokens': [auth_token_replacement],
1130 'auth_tokens': [auth_token_replacement],
1132 'active': user.active,
1131 'active': user.active,
1133 'admin': user.admin,
1132 'admin': user.admin,
1134 'extern_type': user.extern_type,
1133 'extern_type': user.extern_type,
1135 'extern_name': user.extern_name,
1134 'extern_name': user.extern_name,
1136 'last_login': user.last_login,
1135 'last_login': user.last_login,
1137 'last_activity': user.last_activity,
1136 'last_activity': user.last_activity,
1138 'ip_addresses': user.ip_addresses,
1137 'ip_addresses': user.ip_addresses,
1139 'language': user_data.get('language')
1138 'language': user_data.get('language')
1140 }
1139 }
1141 data.update(extras)
1140 data.update(extras)
1142
1141
1143 if include_secrets:
1142 if include_secrets:
1144 data['auth_tokens'] = user.auth_tokens
1143 data['auth_tokens'] = user.auth_tokens
1145 return data
1144 return data
1146
1145
1147 def __json__(self):
1146 def __json__(self):
1148 data = {
1147 data = {
1149 'full_name': self.full_name,
1148 'full_name': self.full_name,
1150 'full_name_or_username': self.full_name_or_username,
1149 'full_name_or_username': self.full_name_or_username,
1151 'short_contact': self.short_contact,
1150 'short_contact': self.short_contact,
1152 'full_contact': self.full_contact,
1151 'full_contact': self.full_contact,
1153 }
1152 }
1154 data.update(self.get_api_data())
1153 data.update(self.get_api_data())
1155 return data
1154 return data
1156
1155
1157
1156
1158 class UserApiKeys(Base, BaseModel):
1157 class UserApiKeys(Base, BaseModel):
1159 __tablename__ = 'user_api_keys'
1158 __tablename__ = 'user_api_keys'
1160 __table_args__ = (
1159 __table_args__ = (
1161 Index('uak_api_key_idx', 'api_key'),
1160 Index('uak_api_key_idx', 'api_key'),
1162 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1163 base_table_args
1162 base_table_args
1164 )
1163 )
1165 __mapper_args__ = {}
1164 __mapper_args__ = {}
1166
1165
1167 # ApiKey role
1166 # ApiKey role
1168 ROLE_ALL = 'token_role_all'
1167 ROLE_ALL = 'token_role_all'
1169 ROLE_VCS = 'token_role_vcs'
1168 ROLE_VCS = 'token_role_vcs'
1170 ROLE_API = 'token_role_api'
1169 ROLE_API = 'token_role_api'
1171 ROLE_HTTP = 'token_role_http'
1170 ROLE_HTTP = 'token_role_http'
1172 ROLE_FEED = 'token_role_feed'
1171 ROLE_FEED = 'token_role_feed'
1173 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1172 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1174 # The last one is ignored in the list as we only
1173 # The last one is ignored in the list as we only
1175 # use it for one action, and cannot be created by users
1174 # use it for one action, and cannot be created by users
1176 ROLE_PASSWORD_RESET = 'token_password_reset'
1175 ROLE_PASSWORD_RESET = 'token_password_reset'
1177
1176
1178 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1177 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1179
1178
1180 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1179 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1181 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1180 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1182 api_key = Column("api_key", String(255), nullable=False, unique=True)
1181 api_key = Column("api_key", String(255), nullable=False, unique=True)
1183 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1182 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1184 expires = Column('expires', Float(53), nullable=False)
1183 expires = Column('expires', Float(53), nullable=False)
1185 role = Column('role', String(255), nullable=True)
1184 role = Column('role', String(255), nullable=True)
1186 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1185 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1187
1186
1188 # scope columns
1187 # scope columns
1189 repo_id = Column(
1188 repo_id = Column(
1190 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1189 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1191 nullable=True, unique=None, default=None)
1190 nullable=True, unique=None, default=None)
1192 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1191 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1193
1192
1194 repo_group_id = Column(
1193 repo_group_id = Column(
1195 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1194 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1196 nullable=True, unique=None, default=None)
1195 nullable=True, unique=None, default=None)
1197 repo_group = relationship('RepoGroup', lazy='joined')
1196 repo_group = relationship('RepoGroup', lazy='joined')
1198
1197
1199 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1198 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1200
1199
1201 def __repr__(self):
1200 def __repr__(self):
1202 return f"<{self.cls_name}('{self.role}')>"
1201 return f"<{self.cls_name}('{self.role}')>"
1203
1202
1204 def __json__(self):
1203 def __json__(self):
1205 data = {
1204 data = {
1206 'auth_token': self.api_key,
1205 'auth_token': self.api_key,
1207 'role': self.role,
1206 'role': self.role,
1208 'scope': self.scope_humanized,
1207 'scope': self.scope_humanized,
1209 'expired': self.expired
1208 'expired': self.expired
1210 }
1209 }
1211 return data
1210 return data
1212
1211
1213 def get_api_data(self, include_secrets=False):
1212 def get_api_data(self, include_secrets=False):
1214 data = self.__json__()
1213 data = self.__json__()
1215 if include_secrets:
1214 if include_secrets:
1216 return data
1215 return data
1217 else:
1216 else:
1218 data['auth_token'] = self.token_obfuscated
1217 data['auth_token'] = self.token_obfuscated
1219 return data
1218 return data
1220
1219
1221 @hybrid_property
1220 @hybrid_property
1222 def description_safe(self):
1221 def description_safe(self):
1223 from rhodecode.lib import helpers as h
1222 from rhodecode.lib import helpers as h
1224 return h.escape(self.description)
1223 return h.escape(self.description)
1225
1224
1226 @property
1225 @property
1227 def expired(self):
1226 def expired(self):
1228 if self.expires == -1:
1227 if self.expires == -1:
1229 return False
1228 return False
1230 return time.time() > self.expires
1229 return time.time() > self.expires
1231
1230
1232 @classmethod
1231 @classmethod
1233 def _get_role_name(cls, role):
1232 def _get_role_name(cls, role):
1234 return {
1233 return {
1235 cls.ROLE_ALL: _('all'),
1234 cls.ROLE_ALL: _('all'),
1236 cls.ROLE_HTTP: _('http/web interface'),
1235 cls.ROLE_HTTP: _('http/web interface'),
1237 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1236 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1238 cls.ROLE_API: _('api calls'),
1237 cls.ROLE_API: _('api calls'),
1239 cls.ROLE_FEED: _('feed access'),
1238 cls.ROLE_FEED: _('feed access'),
1240 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1239 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1241 }.get(role, role)
1240 }.get(role, role)
1242
1241
1243 @classmethod
1242 @classmethod
1244 def _get_role_description(cls, role):
1243 def _get_role_description(cls, role):
1245 return {
1244 return {
1246 cls.ROLE_ALL: _('Token for all actions.'),
1245 cls.ROLE_ALL: _('Token for all actions.'),
1247 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1246 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1248 'login using `api_access_controllers_whitelist` functionality.'),
1247 'login using `api_access_controllers_whitelist` functionality.'),
1249 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1248 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1250 'Requires auth_token authentication plugin to be active. <br/>'
1249 'Requires auth_token authentication plugin to be active. <br/>'
1251 'Such Token should be used then instead of a password to '
1250 'Such Token should be used then instead of a password to '
1252 'interact with a repository, and additionally can be '
1251 'interact with a repository, and additionally can be '
1253 'limited to single repository using repo scope.'),
1252 'limited to single repository using repo scope.'),
1254 cls.ROLE_API: _('Token limited to api calls.'),
1253 cls.ROLE_API: _('Token limited to api calls.'),
1255 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1254 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1256 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1255 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1257 }.get(role, role)
1256 }.get(role, role)
1258
1257
1259 @property
1258 @property
1260 def role_humanized(self):
1259 def role_humanized(self):
1261 return self._get_role_name(self.role)
1260 return self._get_role_name(self.role)
1262
1261
1263 def _get_scope(self):
1262 def _get_scope(self):
1264 if self.repo:
1263 if self.repo:
1265 return 'Repository: {}'.format(self.repo.repo_name)
1264 return 'Repository: {}'.format(self.repo.repo_name)
1266 if self.repo_group:
1265 if self.repo_group:
1267 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1266 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1268 return 'Global'
1267 return 'Global'
1269
1268
1270 @property
1269 @property
1271 def scope_humanized(self):
1270 def scope_humanized(self):
1272 return self._get_scope()
1271 return self._get_scope()
1273
1272
1274 @property
1273 @property
1275 def token_obfuscated(self):
1274 def token_obfuscated(self):
1276 if self.api_key:
1275 if self.api_key:
1277 return self.api_key[:4] + "****"
1276 return self.api_key[:4] + "****"
1278
1277
1279
1278
1280 class UserEmailMap(Base, BaseModel):
1279 class UserEmailMap(Base, BaseModel):
1281 __tablename__ = 'user_email_map'
1280 __tablename__ = 'user_email_map'
1282 __table_args__ = (
1281 __table_args__ = (
1283 Index('uem_email_idx', 'email'),
1282 Index('uem_email_idx', 'email'),
1284 Index('uem_user_id_idx', 'user_id'),
1283 Index('uem_user_id_idx', 'user_id'),
1285 UniqueConstraint('email'),
1284 UniqueConstraint('email'),
1286 base_table_args
1285 base_table_args
1287 )
1286 )
1288 __mapper_args__ = {}
1287 __mapper_args__ = {}
1289
1288
1290 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1289 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1291 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1290 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1292 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1291 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1293 user = relationship('User', lazy='joined', back_populates='user_emails')
1292 user = relationship('User', lazy='joined', back_populates='user_emails')
1294
1293
1295 @validates('_email')
1294 @validates('_email')
1296 def validate_email(self, key, email):
1295 def validate_email(self, key, email):
1297 # check if this email is not main one
1296 # check if this email is not main one
1298 main_email = Session().query(User).filter(User.email == email).scalar()
1297 main_email = Session().query(User).filter(User.email == email).scalar()
1299 if main_email is not None:
1298 if main_email is not None:
1300 raise AttributeError('email %s is present is user table' % email)
1299 raise AttributeError('email %s is present is user table' % email)
1301 return email
1300 return email
1302
1301
1303 @hybrid_property
1302 @hybrid_property
1304 def email(self):
1303 def email(self):
1305 return self._email
1304 return self._email
1306
1305
1307 @email.setter
1306 @email.setter
1308 def email(self, val):
1307 def email(self, val):
1309 self._email = val.lower() if val else None
1308 self._email = val.lower() if val else None
1310
1309
1311
1310
1312 class UserIpMap(Base, BaseModel):
1311 class UserIpMap(Base, BaseModel):
1313 __tablename__ = 'user_ip_map'
1312 __tablename__ = 'user_ip_map'
1314 __table_args__ = (
1313 __table_args__ = (
1315 UniqueConstraint('user_id', 'ip_addr'),
1314 UniqueConstraint('user_id', 'ip_addr'),
1316 base_table_args
1315 base_table_args
1317 )
1316 )
1318 __mapper_args__ = {}
1317 __mapper_args__ = {}
1319
1318
1320 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1319 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1320 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1322 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1321 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1323 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1322 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1324 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1323 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1325 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1324 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1326
1325
1327 @hybrid_property
1326 @hybrid_property
1328 def description_safe(self):
1327 def description_safe(self):
1329 from rhodecode.lib import helpers as h
1328 from rhodecode.lib import helpers as h
1330 return h.escape(self.description)
1329 return h.escape(self.description)
1331
1330
1332 @classmethod
1331 @classmethod
1333 def _get_ip_range(cls, ip_addr):
1332 def _get_ip_range(cls, ip_addr):
1334 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1333 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1335 return [str(net.network_address), str(net.broadcast_address)]
1334 return [str(net.network_address), str(net.broadcast_address)]
1336
1335
1337 def __json__(self):
1336 def __json__(self):
1338 return {
1337 return {
1339 'ip_addr': self.ip_addr,
1338 'ip_addr': self.ip_addr,
1340 'ip_range': self._get_ip_range(self.ip_addr),
1339 'ip_range': self._get_ip_range(self.ip_addr),
1341 }
1340 }
1342
1341
1343 def __repr__(self):
1342 def __repr__(self):
1344 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1343 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1345
1344
1346
1345
1347 class UserSshKeys(Base, BaseModel):
1346 class UserSshKeys(Base, BaseModel):
1348 __tablename__ = 'user_ssh_keys'
1347 __tablename__ = 'user_ssh_keys'
1349 __table_args__ = (
1348 __table_args__ = (
1350 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1349 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1351
1350
1352 UniqueConstraint('ssh_key_fingerprint'),
1351 UniqueConstraint('ssh_key_fingerprint'),
1353
1352
1354 base_table_args
1353 base_table_args
1355 )
1354 )
1356 __mapper_args__ = {}
1355 __mapper_args__ = {}
1357
1356
1358 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1357 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1359 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1358 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1360 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1359 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1361
1360
1362 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1361 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1363
1362
1364 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1363 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1365 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1364 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1366 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1365 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1367
1366
1368 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1367 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1369
1368
1370 def __json__(self):
1369 def __json__(self):
1371 data = {
1370 data = {
1372 'ssh_fingerprint': self.ssh_key_fingerprint,
1371 'ssh_fingerprint': self.ssh_key_fingerprint,
1373 'description': self.description,
1372 'description': self.description,
1374 'created_on': self.created_on
1373 'created_on': self.created_on
1375 }
1374 }
1376 return data
1375 return data
1377
1376
1378 def get_api_data(self):
1377 def get_api_data(self):
1379 data = self.__json__()
1378 data = self.__json__()
1380 return data
1379 return data
1381
1380
1382
1381
1383 class UserLog(Base, BaseModel):
1382 class UserLog(Base, BaseModel):
1384 __tablename__ = 'user_logs'
1383 __tablename__ = 'user_logs'
1385 __table_args__ = (
1384 __table_args__ = (
1386 base_table_args,
1385 base_table_args,
1387 )
1386 )
1388
1387
1389 VERSION_1 = 'v1'
1388 VERSION_1 = 'v1'
1390 VERSION_2 = 'v2'
1389 VERSION_2 = 'v2'
1391 VERSIONS = [VERSION_1, VERSION_2]
1390 VERSIONS = [VERSION_1, VERSION_2]
1392
1391
1393 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1392 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1394 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1393 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1395 username = Column("username", String(255), nullable=True, unique=None, default=None)
1394 username = Column("username", String(255), nullable=True, unique=None, default=None)
1396 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1395 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1397 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1396 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1398 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1397 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1399 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1398 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1400 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1399 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1401
1400
1402 version = Column("version", String(255), nullable=True, default=VERSION_1)
1401 version = Column("version", String(255), nullable=True, default=VERSION_1)
1403 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1402 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1404 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1403 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1405 user = relationship('User', cascade='', back_populates='user_log')
1404 user = relationship('User', cascade='', back_populates='user_log')
1406 repository = relationship('Repository', cascade='', back_populates='logs')
1405 repository = relationship('Repository', cascade='', back_populates='logs')
1407
1406
1408 def __repr__(self):
1407 def __repr__(self):
1409 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1408 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1410
1409
1411 def __json__(self):
1410 def __json__(self):
1412 return {
1411 return {
1413 'user_id': self.user_id,
1412 'user_id': self.user_id,
1414 'username': self.username,
1413 'username': self.username,
1415 'repository_id': self.repository_id,
1414 'repository_id': self.repository_id,
1416 'repository_name': self.repository_name,
1415 'repository_name': self.repository_name,
1417 'user_ip': self.user_ip,
1416 'user_ip': self.user_ip,
1418 'action_date': self.action_date,
1417 'action_date': self.action_date,
1419 'action': self.action,
1418 'action': self.action,
1420 }
1419 }
1421
1420
1422 @hybrid_property
1421 @hybrid_property
1423 def entry_id(self):
1422 def entry_id(self):
1424 return self.user_log_id
1423 return self.user_log_id
1425
1424
1426 @property
1425 @property
1427 def action_as_day(self):
1426 def action_as_day(self):
1428 return datetime.date(*self.action_date.timetuple()[:3])
1427 return datetime.date(*self.action_date.timetuple()[:3])
1429
1428
1430
1429
1431 class UserGroup(Base, BaseModel):
1430 class UserGroup(Base, BaseModel):
1432 __tablename__ = 'users_groups'
1431 __tablename__ = 'users_groups'
1433 __table_args__ = (
1432 __table_args__ = (
1434 base_table_args,
1433 base_table_args,
1435 )
1434 )
1436
1435
1437 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1436 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1438 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1437 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1439 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1438 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1440 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1439 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1441 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1440 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1442 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1441 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1443 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1442 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1444 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1443 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1445
1444
1446 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1445 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1447 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1446 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1448 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1447 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1449 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1448 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1450 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1449 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1451
1450
1452 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1451 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1453
1452
1454 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1453 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1455 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1454 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1456
1455
1457 @classmethod
1456 @classmethod
1458 def _load_group_data(cls, column):
1457 def _load_group_data(cls, column):
1459 if not column:
1458 if not column:
1460 return {}
1459 return {}
1461
1460
1462 try:
1461 try:
1463 return json.loads(column) or {}
1462 return json.loads(column) or {}
1464 except TypeError:
1463 except TypeError:
1465 return {}
1464 return {}
1466
1465
1467 @hybrid_property
1466 @hybrid_property
1468 def description_safe(self):
1467 def description_safe(self):
1469 from rhodecode.lib import helpers as h
1468 from rhodecode.lib import helpers as h
1470 return h.escape(self.user_group_description)
1469 return h.escape(self.user_group_description)
1471
1470
1472 @hybrid_property
1471 @hybrid_property
1473 def group_data(self):
1472 def group_data(self):
1474 return self._load_group_data(self._group_data)
1473 return self._load_group_data(self._group_data)
1475
1474
1476 @group_data.expression
1475 @group_data.expression
1477 def group_data(self, **kwargs):
1476 def group_data(self, **kwargs):
1478 return self._group_data
1477 return self._group_data
1479
1478
1480 @group_data.setter
1479 @group_data.setter
1481 def group_data(self, val):
1480 def group_data(self, val):
1482 try:
1481 try:
1483 self._group_data = json.dumps(val)
1482 self._group_data = json.dumps(val)
1484 except Exception:
1483 except Exception:
1485 log.error(traceback.format_exc())
1484 log.error(traceback.format_exc())
1486
1485
1487 @classmethod
1486 @classmethod
1488 def _load_sync(cls, group_data):
1487 def _load_sync(cls, group_data):
1489 if group_data:
1488 if group_data:
1490 return group_data.get('extern_type')
1489 return group_data.get('extern_type')
1491
1490
1492 @property
1491 @property
1493 def sync(self):
1492 def sync(self):
1494 return self._load_sync(self.group_data)
1493 return self._load_sync(self.group_data)
1495
1494
1496 def __repr__(self):
1495 def __repr__(self):
1497 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1496 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1498
1497
1499 @classmethod
1498 @classmethod
1500 def get_by_group_name(cls, group_name, cache=False,
1499 def get_by_group_name(cls, group_name, cache=False,
1501 case_insensitive=False):
1500 case_insensitive=False):
1502 if case_insensitive:
1501 if case_insensitive:
1503 q = cls.query().filter(func.lower(cls.users_group_name) ==
1502 q = cls.query().filter(func.lower(cls.users_group_name) ==
1504 func.lower(group_name))
1503 func.lower(group_name))
1505
1504
1506 else:
1505 else:
1507 q = cls.query().filter(cls.users_group_name == group_name)
1506 q = cls.query().filter(cls.users_group_name == group_name)
1508 if cache:
1507 if cache:
1509 name_key = _hash_key(group_name)
1508 name_key = _hash_key(group_name)
1510 q = q.options(
1509 q = q.options(
1511 FromCache("sql_cache_short", f"get_group_{name_key}"))
1510 FromCache("sql_cache_short", f"get_group_{name_key}"))
1512 return q.scalar()
1511 return q.scalar()
1513
1512
1514 @classmethod
1513 @classmethod
1515 def get(cls, user_group_id, cache=False):
1514 def get(cls, user_group_id, cache=False):
1516 if not user_group_id:
1515 if not user_group_id:
1517 return
1516 return
1518
1517
1519 user_group = cls.query()
1518 user_group = cls.query()
1520 if cache:
1519 if cache:
1521 user_group = user_group.options(
1520 user_group = user_group.options(
1522 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1521 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1523 return user_group.get(user_group_id)
1522 return user_group.get(user_group_id)
1524
1523
1525 def permissions(self, with_admins=True, with_owner=True,
1524 def permissions(self, with_admins=True, with_owner=True,
1526 expand_from_user_groups=False):
1525 expand_from_user_groups=False):
1527 """
1526 """
1528 Permissions for user groups
1527 Permissions for user groups
1529 """
1528 """
1530 _admin_perm = 'usergroup.admin'
1529 _admin_perm = 'usergroup.admin'
1531
1530
1532 owner_row = []
1531 owner_row = []
1533 if with_owner:
1532 if with_owner:
1534 usr = AttributeDict(self.user.get_dict())
1533 usr = AttributeDict(self.user.get_dict())
1535 usr.owner_row = True
1534 usr.owner_row = True
1536 usr.permission = _admin_perm
1535 usr.permission = _admin_perm
1537 owner_row.append(usr)
1536 owner_row.append(usr)
1538
1537
1539 super_admin_ids = []
1538 super_admin_ids = []
1540 super_admin_rows = []
1539 super_admin_rows = []
1541 if with_admins:
1540 if with_admins:
1542 for usr in User.get_all_super_admins():
1541 for usr in User.get_all_super_admins():
1543 super_admin_ids.append(usr.user_id)
1542 super_admin_ids.append(usr.user_id)
1544 # if this admin is also owner, don't double the record
1543 # if this admin is also owner, don't double the record
1545 if usr.user_id == owner_row[0].user_id:
1544 if usr.user_id == owner_row[0].user_id:
1546 owner_row[0].admin_row = True
1545 owner_row[0].admin_row = True
1547 else:
1546 else:
1548 usr = AttributeDict(usr.get_dict())
1547 usr = AttributeDict(usr.get_dict())
1549 usr.admin_row = True
1548 usr.admin_row = True
1550 usr.permission = _admin_perm
1549 usr.permission = _admin_perm
1551 super_admin_rows.append(usr)
1550 super_admin_rows.append(usr)
1552
1551
1553 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1552 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1554 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1553 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1555 joinedload(UserUserGroupToPerm.user),
1554 joinedload(UserUserGroupToPerm.user),
1556 joinedload(UserUserGroupToPerm.permission),)
1555 joinedload(UserUserGroupToPerm.permission),)
1557
1556
1558 # get owners and admins and permissions. We do a trick of re-writing
1557 # get owners and admins and permissions. We do a trick of re-writing
1559 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1558 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1560 # has a global reference and changing one object propagates to all
1559 # has a global reference and changing one object propagates to all
1561 # others. This means if admin is also an owner admin_row that change
1560 # others. This means if admin is also an owner admin_row that change
1562 # would propagate to both objects
1561 # would propagate to both objects
1563 perm_rows = []
1562 perm_rows = []
1564 for _usr in q.all():
1563 for _usr in q.all():
1565 usr = AttributeDict(_usr.user.get_dict())
1564 usr = AttributeDict(_usr.user.get_dict())
1566 # if this user is also owner/admin, mark as duplicate record
1565 # if this user is also owner/admin, mark as duplicate record
1567 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1566 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1568 usr.duplicate_perm = True
1567 usr.duplicate_perm = True
1569 usr.permission = _usr.permission.permission_name
1568 usr.permission = _usr.permission.permission_name
1570 perm_rows.append(usr)
1569 perm_rows.append(usr)
1571
1570
1572 # filter the perm rows by 'default' first and then sort them by
1571 # filter the perm rows by 'default' first and then sort them by
1573 # admin,write,read,none permissions sorted again alphabetically in
1572 # admin,write,read,none permissions sorted again alphabetically in
1574 # each group
1573 # each group
1575 perm_rows = sorted(perm_rows, key=display_user_sort)
1574 perm_rows = sorted(perm_rows, key=display_user_sort)
1576
1575
1577 user_groups_rows = []
1576 user_groups_rows = []
1578 if expand_from_user_groups:
1577 if expand_from_user_groups:
1579 for ug in self.permission_user_groups(with_members=True):
1578 for ug in self.permission_user_groups(with_members=True):
1580 for user_data in ug.members:
1579 for user_data in ug.members:
1581 user_groups_rows.append(user_data)
1580 user_groups_rows.append(user_data)
1582
1581
1583 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1582 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1584
1583
1585 def permission_user_groups(self, with_members=False):
1584 def permission_user_groups(self, with_members=False):
1586 q = UserGroupUserGroupToPerm.query()\
1585 q = UserGroupUserGroupToPerm.query()\
1587 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1586 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1588 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1587 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1589 joinedload(UserGroupUserGroupToPerm.target_user_group),
1588 joinedload(UserGroupUserGroupToPerm.target_user_group),
1590 joinedload(UserGroupUserGroupToPerm.permission),)
1589 joinedload(UserGroupUserGroupToPerm.permission),)
1591
1590
1592 perm_rows = []
1591 perm_rows = []
1593 for _user_group in q.all():
1592 for _user_group in q.all():
1594 entry = AttributeDict(_user_group.user_group.get_dict())
1593 entry = AttributeDict(_user_group.user_group.get_dict())
1595 entry.permission = _user_group.permission.permission_name
1594 entry.permission = _user_group.permission.permission_name
1596 if with_members:
1595 if with_members:
1597 entry.members = [x.user.get_dict()
1596 entry.members = [x.user.get_dict()
1598 for x in _user_group.user_group.members]
1597 for x in _user_group.user_group.members]
1599 perm_rows.append(entry)
1598 perm_rows.append(entry)
1600
1599
1601 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1600 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1602 return perm_rows
1601 return perm_rows
1603
1602
1604 def _get_default_perms(self, user_group, suffix=''):
1603 def _get_default_perms(self, user_group, suffix=''):
1605 from rhodecode.model.permission import PermissionModel
1604 from rhodecode.model.permission import PermissionModel
1606 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1605 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1607
1606
1608 def get_default_perms(self, suffix=''):
1607 def get_default_perms(self, suffix=''):
1609 return self._get_default_perms(self, suffix)
1608 return self._get_default_perms(self, suffix)
1610
1609
1611 def get_api_data(self, with_group_members=True, include_secrets=False):
1610 def get_api_data(self, with_group_members=True, include_secrets=False):
1612 """
1611 """
1613 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1612 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1614 basically forwarded.
1613 basically forwarded.
1615
1614
1616 """
1615 """
1617 user_group = self
1616 user_group = self
1618 data = {
1617 data = {
1619 'users_group_id': user_group.users_group_id,
1618 'users_group_id': user_group.users_group_id,
1620 'group_name': user_group.users_group_name,
1619 'group_name': user_group.users_group_name,
1621 'group_description': user_group.user_group_description,
1620 'group_description': user_group.user_group_description,
1622 'active': user_group.users_group_active,
1621 'active': user_group.users_group_active,
1623 'owner': user_group.user.username,
1622 'owner': user_group.user.username,
1624 'sync': user_group.sync,
1623 'sync': user_group.sync,
1625 'owner_email': user_group.user.email,
1624 'owner_email': user_group.user.email,
1626 }
1625 }
1627
1626
1628 if with_group_members:
1627 if with_group_members:
1629 users = []
1628 users = []
1630 for user in user_group.members:
1629 for user in user_group.members:
1631 user = user.user
1630 user = user.user
1632 users.append(user.get_api_data(include_secrets=include_secrets))
1631 users.append(user.get_api_data(include_secrets=include_secrets))
1633 data['users'] = users
1632 data['users'] = users
1634
1633
1635 return data
1634 return data
1636
1635
1637
1636
1638 class UserGroupMember(Base, BaseModel):
1637 class UserGroupMember(Base, BaseModel):
1639 __tablename__ = 'users_groups_members'
1638 __tablename__ = 'users_groups_members'
1640 __table_args__ = (
1639 __table_args__ = (
1641 base_table_args,
1640 base_table_args,
1642 )
1641 )
1643
1642
1644 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1643 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1645 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1644 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1646 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1645 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1647
1646
1648 user = relationship('User', lazy='joined', back_populates='group_member')
1647 user = relationship('User', lazy='joined', back_populates='group_member')
1649 users_group = relationship('UserGroup', back_populates='members')
1648 users_group = relationship('UserGroup', back_populates='members')
1650
1649
1651 def __init__(self, gr_id='', u_id=''):
1650 def __init__(self, gr_id='', u_id=''):
1652 self.users_group_id = gr_id
1651 self.users_group_id = gr_id
1653 self.user_id = u_id
1652 self.user_id = u_id
1654
1653
1655
1654
1656 class RepositoryField(Base, BaseModel):
1655 class RepositoryField(Base, BaseModel):
1657 __tablename__ = 'repositories_fields'
1656 __tablename__ = 'repositories_fields'
1658 __table_args__ = (
1657 __table_args__ = (
1659 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1658 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1660 base_table_args,
1659 base_table_args,
1661 )
1660 )
1662
1661
1663 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1662 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1664
1663
1665 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1664 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1666 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1665 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1667 field_key = Column("field_key", String(250))
1666 field_key = Column("field_key", String(250))
1668 field_label = Column("field_label", String(1024), nullable=False)
1667 field_label = Column("field_label", String(1024), nullable=False)
1669 field_value = Column("field_value", String(10000), nullable=False)
1668 field_value = Column("field_value", String(10000), nullable=False)
1670 field_desc = Column("field_desc", String(1024), nullable=False)
1669 field_desc = Column("field_desc", String(1024), nullable=False)
1671 field_type = Column("field_type", String(255), nullable=False, unique=None)
1670 field_type = Column("field_type", String(255), nullable=False, unique=None)
1672 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1671 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1673
1672
1674 repository = relationship('Repository', back_populates='extra_fields')
1673 repository = relationship('Repository', back_populates='extra_fields')
1675
1674
1676 @property
1675 @property
1677 def field_key_prefixed(self):
1676 def field_key_prefixed(self):
1678 return 'ex_%s' % self.field_key
1677 return 'ex_%s' % self.field_key
1679
1678
1680 @classmethod
1679 @classmethod
1681 def un_prefix_key(cls, key):
1680 def un_prefix_key(cls, key):
1682 if key.startswith(cls.PREFIX):
1681 if key.startswith(cls.PREFIX):
1683 return key[len(cls.PREFIX):]
1682 return key[len(cls.PREFIX):]
1684 return key
1683 return key
1685
1684
1686 @classmethod
1685 @classmethod
1687 def get_by_key_name(cls, key, repo):
1686 def get_by_key_name(cls, key, repo):
1688 row = cls.query()\
1687 row = cls.query()\
1689 .filter(cls.repository == repo)\
1688 .filter(cls.repository == repo)\
1690 .filter(cls.field_key == key).scalar()
1689 .filter(cls.field_key == key).scalar()
1691 return row
1690 return row
1692
1691
1693
1692
1694 class Repository(Base, BaseModel):
1693 class Repository(Base, BaseModel):
1695 __tablename__ = 'repositories'
1694 __tablename__ = 'repositories'
1696 __table_args__ = (
1695 __table_args__ = (
1697 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1696 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1698 base_table_args,
1697 base_table_args,
1699 )
1698 )
1700 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1699 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1701 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1700 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1702 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1701 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1703
1702
1704 STATE_CREATED = 'repo_state_created'
1703 STATE_CREATED = 'repo_state_created'
1705 STATE_PENDING = 'repo_state_pending'
1704 STATE_PENDING = 'repo_state_pending'
1706 STATE_ERROR = 'repo_state_error'
1705 STATE_ERROR = 'repo_state_error'
1707
1706
1708 LOCK_AUTOMATIC = 'lock_auto'
1707 LOCK_AUTOMATIC = 'lock_auto'
1709 LOCK_API = 'lock_api'
1708 LOCK_API = 'lock_api'
1710 LOCK_WEB = 'lock_web'
1709 LOCK_WEB = 'lock_web'
1711 LOCK_PULL = 'lock_pull'
1710 LOCK_PULL = 'lock_pull'
1712
1711
1713 NAME_SEP = URL_SEP
1712 NAME_SEP = URL_SEP
1714
1713
1715 repo_id = Column(
1714 repo_id = Column(
1716 "repo_id", Integer(), nullable=False, unique=True, default=None,
1715 "repo_id", Integer(), nullable=False, unique=True, default=None,
1717 primary_key=True)
1716 primary_key=True)
1718 _repo_name = Column(
1717 _repo_name = Column(
1719 "repo_name", Text(), nullable=False, default=None)
1718 "repo_name", Text(), nullable=False, default=None)
1720 repo_name_hash = Column(
1719 repo_name_hash = Column(
1721 "repo_name_hash", String(255), nullable=False, unique=True)
1720 "repo_name_hash", String(255), nullable=False, unique=True)
1722 repo_state = Column("repo_state", String(255), nullable=True)
1721 repo_state = Column("repo_state", String(255), nullable=True)
1723
1722
1724 clone_uri = Column(
1723 clone_uri = Column(
1725 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1724 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1726 default=None)
1725 default=None)
1727 push_uri = Column(
1726 push_uri = Column(
1728 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1727 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1729 default=None)
1728 default=None)
1730 repo_type = Column(
1729 repo_type = Column(
1731 "repo_type", String(255), nullable=False, unique=False, default=None)
1730 "repo_type", String(255), nullable=False, unique=False, default=None)
1732 user_id = Column(
1731 user_id = Column(
1733 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1732 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1734 unique=False, default=None)
1733 unique=False, default=None)
1735 private = Column(
1734 private = Column(
1736 "private", Boolean(), nullable=True, unique=None, default=None)
1735 "private", Boolean(), nullable=True, unique=None, default=None)
1737 archived = Column(
1736 archived = Column(
1738 "archived", Boolean(), nullable=True, unique=None, default=None)
1737 "archived", Boolean(), nullable=True, unique=None, default=None)
1739 enable_statistics = Column(
1738 enable_statistics = Column(
1740 "statistics", Boolean(), nullable=True, unique=None, default=True)
1739 "statistics", Boolean(), nullable=True, unique=None, default=True)
1741 enable_downloads = Column(
1740 enable_downloads = Column(
1742 "downloads", Boolean(), nullable=True, unique=None, default=True)
1741 "downloads", Boolean(), nullable=True, unique=None, default=True)
1743 description = Column(
1742 description = Column(
1744 "description", String(10000), nullable=True, unique=None, default=None)
1743 "description", String(10000), nullable=True, unique=None, default=None)
1745 created_on = Column(
1744 created_on = Column(
1746 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1745 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1747 default=datetime.datetime.now)
1746 default=datetime.datetime.now)
1748 updated_on = Column(
1747 updated_on = Column(
1749 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1748 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1750 default=datetime.datetime.now)
1749 default=datetime.datetime.now)
1751 _landing_revision = Column(
1750 _landing_revision = Column(
1752 "landing_revision", String(255), nullable=False, unique=False,
1751 "landing_revision", String(255), nullable=False, unique=False,
1753 default=None)
1752 default=None)
1754 enable_locking = Column(
1753 enable_locking = Column(
1755 "enable_locking", Boolean(), nullable=False, unique=None,
1754 "enable_locking", Boolean(), nullable=False, unique=None,
1756 default=False)
1755 default=False)
1757 _locked = Column(
1756 _locked = Column(
1758 "locked", String(255), nullable=True, unique=False, default=None)
1757 "locked", String(255), nullable=True, unique=False, default=None)
1759 _changeset_cache = Column(
1758 _changeset_cache = Column(
1760 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1759 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1761
1760
1762 fork_id = Column(
1761 fork_id = Column(
1763 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1762 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1764 nullable=True, unique=False, default=None)
1763 nullable=True, unique=False, default=None)
1765 group_id = Column(
1764 group_id = Column(
1766 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1765 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1767 unique=False, default=None)
1766 unique=False, default=None)
1768
1767
1769 user = relationship('User', lazy='joined', back_populates='repositories')
1768 user = relationship('User', lazy='joined', back_populates='repositories')
1770 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1769 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1771 group = relationship('RepoGroup', lazy='joined')
1770 group = relationship('RepoGroup', lazy='joined')
1772 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1771 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1773 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1772 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1774 stats = relationship('Statistics', cascade='all', uselist=False)
1773 stats = relationship('Statistics', cascade='all', uselist=False)
1775
1774
1776 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1775 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1777 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1776 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1778
1777
1779 logs = relationship('UserLog', back_populates='repository')
1778 logs = relationship('UserLog', back_populates='repository')
1780
1779
1781 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1780 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1782
1781
1783 pull_requests_source = relationship(
1782 pull_requests_source = relationship(
1784 'PullRequest',
1783 'PullRequest',
1785 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1784 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1786 cascade="all, delete-orphan",
1785 cascade="all, delete-orphan",
1787 #back_populates="pr_source"
1786 #back_populates="pr_source"
1788 )
1787 )
1789 pull_requests_target = relationship(
1788 pull_requests_target = relationship(
1790 'PullRequest',
1789 'PullRequest',
1791 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1790 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1792 cascade="all, delete-orphan",
1791 cascade="all, delete-orphan",
1793 #back_populates="pr_target"
1792 #back_populates="pr_target"
1794 )
1793 )
1795
1794
1796 ui = relationship('RepoRhodeCodeUi', cascade="all")
1795 ui = relationship('RepoRhodeCodeUi', cascade="all")
1797 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1796 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1798 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1797 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1799
1798
1800 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1799 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1801
1800
1802 # no cascade, set NULL
1801 # no cascade, set NULL
1803 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1802 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1804
1803
1805 review_rules = relationship('RepoReviewRule')
1804 review_rules = relationship('RepoReviewRule')
1806 user_branch_perms = relationship('UserToRepoBranchPermission')
1805 user_branch_perms = relationship('UserToRepoBranchPermission')
1807 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1806 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1808
1807
1809 def __repr__(self):
1808 def __repr__(self):
1810 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1809 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1811
1810
1812 @hybrid_property
1811 @hybrid_property
1813 def description_safe(self):
1812 def description_safe(self):
1814 from rhodecode.lib import helpers as h
1813 from rhodecode.lib import helpers as h
1815 return h.escape(self.description)
1814 return h.escape(self.description)
1816
1815
1817 @hybrid_property
1816 @hybrid_property
1818 def landing_rev(self):
1817 def landing_rev(self):
1819 # always should return [rev_type, rev], e.g ['branch', 'master']
1818 # always should return [rev_type, rev], e.g ['branch', 'master']
1820 if self._landing_revision:
1819 if self._landing_revision:
1821 _rev_info = self._landing_revision.split(':')
1820 _rev_info = self._landing_revision.split(':')
1822 if len(_rev_info) < 2:
1821 if len(_rev_info) < 2:
1823 _rev_info.insert(0, 'rev')
1822 _rev_info.insert(0, 'rev')
1824 return [_rev_info[0], _rev_info[1]]
1823 return [_rev_info[0], _rev_info[1]]
1825 return [None, None]
1824 return [None, None]
1826
1825
1827 @property
1826 @property
1828 def landing_ref_type(self):
1827 def landing_ref_type(self):
1829 return self.landing_rev[0]
1828 return self.landing_rev[0]
1830
1829
1831 @property
1830 @property
1832 def landing_ref_name(self):
1831 def landing_ref_name(self):
1833 return self.landing_rev[1]
1832 return self.landing_rev[1]
1834
1833
1835 @landing_rev.setter
1834 @landing_rev.setter
1836 def landing_rev(self, val):
1835 def landing_rev(self, val):
1837 if ':' not in val:
1836 if ':' not in val:
1838 raise ValueError('value must be delimited with `:` and consist '
1837 raise ValueError('value must be delimited with `:` and consist '
1839 'of <rev_type>:<rev>, got %s instead' % val)
1838 'of <rev_type>:<rev>, got %s instead' % val)
1840 self._landing_revision = val
1839 self._landing_revision = val
1841
1840
1842 @hybrid_property
1841 @hybrid_property
1843 def locked(self):
1842 def locked(self):
1844 if self._locked:
1843 if self._locked:
1845 user_id, timelocked, reason = self._locked.split(':')
1844 user_id, timelocked, reason = self._locked.split(':')
1846 lock_values = int(user_id), timelocked, reason
1845 lock_values = int(user_id), timelocked, reason
1847 else:
1846 else:
1848 lock_values = [None, None, None]
1847 lock_values = [None, None, None]
1849 return lock_values
1848 return lock_values
1850
1849
1851 @locked.setter
1850 @locked.setter
1852 def locked(self, val):
1851 def locked(self, val):
1853 if val and isinstance(val, (list, tuple)):
1852 if val and isinstance(val, (list, tuple)):
1854 self._locked = ':'.join(map(str, val))
1853 self._locked = ':'.join(map(str, val))
1855 else:
1854 else:
1856 self._locked = None
1855 self._locked = None
1857
1856
1858 @classmethod
1857 @classmethod
1859 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1858 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1860 from rhodecode.lib.vcs.backends.base import EmptyCommit
1859 from rhodecode.lib.vcs.backends.base import EmptyCommit
1861 dummy = EmptyCommit().__json__()
1860 dummy = EmptyCommit().__json__()
1862 if not changeset_cache_raw:
1861 if not changeset_cache_raw:
1863 dummy['source_repo_id'] = repo_id
1862 dummy['source_repo_id'] = repo_id
1864 return json.loads(json.dumps(dummy))
1863 return json.loads(json.dumps(dummy))
1865
1864
1866 try:
1865 try:
1867 return json.loads(changeset_cache_raw)
1866 return json.loads(changeset_cache_raw)
1868 except TypeError:
1867 except TypeError:
1869 return dummy
1868 return dummy
1870 except Exception:
1869 except Exception:
1871 log.error(traceback.format_exc())
1870 log.error(traceback.format_exc())
1872 return dummy
1871 return dummy
1873
1872
1874 @hybrid_property
1873 @hybrid_property
1875 def changeset_cache(self):
1874 def changeset_cache(self):
1876 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1875 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1877
1876
1878 @changeset_cache.setter
1877 @changeset_cache.setter
1879 def changeset_cache(self, val):
1878 def changeset_cache(self, val):
1880 try:
1879 try:
1881 self._changeset_cache = json.dumps(val)
1880 self._changeset_cache = json.dumps(val)
1882 except Exception:
1881 except Exception:
1883 log.error(traceback.format_exc())
1882 log.error(traceback.format_exc())
1884
1883
1885 @hybrid_property
1884 @hybrid_property
1886 def repo_name(self):
1885 def repo_name(self):
1887 return self._repo_name
1886 return self._repo_name
1888
1887
1889 @repo_name.setter
1888 @repo_name.setter
1890 def repo_name(self, value):
1889 def repo_name(self, value):
1891 self._repo_name = value
1890 self._repo_name = value
1892 self.repo_name_hash = sha1(safe_bytes(value))
1891 self.repo_name_hash = sha1(safe_bytes(value))
1893
1892
1894 @classmethod
1893 @classmethod
1895 def normalize_repo_name(cls, repo_name):
1894 def normalize_repo_name(cls, repo_name):
1896 """
1895 """
1897 Normalizes os specific repo_name to the format internally stored inside
1896 Normalizes os specific repo_name to the format internally stored inside
1898 database using URL_SEP
1897 database using URL_SEP
1899
1898
1900 :param cls:
1899 :param cls:
1901 :param repo_name:
1900 :param repo_name:
1902 """
1901 """
1903 return cls.NAME_SEP.join(repo_name.split(os.sep))
1902 return cls.NAME_SEP.join(repo_name.split(os.sep))
1904
1903
1905 @classmethod
1904 @classmethod
1906 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1905 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1907 session = Session()
1906 session = Session()
1908 q = session.query(cls).filter(cls.repo_name == repo_name)
1907 q = session.query(cls).filter(cls.repo_name == repo_name)
1909
1908
1910 if cache:
1909 if cache:
1911 if identity_cache:
1910 if identity_cache:
1912 val = cls.identity_cache(session, 'repo_name', repo_name)
1911 val = cls.identity_cache(session, 'repo_name', repo_name)
1913 if val:
1912 if val:
1914 return val
1913 return val
1915 else:
1914 else:
1916 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1915 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1917 q = q.options(
1916 q = q.options(
1918 FromCache("sql_cache_short", cache_key))
1917 FromCache("sql_cache_short", cache_key))
1919
1918
1920 return q.scalar()
1919 return q.scalar()
1921
1920
1922 @classmethod
1921 @classmethod
1923 def get_by_id_or_repo_name(cls, repoid):
1922 def get_by_id_or_repo_name(cls, repoid):
1924 if isinstance(repoid, int):
1923 if isinstance(repoid, int):
1925 try:
1924 try:
1926 repo = cls.get(repoid)
1925 repo = cls.get(repoid)
1927 except ValueError:
1926 except ValueError:
1928 repo = None
1927 repo = None
1929 else:
1928 else:
1930 repo = cls.get_by_repo_name(repoid)
1929 repo = cls.get_by_repo_name(repoid)
1931 return repo
1930 return repo
1932
1931
1933 @classmethod
1932 @classmethod
1934 def get_by_full_path(cls, repo_full_path):
1933 def get_by_full_path(cls, repo_full_path):
1935 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1934 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1936 repo_name = cls.normalize_repo_name(repo_name)
1935 repo_name = cls.normalize_repo_name(repo_name)
1937 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1936 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1938
1937
1939 @classmethod
1938 @classmethod
1940 def get_repo_forks(cls, repo_id):
1939 def get_repo_forks(cls, repo_id):
1941 return cls.query().filter(Repository.fork_id == repo_id)
1940 return cls.query().filter(Repository.fork_id == repo_id)
1942
1941
1943 @classmethod
1942 @classmethod
1944 def base_path(cls):
1943 def base_path(cls):
1945 """
1944 """
1946 Returns base path when all repos are stored
1945 Returns base path when all repos are stored
1947
1946
1948 :param cls:
1947 :param cls:
1949 """
1948 """
1950 from rhodecode.lib.utils import get_rhodecode_base_path
1949 from rhodecode.lib.utils import get_rhodecode_base_path
1951 return get_rhodecode_base_path()
1950 return get_rhodecode_base_path()
1952
1951
1953 @classmethod
1952 @classmethod
1954 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1953 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1955 case_insensitive=True, archived=False):
1954 case_insensitive=True, archived=False):
1956 q = Repository.query()
1955 q = Repository.query()
1957
1956
1958 if not archived:
1957 if not archived:
1959 q = q.filter(Repository.archived.isnot(true()))
1958 q = q.filter(Repository.archived.isnot(true()))
1960
1959
1961 if not isinstance(user_id, Optional):
1960 if not isinstance(user_id, Optional):
1962 q = q.filter(Repository.user_id == user_id)
1961 q = q.filter(Repository.user_id == user_id)
1963
1962
1964 if not isinstance(group_id, Optional):
1963 if not isinstance(group_id, Optional):
1965 q = q.filter(Repository.group_id == group_id)
1964 q = q.filter(Repository.group_id == group_id)
1966
1965
1967 if case_insensitive:
1966 if case_insensitive:
1968 q = q.order_by(func.lower(Repository.repo_name))
1967 q = q.order_by(func.lower(Repository.repo_name))
1969 else:
1968 else:
1970 q = q.order_by(Repository.repo_name)
1969 q = q.order_by(Repository.repo_name)
1971
1970
1972 return q.all()
1971 return q.all()
1973
1972
1974 @property
1973 @property
1975 def repo_uid(self):
1974 def repo_uid(self):
1976 return '_{}'.format(self.repo_id)
1975 return '_{}'.format(self.repo_id)
1977
1976
1978 @property
1977 @property
1979 def forks(self):
1978 def forks(self):
1980 """
1979 """
1981 Return forks of this repo
1980 Return forks of this repo
1982 """
1981 """
1983 return Repository.get_repo_forks(self.repo_id)
1982 return Repository.get_repo_forks(self.repo_id)
1984
1983
1985 @property
1984 @property
1986 def parent(self):
1985 def parent(self):
1987 """
1986 """
1988 Returns fork parent
1987 Returns fork parent
1989 """
1988 """
1990 return self.fork
1989 return self.fork
1991
1990
1992 @property
1991 @property
1993 def just_name(self):
1992 def just_name(self):
1994 return self.repo_name.split(self.NAME_SEP)[-1]
1993 return self.repo_name.split(self.NAME_SEP)[-1]
1995
1994
1996 @property
1995 @property
1997 def groups_with_parents(self):
1996 def groups_with_parents(self):
1998 groups = []
1997 groups = []
1999 if self.group is None:
1998 if self.group is None:
2000 return groups
1999 return groups
2001
2000
2002 cur_gr = self.group
2001 cur_gr = self.group
2003 groups.insert(0, cur_gr)
2002 groups.insert(0, cur_gr)
2004 while 1:
2003 while 1:
2005 gr = getattr(cur_gr, 'parent_group', None)
2004 gr = getattr(cur_gr, 'parent_group', None)
2006 cur_gr = cur_gr.parent_group
2005 cur_gr = cur_gr.parent_group
2007 if gr is None:
2006 if gr is None:
2008 break
2007 break
2009 groups.insert(0, gr)
2008 groups.insert(0, gr)
2010
2009
2011 return groups
2010 return groups
2012
2011
2013 @property
2012 @property
2014 def groups_and_repo(self):
2013 def groups_and_repo(self):
2015 return self.groups_with_parents, self
2014 return self.groups_with_parents, self
2016
2015
2017 @LazyProperty
2016 @LazyProperty
2018 def repo_path(self):
2017 def repo_path(self):
2019 """
2018 """
2020 Returns base full path for that repository means where it actually
2019 Returns base full path for that repository means where it actually
2021 exists on a filesystem
2020 exists on a filesystem
2022 """
2021 """
2023 q = Session().query(RhodeCodeUi).filter(
2022 q = Session().query(RhodeCodeUi).filter(
2024 RhodeCodeUi.ui_key == self.NAME_SEP)
2023 RhodeCodeUi.ui_key == self.NAME_SEP)
2025 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2024 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2026 return q.one().ui_value
2025 return q.one().ui_value
2027
2026
2028 @property
2027 @property
2029 def repo_full_path(self):
2028 def repo_full_path(self):
2030 p = [self.repo_path]
2029 p = [self.repo_path]
2031 # we need to split the name by / since this is how we store the
2030 # we need to split the name by / since this is how we store the
2032 # names in the database, but that eventually needs to be converted
2031 # names in the database, but that eventually needs to be converted
2033 # into a valid system path
2032 # into a valid system path
2034 p += self.repo_name.split(self.NAME_SEP)
2033 p += self.repo_name.split(self.NAME_SEP)
2035 return os.path.join(*map(safe_str, p))
2034 return os.path.join(*map(safe_str, p))
2036
2035
2037 @property
2036 @property
2038 def cache_keys(self):
2037 def cache_keys(self):
2039 """
2038 """
2040 Returns associated cache keys for that repo
2039 Returns associated cache keys for that repo
2041 """
2040 """
2042 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2041 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2043 repo_id=self.repo_id)
2042 repo_id=self.repo_id)
2044 return CacheKey.query()\
2043 return CacheKey.query()\
2045 .filter(CacheKey.cache_args == invalidation_namespace)\
2044 .filter(CacheKey.cache_args == invalidation_namespace)\
2046 .order_by(CacheKey.cache_key)\
2045 .order_by(CacheKey.cache_key)\
2047 .all()
2046 .all()
2048
2047
2049 @property
2048 @property
2050 def cached_diffs_relative_dir(self):
2049 def cached_diffs_relative_dir(self):
2051 """
2050 """
2052 Return a relative to the repository store path of cached diffs
2051 Return a relative to the repository store path of cached diffs
2053 used for safe display for users, who shouldn't know the absolute store
2052 used for safe display for users, who shouldn't know the absolute store
2054 path
2053 path
2055 """
2054 """
2056 return os.path.join(
2055 return os.path.join(
2057 os.path.dirname(self.repo_name),
2056 os.path.dirname(self.repo_name),
2058 self.cached_diffs_dir.split(os.path.sep)[-1])
2057 self.cached_diffs_dir.split(os.path.sep)[-1])
2059
2058
2060 @property
2059 @property
2061 def cached_diffs_dir(self):
2060 def cached_diffs_dir(self):
2062 path = self.repo_full_path
2061 path = self.repo_full_path
2063 return os.path.join(
2062 return os.path.join(
2064 os.path.dirname(path),
2063 os.path.dirname(path),
2065 f'.__shadow_diff_cache_repo_{self.repo_id}')
2064 f'.__shadow_diff_cache_repo_{self.repo_id}')
2066
2065
2067 def cached_diffs(self):
2066 def cached_diffs(self):
2068 diff_cache_dir = self.cached_diffs_dir
2067 diff_cache_dir = self.cached_diffs_dir
2069 if os.path.isdir(diff_cache_dir):
2068 if os.path.isdir(diff_cache_dir):
2070 return os.listdir(diff_cache_dir)
2069 return os.listdir(diff_cache_dir)
2071 return []
2070 return []
2072
2071
2073 def shadow_repos(self):
2072 def shadow_repos(self):
2074 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2073 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2075 return [
2074 return [
2076 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2075 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2077 if x.startswith(shadow_repos_pattern)
2076 if x.startswith(shadow_repos_pattern)
2078 ]
2077 ]
2079
2078
2080 def get_new_name(self, repo_name):
2079 def get_new_name(self, repo_name):
2081 """
2080 """
2082 returns new full repository name based on assigned group and new new
2081 returns new full repository name based on assigned group and new new
2083
2082
2084 :param repo_name:
2083 :param repo_name:
2085 """
2084 """
2086 path_prefix = self.group.full_path_splitted if self.group else []
2085 path_prefix = self.group.full_path_splitted if self.group else []
2087 return self.NAME_SEP.join(path_prefix + [repo_name])
2086 return self.NAME_SEP.join(path_prefix + [repo_name])
2088
2087
2089 @property
2088 @property
2090 def _config(self):
2089 def _config(self):
2091 """
2090 """
2092 Returns db based config object.
2091 Returns db based config object.
2093 """
2092 """
2094 from rhodecode.lib.utils import make_db_config
2093 from rhodecode.lib.utils import make_db_config
2095 return make_db_config(clear_session=False, repo=self)
2094 return make_db_config(clear_session=False, repo=self)
2096
2095
2097 def permissions(self, with_admins=True, with_owner=True,
2096 def permissions(self, with_admins=True, with_owner=True,
2098 expand_from_user_groups=False):
2097 expand_from_user_groups=False):
2099 """
2098 """
2100 Permissions for repositories
2099 Permissions for repositories
2101 """
2100 """
2102 _admin_perm = 'repository.admin'
2101 _admin_perm = 'repository.admin'
2103
2102
2104 owner_row = []
2103 owner_row = []
2105 if with_owner:
2104 if with_owner:
2106 usr = AttributeDict(self.user.get_dict())
2105 usr = AttributeDict(self.user.get_dict())
2107 usr.owner_row = True
2106 usr.owner_row = True
2108 usr.permission = _admin_perm
2107 usr.permission = _admin_perm
2109 usr.permission_id = None
2108 usr.permission_id = None
2110 owner_row.append(usr)
2109 owner_row.append(usr)
2111
2110
2112 super_admin_ids = []
2111 super_admin_ids = []
2113 super_admin_rows = []
2112 super_admin_rows = []
2114 if with_admins:
2113 if with_admins:
2115 for usr in User.get_all_super_admins():
2114 for usr in User.get_all_super_admins():
2116 super_admin_ids.append(usr.user_id)
2115 super_admin_ids.append(usr.user_id)
2117 # if this admin is also owner, don't double the record
2116 # if this admin is also owner, don't double the record
2118 if usr.user_id == owner_row[0].user_id:
2117 if usr.user_id == owner_row[0].user_id:
2119 owner_row[0].admin_row = True
2118 owner_row[0].admin_row = True
2120 else:
2119 else:
2121 usr = AttributeDict(usr.get_dict())
2120 usr = AttributeDict(usr.get_dict())
2122 usr.admin_row = True
2121 usr.admin_row = True
2123 usr.permission = _admin_perm
2122 usr.permission = _admin_perm
2124 usr.permission_id = None
2123 usr.permission_id = None
2125 super_admin_rows.append(usr)
2124 super_admin_rows.append(usr)
2126
2125
2127 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2126 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2128 q = q.options(joinedload(UserRepoToPerm.repository),
2127 q = q.options(joinedload(UserRepoToPerm.repository),
2129 joinedload(UserRepoToPerm.user),
2128 joinedload(UserRepoToPerm.user),
2130 joinedload(UserRepoToPerm.permission),)
2129 joinedload(UserRepoToPerm.permission),)
2131
2130
2132 # get owners and admins and permissions. We do a trick of re-writing
2131 # get owners and admins and permissions. We do a trick of re-writing
2133 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2132 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2134 # has a global reference and changing one object propagates to all
2133 # has a global reference and changing one object propagates to all
2135 # others. This means if admin is also an owner admin_row that change
2134 # others. This means if admin is also an owner admin_row that change
2136 # would propagate to both objects
2135 # would propagate to both objects
2137 perm_rows = []
2136 perm_rows = []
2138 for _usr in q.all():
2137 for _usr in q.all():
2139 usr = AttributeDict(_usr.user.get_dict())
2138 usr = AttributeDict(_usr.user.get_dict())
2140 # if this user is also owner/admin, mark as duplicate record
2139 # if this user is also owner/admin, mark as duplicate record
2141 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2140 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2142 usr.duplicate_perm = True
2141 usr.duplicate_perm = True
2143 # also check if this permission is maybe used by branch_permissions
2142 # also check if this permission is maybe used by branch_permissions
2144 if _usr.branch_perm_entry:
2143 if _usr.branch_perm_entry:
2145 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2144 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2146
2145
2147 usr.permission = _usr.permission.permission_name
2146 usr.permission = _usr.permission.permission_name
2148 usr.permission_id = _usr.repo_to_perm_id
2147 usr.permission_id = _usr.repo_to_perm_id
2149 perm_rows.append(usr)
2148 perm_rows.append(usr)
2150
2149
2151 # filter the perm rows by 'default' first and then sort them by
2150 # filter the perm rows by 'default' first and then sort them by
2152 # admin,write,read,none permissions sorted again alphabetically in
2151 # admin,write,read,none permissions sorted again alphabetically in
2153 # each group
2152 # each group
2154 perm_rows = sorted(perm_rows, key=display_user_sort)
2153 perm_rows = sorted(perm_rows, key=display_user_sort)
2155
2154
2156 user_groups_rows = []
2155 user_groups_rows = []
2157 if expand_from_user_groups:
2156 if expand_from_user_groups:
2158 for ug in self.permission_user_groups(with_members=True):
2157 for ug in self.permission_user_groups(with_members=True):
2159 for user_data in ug.members:
2158 for user_data in ug.members:
2160 user_groups_rows.append(user_data)
2159 user_groups_rows.append(user_data)
2161
2160
2162 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2161 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2163
2162
2164 def permission_user_groups(self, with_members=True):
2163 def permission_user_groups(self, with_members=True):
2165 q = UserGroupRepoToPerm.query()\
2164 q = UserGroupRepoToPerm.query()\
2166 .filter(UserGroupRepoToPerm.repository == self)
2165 .filter(UserGroupRepoToPerm.repository == self)
2167 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2166 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2168 joinedload(UserGroupRepoToPerm.users_group),
2167 joinedload(UserGroupRepoToPerm.users_group),
2169 joinedload(UserGroupRepoToPerm.permission),)
2168 joinedload(UserGroupRepoToPerm.permission),)
2170
2169
2171 perm_rows = []
2170 perm_rows = []
2172 for _user_group in q.all():
2171 for _user_group in q.all():
2173 entry = AttributeDict(_user_group.users_group.get_dict())
2172 entry = AttributeDict(_user_group.users_group.get_dict())
2174 entry.permission = _user_group.permission.permission_name
2173 entry.permission = _user_group.permission.permission_name
2175 if with_members:
2174 if with_members:
2176 entry.members = [x.user.get_dict()
2175 entry.members = [x.user.get_dict()
2177 for x in _user_group.users_group.members]
2176 for x in _user_group.users_group.members]
2178 perm_rows.append(entry)
2177 perm_rows.append(entry)
2179
2178
2180 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2179 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2181 return perm_rows
2180 return perm_rows
2182
2181
2183 def get_api_data(self, include_secrets=False):
2182 def get_api_data(self, include_secrets=False):
2184 """
2183 """
2185 Common function for generating repo api data
2184 Common function for generating repo api data
2186
2185
2187 :param include_secrets: See :meth:`User.get_api_data`.
2186 :param include_secrets: See :meth:`User.get_api_data`.
2188
2187
2189 """
2188 """
2190 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2189 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2191 # move this methods on models level.
2190 # move this methods on models level.
2192 from rhodecode.model.settings import SettingsModel
2191 from rhodecode.model.settings import SettingsModel
2193 from rhodecode.model.repo import RepoModel
2192 from rhodecode.model.repo import RepoModel
2194
2193
2195 repo = self
2194 repo = self
2196 _user_id, _time, _reason = self.locked
2195 _user_id, _time, _reason = self.locked
2197
2196
2198 data = {
2197 data = {
2199 'repo_id': repo.repo_id,
2198 'repo_id': repo.repo_id,
2200 'repo_name': repo.repo_name,
2199 'repo_name': repo.repo_name,
2201 'repo_type': repo.repo_type,
2200 'repo_type': repo.repo_type,
2202 'clone_uri': repo.clone_uri or '',
2201 'clone_uri': repo.clone_uri or '',
2203 'push_uri': repo.push_uri or '',
2202 'push_uri': repo.push_uri or '',
2204 'url': RepoModel().get_url(self),
2203 'url': RepoModel().get_url(self),
2205 'private': repo.private,
2204 'private': repo.private,
2206 'created_on': repo.created_on,
2205 'created_on': repo.created_on,
2207 'description': repo.description_safe,
2206 'description': repo.description_safe,
2208 'landing_rev': repo.landing_rev,
2207 'landing_rev': repo.landing_rev,
2209 'owner': repo.user.username,
2208 'owner': repo.user.username,
2210 'fork_of': repo.fork.repo_name if repo.fork else None,
2209 'fork_of': repo.fork.repo_name if repo.fork else None,
2211 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2210 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2212 'enable_statistics': repo.enable_statistics,
2211 'enable_statistics': repo.enable_statistics,
2213 'enable_locking': repo.enable_locking,
2212 'enable_locking': repo.enable_locking,
2214 'enable_downloads': repo.enable_downloads,
2213 'enable_downloads': repo.enable_downloads,
2215 'last_changeset': repo.changeset_cache,
2214 'last_changeset': repo.changeset_cache,
2216 'locked_by': User.get(_user_id).get_api_data(
2215 'locked_by': User.get(_user_id).get_api_data(
2217 include_secrets=include_secrets) if _user_id else None,
2216 include_secrets=include_secrets) if _user_id else None,
2218 'locked_date': time_to_datetime(_time) if _time else None,
2217 'locked_date': time_to_datetime(_time) if _time else None,
2219 'lock_reason': _reason if _reason else None,
2218 'lock_reason': _reason if _reason else None,
2220 }
2219 }
2221
2220
2222 # TODO: mikhail: should be per-repo settings here
2221 # TODO: mikhail: should be per-repo settings here
2223 rc_config = SettingsModel().get_all_settings()
2222 rc_config = SettingsModel().get_all_settings()
2224 repository_fields = str2bool(
2223 repository_fields = str2bool(
2225 rc_config.get('rhodecode_repository_fields'))
2224 rc_config.get('rhodecode_repository_fields'))
2226 if repository_fields:
2225 if repository_fields:
2227 for f in self.extra_fields:
2226 for f in self.extra_fields:
2228 data[f.field_key_prefixed] = f.field_value
2227 data[f.field_key_prefixed] = f.field_value
2229
2228
2230 return data
2229 return data
2231
2230
2232 @classmethod
2231 @classmethod
2233 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2232 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2234 if not lock_time:
2233 if not lock_time:
2235 lock_time = time.time()
2234 lock_time = time.time()
2236 if not lock_reason:
2235 if not lock_reason:
2237 lock_reason = cls.LOCK_AUTOMATIC
2236 lock_reason = cls.LOCK_AUTOMATIC
2238 repo.locked = [user_id, lock_time, lock_reason]
2237 repo.locked = [user_id, lock_time, lock_reason]
2239 Session().add(repo)
2238 Session().add(repo)
2240 Session().commit()
2239 Session().commit()
2241
2240
2242 @classmethod
2241 @classmethod
2243 def unlock(cls, repo):
2242 def unlock(cls, repo):
2244 repo.locked = None
2243 repo.locked = None
2245 Session().add(repo)
2244 Session().add(repo)
2246 Session().commit()
2245 Session().commit()
2247
2246
2248 @classmethod
2247 @classmethod
2249 def getlock(cls, repo):
2248 def getlock(cls, repo):
2250 return repo.locked
2249 return repo.locked
2251
2250
2252 def get_locking_state(self, action, user_id, only_when_enabled=True):
2251 def get_locking_state(self, action, user_id, only_when_enabled=True):
2253 """
2252 """
2254 Checks locking on this repository, if locking is enabled and lock is
2253 Checks locking on this repository, if locking is enabled and lock is
2255 present returns a tuple of make_lock, locked, locked_by.
2254 present returns a tuple of make_lock, locked, locked_by.
2256 make_lock can have 3 states None (do nothing) True, make lock
2255 make_lock can have 3 states None (do nothing) True, make lock
2257 False release lock, This value is later propagated to hooks, which
2256 False release lock, This value is later propagated to hooks, which
2258 do the locking. Think about this as signals passed to hooks what to do.
2257 do the locking. Think about this as signals passed to hooks what to do.
2259
2258
2260 """
2259 """
2261 # TODO: johbo: This is part of the business logic and should be moved
2260 # TODO: johbo: This is part of the business logic and should be moved
2262 # into the RepositoryModel.
2261 # into the RepositoryModel.
2263
2262
2264 if action not in ('push', 'pull'):
2263 if action not in ('push', 'pull'):
2265 raise ValueError("Invalid action value: %s" % repr(action))
2264 raise ValueError("Invalid action value: %s" % repr(action))
2266
2265
2267 # defines if locked error should be thrown to user
2266 # defines if locked error should be thrown to user
2268 currently_locked = False
2267 currently_locked = False
2269 # defines if new lock should be made, tri-state
2268 # defines if new lock should be made, tri-state
2270 make_lock = None
2269 make_lock = None
2271 repo = self
2270 repo = self
2272 user = User.get(user_id)
2271 user = User.get(user_id)
2273
2272
2274 lock_info = repo.locked
2273 lock_info = repo.locked
2275
2274
2276 if repo and (repo.enable_locking or not only_when_enabled):
2275 if repo and (repo.enable_locking or not only_when_enabled):
2277 if action == 'push':
2276 if action == 'push':
2278 # check if it's already locked !, if it is compare users
2277 # check if it's already locked !, if it is compare users
2279 locked_by_user_id = lock_info[0]
2278 locked_by_user_id = lock_info[0]
2280 if user.user_id == locked_by_user_id:
2279 if user.user_id == locked_by_user_id:
2281 log.debug(
2280 log.debug(
2282 'Got `push` action from user %s, now unlocking', user)
2281 'Got `push` action from user %s, now unlocking', user)
2283 # unlock if we have push from user who locked
2282 # unlock if we have push from user who locked
2284 make_lock = False
2283 make_lock = False
2285 else:
2284 else:
2286 # we're not the same user who locked, ban with
2285 # we're not the same user who locked, ban with
2287 # code defined in settings (default is 423 HTTP Locked) !
2286 # code defined in settings (default is 423 HTTP Locked) !
2288 log.debug('Repo %s is currently locked by %s', repo, user)
2287 log.debug('Repo %s is currently locked by %s', repo, user)
2289 currently_locked = True
2288 currently_locked = True
2290 elif action == 'pull':
2289 elif action == 'pull':
2291 # [0] user [1] date
2290 # [0] user [1] date
2292 if lock_info[0] and lock_info[1]:
2291 if lock_info[0] and lock_info[1]:
2293 log.debug('Repo %s is currently locked by %s', repo, user)
2292 log.debug('Repo %s is currently locked by %s', repo, user)
2294 currently_locked = True
2293 currently_locked = True
2295 else:
2294 else:
2296 log.debug('Setting lock on repo %s by %s', repo, user)
2295 log.debug('Setting lock on repo %s by %s', repo, user)
2297 make_lock = True
2296 make_lock = True
2298
2297
2299 else:
2298 else:
2300 log.debug('Repository %s do not have locking enabled', repo)
2299 log.debug('Repository %s do not have locking enabled', repo)
2301
2300
2302 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2301 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2303 make_lock, currently_locked, lock_info)
2302 make_lock, currently_locked, lock_info)
2304
2303
2305 from rhodecode.lib.auth import HasRepoPermissionAny
2304 from rhodecode.lib.auth import HasRepoPermissionAny
2306 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2305 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2307 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2306 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2308 # if we don't have at least write permission we cannot make a lock
2307 # if we don't have at least write permission we cannot make a lock
2309 log.debug('lock state reset back to FALSE due to lack '
2308 log.debug('lock state reset back to FALSE due to lack '
2310 'of at least read permission')
2309 'of at least read permission')
2311 make_lock = False
2310 make_lock = False
2312
2311
2313 return make_lock, currently_locked, lock_info
2312 return make_lock, currently_locked, lock_info
2314
2313
2315 @property
2314 @property
2316 def last_commit_cache_update_diff(self):
2315 def last_commit_cache_update_diff(self):
2317 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2316 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2318
2317
2319 @classmethod
2318 @classmethod
2320 def _load_commit_change(cls, last_commit_cache):
2319 def _load_commit_change(cls, last_commit_cache):
2321 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2320 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2322 empty_date = datetime.datetime.fromtimestamp(0)
2321 empty_date = datetime.datetime.fromtimestamp(0)
2323 date_latest = last_commit_cache.get('date', empty_date)
2322 date_latest = last_commit_cache.get('date', empty_date)
2324 try:
2323 try:
2325 return parse_datetime(date_latest)
2324 return parse_datetime(date_latest)
2326 except Exception:
2325 except Exception:
2327 return empty_date
2326 return empty_date
2328
2327
2329 @property
2328 @property
2330 def last_commit_change(self):
2329 def last_commit_change(self):
2331 return self._load_commit_change(self.changeset_cache)
2330 return self._load_commit_change(self.changeset_cache)
2332
2331
2333 @property
2332 @property
2334 def last_db_change(self):
2333 def last_db_change(self):
2335 return self.updated_on
2334 return self.updated_on
2336
2335
2337 @property
2336 @property
2338 def clone_uri_hidden(self):
2337 def clone_uri_hidden(self):
2339 clone_uri = self.clone_uri
2338 clone_uri = self.clone_uri
2340 if clone_uri:
2339 if clone_uri:
2341 import urlobject
2340 import urlobject
2342 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2341 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2343 if url_obj.password:
2342 if url_obj.password:
2344 clone_uri = url_obj.with_password('*****')
2343 clone_uri = url_obj.with_password('*****')
2345 return clone_uri
2344 return clone_uri
2346
2345
2347 @property
2346 @property
2348 def push_uri_hidden(self):
2347 def push_uri_hidden(self):
2349 push_uri = self.push_uri
2348 push_uri = self.push_uri
2350 if push_uri:
2349 if push_uri:
2351 import urlobject
2350 import urlobject
2352 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2351 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2353 if url_obj.password:
2352 if url_obj.password:
2354 push_uri = url_obj.with_password('*****')
2353 push_uri = url_obj.with_password('*****')
2355 return push_uri
2354 return push_uri
2356
2355
2357 def clone_url(self, **override):
2356 def clone_url(self, **override):
2358 from rhodecode.model.settings import SettingsModel
2357 from rhodecode.model.settings import SettingsModel
2359
2358
2360 uri_tmpl = None
2359 uri_tmpl = None
2361 if 'with_id' in override:
2360 if 'with_id' in override:
2362 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2361 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2363 del override['with_id']
2362 del override['with_id']
2364
2363
2365 if 'uri_tmpl' in override:
2364 if 'uri_tmpl' in override:
2366 uri_tmpl = override['uri_tmpl']
2365 uri_tmpl = override['uri_tmpl']
2367 del override['uri_tmpl']
2366 del override['uri_tmpl']
2368
2367
2369 ssh = False
2368 ssh = False
2370 if 'ssh' in override:
2369 if 'ssh' in override:
2371 ssh = True
2370 ssh = True
2372 del override['ssh']
2371 del override['ssh']
2373
2372
2374 # we didn't override our tmpl from **overrides
2373 # we didn't override our tmpl from **overrides
2375 request = get_current_request()
2374 request = get_current_request()
2376 if not uri_tmpl:
2375 if not uri_tmpl:
2377 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2376 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2378 rc_config = request.call_context.rc_config
2377 rc_config = request.call_context.rc_config
2379 else:
2378 else:
2380 rc_config = SettingsModel().get_all_settings(cache=True)
2379 rc_config = SettingsModel().get_all_settings(cache=True)
2381
2380
2382 if ssh:
2381 if ssh:
2383 uri_tmpl = rc_config.get(
2382 uri_tmpl = rc_config.get(
2384 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2383 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2385
2384
2386 else:
2385 else:
2387 uri_tmpl = rc_config.get(
2386 uri_tmpl = rc_config.get(
2388 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2387 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2389
2388
2390 return get_clone_url(request=request,
2389 return get_clone_url(request=request,
2391 uri_tmpl=uri_tmpl,
2390 uri_tmpl=uri_tmpl,
2392 repo_name=self.repo_name,
2391 repo_name=self.repo_name,
2393 repo_id=self.repo_id,
2392 repo_id=self.repo_id,
2394 repo_type=self.repo_type,
2393 repo_type=self.repo_type,
2395 **override)
2394 **override)
2396
2395
2397 def set_state(self, state):
2396 def set_state(self, state):
2398 self.repo_state = state
2397 self.repo_state = state
2399 Session().add(self)
2398 Session().add(self)
2400 #==========================================================================
2399 #==========================================================================
2401 # SCM PROPERTIES
2400 # SCM PROPERTIES
2402 #==========================================================================
2401 #==========================================================================
2403
2402
2404 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2403 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2405 return get_commit_safe(
2404 return get_commit_safe(
2406 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2405 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2407 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2406 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2408
2407
2409 def get_changeset(self, rev=None, pre_load=None):
2408 def get_changeset(self, rev=None, pre_load=None):
2410 warnings.warn("Use get_commit", DeprecationWarning)
2409 warnings.warn("Use get_commit", DeprecationWarning)
2411 commit_id = None
2410 commit_id = None
2412 commit_idx = None
2411 commit_idx = None
2413 if isinstance(rev, str):
2412 if isinstance(rev, str):
2414 commit_id = rev
2413 commit_id = rev
2415 else:
2414 else:
2416 commit_idx = rev
2415 commit_idx = rev
2417 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2416 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2418 pre_load=pre_load)
2417 pre_load=pre_load)
2419
2418
2420 def get_landing_commit(self):
2419 def get_landing_commit(self):
2421 """
2420 """
2422 Returns landing commit, or if that doesn't exist returns the tip
2421 Returns landing commit, or if that doesn't exist returns the tip
2423 """
2422 """
2424 _rev_type, _rev = self.landing_rev
2423 _rev_type, _rev = self.landing_rev
2425 commit = self.get_commit(_rev)
2424 commit = self.get_commit(_rev)
2426 if isinstance(commit, EmptyCommit):
2425 if isinstance(commit, EmptyCommit):
2427 return self.get_commit()
2426 return self.get_commit()
2428 return commit
2427 return commit
2429
2428
2430 def flush_commit_cache(self):
2429 def flush_commit_cache(self):
2431 self.update_commit_cache(cs_cache={'raw_id':'0'})
2430 self.update_commit_cache(cs_cache={'raw_id':'0'})
2432 self.update_commit_cache()
2431 self.update_commit_cache()
2433
2432
2434 def update_commit_cache(self, cs_cache=None, config=None):
2433 def update_commit_cache(self, cs_cache=None, config=None):
2435 """
2434 """
2436 Update cache of last commit for repository
2435 Update cache of last commit for repository
2437 cache_keys should be::
2436 cache_keys should be::
2438
2437
2439 source_repo_id
2438 source_repo_id
2440 short_id
2439 short_id
2441 raw_id
2440 raw_id
2442 revision
2441 revision
2443 parents
2442 parents
2444 message
2443 message
2445 date
2444 date
2446 author
2445 author
2447 updated_on
2446 updated_on
2448
2447
2449 """
2448 """
2450 from rhodecode.lib.vcs.backends.base import BaseCommit
2449 from rhodecode.lib.vcs.backends.base import BaseCommit
2451 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2450 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2452 empty_date = datetime.datetime.fromtimestamp(0)
2451 empty_date = datetime.datetime.fromtimestamp(0)
2453 repo_commit_count = 0
2452 repo_commit_count = 0
2454
2453
2455 if cs_cache is None:
2454 if cs_cache is None:
2456 # use no-cache version here
2455 # use no-cache version here
2457 try:
2456 try:
2458 scm_repo = self.scm_instance(cache=False, config=config)
2457 scm_repo = self.scm_instance(cache=False, config=config)
2459 except VCSError:
2458 except VCSError:
2460 scm_repo = None
2459 scm_repo = None
2461 empty = scm_repo is None or scm_repo.is_empty()
2460 empty = scm_repo is None or scm_repo.is_empty()
2462
2461
2463 if not empty:
2462 if not empty:
2464 cs_cache = scm_repo.get_commit(
2463 cs_cache = scm_repo.get_commit(
2465 pre_load=["author", "date", "message", "parents", "branch"])
2464 pre_load=["author", "date", "message", "parents", "branch"])
2466 repo_commit_count = scm_repo.count()
2465 repo_commit_count = scm_repo.count()
2467 else:
2466 else:
2468 cs_cache = EmptyCommit()
2467 cs_cache = EmptyCommit()
2469
2468
2470 if isinstance(cs_cache, BaseCommit):
2469 if isinstance(cs_cache, BaseCommit):
2471 cs_cache = cs_cache.__json__()
2470 cs_cache = cs_cache.__json__()
2472
2471
2473 def is_outdated(new_cs_cache):
2472 def is_outdated(new_cs_cache):
2474 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2473 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2475 new_cs_cache['revision'] != self.changeset_cache['revision']):
2474 new_cs_cache['revision'] != self.changeset_cache['revision']):
2476 return True
2475 return True
2477 return False
2476 return False
2478
2477
2479 # check if we have maybe already latest cached revision
2478 # check if we have maybe already latest cached revision
2480 if is_outdated(cs_cache) or not self.changeset_cache:
2479 if is_outdated(cs_cache) or not self.changeset_cache:
2481 _current_datetime = datetime.datetime.utcnow()
2480 _current_datetime = datetime.datetime.utcnow()
2482 last_change = cs_cache.get('date') or _current_datetime
2481 last_change = cs_cache.get('date') or _current_datetime
2483 # we check if last update is newer than the new value
2482 # we check if last update is newer than the new value
2484 # if yes, we use the current timestamp instead. Imagine you get
2483 # if yes, we use the current timestamp instead. Imagine you get
2485 # old commit pushed 1y ago, we'd set last update 1y to ago.
2484 # old commit pushed 1y ago, we'd set last update 1y to ago.
2486 last_change_timestamp = datetime_to_time(last_change)
2485 last_change_timestamp = datetime_to_time(last_change)
2487 current_timestamp = datetime_to_time(last_change)
2486 current_timestamp = datetime_to_time(last_change)
2488 if last_change_timestamp > current_timestamp and not empty:
2487 if last_change_timestamp > current_timestamp and not empty:
2489 cs_cache['date'] = _current_datetime
2488 cs_cache['date'] = _current_datetime
2490
2489
2491 # also store size of repo
2490 # also store size of repo
2492 cs_cache['repo_commit_count'] = repo_commit_count
2491 cs_cache['repo_commit_count'] = repo_commit_count
2493
2492
2494 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2493 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2495 cs_cache['updated_on'] = time.time()
2494 cs_cache['updated_on'] = time.time()
2496 self.changeset_cache = cs_cache
2495 self.changeset_cache = cs_cache
2497 self.updated_on = last_change
2496 self.updated_on = last_change
2498 Session().add(self)
2497 Session().add(self)
2499 Session().commit()
2498 Session().commit()
2500
2499
2501 else:
2500 else:
2502 if empty:
2501 if empty:
2503 cs_cache = EmptyCommit().__json__()
2502 cs_cache = EmptyCommit().__json__()
2504 else:
2503 else:
2505 cs_cache = self.changeset_cache
2504 cs_cache = self.changeset_cache
2506
2505
2507 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2506 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2508
2507
2509 cs_cache['updated_on'] = time.time()
2508 cs_cache['updated_on'] = time.time()
2510 self.changeset_cache = cs_cache
2509 self.changeset_cache = cs_cache
2511 self.updated_on = _date_latest
2510 self.updated_on = _date_latest
2512 Session().add(self)
2511 Session().add(self)
2513 Session().commit()
2512 Session().commit()
2514
2513
2515 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2514 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2516 self.repo_name, cs_cache, _date_latest)
2515 self.repo_name, cs_cache, _date_latest)
2517
2516
2518 @property
2517 @property
2519 def tip(self):
2518 def tip(self):
2520 return self.get_commit('tip')
2519 return self.get_commit('tip')
2521
2520
2522 @property
2521 @property
2523 def author(self):
2522 def author(self):
2524 return self.tip.author
2523 return self.tip.author
2525
2524
2526 @property
2525 @property
2527 def last_change(self):
2526 def last_change(self):
2528 return self.scm_instance().last_change
2527 return self.scm_instance().last_change
2529
2528
2530 def get_comments(self, revisions=None):
2529 def get_comments(self, revisions=None):
2531 """
2530 """
2532 Returns comments for this repository grouped by revisions
2531 Returns comments for this repository grouped by revisions
2533
2532
2534 :param revisions: filter query by revisions only
2533 :param revisions: filter query by revisions only
2535 """
2534 """
2536 cmts = ChangesetComment.query()\
2535 cmts = ChangesetComment.query()\
2537 .filter(ChangesetComment.repo == self)
2536 .filter(ChangesetComment.repo == self)
2538 if revisions:
2537 if revisions:
2539 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2538 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2540 grouped = collections.defaultdict(list)
2539 grouped = collections.defaultdict(list)
2541 for cmt in cmts.all():
2540 for cmt in cmts.all():
2542 grouped[cmt.revision].append(cmt)
2541 grouped[cmt.revision].append(cmt)
2543 return grouped
2542 return grouped
2544
2543
2545 def statuses(self, revisions=None):
2544 def statuses(self, revisions=None):
2546 """
2545 """
2547 Returns statuses for this repository
2546 Returns statuses for this repository
2548
2547
2549 :param revisions: list of revisions to get statuses for
2548 :param revisions: list of revisions to get statuses for
2550 """
2549 """
2551 statuses = ChangesetStatus.query()\
2550 statuses = ChangesetStatus.query()\
2552 .filter(ChangesetStatus.repo == self)\
2551 .filter(ChangesetStatus.repo == self)\
2553 .filter(ChangesetStatus.version == 0)
2552 .filter(ChangesetStatus.version == 0)
2554
2553
2555 if revisions:
2554 if revisions:
2556 # Try doing the filtering in chunks to avoid hitting limits
2555 # Try doing the filtering in chunks to avoid hitting limits
2557 size = 500
2556 size = 500
2558 status_results = []
2557 status_results = []
2559 for chunk in range(0, len(revisions), size):
2558 for chunk in range(0, len(revisions), size):
2560 status_results += statuses.filter(
2559 status_results += statuses.filter(
2561 ChangesetStatus.revision.in_(
2560 ChangesetStatus.revision.in_(
2562 revisions[chunk: chunk+size])
2561 revisions[chunk: chunk+size])
2563 ).all()
2562 ).all()
2564 else:
2563 else:
2565 status_results = statuses.all()
2564 status_results = statuses.all()
2566
2565
2567 grouped = {}
2566 grouped = {}
2568
2567
2569 # maybe we have open new pullrequest without a status?
2568 # maybe we have open new pullrequest without a status?
2570 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2569 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2571 status_lbl = ChangesetStatus.get_status_lbl(stat)
2570 status_lbl = ChangesetStatus.get_status_lbl(stat)
2572 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2571 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2573 for rev in pr.revisions:
2572 for rev in pr.revisions:
2574 pr_id = pr.pull_request_id
2573 pr_id = pr.pull_request_id
2575 pr_repo = pr.target_repo.repo_name
2574 pr_repo = pr.target_repo.repo_name
2576 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2575 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2577
2576
2578 for stat in status_results:
2577 for stat in status_results:
2579 pr_id = pr_repo = None
2578 pr_id = pr_repo = None
2580 if stat.pull_request:
2579 if stat.pull_request:
2581 pr_id = stat.pull_request.pull_request_id
2580 pr_id = stat.pull_request.pull_request_id
2582 pr_repo = stat.pull_request.target_repo.repo_name
2581 pr_repo = stat.pull_request.target_repo.repo_name
2583 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2582 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2584 pr_id, pr_repo]
2583 pr_id, pr_repo]
2585 return grouped
2584 return grouped
2586
2585
2587 # ==========================================================================
2586 # ==========================================================================
2588 # SCM CACHE INSTANCE
2587 # SCM CACHE INSTANCE
2589 # ==========================================================================
2588 # ==========================================================================
2590
2589
2591 def scm_instance(self, **kwargs):
2590 def scm_instance(self, **kwargs):
2592 import rhodecode
2591 import rhodecode
2593
2592
2594 # Passing a config will not hit the cache currently only used
2593 # Passing a config will not hit the cache currently only used
2595 # for repo2dbmapper
2594 # for repo2dbmapper
2596 config = kwargs.pop('config', None)
2595 config = kwargs.pop('config', None)
2597 cache = kwargs.pop('cache', None)
2596 cache = kwargs.pop('cache', None)
2598 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2597 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2599 if vcs_full_cache is not None:
2598 if vcs_full_cache is not None:
2600 # allows override global config
2599 # allows override global config
2601 full_cache = vcs_full_cache
2600 full_cache = vcs_full_cache
2602 else:
2601 else:
2603 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2602 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2604 # if cache is NOT defined use default global, else we have a full
2603 # if cache is NOT defined use default global, else we have a full
2605 # control over cache behaviour
2604 # control over cache behaviour
2606 if cache is None and full_cache and not config:
2605 if cache is None and full_cache and not config:
2607 log.debug('Initializing pure cached instance for %s', self.repo_path)
2606 log.debug('Initializing pure cached instance for %s', self.repo_path)
2608 return self._get_instance_cached()
2607 return self._get_instance_cached()
2609
2608
2610 # cache here is sent to the "vcs server"
2609 # cache here is sent to the "vcs server"
2611 return self._get_instance(cache=bool(cache), config=config)
2610 return self._get_instance(cache=bool(cache), config=config)
2612
2611
2613 def _get_instance_cached(self):
2612 def _get_instance_cached(self):
2614 from rhodecode.lib import rc_cache
2613 from rhodecode.lib import rc_cache
2615
2614
2616 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2615 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2617 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2616 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2618 repo_id=self.repo_id)
2617 repo_id=self.repo_id)
2619 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2618 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2620
2619
2621 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2620 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2622 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2621 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2623 return self._get_instance(repo_state_uid=_cache_state_uid)
2622 return self._get_instance(repo_state_uid=_cache_state_uid)
2624
2623
2625 # we must use thread scoped cache here,
2624 # we must use thread scoped cache here,
2626 # because each thread of gevent needs it's own not shared connection and cache
2625 # because each thread of gevent needs it's own not shared connection and cache
2627 # we also alter `args` so the cache key is individual for every green thread.
2626 # we also alter `args` so the cache key is individual for every green thread.
2628 inv_context_manager = rc_cache.InvalidationContext(
2627 inv_context_manager = rc_cache.InvalidationContext(
2629 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2628 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2630 thread_scoped=True)
2629 thread_scoped=True)
2631 with inv_context_manager as invalidation_context:
2630 with inv_context_manager as invalidation_context:
2632 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2631 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2633 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2632 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2634
2633
2635 # re-compute and store cache if we get invalidate signal
2634 # re-compute and store cache if we get invalidate signal
2636 if invalidation_context.should_invalidate():
2635 if invalidation_context.should_invalidate():
2637 instance = get_instance_cached.refresh(*args)
2636 instance = get_instance_cached.refresh(*args)
2638 else:
2637 else:
2639 instance = get_instance_cached(*args)
2638 instance = get_instance_cached(*args)
2640
2639
2641 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2640 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2642 return instance
2641 return instance
2643
2642
2644 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2643 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2645 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2644 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2646 self.repo_type, self.repo_path, cache)
2645 self.repo_type, self.repo_path, cache)
2647 config = config or self._config
2646 config = config or self._config
2648 custom_wire = {
2647 custom_wire = {
2649 'cache': cache, # controls the vcs.remote cache
2648 'cache': cache, # controls the vcs.remote cache
2650 'repo_state_uid': repo_state_uid
2649 'repo_state_uid': repo_state_uid
2651 }
2650 }
2652 repo = get_vcs_instance(
2651 repo = get_vcs_instance(
2653 repo_path=safe_str(self.repo_full_path),
2652 repo_path=safe_str(self.repo_full_path),
2654 config=config,
2653 config=config,
2655 with_wire=custom_wire,
2654 with_wire=custom_wire,
2656 create=False,
2655 create=False,
2657 _vcs_alias=self.repo_type)
2656 _vcs_alias=self.repo_type)
2658 if repo is not None:
2657 if repo is not None:
2659 repo.count() # cache rebuild
2658 repo.count() # cache rebuild
2660 return repo
2659 return repo
2661
2660
2662 def get_shadow_repository_path(self, workspace_id):
2661 def get_shadow_repository_path(self, workspace_id):
2663 from rhodecode.lib.vcs.backends.base import BaseRepository
2662 from rhodecode.lib.vcs.backends.base import BaseRepository
2664 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2663 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2665 self.repo_full_path, self.repo_id, workspace_id)
2664 self.repo_full_path, self.repo_id, workspace_id)
2666 return shadow_repo_path
2665 return shadow_repo_path
2667
2666
2668 def __json__(self):
2667 def __json__(self):
2669 return {'landing_rev': self.landing_rev}
2668 return {'landing_rev': self.landing_rev}
2670
2669
2671 def get_dict(self):
2670 def get_dict(self):
2672
2671
2673 # Since we transformed `repo_name` to a hybrid property, we need to
2672 # Since we transformed `repo_name` to a hybrid property, we need to
2674 # keep compatibility with the code which uses `repo_name` field.
2673 # keep compatibility with the code which uses `repo_name` field.
2675
2674
2676 result = super(Repository, self).get_dict()
2675 result = super(Repository, self).get_dict()
2677 result['repo_name'] = result.pop('_repo_name', None)
2676 result['repo_name'] = result.pop('_repo_name', None)
2678 result.pop('_changeset_cache', '')
2677 result.pop('_changeset_cache', '')
2679 return result
2678 return result
2680
2679
2681
2680
2682 class RepoGroup(Base, BaseModel):
2681 class RepoGroup(Base, BaseModel):
2683 __tablename__ = 'groups'
2682 __tablename__ = 'groups'
2684 __table_args__ = (
2683 __table_args__ = (
2685 UniqueConstraint('group_name', 'group_parent_id'),
2684 UniqueConstraint('group_name', 'group_parent_id'),
2686 base_table_args,
2685 base_table_args,
2687 )
2686 )
2688 __mapper_args__ = {
2687 __mapper_args__ = {
2689 #TODO: this is now depracated ?!
2688 #TODO: this is now depracated ?!
2690 # 'order_by': 'group_name'
2689 # 'order_by': 'group_name'
2691 }
2690 }
2692
2691
2693 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2692 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2694
2693
2695 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2694 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2696 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2695 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2697 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2696 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2698 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2697 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2699 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2698 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2700 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2699 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2701 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2700 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2702 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2701 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2703 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2702 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2704 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2703 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2705 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2704 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2706
2705
2707 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2706 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2708 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2707 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2709 parent_group = relationship('RepoGroup', remote_side=group_id)
2708 parent_group = relationship('RepoGroup', remote_side=group_id)
2710 user = relationship('User', back_populates='repository_groups')
2709 user = relationship('User', back_populates='repository_groups')
2711 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2710 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2712
2711
2713 # no cascade, set NULL
2712 # no cascade, set NULL
2714 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2713 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2715
2714
2716 def __init__(self, group_name='', parent_group=None):
2715 def __init__(self, group_name='', parent_group=None):
2717 self.group_name = group_name
2716 self.group_name = group_name
2718 self.parent_group = parent_group
2717 self.parent_group = parent_group
2719
2718
2720 def __repr__(self):
2719 def __repr__(self):
2721 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2720 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2722
2721
2723 @hybrid_property
2722 @hybrid_property
2724 def group_name(self):
2723 def group_name(self):
2725 return self._group_name
2724 return self._group_name
2726
2725
2727 @group_name.setter
2726 @group_name.setter
2728 def group_name(self, value):
2727 def group_name(self, value):
2729 self._group_name = value
2728 self._group_name = value
2730 self.group_name_hash = self.hash_repo_group_name(value)
2729 self.group_name_hash = self.hash_repo_group_name(value)
2731
2730
2732 @classmethod
2731 @classmethod
2733 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2732 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2734 from rhodecode.lib.vcs.backends.base import EmptyCommit
2733 from rhodecode.lib.vcs.backends.base import EmptyCommit
2735 dummy = EmptyCommit().__json__()
2734 dummy = EmptyCommit().__json__()
2736 if not changeset_cache_raw:
2735 if not changeset_cache_raw:
2737 dummy['source_repo_id'] = repo_id
2736 dummy['source_repo_id'] = repo_id
2738 return json.loads(json.dumps(dummy))
2737 return json.loads(json.dumps(dummy))
2739
2738
2740 try:
2739 try:
2741 return json.loads(changeset_cache_raw)
2740 return json.loads(changeset_cache_raw)
2742 except TypeError:
2741 except TypeError:
2743 return dummy
2742 return dummy
2744 except Exception:
2743 except Exception:
2745 log.error(traceback.format_exc())
2744 log.error(traceback.format_exc())
2746 return dummy
2745 return dummy
2747
2746
2748 @hybrid_property
2747 @hybrid_property
2749 def changeset_cache(self):
2748 def changeset_cache(self):
2750 return self._load_changeset_cache('', self._changeset_cache)
2749 return self._load_changeset_cache('', self._changeset_cache)
2751
2750
2752 @changeset_cache.setter
2751 @changeset_cache.setter
2753 def changeset_cache(self, val):
2752 def changeset_cache(self, val):
2754 try:
2753 try:
2755 self._changeset_cache = json.dumps(val)
2754 self._changeset_cache = json.dumps(val)
2756 except Exception:
2755 except Exception:
2757 log.error(traceback.format_exc())
2756 log.error(traceback.format_exc())
2758
2757
2759 @validates('group_parent_id')
2758 @validates('group_parent_id')
2760 def validate_group_parent_id(self, key, val):
2759 def validate_group_parent_id(self, key, val):
2761 """
2760 """
2762 Check cycle references for a parent group to self
2761 Check cycle references for a parent group to self
2763 """
2762 """
2764 if self.group_id and val:
2763 if self.group_id and val:
2765 assert val != self.group_id
2764 assert val != self.group_id
2766
2765
2767 return val
2766 return val
2768
2767
2769 @hybrid_property
2768 @hybrid_property
2770 def description_safe(self):
2769 def description_safe(self):
2771 from rhodecode.lib import helpers as h
2770 from rhodecode.lib import helpers as h
2772 return h.escape(self.group_description)
2771 return h.escape(self.group_description)
2773
2772
2774 @classmethod
2773 @classmethod
2775 def hash_repo_group_name(cls, repo_group_name):
2774 def hash_repo_group_name(cls, repo_group_name):
2776 val = remove_formatting(repo_group_name)
2775 val = remove_formatting(repo_group_name)
2777 val = safe_str(val).lower()
2776 val = safe_str(val).lower()
2778 chars = []
2777 chars = []
2779 for c in val:
2778 for c in val:
2780 if c not in string.ascii_letters:
2779 if c not in string.ascii_letters:
2781 c = str(ord(c))
2780 c = str(ord(c))
2782 chars.append(c)
2781 chars.append(c)
2783
2782
2784 return ''.join(chars)
2783 return ''.join(chars)
2785
2784
2786 @classmethod
2785 @classmethod
2787 def _generate_choice(cls, repo_group):
2786 def _generate_choice(cls, repo_group):
2788 from webhelpers2.html import literal as _literal
2787 from webhelpers2.html import literal as _literal
2789
2788
2790 def _name(k):
2789 def _name(k):
2791 return _literal(cls.CHOICES_SEPARATOR.join(k))
2790 return _literal(cls.CHOICES_SEPARATOR.join(k))
2792
2791
2793 return repo_group.group_id, _name(repo_group.full_path_splitted)
2792 return repo_group.group_id, _name(repo_group.full_path_splitted)
2794
2793
2795 @classmethod
2794 @classmethod
2796 def groups_choices(cls, groups=None, show_empty_group=True):
2795 def groups_choices(cls, groups=None, show_empty_group=True):
2797 if not groups:
2796 if not groups:
2798 groups = cls.query().all()
2797 groups = cls.query().all()
2799
2798
2800 repo_groups = []
2799 repo_groups = []
2801 if show_empty_group:
2800 if show_empty_group:
2802 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2801 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2803
2802
2804 repo_groups.extend([cls._generate_choice(x) for x in groups])
2803 repo_groups.extend([cls._generate_choice(x) for x in groups])
2805
2804
2806 repo_groups = sorted(
2805 repo_groups = sorted(
2807 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2806 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2808 return repo_groups
2807 return repo_groups
2809
2808
2810 @classmethod
2809 @classmethod
2811 def url_sep(cls):
2810 def url_sep(cls):
2812 return URL_SEP
2811 return URL_SEP
2813
2812
2814 @classmethod
2813 @classmethod
2815 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2814 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2816 if case_insensitive:
2815 if case_insensitive:
2817 gr = cls.query().filter(func.lower(cls.group_name)
2816 gr = cls.query().filter(func.lower(cls.group_name)
2818 == func.lower(group_name))
2817 == func.lower(group_name))
2819 else:
2818 else:
2820 gr = cls.query().filter(cls.group_name == group_name)
2819 gr = cls.query().filter(cls.group_name == group_name)
2821 if cache:
2820 if cache:
2822 name_key = _hash_key(group_name)
2821 name_key = _hash_key(group_name)
2823 gr = gr.options(
2822 gr = gr.options(
2824 FromCache("sql_cache_short", f"get_group_{name_key}"))
2823 FromCache("sql_cache_short", f"get_group_{name_key}"))
2825 return gr.scalar()
2824 return gr.scalar()
2826
2825
2827 @classmethod
2826 @classmethod
2828 def get_user_personal_repo_group(cls, user_id):
2827 def get_user_personal_repo_group(cls, user_id):
2829 user = User.get(user_id)
2828 user = User.get(user_id)
2830 if user.username == User.DEFAULT_USER:
2829 if user.username == User.DEFAULT_USER:
2831 return None
2830 return None
2832
2831
2833 return cls.query()\
2832 return cls.query()\
2834 .filter(cls.personal == true()) \
2833 .filter(cls.personal == true()) \
2835 .filter(cls.user == user) \
2834 .filter(cls.user == user) \
2836 .order_by(cls.group_id.asc()) \
2835 .order_by(cls.group_id.asc()) \
2837 .first()
2836 .first()
2838
2837
2839 @classmethod
2838 @classmethod
2840 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2839 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2841 case_insensitive=True):
2840 case_insensitive=True):
2842 q = RepoGroup.query()
2841 q = RepoGroup.query()
2843
2842
2844 if not isinstance(user_id, Optional):
2843 if not isinstance(user_id, Optional):
2845 q = q.filter(RepoGroup.user_id == user_id)
2844 q = q.filter(RepoGroup.user_id == user_id)
2846
2845
2847 if not isinstance(group_id, Optional):
2846 if not isinstance(group_id, Optional):
2848 q = q.filter(RepoGroup.group_parent_id == group_id)
2847 q = q.filter(RepoGroup.group_parent_id == group_id)
2849
2848
2850 if case_insensitive:
2849 if case_insensitive:
2851 q = q.order_by(func.lower(RepoGroup.group_name))
2850 q = q.order_by(func.lower(RepoGroup.group_name))
2852 else:
2851 else:
2853 q = q.order_by(RepoGroup.group_name)
2852 q = q.order_by(RepoGroup.group_name)
2854 return q.all()
2853 return q.all()
2855
2854
2856 @property
2855 @property
2857 def parents(self, parents_recursion_limit=10):
2856 def parents(self, parents_recursion_limit=10):
2858 groups = []
2857 groups = []
2859 if self.parent_group is None:
2858 if self.parent_group is None:
2860 return groups
2859 return groups
2861 cur_gr = self.parent_group
2860 cur_gr = self.parent_group
2862 groups.insert(0, cur_gr)
2861 groups.insert(0, cur_gr)
2863 cnt = 0
2862 cnt = 0
2864 while 1:
2863 while 1:
2865 cnt += 1
2864 cnt += 1
2866 gr = getattr(cur_gr, 'parent_group', None)
2865 gr = getattr(cur_gr, 'parent_group', None)
2867 cur_gr = cur_gr.parent_group
2866 cur_gr = cur_gr.parent_group
2868 if gr is None:
2867 if gr is None:
2869 break
2868 break
2870 if cnt == parents_recursion_limit:
2869 if cnt == parents_recursion_limit:
2871 # this will prevent accidental infinit loops
2870 # this will prevent accidental infinit loops
2872 log.error('more than %s parents found for group %s, stopping '
2871 log.error('more than %s parents found for group %s, stopping '
2873 'recursive parent fetching', parents_recursion_limit, self)
2872 'recursive parent fetching', parents_recursion_limit, self)
2874 break
2873 break
2875
2874
2876 groups.insert(0, gr)
2875 groups.insert(0, gr)
2877 return groups
2876 return groups
2878
2877
2879 @property
2878 @property
2880 def last_commit_cache_update_diff(self):
2879 def last_commit_cache_update_diff(self):
2881 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2880 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2882
2881
2883 @classmethod
2882 @classmethod
2884 def _load_commit_change(cls, last_commit_cache):
2883 def _load_commit_change(cls, last_commit_cache):
2885 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2884 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2886 empty_date = datetime.datetime.fromtimestamp(0)
2885 empty_date = datetime.datetime.fromtimestamp(0)
2887 date_latest = last_commit_cache.get('date', empty_date)
2886 date_latest = last_commit_cache.get('date', empty_date)
2888 try:
2887 try:
2889 return parse_datetime(date_latest)
2888 return parse_datetime(date_latest)
2890 except Exception:
2889 except Exception:
2891 return empty_date
2890 return empty_date
2892
2891
2893 @property
2892 @property
2894 def last_commit_change(self):
2893 def last_commit_change(self):
2895 return self._load_commit_change(self.changeset_cache)
2894 return self._load_commit_change(self.changeset_cache)
2896
2895
2897 @property
2896 @property
2898 def last_db_change(self):
2897 def last_db_change(self):
2899 return self.updated_on
2898 return self.updated_on
2900
2899
2901 @property
2900 @property
2902 def children(self):
2901 def children(self):
2903 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2902 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2904
2903
2905 @property
2904 @property
2906 def name(self):
2905 def name(self):
2907 return self.group_name.split(RepoGroup.url_sep())[-1]
2906 return self.group_name.split(RepoGroup.url_sep())[-1]
2908
2907
2909 @property
2908 @property
2910 def full_path(self):
2909 def full_path(self):
2911 return self.group_name
2910 return self.group_name
2912
2911
2913 @property
2912 @property
2914 def full_path_splitted(self):
2913 def full_path_splitted(self):
2915 return self.group_name.split(RepoGroup.url_sep())
2914 return self.group_name.split(RepoGroup.url_sep())
2916
2915
2917 @property
2916 @property
2918 def repositories(self):
2917 def repositories(self):
2919 return Repository.query()\
2918 return Repository.query()\
2920 .filter(Repository.group == self)\
2919 .filter(Repository.group == self)\
2921 .order_by(Repository.repo_name)
2920 .order_by(Repository.repo_name)
2922
2921
2923 @property
2922 @property
2924 def repositories_recursive_count(self):
2923 def repositories_recursive_count(self):
2925 cnt = self.repositories.count()
2924 cnt = self.repositories.count()
2926
2925
2927 def children_count(group):
2926 def children_count(group):
2928 cnt = 0
2927 cnt = 0
2929 for child in group.children:
2928 for child in group.children:
2930 cnt += child.repositories.count()
2929 cnt += child.repositories.count()
2931 cnt += children_count(child)
2930 cnt += children_count(child)
2932 return cnt
2931 return cnt
2933
2932
2934 return cnt + children_count(self)
2933 return cnt + children_count(self)
2935
2934
2936 def _recursive_objects(self, include_repos=True, include_groups=True):
2935 def _recursive_objects(self, include_repos=True, include_groups=True):
2937 all_ = []
2936 all_ = []
2938
2937
2939 def _get_members(root_gr):
2938 def _get_members(root_gr):
2940 if include_repos:
2939 if include_repos:
2941 for r in root_gr.repositories:
2940 for r in root_gr.repositories:
2942 all_.append(r)
2941 all_.append(r)
2943 childs = root_gr.children.all()
2942 childs = root_gr.children.all()
2944 if childs:
2943 if childs:
2945 for gr in childs:
2944 for gr in childs:
2946 if include_groups:
2945 if include_groups:
2947 all_.append(gr)
2946 all_.append(gr)
2948 _get_members(gr)
2947 _get_members(gr)
2949
2948
2950 root_group = []
2949 root_group = []
2951 if include_groups:
2950 if include_groups:
2952 root_group = [self]
2951 root_group = [self]
2953
2952
2954 _get_members(self)
2953 _get_members(self)
2955 return root_group + all_
2954 return root_group + all_
2956
2955
2957 def recursive_groups_and_repos(self):
2956 def recursive_groups_and_repos(self):
2958 """
2957 """
2959 Recursive return all groups, with repositories in those groups
2958 Recursive return all groups, with repositories in those groups
2960 """
2959 """
2961 return self._recursive_objects()
2960 return self._recursive_objects()
2962
2961
2963 def recursive_groups(self):
2962 def recursive_groups(self):
2964 """
2963 """
2965 Returns all children groups for this group including children of children
2964 Returns all children groups for this group including children of children
2966 """
2965 """
2967 return self._recursive_objects(include_repos=False)
2966 return self._recursive_objects(include_repos=False)
2968
2967
2969 def recursive_repos(self):
2968 def recursive_repos(self):
2970 """
2969 """
2971 Returns all children repositories for this group
2970 Returns all children repositories for this group
2972 """
2971 """
2973 return self._recursive_objects(include_groups=False)
2972 return self._recursive_objects(include_groups=False)
2974
2973
2975 def get_new_name(self, group_name):
2974 def get_new_name(self, group_name):
2976 """
2975 """
2977 returns new full group name based on parent and new name
2976 returns new full group name based on parent and new name
2978
2977
2979 :param group_name:
2978 :param group_name:
2980 """
2979 """
2981 path_prefix = (self.parent_group.full_path_splitted if
2980 path_prefix = (self.parent_group.full_path_splitted if
2982 self.parent_group else [])
2981 self.parent_group else [])
2983 return RepoGroup.url_sep().join(path_prefix + [group_name])
2982 return RepoGroup.url_sep().join(path_prefix + [group_name])
2984
2983
2985 def update_commit_cache(self, config=None):
2984 def update_commit_cache(self, config=None):
2986 """
2985 """
2987 Update cache of last commit for newest repository inside this repository group.
2986 Update cache of last commit for newest repository inside this repository group.
2988 cache_keys should be::
2987 cache_keys should be::
2989
2988
2990 source_repo_id
2989 source_repo_id
2991 short_id
2990 short_id
2992 raw_id
2991 raw_id
2993 revision
2992 revision
2994 parents
2993 parents
2995 message
2994 message
2996 date
2995 date
2997 author
2996 author
2998
2997
2999 """
2998 """
3000 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2999 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3001 empty_date = datetime.datetime.fromtimestamp(0)
3000 empty_date = datetime.datetime.fromtimestamp(0)
3002
3001
3003 def repo_groups_and_repos(root_gr):
3002 def repo_groups_and_repos(root_gr):
3004 for _repo in root_gr.repositories:
3003 for _repo in root_gr.repositories:
3005 yield _repo
3004 yield _repo
3006 for child_group in root_gr.children.all():
3005 for child_group in root_gr.children.all():
3007 yield child_group
3006 yield child_group
3008
3007
3009 latest_repo_cs_cache = {}
3008 latest_repo_cs_cache = {}
3010 for obj in repo_groups_and_repos(self):
3009 for obj in repo_groups_and_repos(self):
3011 repo_cs_cache = obj.changeset_cache
3010 repo_cs_cache = obj.changeset_cache
3012 date_latest = latest_repo_cs_cache.get('date', empty_date)
3011 date_latest = latest_repo_cs_cache.get('date', empty_date)
3013 date_current = repo_cs_cache.get('date', empty_date)
3012 date_current = repo_cs_cache.get('date', empty_date)
3014 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3013 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3015 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3014 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3016 latest_repo_cs_cache = repo_cs_cache
3015 latest_repo_cs_cache = repo_cs_cache
3017 if hasattr(obj, 'repo_id'):
3016 if hasattr(obj, 'repo_id'):
3018 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3017 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3019 else:
3018 else:
3020 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3019 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3021
3020
3022 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3021 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3023
3022
3024 latest_repo_cs_cache['updated_on'] = time.time()
3023 latest_repo_cs_cache['updated_on'] = time.time()
3025 self.changeset_cache = latest_repo_cs_cache
3024 self.changeset_cache = latest_repo_cs_cache
3026 self.updated_on = _date_latest
3025 self.updated_on = _date_latest
3027 Session().add(self)
3026 Session().add(self)
3028 Session().commit()
3027 Session().commit()
3029
3028
3030 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3029 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3031 self.group_name, latest_repo_cs_cache, _date_latest)
3030 self.group_name, latest_repo_cs_cache, _date_latest)
3032
3031
3033 def permissions(self, with_admins=True, with_owner=True,
3032 def permissions(self, with_admins=True, with_owner=True,
3034 expand_from_user_groups=False):
3033 expand_from_user_groups=False):
3035 """
3034 """
3036 Permissions for repository groups
3035 Permissions for repository groups
3037 """
3036 """
3038 _admin_perm = 'group.admin'
3037 _admin_perm = 'group.admin'
3039
3038
3040 owner_row = []
3039 owner_row = []
3041 if with_owner:
3040 if with_owner:
3042 usr = AttributeDict(self.user.get_dict())
3041 usr = AttributeDict(self.user.get_dict())
3043 usr.owner_row = True
3042 usr.owner_row = True
3044 usr.permission = _admin_perm
3043 usr.permission = _admin_perm
3045 owner_row.append(usr)
3044 owner_row.append(usr)
3046
3045
3047 super_admin_ids = []
3046 super_admin_ids = []
3048 super_admin_rows = []
3047 super_admin_rows = []
3049 if with_admins:
3048 if with_admins:
3050 for usr in User.get_all_super_admins():
3049 for usr in User.get_all_super_admins():
3051 super_admin_ids.append(usr.user_id)
3050 super_admin_ids.append(usr.user_id)
3052 # if this admin is also owner, don't double the record
3051 # if this admin is also owner, don't double the record
3053 if usr.user_id == owner_row[0].user_id:
3052 if usr.user_id == owner_row[0].user_id:
3054 owner_row[0].admin_row = True
3053 owner_row[0].admin_row = True
3055 else:
3054 else:
3056 usr = AttributeDict(usr.get_dict())
3055 usr = AttributeDict(usr.get_dict())
3057 usr.admin_row = True
3056 usr.admin_row = True
3058 usr.permission = _admin_perm
3057 usr.permission = _admin_perm
3059 super_admin_rows.append(usr)
3058 super_admin_rows.append(usr)
3060
3059
3061 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3060 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3062 q = q.options(joinedload(UserRepoGroupToPerm.group),
3061 q = q.options(joinedload(UserRepoGroupToPerm.group),
3063 joinedload(UserRepoGroupToPerm.user),
3062 joinedload(UserRepoGroupToPerm.user),
3064 joinedload(UserRepoGroupToPerm.permission),)
3063 joinedload(UserRepoGroupToPerm.permission),)
3065
3064
3066 # get owners and admins and permissions. We do a trick of re-writing
3065 # get owners and admins and permissions. We do a trick of re-writing
3067 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3066 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3068 # has a global reference and changing one object propagates to all
3067 # has a global reference and changing one object propagates to all
3069 # others. This means if admin is also an owner admin_row that change
3068 # others. This means if admin is also an owner admin_row that change
3070 # would propagate to both objects
3069 # would propagate to both objects
3071 perm_rows = []
3070 perm_rows = []
3072 for _usr in q.all():
3071 for _usr in q.all():
3073 usr = AttributeDict(_usr.user.get_dict())
3072 usr = AttributeDict(_usr.user.get_dict())
3074 # if this user is also owner/admin, mark as duplicate record
3073 # if this user is also owner/admin, mark as duplicate record
3075 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3074 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3076 usr.duplicate_perm = True
3075 usr.duplicate_perm = True
3077 usr.permission = _usr.permission.permission_name
3076 usr.permission = _usr.permission.permission_name
3078 perm_rows.append(usr)
3077 perm_rows.append(usr)
3079
3078
3080 # filter the perm rows by 'default' first and then sort them by
3079 # filter the perm rows by 'default' first and then sort them by
3081 # admin,write,read,none permissions sorted again alphabetically in
3080 # admin,write,read,none permissions sorted again alphabetically in
3082 # each group
3081 # each group
3083 perm_rows = sorted(perm_rows, key=display_user_sort)
3082 perm_rows = sorted(perm_rows, key=display_user_sort)
3084
3083
3085 user_groups_rows = []
3084 user_groups_rows = []
3086 if expand_from_user_groups:
3085 if expand_from_user_groups:
3087 for ug in self.permission_user_groups(with_members=True):
3086 for ug in self.permission_user_groups(with_members=True):
3088 for user_data in ug.members:
3087 for user_data in ug.members:
3089 user_groups_rows.append(user_data)
3088 user_groups_rows.append(user_data)
3090
3089
3091 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3090 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3092
3091
3093 def permission_user_groups(self, with_members=False):
3092 def permission_user_groups(self, with_members=False):
3094 q = UserGroupRepoGroupToPerm.query()\
3093 q = UserGroupRepoGroupToPerm.query()\
3095 .filter(UserGroupRepoGroupToPerm.group == self)
3094 .filter(UserGroupRepoGroupToPerm.group == self)
3096 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3095 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3097 joinedload(UserGroupRepoGroupToPerm.users_group),
3096 joinedload(UserGroupRepoGroupToPerm.users_group),
3098 joinedload(UserGroupRepoGroupToPerm.permission),)
3097 joinedload(UserGroupRepoGroupToPerm.permission),)
3099
3098
3100 perm_rows = []
3099 perm_rows = []
3101 for _user_group in q.all():
3100 for _user_group in q.all():
3102 entry = AttributeDict(_user_group.users_group.get_dict())
3101 entry = AttributeDict(_user_group.users_group.get_dict())
3103 entry.permission = _user_group.permission.permission_name
3102 entry.permission = _user_group.permission.permission_name
3104 if with_members:
3103 if with_members:
3105 entry.members = [x.user.get_dict()
3104 entry.members = [x.user.get_dict()
3106 for x in _user_group.users_group.members]
3105 for x in _user_group.users_group.members]
3107 perm_rows.append(entry)
3106 perm_rows.append(entry)
3108
3107
3109 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3108 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3110 return perm_rows
3109 return perm_rows
3111
3110
3112 def get_api_data(self):
3111 def get_api_data(self):
3113 """
3112 """
3114 Common function for generating api data
3113 Common function for generating api data
3115
3114
3116 """
3115 """
3117 group = self
3116 group = self
3118 data = {
3117 data = {
3119 'group_id': group.group_id,
3118 'group_id': group.group_id,
3120 'group_name': group.group_name,
3119 'group_name': group.group_name,
3121 'group_description': group.description_safe,
3120 'group_description': group.description_safe,
3122 'parent_group': group.parent_group.group_name if group.parent_group else None,
3121 'parent_group': group.parent_group.group_name if group.parent_group else None,
3123 'repositories': [x.repo_name for x in group.repositories],
3122 'repositories': [x.repo_name for x in group.repositories],
3124 'owner': group.user.username,
3123 'owner': group.user.username,
3125 }
3124 }
3126 return data
3125 return data
3127
3126
3128 def get_dict(self):
3127 def get_dict(self):
3129 # Since we transformed `group_name` to a hybrid property, we need to
3128 # Since we transformed `group_name` to a hybrid property, we need to
3130 # keep compatibility with the code which uses `group_name` field.
3129 # keep compatibility with the code which uses `group_name` field.
3131 result = super(RepoGroup, self).get_dict()
3130 result = super(RepoGroup, self).get_dict()
3132 result['group_name'] = result.pop('_group_name', None)
3131 result['group_name'] = result.pop('_group_name', None)
3133 result.pop('_changeset_cache', '')
3132 result.pop('_changeset_cache', '')
3134 return result
3133 return result
3135
3134
3136
3135
3137 class Permission(Base, BaseModel):
3136 class Permission(Base, BaseModel):
3138 __tablename__ = 'permissions'
3137 __tablename__ = 'permissions'
3139 __table_args__ = (
3138 __table_args__ = (
3140 Index('p_perm_name_idx', 'permission_name'),
3139 Index('p_perm_name_idx', 'permission_name'),
3141 base_table_args,
3140 base_table_args,
3142 )
3141 )
3143
3142
3144 PERMS = [
3143 PERMS = [
3145 ('hg.admin', _('RhodeCode Super Administrator')),
3144 ('hg.admin', _('RhodeCode Super Administrator')),
3146
3145
3147 ('repository.none', _('Repository no access')),
3146 ('repository.none', _('Repository no access')),
3148 ('repository.read', _('Repository read access')),
3147 ('repository.read', _('Repository read access')),
3149 ('repository.write', _('Repository write access')),
3148 ('repository.write', _('Repository write access')),
3150 ('repository.admin', _('Repository admin access')),
3149 ('repository.admin', _('Repository admin access')),
3151
3150
3152 ('group.none', _('Repository group no access')),
3151 ('group.none', _('Repository group no access')),
3153 ('group.read', _('Repository group read access')),
3152 ('group.read', _('Repository group read access')),
3154 ('group.write', _('Repository group write access')),
3153 ('group.write', _('Repository group write access')),
3155 ('group.admin', _('Repository group admin access')),
3154 ('group.admin', _('Repository group admin access')),
3156
3155
3157 ('usergroup.none', _('User group no access')),
3156 ('usergroup.none', _('User group no access')),
3158 ('usergroup.read', _('User group read access')),
3157 ('usergroup.read', _('User group read access')),
3159 ('usergroup.write', _('User group write access')),
3158 ('usergroup.write', _('User group write access')),
3160 ('usergroup.admin', _('User group admin access')),
3159 ('usergroup.admin', _('User group admin access')),
3161
3160
3162 ('branch.none', _('Branch no permissions')),
3161 ('branch.none', _('Branch no permissions')),
3163 ('branch.merge', _('Branch access by web merge')),
3162 ('branch.merge', _('Branch access by web merge')),
3164 ('branch.push', _('Branch access by push')),
3163 ('branch.push', _('Branch access by push')),
3165 ('branch.push_force', _('Branch access by push with force')),
3164 ('branch.push_force', _('Branch access by push with force')),
3166
3165
3167 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3166 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3168 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3167 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3169
3168
3170 ('hg.usergroup.create.false', _('User Group creation disabled')),
3169 ('hg.usergroup.create.false', _('User Group creation disabled')),
3171 ('hg.usergroup.create.true', _('User Group creation enabled')),
3170 ('hg.usergroup.create.true', _('User Group creation enabled')),
3172
3171
3173 ('hg.create.none', _('Repository creation disabled')),
3172 ('hg.create.none', _('Repository creation disabled')),
3174 ('hg.create.repository', _('Repository creation enabled')),
3173 ('hg.create.repository', _('Repository creation enabled')),
3175 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3174 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3176 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3175 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3177
3176
3178 ('hg.fork.none', _('Repository forking disabled')),
3177 ('hg.fork.none', _('Repository forking disabled')),
3179 ('hg.fork.repository', _('Repository forking enabled')),
3178 ('hg.fork.repository', _('Repository forking enabled')),
3180
3179
3181 ('hg.register.none', _('Registration disabled')),
3180 ('hg.register.none', _('Registration disabled')),
3182 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3181 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3183 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3182 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3184
3183
3185 ('hg.password_reset.enabled', _('Password reset enabled')),
3184 ('hg.password_reset.enabled', _('Password reset enabled')),
3186 ('hg.password_reset.hidden', _('Password reset hidden')),
3185 ('hg.password_reset.hidden', _('Password reset hidden')),
3187 ('hg.password_reset.disabled', _('Password reset disabled')),
3186 ('hg.password_reset.disabled', _('Password reset disabled')),
3188
3187
3189 ('hg.extern_activate.manual', _('Manual activation of external account')),
3188 ('hg.extern_activate.manual', _('Manual activation of external account')),
3190 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3189 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3191
3190
3192 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3191 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3193 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3192 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3194 ]
3193 ]
3195
3194
3196 # definition of system default permissions for DEFAULT user, created on
3195 # definition of system default permissions for DEFAULT user, created on
3197 # system setup
3196 # system setup
3198 DEFAULT_USER_PERMISSIONS = [
3197 DEFAULT_USER_PERMISSIONS = [
3199 # object perms
3198 # object perms
3200 'repository.read',
3199 'repository.read',
3201 'group.read',
3200 'group.read',
3202 'usergroup.read',
3201 'usergroup.read',
3203 # branch, for backward compat we need same value as before so forced pushed
3202 # branch, for backward compat we need same value as before so forced pushed
3204 'branch.push_force',
3203 'branch.push_force',
3205 # global
3204 # global
3206 'hg.create.repository',
3205 'hg.create.repository',
3207 'hg.repogroup.create.false',
3206 'hg.repogroup.create.false',
3208 'hg.usergroup.create.false',
3207 'hg.usergroup.create.false',
3209 'hg.create.write_on_repogroup.true',
3208 'hg.create.write_on_repogroup.true',
3210 'hg.fork.repository',
3209 'hg.fork.repository',
3211 'hg.register.manual_activate',
3210 'hg.register.manual_activate',
3212 'hg.password_reset.enabled',
3211 'hg.password_reset.enabled',
3213 'hg.extern_activate.auto',
3212 'hg.extern_activate.auto',
3214 'hg.inherit_default_perms.true',
3213 'hg.inherit_default_perms.true',
3215 ]
3214 ]
3216
3215
3217 # defines which permissions are more important higher the more important
3216 # defines which permissions are more important higher the more important
3218 # Weight defines which permissions are more important.
3217 # Weight defines which permissions are more important.
3219 # The higher number the more important.
3218 # The higher number the more important.
3220 PERM_WEIGHTS = {
3219 PERM_WEIGHTS = {
3221 'repository.none': 0,
3220 'repository.none': 0,
3222 'repository.read': 1,
3221 'repository.read': 1,
3223 'repository.write': 3,
3222 'repository.write': 3,
3224 'repository.admin': 4,
3223 'repository.admin': 4,
3225
3224
3226 'group.none': 0,
3225 'group.none': 0,
3227 'group.read': 1,
3226 'group.read': 1,
3228 'group.write': 3,
3227 'group.write': 3,
3229 'group.admin': 4,
3228 'group.admin': 4,
3230
3229
3231 'usergroup.none': 0,
3230 'usergroup.none': 0,
3232 'usergroup.read': 1,
3231 'usergroup.read': 1,
3233 'usergroup.write': 3,
3232 'usergroup.write': 3,
3234 'usergroup.admin': 4,
3233 'usergroup.admin': 4,
3235
3234
3236 'branch.none': 0,
3235 'branch.none': 0,
3237 'branch.merge': 1,
3236 'branch.merge': 1,
3238 'branch.push': 3,
3237 'branch.push': 3,
3239 'branch.push_force': 4,
3238 'branch.push_force': 4,
3240
3239
3241 'hg.repogroup.create.false': 0,
3240 'hg.repogroup.create.false': 0,
3242 'hg.repogroup.create.true': 1,
3241 'hg.repogroup.create.true': 1,
3243
3242
3244 'hg.usergroup.create.false': 0,
3243 'hg.usergroup.create.false': 0,
3245 'hg.usergroup.create.true': 1,
3244 'hg.usergroup.create.true': 1,
3246
3245
3247 'hg.fork.none': 0,
3246 'hg.fork.none': 0,
3248 'hg.fork.repository': 1,
3247 'hg.fork.repository': 1,
3249 'hg.create.none': 0,
3248 'hg.create.none': 0,
3250 'hg.create.repository': 1
3249 'hg.create.repository': 1
3251 }
3250 }
3252
3251
3253 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3252 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3254 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3253 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3255 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3254 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3256
3255
3257 def __repr__(self):
3256 def __repr__(self):
3258 return "<%s('%s:%s')>" % (
3257 return "<%s('%s:%s')>" % (
3259 self.cls_name, self.permission_id, self.permission_name
3258 self.cls_name, self.permission_id, self.permission_name
3260 )
3259 )
3261
3260
3262 @classmethod
3261 @classmethod
3263 def get_by_key(cls, key):
3262 def get_by_key(cls, key):
3264 return cls.query().filter(cls.permission_name == key).scalar()
3263 return cls.query().filter(cls.permission_name == key).scalar()
3265
3264
3266 @classmethod
3265 @classmethod
3267 def get_default_repo_perms(cls, user_id, repo_id=None):
3266 def get_default_repo_perms(cls, user_id, repo_id=None):
3268 q = Session().query(UserRepoToPerm, Repository, Permission)\
3267 q = Session().query(UserRepoToPerm, Repository, Permission)\
3269 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3268 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3270 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3269 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3271 .filter(UserRepoToPerm.user_id == user_id)
3270 .filter(UserRepoToPerm.user_id == user_id)
3272 if repo_id:
3271 if repo_id:
3273 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3272 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3274 return q.all()
3273 return q.all()
3275
3274
3276 @classmethod
3275 @classmethod
3277 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3276 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3278 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3277 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3279 .join(
3278 .join(
3280 Permission,
3279 Permission,
3281 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3280 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3282 .join(
3281 .join(
3283 UserRepoToPerm,
3282 UserRepoToPerm,
3284 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3283 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3285 .filter(UserRepoToPerm.user_id == user_id)
3284 .filter(UserRepoToPerm.user_id == user_id)
3286
3285
3287 if repo_id:
3286 if repo_id:
3288 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3287 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3289 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3288 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3290
3289
3291 @classmethod
3290 @classmethod
3292 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3291 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3293 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3292 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3294 .join(
3293 .join(
3295 Permission,
3294 Permission,
3296 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3295 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3297 .join(
3296 .join(
3298 Repository,
3297 Repository,
3299 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3298 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3300 .join(
3299 .join(
3301 UserGroup,
3300 UserGroup,
3302 UserGroupRepoToPerm.users_group_id ==
3301 UserGroupRepoToPerm.users_group_id ==
3303 UserGroup.users_group_id)\
3302 UserGroup.users_group_id)\
3304 .join(
3303 .join(
3305 UserGroupMember,
3304 UserGroupMember,
3306 UserGroupRepoToPerm.users_group_id ==
3305 UserGroupRepoToPerm.users_group_id ==
3307 UserGroupMember.users_group_id)\
3306 UserGroupMember.users_group_id)\
3308 .filter(
3307 .filter(
3309 UserGroupMember.user_id == user_id,
3308 UserGroupMember.user_id == user_id,
3310 UserGroup.users_group_active == true())
3309 UserGroup.users_group_active == true())
3311 if repo_id:
3310 if repo_id:
3312 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3311 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3313 return q.all()
3312 return q.all()
3314
3313
3315 @classmethod
3314 @classmethod
3316 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3315 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3317 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3316 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3318 .join(
3317 .join(
3319 Permission,
3318 Permission,
3320 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3319 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3321 .join(
3320 .join(
3322 UserGroupRepoToPerm,
3321 UserGroupRepoToPerm,
3323 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3322 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3324 .join(
3323 .join(
3325 UserGroup,
3324 UserGroup,
3326 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3325 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3327 .join(
3326 .join(
3328 UserGroupMember,
3327 UserGroupMember,
3329 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3328 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3330 .filter(
3329 .filter(
3331 UserGroupMember.user_id == user_id,
3330 UserGroupMember.user_id == user_id,
3332 UserGroup.users_group_active == true())
3331 UserGroup.users_group_active == true())
3333
3332
3334 if repo_id:
3333 if repo_id:
3335 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3334 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3336 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3335 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3337
3336
3338 @classmethod
3337 @classmethod
3339 def get_default_group_perms(cls, user_id, repo_group_id=None):
3338 def get_default_group_perms(cls, user_id, repo_group_id=None):
3340 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3339 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3341 .join(
3340 .join(
3342 Permission,
3341 Permission,
3343 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3342 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3344 .join(
3343 .join(
3345 RepoGroup,
3344 RepoGroup,
3346 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3345 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3347 .filter(UserRepoGroupToPerm.user_id == user_id)
3346 .filter(UserRepoGroupToPerm.user_id == user_id)
3348 if repo_group_id:
3347 if repo_group_id:
3349 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3348 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3350 return q.all()
3349 return q.all()
3351
3350
3352 @classmethod
3351 @classmethod
3353 def get_default_group_perms_from_user_group(
3352 def get_default_group_perms_from_user_group(
3354 cls, user_id, repo_group_id=None):
3353 cls, user_id, repo_group_id=None):
3355 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3354 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3356 .join(
3355 .join(
3357 Permission,
3356 Permission,
3358 UserGroupRepoGroupToPerm.permission_id ==
3357 UserGroupRepoGroupToPerm.permission_id ==
3359 Permission.permission_id)\
3358 Permission.permission_id)\
3360 .join(
3359 .join(
3361 RepoGroup,
3360 RepoGroup,
3362 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3361 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3363 .join(
3362 .join(
3364 UserGroup,
3363 UserGroup,
3365 UserGroupRepoGroupToPerm.users_group_id ==
3364 UserGroupRepoGroupToPerm.users_group_id ==
3366 UserGroup.users_group_id)\
3365 UserGroup.users_group_id)\
3367 .join(
3366 .join(
3368 UserGroupMember,
3367 UserGroupMember,
3369 UserGroupRepoGroupToPerm.users_group_id ==
3368 UserGroupRepoGroupToPerm.users_group_id ==
3370 UserGroupMember.users_group_id)\
3369 UserGroupMember.users_group_id)\
3371 .filter(
3370 .filter(
3372 UserGroupMember.user_id == user_id,
3371 UserGroupMember.user_id == user_id,
3373 UserGroup.users_group_active == true())
3372 UserGroup.users_group_active == true())
3374 if repo_group_id:
3373 if repo_group_id:
3375 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3374 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3376 return q.all()
3375 return q.all()
3377
3376
3378 @classmethod
3377 @classmethod
3379 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3378 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3380 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3379 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3381 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3380 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3382 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3381 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3383 .filter(UserUserGroupToPerm.user_id == user_id)
3382 .filter(UserUserGroupToPerm.user_id == user_id)
3384 if user_group_id:
3383 if user_group_id:
3385 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3384 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3386 return q.all()
3385 return q.all()
3387
3386
3388 @classmethod
3387 @classmethod
3389 def get_default_user_group_perms_from_user_group(
3388 def get_default_user_group_perms_from_user_group(
3390 cls, user_id, user_group_id=None):
3389 cls, user_id, user_group_id=None):
3391 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3390 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3392 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3391 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3393 .join(
3392 .join(
3394 Permission,
3393 Permission,
3395 UserGroupUserGroupToPerm.permission_id ==
3394 UserGroupUserGroupToPerm.permission_id ==
3396 Permission.permission_id)\
3395 Permission.permission_id)\
3397 .join(
3396 .join(
3398 TargetUserGroup,
3397 TargetUserGroup,
3399 UserGroupUserGroupToPerm.target_user_group_id ==
3398 UserGroupUserGroupToPerm.target_user_group_id ==
3400 TargetUserGroup.users_group_id)\
3399 TargetUserGroup.users_group_id)\
3401 .join(
3400 .join(
3402 UserGroup,
3401 UserGroup,
3403 UserGroupUserGroupToPerm.user_group_id ==
3402 UserGroupUserGroupToPerm.user_group_id ==
3404 UserGroup.users_group_id)\
3403 UserGroup.users_group_id)\
3405 .join(
3404 .join(
3406 UserGroupMember,
3405 UserGroupMember,
3407 UserGroupUserGroupToPerm.user_group_id ==
3406 UserGroupUserGroupToPerm.user_group_id ==
3408 UserGroupMember.users_group_id)\
3407 UserGroupMember.users_group_id)\
3409 .filter(
3408 .filter(
3410 UserGroupMember.user_id == user_id,
3409 UserGroupMember.user_id == user_id,
3411 UserGroup.users_group_active == true())
3410 UserGroup.users_group_active == true())
3412 if user_group_id:
3411 if user_group_id:
3413 q = q.filter(
3412 q = q.filter(
3414 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3413 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3415
3414
3416 return q.all()
3415 return q.all()
3417
3416
3418
3417
3419 class UserRepoToPerm(Base, BaseModel):
3418 class UserRepoToPerm(Base, BaseModel):
3420 __tablename__ = 'repo_to_perm'
3419 __tablename__ = 'repo_to_perm'
3421 __table_args__ = (
3420 __table_args__ = (
3422 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3421 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3423 base_table_args
3422 base_table_args
3424 )
3423 )
3425
3424
3426 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3425 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3427 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3426 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3428 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3427 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3429 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3428 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3430
3429
3431 user = relationship('User', back_populates="repo_to_perm")
3430 user = relationship('User', back_populates="repo_to_perm")
3432 repository = relationship('Repository', back_populates="repo_to_perm")
3431 repository = relationship('Repository', back_populates="repo_to_perm")
3433 permission = relationship('Permission')
3432 permission = relationship('Permission')
3434
3433
3435 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3434 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3436
3435
3437 @classmethod
3436 @classmethod
3438 def create(cls, user, repository, permission):
3437 def create(cls, user, repository, permission):
3439 n = cls()
3438 n = cls()
3440 n.user = user
3439 n.user = user
3441 n.repository = repository
3440 n.repository = repository
3442 n.permission = permission
3441 n.permission = permission
3443 Session().add(n)
3442 Session().add(n)
3444 return n
3443 return n
3445
3444
3446 def __repr__(self):
3445 def __repr__(self):
3447 return f'<{self.user} => {self.repository} >'
3446 return f'<{self.user} => {self.repository} >'
3448
3447
3449
3448
3450 class UserUserGroupToPerm(Base, BaseModel):
3449 class UserUserGroupToPerm(Base, BaseModel):
3451 __tablename__ = 'user_user_group_to_perm'
3450 __tablename__ = 'user_user_group_to_perm'
3452 __table_args__ = (
3451 __table_args__ = (
3453 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3452 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3454 base_table_args
3453 base_table_args
3455 )
3454 )
3456
3455
3457 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3456 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3457 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3459 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3458 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3460 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3459 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3461
3460
3462 user = relationship('User', back_populates='user_group_to_perm')
3461 user = relationship('User', back_populates='user_group_to_perm')
3463 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3462 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3464 permission = relationship('Permission')
3463 permission = relationship('Permission')
3465
3464
3466 @classmethod
3465 @classmethod
3467 def create(cls, user, user_group, permission):
3466 def create(cls, user, user_group, permission):
3468 n = cls()
3467 n = cls()
3469 n.user = user
3468 n.user = user
3470 n.user_group = user_group
3469 n.user_group = user_group
3471 n.permission = permission
3470 n.permission = permission
3472 Session().add(n)
3471 Session().add(n)
3473 return n
3472 return n
3474
3473
3475 def __repr__(self):
3474 def __repr__(self):
3476 return f'<{self.user} => {self.user_group} >'
3475 return f'<{self.user} => {self.user_group} >'
3477
3476
3478
3477
3479 class UserToPerm(Base, BaseModel):
3478 class UserToPerm(Base, BaseModel):
3480 __tablename__ = 'user_to_perm'
3479 __tablename__ = 'user_to_perm'
3481 __table_args__ = (
3480 __table_args__ = (
3482 UniqueConstraint('user_id', 'permission_id'),
3481 UniqueConstraint('user_id', 'permission_id'),
3483 base_table_args
3482 base_table_args
3484 )
3483 )
3485
3484
3486 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3485 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3487 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3486 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3488 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3487 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3489
3488
3490 user = relationship('User', back_populates='user_perms')
3489 user = relationship('User', back_populates='user_perms')
3491 permission = relationship('Permission', lazy='joined')
3490 permission = relationship('Permission', lazy='joined')
3492
3491
3493 def __repr__(self):
3492 def __repr__(self):
3494 return f'<{self.user} => {self.permission} >'
3493 return f'<{self.user} => {self.permission} >'
3495
3494
3496
3495
3497 class UserGroupRepoToPerm(Base, BaseModel):
3496 class UserGroupRepoToPerm(Base, BaseModel):
3498 __tablename__ = 'users_group_repo_to_perm'
3497 __tablename__ = 'users_group_repo_to_perm'
3499 __table_args__ = (
3498 __table_args__ = (
3500 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3499 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3501 base_table_args
3500 base_table_args
3502 )
3501 )
3503
3502
3504 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3503 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3504 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3506 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3505 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3507 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3506 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3508
3507
3509 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3508 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3510 permission = relationship('Permission')
3509 permission = relationship('Permission')
3511 repository = relationship('Repository', back_populates='users_group_to_perm')
3510 repository = relationship('Repository', back_populates='users_group_to_perm')
3512 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3511 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3513
3512
3514 @classmethod
3513 @classmethod
3515 def create(cls, users_group, repository, permission):
3514 def create(cls, users_group, repository, permission):
3516 n = cls()
3515 n = cls()
3517 n.users_group = users_group
3516 n.users_group = users_group
3518 n.repository = repository
3517 n.repository = repository
3519 n.permission = permission
3518 n.permission = permission
3520 Session().add(n)
3519 Session().add(n)
3521 return n
3520 return n
3522
3521
3523 def __repr__(self):
3522 def __repr__(self):
3524 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3523 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3525
3524
3526
3525
3527 class UserGroupUserGroupToPerm(Base, BaseModel):
3526 class UserGroupUserGroupToPerm(Base, BaseModel):
3528 __tablename__ = 'user_group_user_group_to_perm'
3527 __tablename__ = 'user_group_user_group_to_perm'
3529 __table_args__ = (
3528 __table_args__ = (
3530 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3529 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3531 CheckConstraint('target_user_group_id != user_group_id'),
3530 CheckConstraint('target_user_group_id != user_group_id'),
3532 base_table_args
3531 base_table_args
3533 )
3532 )
3534
3533
3535 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3534 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3536 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3535 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3537 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3536 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3538 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3537 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3539
3538
3540 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3539 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3541 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3540 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3542 permission = relationship('Permission')
3541 permission = relationship('Permission')
3543
3542
3544 @classmethod
3543 @classmethod
3545 def create(cls, target_user_group, user_group, permission):
3544 def create(cls, target_user_group, user_group, permission):
3546 n = cls()
3545 n = cls()
3547 n.target_user_group = target_user_group
3546 n.target_user_group = target_user_group
3548 n.user_group = user_group
3547 n.user_group = user_group
3549 n.permission = permission
3548 n.permission = permission
3550 Session().add(n)
3549 Session().add(n)
3551 return n
3550 return n
3552
3551
3553 def __repr__(self):
3552 def __repr__(self):
3554 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3553 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3555
3554
3556
3555
3557 class UserGroupToPerm(Base, BaseModel):
3556 class UserGroupToPerm(Base, BaseModel):
3558 __tablename__ = 'users_group_to_perm'
3557 __tablename__ = 'users_group_to_perm'
3559 __table_args__ = (
3558 __table_args__ = (
3560 UniqueConstraint('users_group_id', 'permission_id',),
3559 UniqueConstraint('users_group_id', 'permission_id',),
3561 base_table_args
3560 base_table_args
3562 )
3561 )
3563
3562
3564 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3563 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3564 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3566 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3565 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3567
3566
3568 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3567 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3569 permission = relationship('Permission')
3568 permission = relationship('Permission')
3570
3569
3571
3570
3572 class UserRepoGroupToPerm(Base, BaseModel):
3571 class UserRepoGroupToPerm(Base, BaseModel):
3573 __tablename__ = 'user_repo_group_to_perm'
3572 __tablename__ = 'user_repo_group_to_perm'
3574 __table_args__ = (
3573 __table_args__ = (
3575 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3574 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3576 base_table_args
3575 base_table_args
3577 )
3576 )
3578
3577
3579 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3578 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3580 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3579 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3581 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3580 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3582 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3581 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3583
3582
3584 user = relationship('User', back_populates='repo_group_to_perm')
3583 user = relationship('User', back_populates='repo_group_to_perm')
3585 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3584 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3586 permission = relationship('Permission')
3585 permission = relationship('Permission')
3587
3586
3588 @classmethod
3587 @classmethod
3589 def create(cls, user, repository_group, permission):
3588 def create(cls, user, repository_group, permission):
3590 n = cls()
3589 n = cls()
3591 n.user = user
3590 n.user = user
3592 n.group = repository_group
3591 n.group = repository_group
3593 n.permission = permission
3592 n.permission = permission
3594 Session().add(n)
3593 Session().add(n)
3595 return n
3594 return n
3596
3595
3597
3596
3598 class UserGroupRepoGroupToPerm(Base, BaseModel):
3597 class UserGroupRepoGroupToPerm(Base, BaseModel):
3599 __tablename__ = 'users_group_repo_group_to_perm'
3598 __tablename__ = 'users_group_repo_group_to_perm'
3600 __table_args__ = (
3599 __table_args__ = (
3601 UniqueConstraint('users_group_id', 'group_id'),
3600 UniqueConstraint('users_group_id', 'group_id'),
3602 base_table_args
3601 base_table_args
3603 )
3602 )
3604
3603
3605 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3604 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3606 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3605 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3607 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3606 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3608 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3607 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3609
3608
3610 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3609 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3611 permission = relationship('Permission')
3610 permission = relationship('Permission')
3612 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3611 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3613
3612
3614 @classmethod
3613 @classmethod
3615 def create(cls, user_group, repository_group, permission):
3614 def create(cls, user_group, repository_group, permission):
3616 n = cls()
3615 n = cls()
3617 n.users_group = user_group
3616 n.users_group = user_group
3618 n.group = repository_group
3617 n.group = repository_group
3619 n.permission = permission
3618 n.permission = permission
3620 Session().add(n)
3619 Session().add(n)
3621 return n
3620 return n
3622
3621
3623 def __repr__(self):
3622 def __repr__(self):
3624 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3623 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3625
3624
3626
3625
3627 class Statistics(Base, BaseModel):
3626 class Statistics(Base, BaseModel):
3628 __tablename__ = 'statistics'
3627 __tablename__ = 'statistics'
3629 __table_args__ = (
3628 __table_args__ = (
3630 base_table_args
3629 base_table_args
3631 )
3630 )
3632
3631
3633 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3632 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3634 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3633 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3635 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3634 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3636 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3635 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3637 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3636 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3638 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3637 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3639
3638
3640 repository = relationship('Repository', single_parent=True, viewonly=True)
3639 repository = relationship('Repository', single_parent=True, viewonly=True)
3641
3640
3642
3641
3643 class UserFollowing(Base, BaseModel):
3642 class UserFollowing(Base, BaseModel):
3644 __tablename__ = 'user_followings'
3643 __tablename__ = 'user_followings'
3645 __table_args__ = (
3644 __table_args__ = (
3646 UniqueConstraint('user_id', 'follows_repository_id'),
3645 UniqueConstraint('user_id', 'follows_repository_id'),
3647 UniqueConstraint('user_id', 'follows_user_id'),
3646 UniqueConstraint('user_id', 'follows_user_id'),
3648 base_table_args
3647 base_table_args
3649 )
3648 )
3650
3649
3651 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3650 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3652 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3651 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3653 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3652 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3654 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3653 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3655 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3654 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3656
3655
3657 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3656 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3658
3657
3659 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3658 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3660 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3659 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3661
3660
3662 @classmethod
3661 @classmethod
3663 def get_repo_followers(cls, repo_id):
3662 def get_repo_followers(cls, repo_id):
3664 return cls.query().filter(cls.follows_repo_id == repo_id)
3663 return cls.query().filter(cls.follows_repo_id == repo_id)
3665
3664
3666
3665
3667 class CacheKey(Base, BaseModel):
3666 class CacheKey(Base, BaseModel):
3668 __tablename__ = 'cache_invalidation'
3667 __tablename__ = 'cache_invalidation'
3669 __table_args__ = (
3668 __table_args__ = (
3670 UniqueConstraint('cache_key'),
3669 UniqueConstraint('cache_key'),
3671 Index('key_idx', 'cache_key'),
3670 Index('key_idx', 'cache_key'),
3672 Index('cache_args_idx', 'cache_args'),
3671 Index('cache_args_idx', 'cache_args'),
3673 base_table_args,
3672 base_table_args,
3674 )
3673 )
3675
3674
3676 CACHE_TYPE_FEED = 'FEED'
3675 CACHE_TYPE_FEED = 'FEED'
3677
3676
3678 # namespaces used to register process/thread aware caches
3677 # namespaces used to register process/thread aware caches
3679 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3678 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3680
3679
3681 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3680 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3682 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3681 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3683 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3682 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3684 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3683 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3685 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3684 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3686
3685
3687 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3686 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3688 self.cache_key = cache_key
3687 self.cache_key = cache_key
3689 self.cache_args = cache_args
3688 self.cache_args = cache_args
3690 self.cache_active = False
3689 self.cache_active = False
3691 # first key should be same for all entries, since all workers should share it
3690 # first key should be same for all entries, since all workers should share it
3692 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3691 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3693
3692
3694 def __repr__(self):
3693 def __repr__(self):
3695 return "<%s('%s:%s[%s]')>" % (
3694 return "<%s('%s:%s[%s]')>" % (
3696 self.cls_name,
3695 self.cls_name,
3697 self.cache_id, self.cache_key, self.cache_active)
3696 self.cache_id, self.cache_key, self.cache_active)
3698
3697
3699 def _cache_key_partition(self):
3698 def _cache_key_partition(self):
3700 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3699 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3701 return prefix, repo_name, suffix
3700 return prefix, repo_name, suffix
3702
3701
3703 def get_prefix(self):
3702 def get_prefix(self):
3704 """
3703 """
3705 Try to extract prefix from existing cache key. The key could consist
3704 Try to extract prefix from existing cache key. The key could consist
3706 of prefix, repo_name, suffix
3705 of prefix, repo_name, suffix
3707 """
3706 """
3708 # this returns prefix, repo_name, suffix
3707 # this returns prefix, repo_name, suffix
3709 return self._cache_key_partition()[0]
3708 return self._cache_key_partition()[0]
3710
3709
3711 def get_suffix(self):
3710 def get_suffix(self):
3712 """
3711 """
3713 get suffix that might have been used in _get_cache_key to
3712 get suffix that might have been used in _get_cache_key to
3714 generate self.cache_key. Only used for informational purposes
3713 generate self.cache_key. Only used for informational purposes
3715 in repo_edit.mako.
3714 in repo_edit.mako.
3716 """
3715 """
3717 # prefix, repo_name, suffix
3716 # prefix, repo_name, suffix
3718 return self._cache_key_partition()[2]
3717 return self._cache_key_partition()[2]
3719
3718
3720 @classmethod
3719 @classmethod
3721 def generate_new_state_uid(cls, based_on=None):
3720 def generate_new_state_uid(cls, based_on=None):
3722 if based_on:
3721 if based_on:
3723 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3722 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3724 else:
3723 else:
3725 return str(uuid.uuid4())
3724 return str(uuid.uuid4())
3726
3725
3727 @classmethod
3726 @classmethod
3728 def delete_all_cache(cls):
3727 def delete_all_cache(cls):
3729 """
3728 """
3730 Delete all cache keys from database.
3729 Delete all cache keys from database.
3731 Should only be run when all instances are down and all entries
3730 Should only be run when all instances are down and all entries
3732 thus stale.
3731 thus stale.
3733 """
3732 """
3734 cls.query().delete()
3733 cls.query().delete()
3735 Session().commit()
3734 Session().commit()
3736
3735
3737 @classmethod
3736 @classmethod
3738 def set_invalidate(cls, cache_uid, delete=False):
3737 def set_invalidate(cls, cache_uid, delete=False):
3739 """
3738 """
3740 Mark all caches of a repo as invalid in the database.
3739 Mark all caches of a repo as invalid in the database.
3741 """
3740 """
3742
3741
3743 try:
3742 try:
3744 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3743 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3745 if delete:
3744 if delete:
3746 qry.delete()
3745 qry.delete()
3747 log.debug('cache objects deleted for cache args %s',
3746 log.debug('cache objects deleted for cache args %s',
3748 safe_str(cache_uid))
3747 safe_str(cache_uid))
3749 else:
3748 else:
3750 qry.update({"cache_active": False,
3749 qry.update({"cache_active": False,
3751 "cache_state_uid": cls.generate_new_state_uid()})
3750 "cache_state_uid": cls.generate_new_state_uid()})
3752 log.debug('cache objects marked as invalid for cache args %s',
3751 log.debug('cache objects marked as invalid for cache args %s',
3753 safe_str(cache_uid))
3752 safe_str(cache_uid))
3754
3753
3755 Session().commit()
3754 Session().commit()
3756 except Exception:
3755 except Exception:
3757 log.exception(
3756 log.exception(
3758 'Cache key invalidation failed for cache args %s',
3757 'Cache key invalidation failed for cache args %s',
3759 safe_str(cache_uid))
3758 safe_str(cache_uid))
3760 Session().rollback()
3759 Session().rollback()
3761
3760
3762 @classmethod
3761 @classmethod
3763 def get_active_cache(cls, cache_key):
3762 def get_active_cache(cls, cache_key):
3764 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3763 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3765 if inv_obj:
3764 if inv_obj:
3766 return inv_obj
3765 return inv_obj
3767 return None
3766 return None
3768
3767
3769 @classmethod
3768 @classmethod
3770 def get_namespace_map(cls, namespace):
3769 def get_namespace_map(cls, namespace):
3771 return {
3770 return {
3772 x.cache_key: x
3771 x.cache_key: x
3773 for x in cls.query().filter(cls.cache_args == namespace)}
3772 for x in cls.query().filter(cls.cache_args == namespace)}
3774
3773
3775
3774
3776 class ChangesetComment(Base, BaseModel):
3775 class ChangesetComment(Base, BaseModel):
3777 __tablename__ = 'changeset_comments'
3776 __tablename__ = 'changeset_comments'
3778 __table_args__ = (
3777 __table_args__ = (
3779 Index('cc_revision_idx', 'revision'),
3778 Index('cc_revision_idx', 'revision'),
3780 base_table_args,
3779 base_table_args,
3781 )
3780 )
3782
3781
3783 COMMENT_OUTDATED = 'comment_outdated'
3782 COMMENT_OUTDATED = 'comment_outdated'
3784 COMMENT_TYPE_NOTE = 'note'
3783 COMMENT_TYPE_NOTE = 'note'
3785 COMMENT_TYPE_TODO = 'todo'
3784 COMMENT_TYPE_TODO = 'todo'
3786 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3785 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3787
3786
3788 OP_IMMUTABLE = 'immutable'
3787 OP_IMMUTABLE = 'immutable'
3789 OP_CHANGEABLE = 'changeable'
3788 OP_CHANGEABLE = 'changeable'
3790
3789
3791 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3790 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3792 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3791 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3793 revision = Column('revision', String(40), nullable=True)
3792 revision = Column('revision', String(40), nullable=True)
3794 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3793 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3795 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3794 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3796 line_no = Column('line_no', Unicode(10), nullable=True)
3795 line_no = Column('line_no', Unicode(10), nullable=True)
3797 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3796 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3798 f_path = Column('f_path', Unicode(1000), nullable=True)
3797 f_path = Column('f_path', Unicode(1000), nullable=True)
3799 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3798 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3800 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3799 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3801 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3800 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3802 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3801 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3803 renderer = Column('renderer', Unicode(64), nullable=True)
3802 renderer = Column('renderer', Unicode(64), nullable=True)
3804 display_state = Column('display_state', Unicode(128), nullable=True)
3803 display_state = Column('display_state', Unicode(128), nullable=True)
3805 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3804 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3806 draft = Column('draft', Boolean(), nullable=True, default=False)
3805 draft = Column('draft', Boolean(), nullable=True, default=False)
3807
3806
3808 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3807 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3809 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3808 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3810
3809
3811 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3810 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3812 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3811 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3813
3812
3814 author = relationship('User', lazy='select', back_populates='user_comments')
3813 author = relationship('User', lazy='select', back_populates='user_comments')
3815 repo = relationship('Repository', back_populates='comments')
3814 repo = relationship('Repository', back_populates='comments')
3816 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3815 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3817 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3816 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3818 pull_request_version = relationship('PullRequestVersion', lazy='select')
3817 pull_request_version = relationship('PullRequestVersion', lazy='select')
3819 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3818 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3820
3819
3821 @classmethod
3820 @classmethod
3822 def get_users(cls, revision=None, pull_request_id=None):
3821 def get_users(cls, revision=None, pull_request_id=None):
3823 """
3822 """
3824 Returns user associated with this ChangesetComment. ie those
3823 Returns user associated with this ChangesetComment. ie those
3825 who actually commented
3824 who actually commented
3826
3825
3827 :param cls:
3826 :param cls:
3828 :param revision:
3827 :param revision:
3829 """
3828 """
3830 q = Session().query(User).join(ChangesetComment.author)
3829 q = Session().query(User).join(ChangesetComment.author)
3831 if revision:
3830 if revision:
3832 q = q.filter(cls.revision == revision)
3831 q = q.filter(cls.revision == revision)
3833 elif pull_request_id:
3832 elif pull_request_id:
3834 q = q.filter(cls.pull_request_id == pull_request_id)
3833 q = q.filter(cls.pull_request_id == pull_request_id)
3835 return q.all()
3834 return q.all()
3836
3835
3837 @classmethod
3836 @classmethod
3838 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3837 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3839 if pr_version is None:
3838 if pr_version is None:
3840 return 0
3839 return 0
3841
3840
3842 if versions is not None:
3841 if versions is not None:
3843 num_versions = [x.pull_request_version_id for x in versions]
3842 num_versions = [x.pull_request_version_id for x in versions]
3844
3843
3845 num_versions = num_versions or []
3844 num_versions = num_versions or []
3846 try:
3845 try:
3847 return num_versions.index(pr_version) + 1
3846 return num_versions.index(pr_version) + 1
3848 except (IndexError, ValueError):
3847 except (IndexError, ValueError):
3849 return
3848 return
3850
3849
3851 @property
3850 @property
3852 def outdated(self):
3851 def outdated(self):
3853 return self.display_state == self.COMMENT_OUTDATED
3852 return self.display_state == self.COMMENT_OUTDATED
3854
3853
3855 @property
3854 @property
3856 def outdated_js(self):
3855 def outdated_js(self):
3857 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3856 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3858
3857
3859 @property
3858 @property
3860 def immutable(self):
3859 def immutable(self):
3861 return self.immutable_state == self.OP_IMMUTABLE
3860 return self.immutable_state == self.OP_IMMUTABLE
3862
3861
3863 def outdated_at_version(self, version):
3862 def outdated_at_version(self, version):
3864 """
3863 """
3865 Checks if comment is outdated for given pull request version
3864 Checks if comment is outdated for given pull request version
3866 """
3865 """
3867 def version_check():
3866 def version_check():
3868 return self.pull_request_version_id and self.pull_request_version_id != version
3867 return self.pull_request_version_id and self.pull_request_version_id != version
3869
3868
3870 if self.is_inline:
3869 if self.is_inline:
3871 return self.outdated and version_check()
3870 return self.outdated and version_check()
3872 else:
3871 else:
3873 # general comments don't have .outdated set, also latest don't have a version
3872 # general comments don't have .outdated set, also latest don't have a version
3874 return version_check()
3873 return version_check()
3875
3874
3876 def outdated_at_version_js(self, version):
3875 def outdated_at_version_js(self, version):
3877 """
3876 """
3878 Checks if comment is outdated for given pull request version
3877 Checks if comment is outdated for given pull request version
3879 """
3878 """
3880 return json.dumps(self.outdated_at_version(version))
3879 return json.dumps(self.outdated_at_version(version))
3881
3880
3882 def older_than_version(self, version):
3881 def older_than_version(self, version):
3883 """
3882 """
3884 Checks if comment is made from previous version than given
3883 Checks if comment is made from previous version than given
3885 """
3884 """
3886 cur_ver = 0
3885 cur_ver = 0
3887 if self.pull_request_version:
3886 if self.pull_request_version:
3888 cur_ver = self.pull_request_version.pull_request_version_id or cur_ver
3887 cur_ver = self.pull_request_version.pull_request_version_id or cur_ver
3889
3888
3890 if version is None:
3889 if version is None:
3891 return cur_ver != version
3890 return cur_ver != version
3892
3891
3893 return cur_ver < version
3892 return cur_ver < version
3894
3893
3895 def older_than_version_js(self, version):
3894 def older_than_version_js(self, version):
3896 """
3895 """
3897 Checks if comment is made from previous version than given
3896 Checks if comment is made from previous version than given
3898 """
3897 """
3899 return json.dumps(self.older_than_version(version))
3898 return json.dumps(self.older_than_version(version))
3900
3899
3901 @property
3900 @property
3902 def commit_id(self):
3901 def commit_id(self):
3903 """New style naming to stop using .revision"""
3902 """New style naming to stop using .revision"""
3904 return self.revision
3903 return self.revision
3905
3904
3906 @property
3905 @property
3907 def resolved(self):
3906 def resolved(self):
3908 return self.resolved_by[0] if self.resolved_by else None
3907 return self.resolved_by[0] if self.resolved_by else None
3909
3908
3910 @property
3909 @property
3911 def is_todo(self):
3910 def is_todo(self):
3912 return self.comment_type == self.COMMENT_TYPE_TODO
3911 return self.comment_type == self.COMMENT_TYPE_TODO
3913
3912
3914 @property
3913 @property
3915 def is_inline(self):
3914 def is_inline(self):
3916 if self.line_no and self.f_path:
3915 if self.line_no and self.f_path:
3917 return True
3916 return True
3918 return False
3917 return False
3919
3918
3920 @property
3919 @property
3921 def last_version(self):
3920 def last_version(self):
3922 version = 0
3921 version = 0
3923 if self.history:
3922 if self.history:
3924 version = self.history[-1].version
3923 version = self.history[-1].version
3925 return version
3924 return version
3926
3925
3927 def get_index_version(self, versions):
3926 def get_index_version(self, versions):
3928 return self.get_index_from_version(
3927 return self.get_index_from_version(
3929 self.pull_request_version_id, versions)
3928 self.pull_request_version_id, versions)
3930
3929
3931 @property
3930 @property
3932 def review_status(self):
3931 def review_status(self):
3933 if self.status_change:
3932 if self.status_change:
3934 return self.status_change[0].status
3933 return self.status_change[0].status
3935
3934
3936 @property
3935 @property
3937 def review_status_lbl(self):
3936 def review_status_lbl(self):
3938 if self.status_change:
3937 if self.status_change:
3939 return self.status_change[0].status_lbl
3938 return self.status_change[0].status_lbl
3940
3939
3941 def __repr__(self):
3940 def __repr__(self):
3942 if self.comment_id:
3941 if self.comment_id:
3943 return f'<DB:Comment #{self.comment_id}>'
3942 return f'<DB:Comment #{self.comment_id}>'
3944 else:
3943 else:
3945 return f'<DB:Comment at {id(self)!r}>'
3944 return f'<DB:Comment at {id(self)!r}>'
3946
3945
3947 def get_api_data(self):
3946 def get_api_data(self):
3948 comment = self
3947 comment = self
3949
3948
3950 data = {
3949 data = {
3951 'comment_id': comment.comment_id,
3950 'comment_id': comment.comment_id,
3952 'comment_type': comment.comment_type,
3951 'comment_type': comment.comment_type,
3953 'comment_text': comment.text,
3952 'comment_text': comment.text,
3954 'comment_status': comment.status_change,
3953 'comment_status': comment.status_change,
3955 'comment_f_path': comment.f_path,
3954 'comment_f_path': comment.f_path,
3956 'comment_lineno': comment.line_no,
3955 'comment_lineno': comment.line_no,
3957 'comment_author': comment.author,
3956 'comment_author': comment.author,
3958 'comment_created_on': comment.created_on,
3957 'comment_created_on': comment.created_on,
3959 'comment_resolved_by': self.resolved,
3958 'comment_resolved_by': self.resolved,
3960 'comment_commit_id': comment.revision,
3959 'comment_commit_id': comment.revision,
3961 'comment_pull_request_id': comment.pull_request_id,
3960 'comment_pull_request_id': comment.pull_request_id,
3962 'comment_last_version': self.last_version
3961 'comment_last_version': self.last_version
3963 }
3962 }
3964 return data
3963 return data
3965
3964
3966 def __json__(self):
3965 def __json__(self):
3967 data = dict()
3966 data = dict()
3968 data.update(self.get_api_data())
3967 data.update(self.get_api_data())
3969 return data
3968 return data
3970
3969
3971
3970
3972 class ChangesetCommentHistory(Base, BaseModel):
3971 class ChangesetCommentHistory(Base, BaseModel):
3973 __tablename__ = 'changeset_comments_history'
3972 __tablename__ = 'changeset_comments_history'
3974 __table_args__ = (
3973 __table_args__ = (
3975 Index('cch_comment_id_idx', 'comment_id'),
3974 Index('cch_comment_id_idx', 'comment_id'),
3976 base_table_args,
3975 base_table_args,
3977 )
3976 )
3978
3977
3979 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3978 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3980 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3979 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3981 version = Column("version", Integer(), nullable=False, default=0)
3980 version = Column("version", Integer(), nullable=False, default=0)
3982 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3981 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3983 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3982 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3984 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3983 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3985 deleted = Column('deleted', Boolean(), default=False)
3984 deleted = Column('deleted', Boolean(), default=False)
3986
3985
3987 author = relationship('User', lazy='joined')
3986 author = relationship('User', lazy='joined')
3988 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3987 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3989
3988
3990 @classmethod
3989 @classmethod
3991 def get_version(cls, comment_id):
3990 def get_version(cls, comment_id):
3992 q = Session().query(ChangesetCommentHistory).filter(
3991 q = Session().query(ChangesetCommentHistory).filter(
3993 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3992 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3994 if q.count() == 0:
3993 if q.count() == 0:
3995 return 1
3994 return 1
3996 elif q.count() >= q[0].version:
3995 elif q.count() >= q[0].version:
3997 return q.count() + 1
3996 return q.count() + 1
3998 else:
3997 else:
3999 return q[0].version + 1
3998 return q[0].version + 1
4000
3999
4001
4000
4002 class ChangesetStatus(Base, BaseModel):
4001 class ChangesetStatus(Base, BaseModel):
4003 __tablename__ = 'changeset_statuses'
4002 __tablename__ = 'changeset_statuses'
4004 __table_args__ = (
4003 __table_args__ = (
4005 Index('cs_revision_idx', 'revision'),
4004 Index('cs_revision_idx', 'revision'),
4006 Index('cs_version_idx', 'version'),
4005 Index('cs_version_idx', 'version'),
4007 UniqueConstraint('repo_id', 'revision', 'version'),
4006 UniqueConstraint('repo_id', 'revision', 'version'),
4008 base_table_args
4007 base_table_args
4009 )
4008 )
4010
4009
4011 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4010 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4012 STATUS_APPROVED = 'approved'
4011 STATUS_APPROVED = 'approved'
4013 STATUS_REJECTED = 'rejected'
4012 STATUS_REJECTED = 'rejected'
4014 STATUS_UNDER_REVIEW = 'under_review'
4013 STATUS_UNDER_REVIEW = 'under_review'
4015
4014
4016 STATUSES = [
4015 STATUSES = [
4017 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4016 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4018 (STATUS_APPROVED, _("Approved")),
4017 (STATUS_APPROVED, _("Approved")),
4019 (STATUS_REJECTED, _("Rejected")),
4018 (STATUS_REJECTED, _("Rejected")),
4020 (STATUS_UNDER_REVIEW, _("Under Review")),
4019 (STATUS_UNDER_REVIEW, _("Under Review")),
4021 ]
4020 ]
4022
4021
4023 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4022 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4024 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4023 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4025 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4024 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4026 revision = Column('revision', String(40), nullable=False)
4025 revision = Column('revision', String(40), nullable=False)
4027 status = Column('status', String(128), nullable=False, default=DEFAULT)
4026 status = Column('status', String(128), nullable=False, default=DEFAULT)
4028 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4027 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4029 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4028 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4030 version = Column('version', Integer(), nullable=False, default=0)
4029 version = Column('version', Integer(), nullable=False, default=0)
4031 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4030 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4032
4031
4033 author = relationship('User', lazy='select')
4032 author = relationship('User', lazy='select')
4034 repo = relationship('Repository', lazy='select')
4033 repo = relationship('Repository', lazy='select')
4035 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4034 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4036 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4035 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4037
4036
4038 def __repr__(self):
4037 def __repr__(self):
4039 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4038 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4040
4039
4041 @classmethod
4040 @classmethod
4042 def get_status_lbl(cls, value):
4041 def get_status_lbl(cls, value):
4043 return dict(cls.STATUSES).get(value)
4042 return dict(cls.STATUSES).get(value)
4044
4043
4045 @property
4044 @property
4046 def status_lbl(self):
4045 def status_lbl(self):
4047 return ChangesetStatus.get_status_lbl(self.status)
4046 return ChangesetStatus.get_status_lbl(self.status)
4048
4047
4049 def get_api_data(self):
4048 def get_api_data(self):
4050 status = self
4049 status = self
4051 data = {
4050 data = {
4052 'status_id': status.changeset_status_id,
4051 'status_id': status.changeset_status_id,
4053 'status': status.status,
4052 'status': status.status,
4054 }
4053 }
4055 return data
4054 return data
4056
4055
4057 def __json__(self):
4056 def __json__(self):
4058 data = dict()
4057 data = dict()
4059 data.update(self.get_api_data())
4058 data.update(self.get_api_data())
4060 return data
4059 return data
4061
4060
4062
4061
4063 class _SetState(object):
4062 class _SetState(object):
4064 """
4063 """
4065 Context processor allowing changing state for sensitive operation such as
4064 Context processor allowing changing state for sensitive operation such as
4066 pull request update or merge
4065 pull request update or merge
4067 """
4066 """
4068
4067
4069 def __init__(self, pull_request, pr_state, back_state=None):
4068 def __init__(self, pull_request, pr_state, back_state=None):
4070 self._pr = pull_request
4069 self._pr = pull_request
4071 self._org_state = back_state or pull_request.pull_request_state
4070 self._org_state = back_state or pull_request.pull_request_state
4072 self._pr_state = pr_state
4071 self._pr_state = pr_state
4073 self._current_state = None
4072 self._current_state = None
4074
4073
4075 def __enter__(self):
4074 def __enter__(self):
4076 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4075 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4077 self._pr, self._pr_state)
4076 self._pr, self._pr_state)
4078 self.set_pr_state(self._pr_state)
4077 self.set_pr_state(self._pr_state)
4079 return self
4078 return self
4080
4079
4081 def __exit__(self, exc_type, exc_val, exc_tb):
4080 def __exit__(self, exc_type, exc_val, exc_tb):
4082 if exc_val is not None or exc_type is not None:
4081 if exc_val is not None or exc_type is not None:
4083 log.error(traceback.format_tb(exc_tb))
4082 log.error(traceback.format_tb(exc_tb))
4084 return None
4083 return None
4085
4084
4086 self.set_pr_state(self._org_state)
4085 self.set_pr_state(self._org_state)
4087 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4086 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4088 self._pr, self._org_state)
4087 self._pr, self._org_state)
4089
4088
4090 @property
4089 @property
4091 def state(self):
4090 def state(self):
4092 return self._current_state
4091 return self._current_state
4093
4092
4094 def set_pr_state(self, pr_state):
4093 def set_pr_state(self, pr_state):
4095 try:
4094 try:
4096 self._pr.pull_request_state = pr_state
4095 self._pr.pull_request_state = pr_state
4097 Session().add(self._pr)
4096 Session().add(self._pr)
4098 Session().commit()
4097 Session().commit()
4099 self._current_state = pr_state
4098 self._current_state = pr_state
4100 except Exception:
4099 except Exception:
4101 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4100 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4102 raise
4101 raise
4103
4102
4104
4103
4105 class _PullRequestBase(BaseModel):
4104 class _PullRequestBase(BaseModel):
4106 """
4105 """
4107 Common attributes of pull request and version entries.
4106 Common attributes of pull request and version entries.
4108 """
4107 """
4109
4108
4110 # .status values
4109 # .status values
4111 STATUS_NEW = 'new'
4110 STATUS_NEW = 'new'
4112 STATUS_OPEN = 'open'
4111 STATUS_OPEN = 'open'
4113 STATUS_CLOSED = 'closed'
4112 STATUS_CLOSED = 'closed'
4114
4113
4115 # available states
4114 # available states
4116 STATE_CREATING = 'creating'
4115 STATE_CREATING = 'creating'
4117 STATE_UPDATING = 'updating'
4116 STATE_UPDATING = 'updating'
4118 STATE_MERGING = 'merging'
4117 STATE_MERGING = 'merging'
4119 STATE_CREATED = 'created'
4118 STATE_CREATED = 'created'
4120
4119
4121 title = Column('title', Unicode(255), nullable=True)
4120 title = Column('title', Unicode(255), nullable=True)
4122 description = Column(
4121 description = Column(
4123 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4122 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4124 nullable=True)
4123 nullable=True)
4125 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4124 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4126
4125
4127 # new/open/closed status of pull request (not approve/reject/etc)
4126 # new/open/closed status of pull request (not approve/reject/etc)
4128 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4127 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4129 created_on = Column(
4128 created_on = Column(
4130 'created_on', DateTime(timezone=False), nullable=False,
4129 'created_on', DateTime(timezone=False), nullable=False,
4131 default=datetime.datetime.now)
4130 default=datetime.datetime.now)
4132 updated_on = Column(
4131 updated_on = Column(
4133 'updated_on', DateTime(timezone=False), nullable=False,
4132 'updated_on', DateTime(timezone=False), nullable=False,
4134 default=datetime.datetime.now)
4133 default=datetime.datetime.now)
4135
4134
4136 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4135 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4137
4136
4138 @declared_attr
4137 @declared_attr
4139 def user_id(cls):
4138 def user_id(cls):
4140 return Column(
4139 return Column(
4141 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4140 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4142 unique=None)
4141 unique=None)
4143
4142
4144 # 500 revisions max
4143 # 500 revisions max
4145 _revisions = Column(
4144 _revisions = Column(
4146 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4145 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4147
4146
4148 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4147 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4149
4148
4150 @declared_attr
4149 @declared_attr
4151 def source_repo_id(cls):
4150 def source_repo_id(cls):
4152 # TODO: dan: rename column to source_repo_id
4151 # TODO: dan: rename column to source_repo_id
4153 return Column(
4152 return Column(
4154 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4153 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4155 nullable=False)
4154 nullable=False)
4156
4155
4157 @declared_attr
4156 @declared_attr
4158 def pr_source(cls):
4157 def pr_source(cls):
4159 return relationship(
4158 return relationship(
4160 'Repository',
4159 'Repository',
4161 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4160 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4162 overlaps="pull_requests_source"
4161 overlaps="pull_requests_source"
4163 )
4162 )
4164
4163
4165 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4164 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4166
4165
4167 @hybrid_property
4166 @hybrid_property
4168 def source_ref(self):
4167 def source_ref(self):
4169 return self._source_ref
4168 return self._source_ref
4170
4169
4171 @source_ref.setter
4170 @source_ref.setter
4172 def source_ref(self, val):
4171 def source_ref(self, val):
4173 parts = (val or '').split(':')
4172 parts = (val or '').split(':')
4174 if len(parts) != 3:
4173 if len(parts) != 3:
4175 raise ValueError(
4174 raise ValueError(
4176 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4175 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4177 self._source_ref = safe_str(val)
4176 self._source_ref = safe_str(val)
4178
4177
4179 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4178 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4180
4179
4181 @hybrid_property
4180 @hybrid_property
4182 def target_ref(self):
4181 def target_ref(self):
4183 return self._target_ref
4182 return self._target_ref
4184
4183
4185 @target_ref.setter
4184 @target_ref.setter
4186 def target_ref(self, val):
4185 def target_ref(self, val):
4187 parts = (val or '').split(':')
4186 parts = (val or '').split(':')
4188 if len(parts) != 3:
4187 if len(parts) != 3:
4189 raise ValueError(
4188 raise ValueError(
4190 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4189 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4191 self._target_ref = safe_str(val)
4190 self._target_ref = safe_str(val)
4192
4191
4193 @declared_attr
4192 @declared_attr
4194 def target_repo_id(cls):
4193 def target_repo_id(cls):
4195 # TODO: dan: rename column to target_repo_id
4194 # TODO: dan: rename column to target_repo_id
4196 return Column(
4195 return Column(
4197 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4196 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4198 nullable=False)
4197 nullable=False)
4199
4198
4200 @declared_attr
4199 @declared_attr
4201 def pr_target(cls):
4200 def pr_target(cls):
4202 return relationship(
4201 return relationship(
4203 'Repository',
4202 'Repository',
4204 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4203 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4205 overlaps="pull_requests_target"
4204 overlaps="pull_requests_target"
4206 )
4205 )
4207
4206
4208 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4207 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4209
4208
4210 # TODO: dan: rename column to last_merge_source_rev
4209 # TODO: dan: rename column to last_merge_source_rev
4211 _last_merge_source_rev = Column(
4210 _last_merge_source_rev = Column(
4212 'last_merge_org_rev', String(40), nullable=True)
4211 'last_merge_org_rev', String(40), nullable=True)
4213 # TODO: dan: rename column to last_merge_target_rev
4212 # TODO: dan: rename column to last_merge_target_rev
4214 _last_merge_target_rev = Column(
4213 _last_merge_target_rev = Column(
4215 'last_merge_other_rev', String(40), nullable=True)
4214 'last_merge_other_rev', String(40), nullable=True)
4216 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4215 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4217 last_merge_metadata = Column(
4216 last_merge_metadata = Column(
4218 'last_merge_metadata', MutationObj.as_mutable(
4217 'last_merge_metadata', MutationObj.as_mutable(
4219 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4218 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4220
4219
4221 merge_rev = Column('merge_rev', String(40), nullable=True)
4220 merge_rev = Column('merge_rev', String(40), nullable=True)
4222
4221
4223 reviewer_data = Column(
4222 reviewer_data = Column(
4224 'reviewer_data_json', MutationObj.as_mutable(
4223 'reviewer_data_json', MutationObj.as_mutable(
4225 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4224 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4226
4225
4227 @property
4226 @property
4228 def reviewer_data_json(self):
4227 def reviewer_data_json(self):
4229 return json.dumps(self.reviewer_data)
4228 return json.dumps(self.reviewer_data)
4230
4229
4231 @property
4230 @property
4232 def last_merge_metadata_parsed(self):
4231 def last_merge_metadata_parsed(self):
4233 metadata = {}
4232 metadata = {}
4234 if not self.last_merge_metadata:
4233 if not self.last_merge_metadata:
4235 return metadata
4234 return metadata
4236
4235
4237 if hasattr(self.last_merge_metadata, 'de_coerce'):
4236 if hasattr(self.last_merge_metadata, 'de_coerce'):
4238 for k, v in self.last_merge_metadata.de_coerce().items():
4237 for k, v in self.last_merge_metadata.de_coerce().items():
4239 if k in ['target_ref', 'source_ref']:
4238 if k in ['target_ref', 'source_ref']:
4240 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4239 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4241 else:
4240 else:
4242 if hasattr(v, 'de_coerce'):
4241 if hasattr(v, 'de_coerce'):
4243 metadata[k] = v.de_coerce()
4242 metadata[k] = v.de_coerce()
4244 else:
4243 else:
4245 metadata[k] = v
4244 metadata[k] = v
4246 return metadata
4245 return metadata
4247
4246
4248 @property
4247 @property
4249 def work_in_progress(self):
4248 def work_in_progress(self):
4250 """checks if pull request is work in progress by checking the title"""
4249 """checks if pull request is work in progress by checking the title"""
4251 title = self.title.upper()
4250 title = self.title.upper()
4252 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4251 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4253 return True
4252 return True
4254 return False
4253 return False
4255
4254
4256 @property
4255 @property
4257 def title_safe(self):
4256 def title_safe(self):
4258 return self.title\
4257 return self.title\
4259 .replace('{', '{{')\
4258 .replace('{', '{{')\
4260 .replace('}', '}}')
4259 .replace('}', '}}')
4261
4260
4262 @hybrid_property
4261 @hybrid_property
4263 def description_safe(self):
4262 def description_safe(self):
4264 from rhodecode.lib import helpers as h
4263 from rhodecode.lib import helpers as h
4265 return h.escape(self.description)
4264 return h.escape(self.description)
4266
4265
4267 @hybrid_property
4266 @hybrid_property
4268 def revisions(self):
4267 def revisions(self):
4269 return self._revisions.split(':') if self._revisions else []
4268 return self._revisions.split(':') if self._revisions else []
4270
4269
4271 @revisions.setter
4270 @revisions.setter
4272 def revisions(self, val):
4271 def revisions(self, val):
4273 self._revisions = ':'.join(val)
4272 self._revisions = ':'.join(val)
4274
4273
4275 @hybrid_property
4274 @hybrid_property
4276 def last_merge_status(self):
4275 def last_merge_status(self):
4277 return safe_int(self._last_merge_status)
4276 return safe_int(self._last_merge_status)
4278
4277
4279 @last_merge_status.setter
4278 @last_merge_status.setter
4280 def last_merge_status(self, val):
4279 def last_merge_status(self, val):
4281 self._last_merge_status = val
4280 self._last_merge_status = val
4282
4281
4283 @declared_attr
4282 @declared_attr
4284 def author(cls):
4283 def author(cls):
4285 return relationship(
4284 return relationship(
4286 'User', lazy='joined',
4285 'User', lazy='joined',
4287 #TODO, problem that is somehow :?
4286 #TODO, problem that is somehow :?
4288 #back_populates='user_pull_requests'
4287 #back_populates='user_pull_requests'
4289 )
4288 )
4290
4289
4291 @declared_attr
4290 @declared_attr
4292 def source_repo(cls):
4291 def source_repo(cls):
4293 return relationship(
4292 return relationship(
4294 'Repository',
4293 'Repository',
4295 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4294 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4296 #back_populates=''
4295 #back_populates=''
4297 )
4296 )
4298
4297
4299 @property
4298 @property
4300 def source_ref_parts(self):
4299 def source_ref_parts(self):
4301 return self.unicode_to_reference(self.source_ref)
4300 return self.unicode_to_reference(self.source_ref)
4302
4301
4303 @declared_attr
4302 @declared_attr
4304 def target_repo(cls):
4303 def target_repo(cls):
4305 return relationship(
4304 return relationship(
4306 'Repository',
4305 'Repository',
4307 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id'
4306 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id'
4308 )
4307 )
4309
4308
4310 @property
4309 @property
4311 def target_ref_parts(self):
4310 def target_ref_parts(self):
4312 return self.unicode_to_reference(self.target_ref)
4311 return self.unicode_to_reference(self.target_ref)
4313
4312
4314 @property
4313 @property
4315 def shadow_merge_ref(self):
4314 def shadow_merge_ref(self):
4316 return self.unicode_to_reference(self._shadow_merge_ref)
4315 return self.unicode_to_reference(self._shadow_merge_ref)
4317
4316
4318 @shadow_merge_ref.setter
4317 @shadow_merge_ref.setter
4319 def shadow_merge_ref(self, ref):
4318 def shadow_merge_ref(self, ref):
4320 self._shadow_merge_ref = self.reference_to_unicode(ref)
4319 self._shadow_merge_ref = self.reference_to_unicode(ref)
4321
4320
4322 @staticmethod
4321 @staticmethod
4323 def unicode_to_reference(raw):
4322 def unicode_to_reference(raw):
4324 return unicode_to_reference(raw)
4323 return unicode_to_reference(raw)
4325
4324
4326 @staticmethod
4325 @staticmethod
4327 def reference_to_unicode(ref):
4326 def reference_to_unicode(ref):
4328 return reference_to_unicode(ref)
4327 return reference_to_unicode(ref)
4329
4328
4330 def get_api_data(self, with_merge_state=True):
4329 def get_api_data(self, with_merge_state=True):
4331 from rhodecode.model.pull_request import PullRequestModel
4330 from rhodecode.model.pull_request import PullRequestModel
4332
4331
4333 pull_request = self
4332 pull_request = self
4334 if with_merge_state:
4333 if with_merge_state:
4335 merge_response, merge_status, msg = \
4334 merge_response, merge_status, msg = \
4336 PullRequestModel().merge_status(pull_request)
4335 PullRequestModel().merge_status(pull_request)
4337 merge_state = {
4336 merge_state = {
4338 'status': merge_status,
4337 'status': merge_status,
4339 'message': safe_str(msg),
4338 'message': safe_str(msg),
4340 }
4339 }
4341 else:
4340 else:
4342 merge_state = {'status': 'not_available',
4341 merge_state = {'status': 'not_available',
4343 'message': 'not_available'}
4342 'message': 'not_available'}
4344
4343
4345 merge_data = {
4344 merge_data = {
4346 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4345 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4347 'reference': (
4346 'reference': (
4348 pull_request.shadow_merge_ref.asdict()
4347 pull_request.shadow_merge_ref.asdict()
4349 if pull_request.shadow_merge_ref else None),
4348 if pull_request.shadow_merge_ref else None),
4350 }
4349 }
4351
4350
4352 data = {
4351 data = {
4353 'pull_request_id': pull_request.pull_request_id,
4352 'pull_request_id': pull_request.pull_request_id,
4354 'url': PullRequestModel().get_url(pull_request),
4353 'url': PullRequestModel().get_url(pull_request),
4355 'title': pull_request.title,
4354 'title': pull_request.title,
4356 'description': pull_request.description,
4355 'description': pull_request.description,
4357 'status': pull_request.status,
4356 'status': pull_request.status,
4358 'state': pull_request.pull_request_state,
4357 'state': pull_request.pull_request_state,
4359 'created_on': pull_request.created_on,
4358 'created_on': pull_request.created_on,
4360 'updated_on': pull_request.updated_on,
4359 'updated_on': pull_request.updated_on,
4361 'commit_ids': pull_request.revisions,
4360 'commit_ids': pull_request.revisions,
4362 'review_status': pull_request.calculated_review_status(),
4361 'review_status': pull_request.calculated_review_status(),
4363 'mergeable': merge_state,
4362 'mergeable': merge_state,
4364 'source': {
4363 'source': {
4365 'clone_url': pull_request.source_repo.clone_url(),
4364 'clone_url': pull_request.source_repo.clone_url(),
4366 'repository': pull_request.source_repo.repo_name,
4365 'repository': pull_request.source_repo.repo_name,
4367 'reference': {
4366 'reference': {
4368 'name': pull_request.source_ref_parts.name,
4367 'name': pull_request.source_ref_parts.name,
4369 'type': pull_request.source_ref_parts.type,
4368 'type': pull_request.source_ref_parts.type,
4370 'commit_id': pull_request.source_ref_parts.commit_id,
4369 'commit_id': pull_request.source_ref_parts.commit_id,
4371 },
4370 },
4372 },
4371 },
4373 'target': {
4372 'target': {
4374 'clone_url': pull_request.target_repo.clone_url(),
4373 'clone_url': pull_request.target_repo.clone_url(),
4375 'repository': pull_request.target_repo.repo_name,
4374 'repository': pull_request.target_repo.repo_name,
4376 'reference': {
4375 'reference': {
4377 'name': pull_request.target_ref_parts.name,
4376 'name': pull_request.target_ref_parts.name,
4378 'type': pull_request.target_ref_parts.type,
4377 'type': pull_request.target_ref_parts.type,
4379 'commit_id': pull_request.target_ref_parts.commit_id,
4378 'commit_id': pull_request.target_ref_parts.commit_id,
4380 },
4379 },
4381 },
4380 },
4382 'merge': merge_data,
4381 'merge': merge_data,
4383 'author': pull_request.author.get_api_data(include_secrets=False,
4382 'author': pull_request.author.get_api_data(include_secrets=False,
4384 details='basic'),
4383 details='basic'),
4385 'reviewers': [
4384 'reviewers': [
4386 {
4385 {
4387 'user': reviewer.get_api_data(include_secrets=False,
4386 'user': reviewer.get_api_data(include_secrets=False,
4388 details='basic'),
4387 details='basic'),
4389 'reasons': reasons,
4388 'reasons': reasons,
4390 'review_status': st[0][1].status if st else 'not_reviewed',
4389 'review_status': st[0][1].status if st else 'not_reviewed',
4391 }
4390 }
4392 for obj, reviewer, reasons, mandatory, st in
4391 for obj, reviewer, reasons, mandatory, st in
4393 pull_request.reviewers_statuses()
4392 pull_request.reviewers_statuses()
4394 ]
4393 ]
4395 }
4394 }
4396
4395
4397 return data
4396 return data
4398
4397
4399 def set_state(self, pull_request_state, final_state=None):
4398 def set_state(self, pull_request_state, final_state=None):
4400 """
4399 """
4401 # goes from initial state to updating to initial state.
4400 # goes from initial state to updating to initial state.
4402 # initial state can be changed by specifying back_state=
4401 # initial state can be changed by specifying back_state=
4403 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4402 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4404 pull_request.merge()
4403 pull_request.merge()
4405
4404
4406 :param pull_request_state:
4405 :param pull_request_state:
4407 :param final_state:
4406 :param final_state:
4408
4407
4409 """
4408 """
4410
4409
4411 return _SetState(self, pull_request_state, back_state=final_state)
4410 return _SetState(self, pull_request_state, back_state=final_state)
4412
4411
4413
4412
4414 class PullRequest(Base, _PullRequestBase):
4413 class PullRequest(Base, _PullRequestBase):
4415 __tablename__ = 'pull_requests'
4414 __tablename__ = 'pull_requests'
4416 __table_args__ = (
4415 __table_args__ = (
4417 base_table_args,
4416 base_table_args,
4418 )
4417 )
4419 LATEST_VER = 'latest'
4418 LATEST_VER = 'latest'
4420
4419
4421 pull_request_id = Column(
4420 pull_request_id = Column(
4422 'pull_request_id', Integer(), nullable=False, primary_key=True)
4421 'pull_request_id', Integer(), nullable=False, primary_key=True)
4423
4422
4424 def __repr__(self):
4423 def __repr__(self):
4425 if self.pull_request_id:
4424 if self.pull_request_id:
4426 return f'<DB:PullRequest #{self.pull_request_id}>'
4425 return f'<DB:PullRequest #{self.pull_request_id}>'
4427 else:
4426 else:
4428 return f'<DB:PullRequest at {id(self)!r}>'
4427 return f'<DB:PullRequest at {id(self)!r}>'
4429
4428
4430 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4429 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4431 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4430 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4432 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4431 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4433 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4432 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4434
4433
4435 @classmethod
4434 @classmethod
4436 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4435 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4437 internal_methods=None):
4436 internal_methods=None):
4438
4437
4439 class PullRequestDisplay(object):
4438 class PullRequestDisplay(object):
4440 """
4439 """
4441 Special object wrapper for showing PullRequest data via Versions
4440 Special object wrapper for showing PullRequest data via Versions
4442 It mimics PR object as close as possible. This is read only object
4441 It mimics PR object as close as possible. This is read only object
4443 just for display
4442 just for display
4444 """
4443 """
4445
4444
4446 def __init__(self, attrs, internal=None):
4445 def __init__(self, attrs, internal=None):
4447 self.attrs = attrs
4446 self.attrs = attrs
4448 # internal have priority over the given ones via attrs
4447 # internal have priority over the given ones via attrs
4449 self.internal = internal or ['versions']
4448 self.internal = internal or ['versions']
4450
4449
4451 def __getattr__(self, item):
4450 def __getattr__(self, item):
4452 if item in self.internal:
4451 if item in self.internal:
4453 return getattr(self, item)
4452 return getattr(self, item)
4454 try:
4453 try:
4455 return self.attrs[item]
4454 return self.attrs[item]
4456 except KeyError:
4455 except KeyError:
4457 raise AttributeError(
4456 raise AttributeError(
4458 '%s object has no attribute %s' % (self, item))
4457 '%s object has no attribute %s' % (self, item))
4459
4458
4460 def __repr__(self):
4459 def __repr__(self):
4461 pr_id = self.attrs.get('pull_request_id')
4460 pr_id = self.attrs.get('pull_request_id')
4462 return f'<DB:PullRequestDisplay #{pr_id}>'
4461 return f'<DB:PullRequestDisplay #{pr_id}>'
4463
4462
4464 def versions(self):
4463 def versions(self):
4465 return pull_request_obj.versions.order_by(
4464 return pull_request_obj.versions.order_by(
4466 PullRequestVersion.pull_request_version_id).all()
4465 PullRequestVersion.pull_request_version_id).all()
4467
4466
4468 def is_closed(self):
4467 def is_closed(self):
4469 return pull_request_obj.is_closed()
4468 return pull_request_obj.is_closed()
4470
4469
4471 def is_state_changing(self):
4470 def is_state_changing(self):
4472 return pull_request_obj.is_state_changing()
4471 return pull_request_obj.is_state_changing()
4473
4472
4474 @property
4473 @property
4475 def pull_request_version_id(self):
4474 def pull_request_version_id(self):
4476 return getattr(pull_request_obj, 'pull_request_version_id', None)
4475 return getattr(pull_request_obj, 'pull_request_version_id', None)
4477
4476
4478 @property
4477 @property
4479 def pull_request_last_version(self):
4478 def pull_request_last_version(self):
4480 return pull_request_obj.pull_request_last_version
4479 return pull_request_obj.pull_request_last_version
4481
4480
4482 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4481 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4483
4482
4484 attrs.author = StrictAttributeDict(
4483 attrs.author = StrictAttributeDict(
4485 pull_request_obj.author.get_api_data())
4484 pull_request_obj.author.get_api_data())
4486 if pull_request_obj.target_repo:
4485 if pull_request_obj.target_repo:
4487 attrs.target_repo = StrictAttributeDict(
4486 attrs.target_repo = StrictAttributeDict(
4488 pull_request_obj.target_repo.get_api_data())
4487 pull_request_obj.target_repo.get_api_data())
4489 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4488 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4490
4489
4491 if pull_request_obj.source_repo:
4490 if pull_request_obj.source_repo:
4492 attrs.source_repo = StrictAttributeDict(
4491 attrs.source_repo = StrictAttributeDict(
4493 pull_request_obj.source_repo.get_api_data())
4492 pull_request_obj.source_repo.get_api_data())
4494 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4493 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4495
4494
4496 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4495 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4497 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4496 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4498 attrs.revisions = pull_request_obj.revisions
4497 attrs.revisions = pull_request_obj.revisions
4499 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4498 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4500 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4499 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4501 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4500 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4502 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4501 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4503
4502
4504 return PullRequestDisplay(attrs, internal=internal_methods)
4503 return PullRequestDisplay(attrs, internal=internal_methods)
4505
4504
4506 def is_closed(self):
4505 def is_closed(self):
4507 return self.status == self.STATUS_CLOSED
4506 return self.status == self.STATUS_CLOSED
4508
4507
4509 def is_state_changing(self):
4508 def is_state_changing(self):
4510 return self.pull_request_state != PullRequest.STATE_CREATED
4509 return self.pull_request_state != PullRequest.STATE_CREATED
4511
4510
4512 def __json__(self):
4511 def __json__(self):
4513 return {
4512 return {
4514 'revisions': self.revisions,
4513 'revisions': self.revisions,
4515 'versions': self.versions_count
4514 'versions': self.versions_count
4516 }
4515 }
4517
4516
4518 def calculated_review_status(self):
4517 def calculated_review_status(self):
4519 from rhodecode.model.changeset_status import ChangesetStatusModel
4518 from rhodecode.model.changeset_status import ChangesetStatusModel
4520 return ChangesetStatusModel().calculated_review_status(self)
4519 return ChangesetStatusModel().calculated_review_status(self)
4521
4520
4522 def reviewers_statuses(self, user=None):
4521 def reviewers_statuses(self, user=None):
4523 from rhodecode.model.changeset_status import ChangesetStatusModel
4522 from rhodecode.model.changeset_status import ChangesetStatusModel
4524 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4523 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4525
4524
4526 def get_pull_request_reviewers(self, role=None):
4525 def get_pull_request_reviewers(self, role=None):
4527 qry = PullRequestReviewers.query()\
4526 qry = PullRequestReviewers.query()\
4528 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4527 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4529 if role:
4528 if role:
4530 qry = qry.filter(PullRequestReviewers.role == role)
4529 qry = qry.filter(PullRequestReviewers.role == role)
4531
4530
4532 return qry.all()
4531 return qry.all()
4533
4532
4534 @property
4533 @property
4535 def reviewers_count(self):
4534 def reviewers_count(self):
4536 qry = PullRequestReviewers.query()\
4535 qry = PullRequestReviewers.query()\
4537 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4536 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4538 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4537 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4539 return qry.count()
4538 return qry.count()
4540
4539
4541 @property
4540 @property
4542 def observers_count(self):
4541 def observers_count(self):
4543 qry = PullRequestReviewers.query()\
4542 qry = PullRequestReviewers.query()\
4544 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4543 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4545 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4544 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4546 return qry.count()
4545 return qry.count()
4547
4546
4548 def observers(self):
4547 def observers(self):
4549 qry = PullRequestReviewers.query()\
4548 qry = PullRequestReviewers.query()\
4550 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4549 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4551 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4550 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4552 .all()
4551 .all()
4553
4552
4554 for entry in qry:
4553 for entry in qry:
4555 yield entry, entry.user
4554 yield entry, entry.user
4556
4555
4557 @property
4556 @property
4558 def workspace_id(self):
4557 def workspace_id(self):
4559 from rhodecode.model.pull_request import PullRequestModel
4558 from rhodecode.model.pull_request import PullRequestModel
4560 return PullRequestModel()._workspace_id(self)
4559 return PullRequestModel()._workspace_id(self)
4561
4560
4562 def get_shadow_repo(self):
4561 def get_shadow_repo(self):
4563 workspace_id = self.workspace_id
4562 workspace_id = self.workspace_id
4564 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4563 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4565 if os.path.isdir(shadow_repository_path):
4564 if os.path.isdir(shadow_repository_path):
4566 vcs_obj = self.target_repo.scm_instance()
4565 vcs_obj = self.target_repo.scm_instance()
4567 return vcs_obj.get_shadow_instance(shadow_repository_path)
4566 return vcs_obj.get_shadow_instance(shadow_repository_path)
4568
4567
4569 @property
4568 @property
4570 def versions_count(self):
4569 def versions_count(self):
4571 """
4570 """
4572 return number of versions this PR have, e.g a PR that once been
4571 return number of versions this PR have, e.g a PR that once been
4573 updated will have 2 versions
4572 updated will have 2 versions
4574 """
4573 """
4575 return self.versions.count() + 1
4574 return self.versions.count() + 1
4576
4575
4577 @property
4576 @property
4578 def pull_request_last_version(self):
4577 def pull_request_last_version(self):
4579 return self.versions_count
4578 return self.versions_count
4580
4579
4581
4580
4582 class PullRequestVersion(Base, _PullRequestBase):
4581 class PullRequestVersion(Base, _PullRequestBase):
4583 __tablename__ = 'pull_request_versions'
4582 __tablename__ = 'pull_request_versions'
4584 __table_args__ = (
4583 __table_args__ = (
4585 base_table_args,
4584 base_table_args,
4586 )
4585 )
4587
4586
4588 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4587 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4589 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4588 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4590 pull_request = relationship('PullRequest', back_populates='versions')
4589 pull_request = relationship('PullRequest', back_populates='versions')
4591
4590
4592 def __repr__(self):
4591 def __repr__(self):
4593 if self.pull_request_version_id:
4592 if self.pull_request_version_id:
4594 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4593 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4595 else:
4594 else:
4596 return f'<DB:PullRequestVersion at {id(self)!r}>'
4595 return f'<DB:PullRequestVersion at {id(self)!r}>'
4597
4596
4598 @property
4597 @property
4599 def reviewers(self):
4598 def reviewers(self):
4600 return self.pull_request.reviewers
4599 return self.pull_request.reviewers
4601
4600
4602 @property
4601 @property
4603 def versions(self):
4602 def versions(self):
4604 return self.pull_request.versions
4603 return self.pull_request.versions
4605
4604
4606 def is_closed(self):
4605 def is_closed(self):
4607 # calculate from original
4606 # calculate from original
4608 return self.pull_request.status == self.STATUS_CLOSED
4607 return self.pull_request.status == self.STATUS_CLOSED
4609
4608
4610 def is_state_changing(self):
4609 def is_state_changing(self):
4611 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4610 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4612
4611
4613 def calculated_review_status(self):
4612 def calculated_review_status(self):
4614 return self.pull_request.calculated_review_status()
4613 return self.pull_request.calculated_review_status()
4615
4614
4616 def reviewers_statuses(self):
4615 def reviewers_statuses(self):
4617 return self.pull_request.reviewers_statuses()
4616 return self.pull_request.reviewers_statuses()
4618
4617
4619 def observers(self):
4618 def observers(self):
4620 return self.pull_request.observers()
4619 return self.pull_request.observers()
4621
4620
4622
4621
4623 class PullRequestReviewers(Base, BaseModel):
4622 class PullRequestReviewers(Base, BaseModel):
4624 __tablename__ = 'pull_request_reviewers'
4623 __tablename__ = 'pull_request_reviewers'
4625 __table_args__ = (
4624 __table_args__ = (
4626 base_table_args,
4625 base_table_args,
4627 )
4626 )
4628 ROLE_REVIEWER = 'reviewer'
4627 ROLE_REVIEWER = 'reviewer'
4629 ROLE_OBSERVER = 'observer'
4628 ROLE_OBSERVER = 'observer'
4630 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4629 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4631
4630
4632 @hybrid_property
4631 @hybrid_property
4633 def reasons(self):
4632 def reasons(self):
4634 if not self._reasons:
4633 if not self._reasons:
4635 return []
4634 return []
4636 return self._reasons
4635 return self._reasons
4637
4636
4638 @reasons.setter
4637 @reasons.setter
4639 def reasons(self, val):
4638 def reasons(self, val):
4640 val = val or []
4639 val = val or []
4641 if any(not isinstance(x, str) for x in val):
4640 if any(not isinstance(x, str) for x in val):
4642 raise Exception('invalid reasons type, must be list of strings')
4641 raise Exception('invalid reasons type, must be list of strings')
4643 self._reasons = val
4642 self._reasons = val
4644
4643
4645 pull_requests_reviewers_id = Column(
4644 pull_requests_reviewers_id = Column(
4646 'pull_requests_reviewers_id', Integer(), nullable=False,
4645 'pull_requests_reviewers_id', Integer(), nullable=False,
4647 primary_key=True)
4646 primary_key=True)
4648 pull_request_id = Column(
4647 pull_request_id = Column(
4649 "pull_request_id", Integer(),
4648 "pull_request_id", Integer(),
4650 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4649 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4651 user_id = Column(
4650 user_id = Column(
4652 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4651 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4653 _reasons = Column(
4652 _reasons = Column(
4654 'reason', MutationList.as_mutable(
4653 'reason', MutationList.as_mutable(
4655 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4654 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4656
4655
4657 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4656 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4658 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4657 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4659
4658
4660 user = relationship('User')
4659 user = relationship('User')
4661 pull_request = relationship('PullRequest', back_populates='reviewers')
4660 pull_request = relationship('PullRequest', back_populates='reviewers')
4662
4661
4663 rule_data = Column(
4662 rule_data = Column(
4664 'rule_data_json',
4663 'rule_data_json',
4665 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4664 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4666
4665
4667 def rule_user_group_data(self):
4666 def rule_user_group_data(self):
4668 """
4667 """
4669 Returns the voting user group rule data for this reviewer
4668 Returns the voting user group rule data for this reviewer
4670 """
4669 """
4671
4670
4672 if self.rule_data and 'vote_rule' in self.rule_data:
4671 if self.rule_data and 'vote_rule' in self.rule_data:
4673 user_group_data = {}
4672 user_group_data = {}
4674 if 'rule_user_group_entry_id' in self.rule_data:
4673 if 'rule_user_group_entry_id' in self.rule_data:
4675 # means a group with voting rules !
4674 # means a group with voting rules !
4676 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4675 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4677 user_group_data['name'] = self.rule_data['rule_name']
4676 user_group_data['name'] = self.rule_data['rule_name']
4678 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4677 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4679
4678
4680 return user_group_data
4679 return user_group_data
4681
4680
4682 @classmethod
4681 @classmethod
4683 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4682 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4684 qry = PullRequestReviewers.query()\
4683 qry = PullRequestReviewers.query()\
4685 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4684 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4686 if role:
4685 if role:
4687 qry = qry.filter(PullRequestReviewers.role == role)
4686 qry = qry.filter(PullRequestReviewers.role == role)
4688
4687
4689 return qry.all()
4688 return qry.all()
4690
4689
4691 def __repr__(self):
4690 def __repr__(self):
4692 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4691 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4693
4692
4694
4693
4695 class Notification(Base, BaseModel):
4694 class Notification(Base, BaseModel):
4696 __tablename__ = 'notifications'
4695 __tablename__ = 'notifications'
4697 __table_args__ = (
4696 __table_args__ = (
4698 Index('notification_type_idx', 'type'),
4697 Index('notification_type_idx', 'type'),
4699 base_table_args,
4698 base_table_args,
4700 )
4699 )
4701
4700
4702 TYPE_CHANGESET_COMMENT = 'cs_comment'
4701 TYPE_CHANGESET_COMMENT = 'cs_comment'
4703 TYPE_MESSAGE = 'message'
4702 TYPE_MESSAGE = 'message'
4704 TYPE_MENTION = 'mention'
4703 TYPE_MENTION = 'mention'
4705 TYPE_REGISTRATION = 'registration'
4704 TYPE_REGISTRATION = 'registration'
4706 TYPE_PULL_REQUEST = 'pull_request'
4705 TYPE_PULL_REQUEST = 'pull_request'
4707 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4706 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4708 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4707 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4709
4708
4710 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4709 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4711 subject = Column('subject', Unicode(512), nullable=True)
4710 subject = Column('subject', Unicode(512), nullable=True)
4712 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4711 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4713 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4712 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4714 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4713 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4715 type_ = Column('type', Unicode(255))
4714 type_ = Column('type', Unicode(255))
4716
4715
4717 created_by_user = relationship('User', back_populates='user_created_notifications')
4716 created_by_user = relationship('User', back_populates='user_created_notifications')
4718 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4717 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4719
4718
4720 @property
4719 @property
4721 def recipients(self):
4720 def recipients(self):
4722 return [x.user for x in UserNotification.query()\
4721 return [x.user for x in UserNotification.query()\
4723 .filter(UserNotification.notification == self)\
4722 .filter(UserNotification.notification == self)\
4724 .order_by(UserNotification.user_id.asc()).all()]
4723 .order_by(UserNotification.user_id.asc()).all()]
4725
4724
4726 @classmethod
4725 @classmethod
4727 def create(cls, created_by, subject, body, recipients, type_=None):
4726 def create(cls, created_by, subject, body, recipients, type_=None):
4728 if type_ is None:
4727 if type_ is None:
4729 type_ = Notification.TYPE_MESSAGE
4728 type_ = Notification.TYPE_MESSAGE
4730
4729
4731 notification = cls()
4730 notification = cls()
4732 notification.created_by_user = created_by
4731 notification.created_by_user = created_by
4733 notification.subject = subject
4732 notification.subject = subject
4734 notification.body = body
4733 notification.body = body
4735 notification.type_ = type_
4734 notification.type_ = type_
4736 notification.created_on = datetime.datetime.now()
4735 notification.created_on = datetime.datetime.now()
4737
4736
4738 # For each recipient link the created notification to his account
4737 # For each recipient link the created notification to his account
4739 for u in recipients:
4738 for u in recipients:
4740 assoc = UserNotification()
4739 assoc = UserNotification()
4741 assoc.user_id = u.user_id
4740 assoc.user_id = u.user_id
4742 assoc.notification = notification
4741 assoc.notification = notification
4743
4742
4744 # if created_by is inside recipients mark his notification
4743 # if created_by is inside recipients mark his notification
4745 # as read
4744 # as read
4746 if u.user_id == created_by.user_id:
4745 if u.user_id == created_by.user_id:
4747 assoc.read = True
4746 assoc.read = True
4748 Session().add(assoc)
4747 Session().add(assoc)
4749
4748
4750 Session().add(notification)
4749 Session().add(notification)
4751
4750
4752 return notification
4751 return notification
4753
4752
4754
4753
4755 class UserNotification(Base, BaseModel):
4754 class UserNotification(Base, BaseModel):
4756 __tablename__ = 'user_to_notification'
4755 __tablename__ = 'user_to_notification'
4757 __table_args__ = (
4756 __table_args__ = (
4758 UniqueConstraint('user_id', 'notification_id'),
4757 UniqueConstraint('user_id', 'notification_id'),
4759 base_table_args
4758 base_table_args
4760 )
4759 )
4761
4760
4762 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4761 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4763 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4762 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4764 read = Column('read', Boolean, default=False)
4763 read = Column('read', Boolean, default=False)
4765 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4764 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4766
4765
4767 user = relationship('User', lazy="joined", back_populates='notifications')
4766 user = relationship('User', lazy="joined", back_populates='notifications')
4768 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4767 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4769
4768
4770 def mark_as_read(self):
4769 def mark_as_read(self):
4771 self.read = True
4770 self.read = True
4772 Session().add(self)
4771 Session().add(self)
4773
4772
4774
4773
4775 class UserNotice(Base, BaseModel):
4774 class UserNotice(Base, BaseModel):
4776 __tablename__ = 'user_notices'
4775 __tablename__ = 'user_notices'
4777 __table_args__ = (
4776 __table_args__ = (
4778 base_table_args
4777 base_table_args
4779 )
4778 )
4780
4779
4781 NOTIFICATION_TYPE_MESSAGE = 'message'
4780 NOTIFICATION_TYPE_MESSAGE = 'message'
4782 NOTIFICATION_TYPE_NOTICE = 'notice'
4781 NOTIFICATION_TYPE_NOTICE = 'notice'
4783
4782
4784 NOTIFICATION_LEVEL_INFO = 'info'
4783 NOTIFICATION_LEVEL_INFO = 'info'
4785 NOTIFICATION_LEVEL_WARNING = 'warning'
4784 NOTIFICATION_LEVEL_WARNING = 'warning'
4786 NOTIFICATION_LEVEL_ERROR = 'error'
4785 NOTIFICATION_LEVEL_ERROR = 'error'
4787
4786
4788 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4787 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4789
4788
4790 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4789 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4791 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4790 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4792
4791
4793 notice_read = Column('notice_read', Boolean, default=False)
4792 notice_read = Column('notice_read', Boolean, default=False)
4794
4793
4795 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4794 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4796 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4795 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4797
4796
4798 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4797 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4799 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4798 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4800
4799
4801 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4800 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4802 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4801 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4803
4802
4804 @classmethod
4803 @classmethod
4805 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4804 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4806
4805
4807 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4806 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4808 cls.NOTIFICATION_LEVEL_WARNING,
4807 cls.NOTIFICATION_LEVEL_WARNING,
4809 cls.NOTIFICATION_LEVEL_INFO]:
4808 cls.NOTIFICATION_LEVEL_INFO]:
4810 return
4809 return
4811
4810
4812 from rhodecode.model.user import UserModel
4811 from rhodecode.model.user import UserModel
4813 user = UserModel().get_user(user)
4812 user = UserModel().get_user(user)
4814
4813
4815 new_notice = UserNotice()
4814 new_notice = UserNotice()
4816 if not allow_duplicate:
4815 if not allow_duplicate:
4817 existing_msg = UserNotice().query() \
4816 existing_msg = UserNotice().query() \
4818 .filter(UserNotice.user == user) \
4817 .filter(UserNotice.user == user) \
4819 .filter(UserNotice.notice_body == body) \
4818 .filter(UserNotice.notice_body == body) \
4820 .filter(UserNotice.notice_read == false()) \
4819 .filter(UserNotice.notice_read == false()) \
4821 .scalar()
4820 .scalar()
4822 if existing_msg:
4821 if existing_msg:
4823 log.warning('Ignoring duplicate notice for user %s', user)
4822 log.warning('Ignoring duplicate notice for user %s', user)
4824 return
4823 return
4825
4824
4826 new_notice.user = user
4825 new_notice.user = user
4827 new_notice.notice_subject = subject
4826 new_notice.notice_subject = subject
4828 new_notice.notice_body = body
4827 new_notice.notice_body = body
4829 new_notice.notification_level = notice_level
4828 new_notice.notification_level = notice_level
4830 Session().add(new_notice)
4829 Session().add(new_notice)
4831 Session().commit()
4830 Session().commit()
4832
4831
4833
4832
4834 class Gist(Base, BaseModel):
4833 class Gist(Base, BaseModel):
4835 __tablename__ = 'gists'
4834 __tablename__ = 'gists'
4836 __table_args__ = (
4835 __table_args__ = (
4837 Index('g_gist_access_id_idx', 'gist_access_id'),
4836 Index('g_gist_access_id_idx', 'gist_access_id'),
4838 Index('g_created_on_idx', 'created_on'),
4837 Index('g_created_on_idx', 'created_on'),
4839 base_table_args
4838 base_table_args
4840 )
4839 )
4841
4840
4842 GIST_PUBLIC = 'public'
4841 GIST_PUBLIC = 'public'
4843 GIST_PRIVATE = 'private'
4842 GIST_PRIVATE = 'private'
4844 DEFAULT_FILENAME = 'gistfile1.txt'
4843 DEFAULT_FILENAME = 'gistfile1.txt'
4845
4844
4846 ACL_LEVEL_PUBLIC = 'acl_public'
4845 ACL_LEVEL_PUBLIC = 'acl_public'
4847 ACL_LEVEL_PRIVATE = 'acl_private'
4846 ACL_LEVEL_PRIVATE = 'acl_private'
4848
4847
4849 gist_id = Column('gist_id', Integer(), primary_key=True)
4848 gist_id = Column('gist_id', Integer(), primary_key=True)
4850 gist_access_id = Column('gist_access_id', Unicode(250))
4849 gist_access_id = Column('gist_access_id', Unicode(250))
4851 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4850 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4852 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4851 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4853 gist_expires = Column('gist_expires', Float(53), nullable=False)
4852 gist_expires = Column('gist_expires', Float(53), nullable=False)
4854 gist_type = Column('gist_type', Unicode(128), nullable=False)
4853 gist_type = Column('gist_type', Unicode(128), nullable=False)
4855 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4854 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4856 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4855 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4857 acl_level = Column('acl_level', Unicode(128), nullable=True)
4856 acl_level = Column('acl_level', Unicode(128), nullable=True)
4858
4857
4859 owner = relationship('User', back_populates='user_gists')
4858 owner = relationship('User', back_populates='user_gists')
4860
4859
4861 def __repr__(self):
4860 def __repr__(self):
4862 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4861 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4863
4862
4864 @hybrid_property
4863 @hybrid_property
4865 def description_safe(self):
4864 def description_safe(self):
4866 from rhodecode.lib import helpers as h
4865 from rhodecode.lib import helpers as h
4867 return h.escape(self.gist_description)
4866 return h.escape(self.gist_description)
4868
4867
4869 @classmethod
4868 @classmethod
4870 def get_or_404(cls, id_):
4869 def get_or_404(cls, id_):
4871 from pyramid.httpexceptions import HTTPNotFound
4870 from pyramid.httpexceptions import HTTPNotFound
4872
4871
4873 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4872 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4874 if not res:
4873 if not res:
4875 log.debug('WARN: No DB entry with id %s', id_)
4874 log.debug('WARN: No DB entry with id %s', id_)
4876 raise HTTPNotFound()
4875 raise HTTPNotFound()
4877 return res
4876 return res
4878
4877
4879 @classmethod
4878 @classmethod
4880 def get_by_access_id(cls, gist_access_id):
4879 def get_by_access_id(cls, gist_access_id):
4881 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4880 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4882
4881
4883 def gist_url(self):
4882 def gist_url(self):
4884 from rhodecode.model.gist import GistModel
4883 from rhodecode.model.gist import GistModel
4885 return GistModel().get_url(self)
4884 return GistModel().get_url(self)
4886
4885
4887 @classmethod
4886 @classmethod
4888 def base_path(cls):
4887 def base_path(cls):
4889 """
4888 """
4890 Returns base path when all gists are stored
4889 Returns base path when all gists are stored
4891
4890
4892 :param cls:
4891 :param cls:
4893 """
4892 """
4894 from rhodecode.model.gist import GIST_STORE_LOC
4893 from rhodecode.model.gist import GIST_STORE_LOC
4895 q = Session().query(RhodeCodeUi)\
4894 q = Session().query(RhodeCodeUi)\
4896 .filter(RhodeCodeUi.ui_key == URL_SEP)
4895 .filter(RhodeCodeUi.ui_key == URL_SEP)
4897 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4896 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4898 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4897 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4899
4898
4900 def get_api_data(self):
4899 def get_api_data(self):
4901 """
4900 """
4902 Common function for generating gist related data for API
4901 Common function for generating gist related data for API
4903 """
4902 """
4904 gist = self
4903 gist = self
4905 data = {
4904 data = {
4906 'gist_id': gist.gist_id,
4905 'gist_id': gist.gist_id,
4907 'type': gist.gist_type,
4906 'type': gist.gist_type,
4908 'access_id': gist.gist_access_id,
4907 'access_id': gist.gist_access_id,
4909 'description': gist.gist_description,
4908 'description': gist.gist_description,
4910 'url': gist.gist_url(),
4909 'url': gist.gist_url(),
4911 'expires': gist.gist_expires,
4910 'expires': gist.gist_expires,
4912 'created_on': gist.created_on,
4911 'created_on': gist.created_on,
4913 'modified_at': gist.modified_at,
4912 'modified_at': gist.modified_at,
4914 'content': None,
4913 'content': None,
4915 'acl_level': gist.acl_level,
4914 'acl_level': gist.acl_level,
4916 }
4915 }
4917 return data
4916 return data
4918
4917
4919 def __json__(self):
4918 def __json__(self):
4920 data = dict(
4919 data = dict(
4921 )
4920 )
4922 data.update(self.get_api_data())
4921 data.update(self.get_api_data())
4923 return data
4922 return data
4924 # SCM functions
4923 # SCM functions
4925
4924
4926 def scm_instance(self, **kwargs):
4925 def scm_instance(self, **kwargs):
4927 """
4926 """
4928 Get an instance of VCS Repository
4927 Get an instance of VCS Repository
4929
4928
4930 :param kwargs:
4929 :param kwargs:
4931 """
4930 """
4932 from rhodecode.model.gist import GistModel
4931 from rhodecode.model.gist import GistModel
4933 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4932 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4934 return get_vcs_instance(
4933 return get_vcs_instance(
4935 repo_path=safe_str(full_repo_path), create=False,
4934 repo_path=safe_str(full_repo_path), create=False,
4936 _vcs_alias=GistModel.vcs_backend)
4935 _vcs_alias=GistModel.vcs_backend)
4937
4936
4938
4937
4939 class ExternalIdentity(Base, BaseModel):
4938 class ExternalIdentity(Base, BaseModel):
4940 __tablename__ = 'external_identities'
4939 __tablename__ = 'external_identities'
4941 __table_args__ = (
4940 __table_args__ = (
4942 Index('local_user_id_idx', 'local_user_id'),
4941 Index('local_user_id_idx', 'local_user_id'),
4943 Index('external_id_idx', 'external_id'),
4942 Index('external_id_idx', 'external_id'),
4944 base_table_args
4943 base_table_args
4945 )
4944 )
4946
4945
4947 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4946 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4948 external_username = Column('external_username', Unicode(1024), default='')
4947 external_username = Column('external_username', Unicode(1024), default='')
4949 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4948 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4950 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4949 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4951 access_token = Column('access_token', String(1024), default='')
4950 access_token = Column('access_token', String(1024), default='')
4952 alt_token = Column('alt_token', String(1024), default='')
4951 alt_token = Column('alt_token', String(1024), default='')
4953 token_secret = Column('token_secret', String(1024), default='')
4952 token_secret = Column('token_secret', String(1024), default='')
4954
4953
4955 @classmethod
4954 @classmethod
4956 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4955 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4957 """
4956 """
4958 Returns ExternalIdentity instance based on search params
4957 Returns ExternalIdentity instance based on search params
4959
4958
4960 :param external_id:
4959 :param external_id:
4961 :param provider_name:
4960 :param provider_name:
4962 :return: ExternalIdentity
4961 :return: ExternalIdentity
4963 """
4962 """
4964 query = cls.query()
4963 query = cls.query()
4965 query = query.filter(cls.external_id == external_id)
4964 query = query.filter(cls.external_id == external_id)
4966 query = query.filter(cls.provider_name == provider_name)
4965 query = query.filter(cls.provider_name == provider_name)
4967 if local_user_id:
4966 if local_user_id:
4968 query = query.filter(cls.local_user_id == local_user_id)
4967 query = query.filter(cls.local_user_id == local_user_id)
4969 return query.first()
4968 return query.first()
4970
4969
4971 @classmethod
4970 @classmethod
4972 def user_by_external_id_and_provider(cls, external_id, provider_name):
4971 def user_by_external_id_and_provider(cls, external_id, provider_name):
4973 """
4972 """
4974 Returns User instance based on search params
4973 Returns User instance based on search params
4975
4974
4976 :param external_id:
4975 :param external_id:
4977 :param provider_name:
4976 :param provider_name:
4978 :return: User
4977 :return: User
4979 """
4978 """
4980 query = User.query()
4979 query = User.query()
4981 query = query.filter(cls.external_id == external_id)
4980 query = query.filter(cls.external_id == external_id)
4982 query = query.filter(cls.provider_name == provider_name)
4981 query = query.filter(cls.provider_name == provider_name)
4983 query = query.filter(User.user_id == cls.local_user_id)
4982 query = query.filter(User.user_id == cls.local_user_id)
4984 return query.first()
4983 return query.first()
4985
4984
4986 @classmethod
4985 @classmethod
4987 def by_local_user_id(cls, local_user_id):
4986 def by_local_user_id(cls, local_user_id):
4988 """
4987 """
4989 Returns all tokens for user
4988 Returns all tokens for user
4990
4989
4991 :param local_user_id:
4990 :param local_user_id:
4992 :return: ExternalIdentity
4991 :return: ExternalIdentity
4993 """
4992 """
4994 query = cls.query()
4993 query = cls.query()
4995 query = query.filter(cls.local_user_id == local_user_id)
4994 query = query.filter(cls.local_user_id == local_user_id)
4996 return query
4995 return query
4997
4996
4998 @classmethod
4997 @classmethod
4999 def load_provider_plugin(cls, plugin_id):
4998 def load_provider_plugin(cls, plugin_id):
5000 from rhodecode.authentication.base import loadplugin
4999 from rhodecode.authentication.base import loadplugin
5001 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5000 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5002 auth_plugin = loadplugin(_plugin_id)
5001 auth_plugin = loadplugin(_plugin_id)
5003 return auth_plugin
5002 return auth_plugin
5004
5003
5005
5004
5006 class Integration(Base, BaseModel):
5005 class Integration(Base, BaseModel):
5007 __tablename__ = 'integrations'
5006 __tablename__ = 'integrations'
5008 __table_args__ = (
5007 __table_args__ = (
5009 base_table_args
5008 base_table_args
5010 )
5009 )
5011
5010
5012 integration_id = Column('integration_id', Integer(), primary_key=True)
5011 integration_id = Column('integration_id', Integer(), primary_key=True)
5013 integration_type = Column('integration_type', String(255))
5012 integration_type = Column('integration_type', String(255))
5014 enabled = Column('enabled', Boolean(), nullable=False)
5013 enabled = Column('enabled', Boolean(), nullable=False)
5015 name = Column('name', String(255), nullable=False)
5014 name = Column('name', String(255), nullable=False)
5016 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5015 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5017
5016
5018 settings = Column(
5017 settings = Column(
5019 'settings_json', MutationObj.as_mutable(
5018 'settings_json', MutationObj.as_mutable(
5020 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5019 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5021 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5020 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5022 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5021 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5023
5022
5024 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5023 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5025 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5024 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5026
5025
5027 @property
5026 @property
5028 def scope(self):
5027 def scope(self):
5029 if self.repo:
5028 if self.repo:
5030 return repr(self.repo)
5029 return repr(self.repo)
5031 if self.repo_group:
5030 if self.repo_group:
5032 if self.child_repos_only:
5031 if self.child_repos_only:
5033 return repr(self.repo_group) + ' (child repos only)'
5032 return repr(self.repo_group) + ' (child repos only)'
5034 else:
5033 else:
5035 return repr(self.repo_group) + ' (recursive)'
5034 return repr(self.repo_group) + ' (recursive)'
5036 if self.child_repos_only:
5035 if self.child_repos_only:
5037 return 'root_repos'
5036 return 'root_repos'
5038 return 'global'
5037 return 'global'
5039
5038
5040 def __repr__(self):
5039 def __repr__(self):
5041 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5040 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5042
5041
5043
5042
5044 class RepoReviewRuleUser(Base, BaseModel):
5043 class RepoReviewRuleUser(Base, BaseModel):
5045 __tablename__ = 'repo_review_rules_users'
5044 __tablename__ = 'repo_review_rules_users'
5046 __table_args__ = (
5045 __table_args__ = (
5047 base_table_args
5046 base_table_args
5048 )
5047 )
5049 ROLE_REVIEWER = 'reviewer'
5048 ROLE_REVIEWER = 'reviewer'
5050 ROLE_OBSERVER = 'observer'
5049 ROLE_OBSERVER = 'observer'
5051 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5050 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5052
5051
5053 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5052 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5054 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5053 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5055 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5054 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5056 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5055 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5057 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5056 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5058 user = relationship('User', back_populates='user_review_rules')
5057 user = relationship('User', back_populates='user_review_rules')
5059
5058
5060 def rule_data(self):
5059 def rule_data(self):
5061 return {
5060 return {
5062 'mandatory': self.mandatory,
5061 'mandatory': self.mandatory,
5063 'role': self.role,
5062 'role': self.role,
5064 }
5063 }
5065
5064
5066
5065
5067 class RepoReviewRuleUserGroup(Base, BaseModel):
5066 class RepoReviewRuleUserGroup(Base, BaseModel):
5068 __tablename__ = 'repo_review_rules_users_groups'
5067 __tablename__ = 'repo_review_rules_users_groups'
5069 __table_args__ = (
5068 __table_args__ = (
5070 base_table_args
5069 base_table_args
5071 )
5070 )
5072
5071
5073 VOTE_RULE_ALL = -1
5072 VOTE_RULE_ALL = -1
5074 ROLE_REVIEWER = 'reviewer'
5073 ROLE_REVIEWER = 'reviewer'
5075 ROLE_OBSERVER = 'observer'
5074 ROLE_OBSERVER = 'observer'
5076 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5075 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5077
5076
5078 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5077 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5079 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5078 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5080 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5079 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5081 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5080 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5082 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5081 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5083 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5082 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5084 users_group = relationship('UserGroup')
5083 users_group = relationship('UserGroup')
5085
5084
5086 def rule_data(self):
5085 def rule_data(self):
5087 return {
5086 return {
5088 'mandatory': self.mandatory,
5087 'mandatory': self.mandatory,
5089 'role': self.role,
5088 'role': self.role,
5090 'vote_rule': self.vote_rule
5089 'vote_rule': self.vote_rule
5091 }
5090 }
5092
5091
5093 @property
5092 @property
5094 def vote_rule_label(self):
5093 def vote_rule_label(self):
5095 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5094 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5096 return 'all must vote'
5095 return 'all must vote'
5097 else:
5096 else:
5098 return 'min. vote {}'.format(self.vote_rule)
5097 return 'min. vote {}'.format(self.vote_rule)
5099
5098
5100
5099
5101 class RepoReviewRule(Base, BaseModel):
5100 class RepoReviewRule(Base, BaseModel):
5102 __tablename__ = 'repo_review_rules'
5101 __tablename__ = 'repo_review_rules'
5103 __table_args__ = (
5102 __table_args__ = (
5104 base_table_args
5103 base_table_args
5105 )
5104 )
5106
5105
5107 repo_review_rule_id = Column(
5106 repo_review_rule_id = Column(
5108 'repo_review_rule_id', Integer(), primary_key=True)
5107 'repo_review_rule_id', Integer(), primary_key=True)
5109 repo_id = Column(
5108 repo_id = Column(
5110 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5109 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5111 repo = relationship('Repository', back_populates='review_rules')
5110 repo = relationship('Repository', back_populates='review_rules')
5112
5111
5113 review_rule_name = Column('review_rule_name', String(255))
5112 review_rule_name = Column('review_rule_name', String(255))
5114 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5113 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5115 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5114 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5116 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5115 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5117
5116
5118 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5117 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5119
5118
5120 # Legacy fields, just for backward compat
5119 # Legacy fields, just for backward compat
5121 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5120 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5122 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5121 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5123
5122
5124 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5123 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5125 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5124 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5126
5125
5127 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5126 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5128
5127
5129 rule_users = relationship('RepoReviewRuleUser')
5128 rule_users = relationship('RepoReviewRuleUser')
5130 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5129 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5131
5130
5132 def _validate_pattern(self, value):
5131 def _validate_pattern(self, value):
5133 re.compile('^' + glob2re(value) + '$')
5132 re.compile('^' + glob2re(value) + '$')
5134
5133
5135 @hybrid_property
5134 @hybrid_property
5136 def source_branch_pattern(self):
5135 def source_branch_pattern(self):
5137 return self._branch_pattern or '*'
5136 return self._branch_pattern or '*'
5138
5137
5139 @source_branch_pattern.setter
5138 @source_branch_pattern.setter
5140 def source_branch_pattern(self, value):
5139 def source_branch_pattern(self, value):
5141 self._validate_pattern(value)
5140 self._validate_pattern(value)
5142 self._branch_pattern = value or '*'
5141 self._branch_pattern = value or '*'
5143
5142
5144 @hybrid_property
5143 @hybrid_property
5145 def target_branch_pattern(self):
5144 def target_branch_pattern(self):
5146 return self._target_branch_pattern or '*'
5145 return self._target_branch_pattern or '*'
5147
5146
5148 @target_branch_pattern.setter
5147 @target_branch_pattern.setter
5149 def target_branch_pattern(self, value):
5148 def target_branch_pattern(self, value):
5150 self._validate_pattern(value)
5149 self._validate_pattern(value)
5151 self._target_branch_pattern = value or '*'
5150 self._target_branch_pattern = value or '*'
5152
5151
5153 @hybrid_property
5152 @hybrid_property
5154 def file_pattern(self):
5153 def file_pattern(self):
5155 return self._file_pattern or '*'
5154 return self._file_pattern or '*'
5156
5155
5157 @file_pattern.setter
5156 @file_pattern.setter
5158 def file_pattern(self, value):
5157 def file_pattern(self, value):
5159 self._validate_pattern(value)
5158 self._validate_pattern(value)
5160 self._file_pattern = value or '*'
5159 self._file_pattern = value or '*'
5161
5160
5162 @hybrid_property
5161 @hybrid_property
5163 def forbid_pr_author_to_review(self):
5162 def forbid_pr_author_to_review(self):
5164 return self.pr_author == 'forbid_pr_author'
5163 return self.pr_author == 'forbid_pr_author'
5165
5164
5166 @hybrid_property
5165 @hybrid_property
5167 def include_pr_author_to_review(self):
5166 def include_pr_author_to_review(self):
5168 return self.pr_author == 'include_pr_author'
5167 return self.pr_author == 'include_pr_author'
5169
5168
5170 @hybrid_property
5169 @hybrid_property
5171 def forbid_commit_author_to_review(self):
5170 def forbid_commit_author_to_review(self):
5172 return self.commit_author == 'forbid_commit_author'
5171 return self.commit_author == 'forbid_commit_author'
5173
5172
5174 @hybrid_property
5173 @hybrid_property
5175 def include_commit_author_to_review(self):
5174 def include_commit_author_to_review(self):
5176 return self.commit_author == 'include_commit_author'
5175 return self.commit_author == 'include_commit_author'
5177
5176
5178 def matches(self, source_branch, target_branch, files_changed):
5177 def matches(self, source_branch, target_branch, files_changed):
5179 """
5178 """
5180 Check if this review rule matches a branch/files in a pull request
5179 Check if this review rule matches a branch/files in a pull request
5181
5180
5182 :param source_branch: source branch name for the commit
5181 :param source_branch: source branch name for the commit
5183 :param target_branch: target branch name for the commit
5182 :param target_branch: target branch name for the commit
5184 :param files_changed: list of file paths changed in the pull request
5183 :param files_changed: list of file paths changed in the pull request
5185 """
5184 """
5186
5185
5187 source_branch = source_branch or ''
5186 source_branch = source_branch or ''
5188 target_branch = target_branch or ''
5187 target_branch = target_branch or ''
5189 files_changed = files_changed or []
5188 files_changed = files_changed or []
5190
5189
5191 branch_matches = True
5190 branch_matches = True
5192 if source_branch or target_branch:
5191 if source_branch or target_branch:
5193 if self.source_branch_pattern == '*':
5192 if self.source_branch_pattern == '*':
5194 source_branch_match = True
5193 source_branch_match = True
5195 else:
5194 else:
5196 if self.source_branch_pattern.startswith('re:'):
5195 if self.source_branch_pattern.startswith('re:'):
5197 source_pattern = self.source_branch_pattern[3:]
5196 source_pattern = self.source_branch_pattern[3:]
5198 else:
5197 else:
5199 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5198 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5200 source_branch_regex = re.compile(source_pattern)
5199 source_branch_regex = re.compile(source_pattern)
5201 source_branch_match = bool(source_branch_regex.search(source_branch))
5200 source_branch_match = bool(source_branch_regex.search(source_branch))
5202 if self.target_branch_pattern == '*':
5201 if self.target_branch_pattern == '*':
5203 target_branch_match = True
5202 target_branch_match = True
5204 else:
5203 else:
5205 if self.target_branch_pattern.startswith('re:'):
5204 if self.target_branch_pattern.startswith('re:'):
5206 target_pattern = self.target_branch_pattern[3:]
5205 target_pattern = self.target_branch_pattern[3:]
5207 else:
5206 else:
5208 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5207 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5209 target_branch_regex = re.compile(target_pattern)
5208 target_branch_regex = re.compile(target_pattern)
5210 target_branch_match = bool(target_branch_regex.search(target_branch))
5209 target_branch_match = bool(target_branch_regex.search(target_branch))
5211
5210
5212 branch_matches = source_branch_match and target_branch_match
5211 branch_matches = source_branch_match and target_branch_match
5213
5212
5214 files_matches = True
5213 files_matches = True
5215 if self.file_pattern != '*':
5214 if self.file_pattern != '*':
5216 files_matches = False
5215 files_matches = False
5217 if self.file_pattern.startswith('re:'):
5216 if self.file_pattern.startswith('re:'):
5218 file_pattern = self.file_pattern[3:]
5217 file_pattern = self.file_pattern[3:]
5219 else:
5218 else:
5220 file_pattern = glob2re(self.file_pattern)
5219 file_pattern = glob2re(self.file_pattern)
5221 file_regex = re.compile(file_pattern)
5220 file_regex = re.compile(file_pattern)
5222 for file_data in files_changed:
5221 for file_data in files_changed:
5223 filename = file_data.get('filename')
5222 filename = file_data.get('filename')
5224
5223
5225 if file_regex.search(filename):
5224 if file_regex.search(filename):
5226 files_matches = True
5225 files_matches = True
5227 break
5226 break
5228
5227
5229 return branch_matches and files_matches
5228 return branch_matches and files_matches
5230
5229
5231 @property
5230 @property
5232 def review_users(self):
5231 def review_users(self):
5233 """ Returns the users which this rule applies to """
5232 """ Returns the users which this rule applies to """
5234
5233
5235 users = collections.OrderedDict()
5234 users = collections.OrderedDict()
5236
5235
5237 for rule_user in self.rule_users:
5236 for rule_user in self.rule_users:
5238 if rule_user.user.active:
5237 if rule_user.user.active:
5239 if rule_user.user not in users:
5238 if rule_user.user not in users:
5240 users[rule_user.user.username] = {
5239 users[rule_user.user.username] = {
5241 'user': rule_user.user,
5240 'user': rule_user.user,
5242 'source': 'user',
5241 'source': 'user',
5243 'source_data': {},
5242 'source_data': {},
5244 'data': rule_user.rule_data()
5243 'data': rule_user.rule_data()
5245 }
5244 }
5246
5245
5247 for rule_user_group in self.rule_user_groups:
5246 for rule_user_group in self.rule_user_groups:
5248 source_data = {
5247 source_data = {
5249 'user_group_id': rule_user_group.users_group.users_group_id,
5248 'user_group_id': rule_user_group.users_group.users_group_id,
5250 'name': rule_user_group.users_group.users_group_name,
5249 'name': rule_user_group.users_group.users_group_name,
5251 'members': len(rule_user_group.users_group.members)
5250 'members': len(rule_user_group.users_group.members)
5252 }
5251 }
5253 for member in rule_user_group.users_group.members:
5252 for member in rule_user_group.users_group.members:
5254 if member.user.active:
5253 if member.user.active:
5255 key = member.user.username
5254 key = member.user.username
5256 if key in users:
5255 if key in users:
5257 # skip this member as we have him already
5256 # skip this member as we have him already
5258 # this prevents from override the "first" matched
5257 # this prevents from override the "first" matched
5259 # users with duplicates in multiple groups
5258 # users with duplicates in multiple groups
5260 continue
5259 continue
5261
5260
5262 users[key] = {
5261 users[key] = {
5263 'user': member.user,
5262 'user': member.user,
5264 'source': 'user_group',
5263 'source': 'user_group',
5265 'source_data': source_data,
5264 'source_data': source_data,
5266 'data': rule_user_group.rule_data()
5265 'data': rule_user_group.rule_data()
5267 }
5266 }
5268
5267
5269 return users
5268 return users
5270
5269
5271 def user_group_vote_rule(self, user_id):
5270 def user_group_vote_rule(self, user_id):
5272
5271
5273 rules = []
5272 rules = []
5274 if not self.rule_user_groups:
5273 if not self.rule_user_groups:
5275 return rules
5274 return rules
5276
5275
5277 for user_group in self.rule_user_groups:
5276 for user_group in self.rule_user_groups:
5278 user_group_members = [x.user_id for x in user_group.users_group.members]
5277 user_group_members = [x.user_id for x in user_group.users_group.members]
5279 if user_id in user_group_members:
5278 if user_id in user_group_members:
5280 rules.append(user_group)
5279 rules.append(user_group)
5281 return rules
5280 return rules
5282
5281
5283 def __repr__(self):
5282 def __repr__(self):
5284 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5283 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5285
5284
5286
5285
5287 class ScheduleEntry(Base, BaseModel):
5286 class ScheduleEntry(Base, BaseModel):
5288 __tablename__ = 'schedule_entries'
5287 __tablename__ = 'schedule_entries'
5289 __table_args__ = (
5288 __table_args__ = (
5290 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5289 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5291 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5290 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5292 base_table_args,
5291 base_table_args,
5293 )
5292 )
5294
5293
5295 schedule_types = ['crontab', 'timedelta', 'integer']
5294 schedule_types = ['crontab', 'timedelta', 'integer']
5296 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5295 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5297
5296
5298 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5297 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5299 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5298 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5300 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5299 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5301
5300
5302 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5301 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5303 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5302 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5304
5303
5305 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5304 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5306 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5305 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5307
5306
5308 # task
5307 # task
5309 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5308 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5310 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5309 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5311 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5310 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5312 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5311 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5313
5312
5314 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5313 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5315 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5314 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5316
5315
5317 @hybrid_property
5316 @hybrid_property
5318 def schedule_type(self):
5317 def schedule_type(self):
5319 return self._schedule_type
5318 return self._schedule_type
5320
5319
5321 @schedule_type.setter
5320 @schedule_type.setter
5322 def schedule_type(self, val):
5321 def schedule_type(self, val):
5323 if val not in self.schedule_types:
5322 if val not in self.schedule_types:
5324 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5323 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5325 val, self.schedule_type))
5324 val, self.schedule_type))
5326
5325
5327 self._schedule_type = val
5326 self._schedule_type = val
5328
5327
5329 @classmethod
5328 @classmethod
5330 def get_uid(cls, obj):
5329 def get_uid(cls, obj):
5331 args = obj.task_args
5330 args = obj.task_args
5332 kwargs = obj.task_kwargs
5331 kwargs = obj.task_kwargs
5333 if isinstance(args, JsonRaw):
5332 if isinstance(args, JsonRaw):
5334 try:
5333 try:
5335 args = json.loads(args)
5334 args = json.loads(args)
5336 except ValueError:
5335 except ValueError:
5337 args = tuple()
5336 args = tuple()
5338
5337
5339 if isinstance(kwargs, JsonRaw):
5338 if isinstance(kwargs, JsonRaw):
5340 try:
5339 try:
5341 kwargs = json.loads(kwargs)
5340 kwargs = json.loads(kwargs)
5342 except ValueError:
5341 except ValueError:
5343 kwargs = dict()
5342 kwargs = dict()
5344
5343
5345 dot_notation = obj.task_dot_notation
5344 dot_notation = obj.task_dot_notation
5346 val = '.'.join(map(safe_str, [
5345 val = '.'.join(map(safe_str, [
5347 sorted(dot_notation), args, sorted(kwargs.items())]))
5346 sorted(dot_notation), args, sorted(kwargs.items())]))
5348 return sha1(safe_bytes(val))
5347 return sha1(safe_bytes(val))
5349
5348
5350 @classmethod
5349 @classmethod
5351 def get_by_schedule_name(cls, schedule_name):
5350 def get_by_schedule_name(cls, schedule_name):
5352 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5351 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5353
5352
5354 @classmethod
5353 @classmethod
5355 def get_by_schedule_id(cls, schedule_id):
5354 def get_by_schedule_id(cls, schedule_id):
5356 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5355 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5357
5356
5358 @property
5357 @property
5359 def task(self):
5358 def task(self):
5360 return self.task_dot_notation
5359 return self.task_dot_notation
5361
5360
5362 @property
5361 @property
5363 def schedule(self):
5362 def schedule(self):
5364 from rhodecode.lib.celerylib.utils import raw_2_schedule
5363 from rhodecode.lib.celerylib.utils import raw_2_schedule
5365 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5364 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5366 return schedule
5365 return schedule
5367
5366
5368 @property
5367 @property
5369 def args(self):
5368 def args(self):
5370 try:
5369 try:
5371 return list(self.task_args or [])
5370 return list(self.task_args or [])
5372 except ValueError:
5371 except ValueError:
5373 return list()
5372 return list()
5374
5373
5375 @property
5374 @property
5376 def kwargs(self):
5375 def kwargs(self):
5377 try:
5376 try:
5378 return dict(self.task_kwargs or {})
5377 return dict(self.task_kwargs or {})
5379 except ValueError:
5378 except ValueError:
5380 return dict()
5379 return dict()
5381
5380
5382 def _as_raw(self, val, indent=None):
5381 def _as_raw(self, val, indent=None):
5383 if hasattr(val, 'de_coerce'):
5382 if hasattr(val, 'de_coerce'):
5384 val = val.de_coerce()
5383 val = val.de_coerce()
5385 if val:
5384 if val:
5386 if indent:
5385 if indent:
5387 ext_json.formatted_json(val)
5386 ext_json.formatted_json(val)
5388 else:
5387 else:
5389 val = ext_json.json.dumps(val)
5388 val = ext_json.json.dumps(val)
5390
5389
5391 return val
5390 return val
5392
5391
5393 @property
5392 @property
5394 def schedule_definition_raw(self):
5393 def schedule_definition_raw(self):
5395 return self._as_raw(self.schedule_definition)
5394 return self._as_raw(self.schedule_definition)
5396
5395
5397 def args_raw(self, indent=None):
5396 def args_raw(self, indent=None):
5398 return self._as_raw(self.task_args, indent)
5397 return self._as_raw(self.task_args, indent)
5399
5398
5400 def kwargs_raw(self, indent=None):
5399 def kwargs_raw(self, indent=None):
5401 return self._as_raw(self.task_kwargs, indent)
5400 return self._as_raw(self.task_kwargs, indent)
5402
5401
5403 def __repr__(self):
5402 def __repr__(self):
5404 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5403 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5405
5404
5406
5405
5407 @event.listens_for(ScheduleEntry, 'before_update')
5406 @event.listens_for(ScheduleEntry, 'before_update')
5408 def update_task_uid(mapper, connection, target):
5407 def update_task_uid(mapper, connection, target):
5409 target.task_uid = ScheduleEntry.get_uid(target)
5408 target.task_uid = ScheduleEntry.get_uid(target)
5410
5409
5411
5410
5412 @event.listens_for(ScheduleEntry, 'before_insert')
5411 @event.listens_for(ScheduleEntry, 'before_insert')
5413 def set_task_uid(mapper, connection, target):
5412 def set_task_uid(mapper, connection, target):
5414 target.task_uid = ScheduleEntry.get_uid(target)
5413 target.task_uid = ScheduleEntry.get_uid(target)
5415
5414
5416
5415
5417 class _BaseBranchPerms(BaseModel):
5416 class _BaseBranchPerms(BaseModel):
5418 @classmethod
5417 @classmethod
5419 def compute_hash(cls, value):
5418 def compute_hash(cls, value):
5420 return sha1_safe(value)
5419 return sha1_safe(value)
5421
5420
5422 @hybrid_property
5421 @hybrid_property
5423 def branch_pattern(self):
5422 def branch_pattern(self):
5424 return self._branch_pattern or '*'
5423 return self._branch_pattern or '*'
5425
5424
5426 @hybrid_property
5425 @hybrid_property
5427 def branch_hash(self):
5426 def branch_hash(self):
5428 return self._branch_hash
5427 return self._branch_hash
5429
5428
5430 def _validate_glob(self, value):
5429 def _validate_glob(self, value):
5431 re.compile('^' + glob2re(value) + '$')
5430 re.compile('^' + glob2re(value) + '$')
5432
5431
5433 @branch_pattern.setter
5432 @branch_pattern.setter
5434 def branch_pattern(self, value):
5433 def branch_pattern(self, value):
5435 self._validate_glob(value)
5434 self._validate_glob(value)
5436 self._branch_pattern = value or '*'
5435 self._branch_pattern = value or '*'
5437 # set the Hash when setting the branch pattern
5436 # set the Hash when setting the branch pattern
5438 self._branch_hash = self.compute_hash(self._branch_pattern)
5437 self._branch_hash = self.compute_hash(self._branch_pattern)
5439
5438
5440 def matches(self, branch):
5439 def matches(self, branch):
5441 """
5440 """
5442 Check if this the branch matches entry
5441 Check if this the branch matches entry
5443
5442
5444 :param branch: branch name for the commit
5443 :param branch: branch name for the commit
5445 """
5444 """
5446
5445
5447 branch = branch or ''
5446 branch = branch or ''
5448
5447
5449 branch_matches = True
5448 branch_matches = True
5450 if branch:
5449 if branch:
5451 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5450 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5452 branch_matches = bool(branch_regex.search(branch))
5451 branch_matches = bool(branch_regex.search(branch))
5453
5452
5454 return branch_matches
5453 return branch_matches
5455
5454
5456
5455
5457 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5456 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5458 __tablename__ = 'user_to_repo_branch_permissions'
5457 __tablename__ = 'user_to_repo_branch_permissions'
5459 __table_args__ = (
5458 __table_args__ = (
5460 base_table_args
5459 base_table_args
5461 )
5460 )
5462
5461
5463 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5462 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5464
5463
5465 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5464 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5466 repo = relationship('Repository', back_populates='user_branch_perms')
5465 repo = relationship('Repository', back_populates='user_branch_perms')
5467
5466
5468 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5467 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5469 permission = relationship('Permission')
5468 permission = relationship('Permission')
5470
5469
5471 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5470 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5472 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5471 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5473
5472
5474 rule_order = Column('rule_order', Integer(), nullable=False)
5473 rule_order = Column('rule_order', Integer(), nullable=False)
5475 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5474 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5476 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5475 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5477
5476
5478 def __repr__(self):
5477 def __repr__(self):
5479 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5478 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5480
5479
5481
5480
5482 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5481 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5483 __tablename__ = 'user_group_to_repo_branch_permissions'
5482 __tablename__ = 'user_group_to_repo_branch_permissions'
5484 __table_args__ = (
5483 __table_args__ = (
5485 base_table_args
5484 base_table_args
5486 )
5485 )
5487
5486
5488 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5487 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5489
5488
5490 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5489 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5491 repo = relationship('Repository', back_populates='user_group_branch_perms')
5490 repo = relationship('Repository', back_populates='user_group_branch_perms')
5492
5491
5493 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5492 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5494 permission = relationship('Permission')
5493 permission = relationship('Permission')
5495
5494
5496 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5495 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5497 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5496 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5498
5497
5499 rule_order = Column('rule_order', Integer(), nullable=False)
5498 rule_order = Column('rule_order', Integer(), nullable=False)
5500 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5499 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5501 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5500 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5502
5501
5503 def __repr__(self):
5502 def __repr__(self):
5504 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5503 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5505
5504
5506
5505
5507 class UserBookmark(Base, BaseModel):
5506 class UserBookmark(Base, BaseModel):
5508 __tablename__ = 'user_bookmarks'
5507 __tablename__ = 'user_bookmarks'
5509 __table_args__ = (
5508 __table_args__ = (
5510 UniqueConstraint('user_id', 'bookmark_repo_id'),
5509 UniqueConstraint('user_id', 'bookmark_repo_id'),
5511 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5510 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5512 UniqueConstraint('user_id', 'bookmark_position'),
5511 UniqueConstraint('user_id', 'bookmark_position'),
5513 base_table_args
5512 base_table_args
5514 )
5513 )
5515
5514
5516 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5515 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5517 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5516 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5518 position = Column("bookmark_position", Integer(), nullable=False)
5517 position = Column("bookmark_position", Integer(), nullable=False)
5519 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5518 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5520 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5519 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5521 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5520 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5522
5521
5523 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5522 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5524 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5523 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5525
5524
5526 user = relationship("User")
5525 user = relationship("User")
5527
5526
5528 repository = relationship("Repository")
5527 repository = relationship("Repository")
5529 repository_group = relationship("RepoGroup")
5528 repository_group = relationship("RepoGroup")
5530
5529
5531 @classmethod
5530 @classmethod
5532 def get_by_position_for_user(cls, position, user_id):
5531 def get_by_position_for_user(cls, position, user_id):
5533 return cls.query() \
5532 return cls.query() \
5534 .filter(UserBookmark.user_id == user_id) \
5533 .filter(UserBookmark.user_id == user_id) \
5535 .filter(UserBookmark.position == position).scalar()
5534 .filter(UserBookmark.position == position).scalar()
5536
5535
5537 @classmethod
5536 @classmethod
5538 def get_bookmarks_for_user(cls, user_id, cache=True):
5537 def get_bookmarks_for_user(cls, user_id, cache=True):
5539 bookmarks = cls.query() \
5538 bookmarks = cls.query() \
5540 .filter(UserBookmark.user_id == user_id) \
5539 .filter(UserBookmark.user_id == user_id) \
5541 .options(joinedload(UserBookmark.repository)) \
5540 .options(joinedload(UserBookmark.repository)) \
5542 .options(joinedload(UserBookmark.repository_group)) \
5541 .options(joinedload(UserBookmark.repository_group)) \
5543 .order_by(UserBookmark.position.asc())
5542 .order_by(UserBookmark.position.asc())
5544
5543
5545 if cache:
5544 if cache:
5546 bookmarks = bookmarks.options(
5545 bookmarks = bookmarks.options(
5547 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5546 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5548 )
5547 )
5549
5548
5550 return bookmarks.all()
5549 return bookmarks.all()
5551
5550
5552 def __repr__(self):
5551 def __repr__(self):
5553 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5552 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5554
5553
5555
5554
5556 class FileStore(Base, BaseModel):
5555 class FileStore(Base, BaseModel):
5557 __tablename__ = 'file_store'
5556 __tablename__ = 'file_store'
5558 __table_args__ = (
5557 __table_args__ = (
5559 base_table_args
5558 base_table_args
5560 )
5559 )
5561
5560
5562 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5561 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5563 file_uid = Column('file_uid', String(1024), nullable=False)
5562 file_uid = Column('file_uid', String(1024), nullable=False)
5564 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5563 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5565 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5564 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5566 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5565 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5567
5566
5568 # sha256 hash
5567 # sha256 hash
5569 file_hash = Column('file_hash', String(512), nullable=False)
5568 file_hash = Column('file_hash', String(512), nullable=False)
5570 file_size = Column('file_size', BigInteger(), nullable=False)
5569 file_size = Column('file_size', BigInteger(), nullable=False)
5571
5570
5572 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5571 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5573 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5572 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5574 accessed_count = Column('accessed_count', Integer(), default=0)
5573 accessed_count = Column('accessed_count', Integer(), default=0)
5575
5574
5576 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5575 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5577
5576
5578 # if repo/repo_group reference is set, check for permissions
5577 # if repo/repo_group reference is set, check for permissions
5579 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5578 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5580
5579
5581 # hidden defines an attachment that should be hidden from showing in artifact listing
5580 # hidden defines an attachment that should be hidden from showing in artifact listing
5582 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5581 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5583
5582
5584 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5583 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5585 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5584 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5586
5585
5587 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5586 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5588
5587
5589 # scope limited to user, which requester have access to
5588 # scope limited to user, which requester have access to
5590 scope_user_id = Column(
5589 scope_user_id = Column(
5591 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5590 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5592 nullable=True, unique=None, default=None)
5591 nullable=True, unique=None, default=None)
5593 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5592 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5594
5593
5595 # scope limited to user group, which requester have access to
5594 # scope limited to user group, which requester have access to
5596 scope_user_group_id = Column(
5595 scope_user_group_id = Column(
5597 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5596 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5598 nullable=True, unique=None, default=None)
5597 nullable=True, unique=None, default=None)
5599 user_group = relationship('UserGroup', lazy='joined')
5598 user_group = relationship('UserGroup', lazy='joined')
5600
5599
5601 # scope limited to repo, which requester have access to
5600 # scope limited to repo, which requester have access to
5602 scope_repo_id = Column(
5601 scope_repo_id = Column(
5603 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5602 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5604 nullable=True, unique=None, default=None)
5603 nullable=True, unique=None, default=None)
5605 repo = relationship('Repository', lazy='joined')
5604 repo = relationship('Repository', lazy='joined')
5606
5605
5607 # scope limited to repo group, which requester have access to
5606 # scope limited to repo group, which requester have access to
5608 scope_repo_group_id = Column(
5607 scope_repo_group_id = Column(
5609 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5608 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5610 nullable=True, unique=None, default=None)
5609 nullable=True, unique=None, default=None)
5611 repo_group = relationship('RepoGroup', lazy='joined')
5610 repo_group = relationship('RepoGroup', lazy='joined')
5612
5611
5613 @classmethod
5612 @classmethod
5614 def get_by_store_uid(cls, file_store_uid, safe=False):
5613 def get_by_store_uid(cls, file_store_uid, safe=False):
5615 if safe:
5614 if safe:
5616 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5615 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5617 else:
5616 else:
5618 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5617 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5619
5618
5620 @classmethod
5619 @classmethod
5621 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5620 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5622 file_description='', enabled=True, hidden=False, check_acl=True,
5621 file_description='', enabled=True, hidden=False, check_acl=True,
5623 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5622 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5624
5623
5625 store_entry = FileStore()
5624 store_entry = FileStore()
5626 store_entry.file_uid = file_uid
5625 store_entry.file_uid = file_uid
5627 store_entry.file_display_name = file_display_name
5626 store_entry.file_display_name = file_display_name
5628 store_entry.file_org_name = filename
5627 store_entry.file_org_name = filename
5629 store_entry.file_size = file_size
5628 store_entry.file_size = file_size
5630 store_entry.file_hash = file_hash
5629 store_entry.file_hash = file_hash
5631 store_entry.file_description = file_description
5630 store_entry.file_description = file_description
5632
5631
5633 store_entry.check_acl = check_acl
5632 store_entry.check_acl = check_acl
5634 store_entry.enabled = enabled
5633 store_entry.enabled = enabled
5635 store_entry.hidden = hidden
5634 store_entry.hidden = hidden
5636
5635
5637 store_entry.user_id = user_id
5636 store_entry.user_id = user_id
5638 store_entry.scope_user_id = scope_user_id
5637 store_entry.scope_user_id = scope_user_id
5639 store_entry.scope_repo_id = scope_repo_id
5638 store_entry.scope_repo_id = scope_repo_id
5640 store_entry.scope_repo_group_id = scope_repo_group_id
5639 store_entry.scope_repo_group_id = scope_repo_group_id
5641
5640
5642 return store_entry
5641 return store_entry
5643
5642
5644 @classmethod
5643 @classmethod
5645 def store_metadata(cls, file_store_id, args, commit=True):
5644 def store_metadata(cls, file_store_id, args, commit=True):
5646 file_store = FileStore.get(file_store_id)
5645 file_store = FileStore.get(file_store_id)
5647 if file_store is None:
5646 if file_store is None:
5648 return
5647 return
5649
5648
5650 for section, key, value, value_type in args:
5649 for section, key, value, value_type in args:
5651 has_key = FileStoreMetadata().query() \
5650 has_key = FileStoreMetadata().query() \
5652 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5651 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5653 .filter(FileStoreMetadata.file_store_meta_section == section) \
5652 .filter(FileStoreMetadata.file_store_meta_section == section) \
5654 .filter(FileStoreMetadata.file_store_meta_key == key) \
5653 .filter(FileStoreMetadata.file_store_meta_key == key) \
5655 .scalar()
5654 .scalar()
5656 if has_key:
5655 if has_key:
5657 msg = 'key `{}` already defined under section `{}` for this file.'\
5656 msg = 'key `{}` already defined under section `{}` for this file.'\
5658 .format(key, section)
5657 .format(key, section)
5659 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5658 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5660
5659
5661 # NOTE(marcink): raises ArtifactMetadataBadValueType
5660 # NOTE(marcink): raises ArtifactMetadataBadValueType
5662 FileStoreMetadata.valid_value_type(value_type)
5661 FileStoreMetadata.valid_value_type(value_type)
5663
5662
5664 meta_entry = FileStoreMetadata()
5663 meta_entry = FileStoreMetadata()
5665 meta_entry.file_store = file_store
5664 meta_entry.file_store = file_store
5666 meta_entry.file_store_meta_section = section
5665 meta_entry.file_store_meta_section = section
5667 meta_entry.file_store_meta_key = key
5666 meta_entry.file_store_meta_key = key
5668 meta_entry.file_store_meta_value_type = value_type
5667 meta_entry.file_store_meta_value_type = value_type
5669 meta_entry.file_store_meta_value = value
5668 meta_entry.file_store_meta_value = value
5670
5669
5671 Session().add(meta_entry)
5670 Session().add(meta_entry)
5672
5671
5673 try:
5672 try:
5674 if commit:
5673 if commit:
5675 Session().commit()
5674 Session().commit()
5676 except IntegrityError:
5675 except IntegrityError:
5677 Session().rollback()
5676 Session().rollback()
5678 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5677 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5679
5678
5680 @classmethod
5679 @classmethod
5681 def bump_access_counter(cls, file_uid, commit=True):
5680 def bump_access_counter(cls, file_uid, commit=True):
5682 FileStore().query()\
5681 FileStore().query()\
5683 .filter(FileStore.file_uid == file_uid)\
5682 .filter(FileStore.file_uid == file_uid)\
5684 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5683 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5685 FileStore.accessed_on: datetime.datetime.now()})
5684 FileStore.accessed_on: datetime.datetime.now()})
5686 if commit:
5685 if commit:
5687 Session().commit()
5686 Session().commit()
5688
5687
5689 def __json__(self):
5688 def __json__(self):
5690 data = {
5689 data = {
5691 'filename': self.file_display_name,
5690 'filename': self.file_display_name,
5692 'filename_org': self.file_org_name,
5691 'filename_org': self.file_org_name,
5693 'file_uid': self.file_uid,
5692 'file_uid': self.file_uid,
5694 'description': self.file_description,
5693 'description': self.file_description,
5695 'hidden': self.hidden,
5694 'hidden': self.hidden,
5696 'size': self.file_size,
5695 'size': self.file_size,
5697 'created_on': self.created_on,
5696 'created_on': self.created_on,
5698 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5697 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5699 'downloaded_times': self.accessed_count,
5698 'downloaded_times': self.accessed_count,
5700 'sha256': self.file_hash,
5699 'sha256': self.file_hash,
5701 'metadata': self.file_metadata,
5700 'metadata': self.file_metadata,
5702 }
5701 }
5703
5702
5704 return data
5703 return data
5705
5704
5706 def __repr__(self):
5705 def __repr__(self):
5707 return f'<FileStore({self.file_store_id})>'
5706 return f'<FileStore({self.file_store_id})>'
5708
5707
5709
5708
5710 class FileStoreMetadata(Base, BaseModel):
5709 class FileStoreMetadata(Base, BaseModel):
5711 __tablename__ = 'file_store_metadata'
5710 __tablename__ = 'file_store_metadata'
5712 __table_args__ = (
5711 __table_args__ = (
5713 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5712 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5714 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5713 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5715 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5714 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5716 base_table_args
5715 base_table_args
5717 )
5716 )
5718 SETTINGS_TYPES = {
5717 SETTINGS_TYPES = {
5719 'str': safe_str,
5718 'str': safe_str,
5720 'int': safe_int,
5719 'int': safe_int,
5721 'unicode': safe_str,
5720 'unicode': safe_str,
5722 'bool': str2bool,
5721 'bool': str2bool,
5723 'list': functools.partial(aslist, sep=',')
5722 'list': functools.partial(aslist, sep=',')
5724 }
5723 }
5725
5724
5726 file_store_meta_id = Column(
5725 file_store_meta_id = Column(
5727 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5726 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5728 primary_key=True)
5727 primary_key=True)
5729 _file_store_meta_section = Column(
5728 _file_store_meta_section = Column(
5730 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5729 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5731 nullable=True, unique=None, default=None)
5730 nullable=True, unique=None, default=None)
5732 _file_store_meta_section_hash = Column(
5731 _file_store_meta_section_hash = Column(
5733 "file_store_meta_section_hash", String(255),
5732 "file_store_meta_section_hash", String(255),
5734 nullable=True, unique=None, default=None)
5733 nullable=True, unique=None, default=None)
5735 _file_store_meta_key = Column(
5734 _file_store_meta_key = Column(
5736 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5735 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5737 nullable=True, unique=None, default=None)
5736 nullable=True, unique=None, default=None)
5738 _file_store_meta_key_hash = Column(
5737 _file_store_meta_key_hash = Column(
5739 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5738 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5740 _file_store_meta_value = Column(
5739 _file_store_meta_value = Column(
5741 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5740 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5742 nullable=True, unique=None, default=None)
5741 nullable=True, unique=None, default=None)
5743 _file_store_meta_value_type = Column(
5742 _file_store_meta_value_type = Column(
5744 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5743 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5745 default='unicode')
5744 default='unicode')
5746
5745
5747 file_store_id = Column(
5746 file_store_id = Column(
5748 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5747 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5749 nullable=True, unique=None, default=None)
5748 nullable=True, unique=None, default=None)
5750
5749
5751 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5750 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5752
5751
5753 @classmethod
5752 @classmethod
5754 def valid_value_type(cls, value):
5753 def valid_value_type(cls, value):
5755 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5754 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5756 raise ArtifactMetadataBadValueType(
5755 raise ArtifactMetadataBadValueType(
5757 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5756 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5758
5757
5759 @hybrid_property
5758 @hybrid_property
5760 def file_store_meta_section(self):
5759 def file_store_meta_section(self):
5761 return self._file_store_meta_section
5760 return self._file_store_meta_section
5762
5761
5763 @file_store_meta_section.setter
5762 @file_store_meta_section.setter
5764 def file_store_meta_section(self, value):
5763 def file_store_meta_section(self, value):
5765 self._file_store_meta_section = value
5764 self._file_store_meta_section = value
5766 self._file_store_meta_section_hash = _hash_key(value)
5765 self._file_store_meta_section_hash = _hash_key(value)
5767
5766
5768 @hybrid_property
5767 @hybrid_property
5769 def file_store_meta_key(self):
5768 def file_store_meta_key(self):
5770 return self._file_store_meta_key
5769 return self._file_store_meta_key
5771
5770
5772 @file_store_meta_key.setter
5771 @file_store_meta_key.setter
5773 def file_store_meta_key(self, value):
5772 def file_store_meta_key(self, value):
5774 self._file_store_meta_key = value
5773 self._file_store_meta_key = value
5775 self._file_store_meta_key_hash = _hash_key(value)
5774 self._file_store_meta_key_hash = _hash_key(value)
5776
5775
5777 @hybrid_property
5776 @hybrid_property
5778 def file_store_meta_value(self):
5777 def file_store_meta_value(self):
5779 val = self._file_store_meta_value
5778 val = self._file_store_meta_value
5780
5779
5781 if self._file_store_meta_value_type:
5780 if self._file_store_meta_value_type:
5782 # e.g unicode.encrypted == unicode
5781 # e.g unicode.encrypted == unicode
5783 _type = self._file_store_meta_value_type.split('.')[0]
5782 _type = self._file_store_meta_value_type.split('.')[0]
5784 # decode the encrypted value if it's encrypted field type
5783 # decode the encrypted value if it's encrypted field type
5785 if '.encrypted' in self._file_store_meta_value_type:
5784 if '.encrypted' in self._file_store_meta_value_type:
5786 cipher = EncryptedTextValue()
5785 cipher = EncryptedTextValue()
5787 val = safe_str(cipher.process_result_value(val, None))
5786 val = safe_str(cipher.process_result_value(val, None))
5788 # do final type conversion
5787 # do final type conversion
5789 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5788 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5790 val = converter(val)
5789 val = converter(val)
5791
5790
5792 return val
5791 return val
5793
5792
5794 @file_store_meta_value.setter
5793 @file_store_meta_value.setter
5795 def file_store_meta_value(self, val):
5794 def file_store_meta_value(self, val):
5796 val = safe_str(val)
5795 val = safe_str(val)
5797 # encode the encrypted value
5796 # encode the encrypted value
5798 if '.encrypted' in self.file_store_meta_value_type:
5797 if '.encrypted' in self.file_store_meta_value_type:
5799 cipher = EncryptedTextValue()
5798 cipher = EncryptedTextValue()
5800 val = safe_str(cipher.process_bind_param(val, None))
5799 val = safe_str(cipher.process_bind_param(val, None))
5801 self._file_store_meta_value = val
5800 self._file_store_meta_value = val
5802
5801
5803 @hybrid_property
5802 @hybrid_property
5804 def file_store_meta_value_type(self):
5803 def file_store_meta_value_type(self):
5805 return self._file_store_meta_value_type
5804 return self._file_store_meta_value_type
5806
5805
5807 @file_store_meta_value_type.setter
5806 @file_store_meta_value_type.setter
5808 def file_store_meta_value_type(self, val):
5807 def file_store_meta_value_type(self, val):
5809 # e.g unicode.encrypted
5808 # e.g unicode.encrypted
5810 self.valid_value_type(val)
5809 self.valid_value_type(val)
5811 self._file_store_meta_value_type = val
5810 self._file_store_meta_value_type = val
5812
5811
5813 def __json__(self):
5812 def __json__(self):
5814 data = {
5813 data = {
5815 'artifact': self.file_store.file_uid,
5814 'artifact': self.file_store.file_uid,
5816 'section': self.file_store_meta_section,
5815 'section': self.file_store_meta_section,
5817 'key': self.file_store_meta_key,
5816 'key': self.file_store_meta_key,
5818 'value': self.file_store_meta_value,
5817 'value': self.file_store_meta_value,
5819 }
5818 }
5820
5819
5821 return data
5820 return data
5822
5821
5823 def __repr__(self):
5822 def __repr__(self):
5824 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5823 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5825 self.file_store_meta_key, self.file_store_meta_value)
5824 self.file_store_meta_key, self.file_store_meta_value)
5826
5825
5827
5826
5828 class DbMigrateVersion(Base, BaseModel):
5827 class DbMigrateVersion(Base, BaseModel):
5829 __tablename__ = 'db_migrate_version'
5828 __tablename__ = 'db_migrate_version'
5830 __table_args__ = (
5829 __table_args__ = (
5831 base_table_args,
5830 base_table_args,
5832 )
5831 )
5833
5832
5834 repository_id = Column('repository_id', String(250), primary_key=True)
5833 repository_id = Column('repository_id', String(250), primary_key=True)
5835 repository_path = Column('repository_path', Text)
5834 repository_path = Column('repository_path', Text)
5836 version = Column('version', Integer)
5835 version = Column('version', Integer)
5837
5836
5838 @classmethod
5837 @classmethod
5839 def set_version(cls, version):
5838 def set_version(cls, version):
5840 """
5839 """
5841 Helper for forcing a different version, usually for debugging purposes via ishell.
5840 Helper for forcing a different version, usually for debugging purposes via ishell.
5842 """
5841 """
5843 ver = DbMigrateVersion.query().first()
5842 ver = DbMigrateVersion.query().first()
5844 ver.version = version
5843 ver.version = version
5845 Session().commit()
5844 Session().commit()
5846
5845
5847
5846
5848 class DbSession(Base, BaseModel):
5847 class DbSession(Base, BaseModel):
5849 __tablename__ = 'db_session'
5848 __tablename__ = 'db_session'
5850 __table_args__ = (
5849 __table_args__ = (
5851 base_table_args,
5850 base_table_args,
5852 )
5851 )
5853
5852
5854 def __repr__(self):
5853 def __repr__(self):
5855 return f'<DB:DbSession({self.id})>'
5854 return f'<DB:DbSession({self.id})>'
5856
5855
5857 id = Column('id', Integer())
5856 id = Column('id', Integer())
5858 namespace = Column('namespace', String(255), primary_key=True)
5857 namespace = Column('namespace', String(255), primary_key=True)
5859 accessed = Column('accessed', DateTime, nullable=False)
5858 accessed = Column('accessed', DateTime, nullable=False)
5860 created = Column('created', DateTime, nullable=False)
5859 created = Column('created', DateTime, nullable=False)
5861 data = Column('data', PickleType, nullable=False)
5860 data = Column('data', PickleType, nullable=False)
@@ -1,640 +1,639 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 this is forms validation classes
20 this is forms validation classes
22 http://formencode.org/module-formencode.validators.html
21 http://formencode.org/module-formencode.validators.html
23 for list off all availible validators
22 for list off all availible validators
24
23
25 we can create our own validators
24 we can create our own validators
26
25
27 The table below outlines the options which can be used in a schema in addition to the validators themselves
26 The table below outlines the options which can be used in a schema in addition to the validators themselves
28 pre_validators [] These validators will be applied before the schema
27 pre_validators [] These validators will be applied before the schema
29 chained_validators [] These validators will be applied after the schema
28 chained_validators [] These validators will be applied after the schema
30 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
31 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
32 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
33 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
34
33
35
34
36 <name> = formencode.validators.<name of validator>
35 <name> = formencode.validators.<name of validator>
37 <name> must equal form name
36 <name> must equal form name
38 list=[1,2,3,4,5]
37 list=[1,2,3,4,5]
39 for SELECT use formencode.All(OneOf(list), Int())
38 for SELECT use formencode.All(OneOf(list), Int())
40
39
41 """
40 """
42
41
43 import deform
42 import deform
44 import logging
43 import logging
45 import formencode
44 import formencode
46
45
47 from pkg_resources import resource_filename
46 from pkg_resources import resource_filename
48 from formencode import All, Pipe
47 from formencode import All, Pipe
49
48
50 from pyramid.threadlocal import get_current_request
49 from pyramid.threadlocal import get_current_request
51
50
52 from rhodecode import BACKENDS
51 from rhodecode import BACKENDS
53 from rhodecode.lib import helpers
52 from rhodecode.lib import helpers
54 from rhodecode.model import validators as v
53 from rhodecode.model import validators as v
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58
57
59 deform_templates = resource_filename('deform', 'templates')
58 deform_templates = resource_filename('deform', 'templates')
60 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
61 search_path = (rhodecode_templates, deform_templates)
60 search_path = (rhodecode_templates, deform_templates)
62
61
63
62
64 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
65 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
66 def __call__(self, template_name, **kw):
65 def __call__(self, template_name, **kw):
67 kw['h'] = helpers
66 kw['h'] = helpers
68 kw['request'] = get_current_request()
67 kw['request'] = get_current_request()
69 return self.load(template_name)(**kw)
68 return self.load(template_name)(**kw)
70
69
71
70
72 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
73 deform.Form.set_default_renderer(form_renderer)
72 deform.Form.set_default_renderer(form_renderer)
74
73
75
74
76 def LoginForm(localizer):
75 def LoginForm(localizer):
77 _ = localizer
76 _ = localizer
78
77
79 class _LoginForm(formencode.Schema):
78 class _LoginForm(formencode.Schema):
80 allow_extra_fields = True
79 allow_extra_fields = True
81 filter_extra_fields = True
80 filter_extra_fields = True
82 username = v.UnicodeString(
81 username = v.UnicodeString(
83 strip=True,
82 strip=True,
84 min=1,
83 min=1,
85 not_empty=True,
84 not_empty=True,
86 messages={
85 messages={
87 'empty': _(u'Please enter a login'),
86 'empty': _('Please enter a login'),
88 'tooShort': _(u'Enter a value %(min)i characters long or more')
87 'tooShort': _('Enter a value %(min)i characters long or more')
89 }
88 }
90 )
89 )
91
90
92 password = v.UnicodeString(
91 password = v.UnicodeString(
93 strip=False,
92 strip=False,
94 min=3,
93 min=3,
95 max=72,
94 max=72,
96 not_empty=True,
95 not_empty=True,
97 messages={
96 messages={
98 'empty': _(u'Please enter a password'),
97 'empty': _('Please enter a password'),
99 'tooShort': _(u'Enter %(min)i characters or more')}
98 'tooShort': _('Enter %(min)i characters or more')}
100 )
99 )
101
100
102 remember = v.StringBoolean(if_missing=False)
101 remember = v.StringBoolean(if_missing=False)
103
102
104 chained_validators = [v.ValidAuth(localizer)]
103 chained_validators = [v.ValidAuth(localizer)]
105 return _LoginForm
104 return _LoginForm
106
105
107
106
108 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
107 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
109 old_data = old_data or {}
108 old_data = old_data or {}
110 available_languages = available_languages or []
109 available_languages = available_languages or []
111 _ = localizer
110 _ = localizer
112
111
113 class _UserForm(formencode.Schema):
112 class _UserForm(formencode.Schema):
114 allow_extra_fields = True
113 allow_extra_fields = True
115 filter_extra_fields = True
114 filter_extra_fields = True
116 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
115 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
117 v.ValidUsername(localizer, edit, old_data))
116 v.ValidUsername(localizer, edit, old_data))
118 if edit:
117 if edit:
119 new_password = All(
118 new_password = All(
120 v.ValidPassword(localizer),
119 v.ValidPassword(localizer),
121 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
120 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
122 )
121 )
123 password_confirmation = All(
122 password_confirmation = All(
124 v.ValidPassword(localizer),
123 v.ValidPassword(localizer),
125 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
124 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
126 )
125 )
127 admin = v.StringBoolean(if_missing=False)
126 admin = v.StringBoolean(if_missing=False)
128 else:
127 else:
129 password = All(
128 password = All(
130 v.ValidPassword(localizer),
129 v.ValidPassword(localizer),
131 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
130 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
132 )
131 )
133 password_confirmation = All(
132 password_confirmation = All(
134 v.ValidPassword(localizer),
133 v.ValidPassword(localizer),
135 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
134 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
136 )
135 )
137
136
138 password_change = v.StringBoolean(if_missing=False)
137 password_change = v.StringBoolean(if_missing=False)
139 create_repo_group = v.StringBoolean(if_missing=False)
138 create_repo_group = v.StringBoolean(if_missing=False)
140
139
141 active = v.StringBoolean(if_missing=False)
140 active = v.StringBoolean(if_missing=False)
142 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
141 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
143 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
145 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
144 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
146 if_missing='')
145 if_missing='')
147 extern_name = v.UnicodeString(strip=True)
146 extern_name = v.UnicodeString(strip=True)
148 extern_type = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
149 language = v.OneOf(available_languages, hideList=False,
148 language = v.OneOf(available_languages, hideList=False,
150 testValueList=True, if_missing=None)
149 testValueList=True, if_missing=None)
151 chained_validators = [v.ValidPasswordsMatch(localizer)]
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
152 return _UserForm
151 return _UserForm
153
152
154
153
155 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
156 old_data = old_data or {}
155 old_data = old_data or {}
157 _ = localizer
156 _ = localizer
158
157
159 class _UserGroupForm(formencode.Schema):
158 class _UserGroupForm(formencode.Schema):
160 allow_extra_fields = True
159 allow_extra_fields = True
161 filter_extra_fields = True
160 filter_extra_fields = True
162
161
163 users_group_name = All(
162 users_group_name = All(
164 v.UnicodeString(strip=True, min=1, not_empty=True),
163 v.UnicodeString(strip=True, min=1, not_empty=True),
165 v.ValidUserGroup(localizer, edit, old_data)
164 v.ValidUserGroup(localizer, edit, old_data)
166 )
165 )
167 user_group_description = v.UnicodeString(strip=True, min=1,
166 user_group_description = v.UnicodeString(strip=True, min=1,
168 not_empty=False)
167 not_empty=False)
169
168
170 users_group_active = v.StringBoolean(if_missing=False)
169 users_group_active = v.StringBoolean(if_missing=False)
171
170
172 if edit:
171 if edit:
173 # this is user group owner
172 # this is user group owner
174 user = All(
173 user = All(
175 v.UnicodeString(not_empty=True),
174 v.UnicodeString(not_empty=True),
176 v.ValidRepoUser(localizer, allow_disabled))
175 v.ValidRepoUser(localizer, allow_disabled))
177 return _UserGroupForm
176 return _UserGroupForm
178
177
179
178
180 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
181 can_create_in_root=False, allow_disabled=False):
180 can_create_in_root=False, allow_disabled=False):
182 _ = localizer
181 _ = localizer
183 old_data = old_data or {}
182 old_data = old_data or {}
184 available_groups = available_groups or []
183 available_groups = available_groups or []
185
184
186 class _RepoGroupForm(formencode.Schema):
185 class _RepoGroupForm(formencode.Schema):
187 allow_extra_fields = True
186 allow_extra_fields = True
188 filter_extra_fields = False
187 filter_extra_fields = False
189
188
190 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
191 v.SlugifyName(localizer),)
190 v.SlugifyName(localizer),)
192 group_description = v.UnicodeString(strip=True, min=1,
191 group_description = v.UnicodeString(strip=True, min=1,
193 not_empty=False)
192 not_empty=False)
194 group_copy_permissions = v.StringBoolean(if_missing=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
195
194
196 group_parent_id = v.OneOf(available_groups, hideList=False,
195 group_parent_id = v.OneOf(available_groups, hideList=False,
197 testValueList=True, not_empty=True)
196 testValueList=True, not_empty=True)
198 enable_locking = v.StringBoolean(if_missing=False)
197 enable_locking = v.StringBoolean(if_missing=False)
199 chained_validators = [
198 chained_validators = [
200 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
201
200
202 if edit:
201 if edit:
203 # this is repo group owner
202 # this is repo group owner
204 user = All(
203 user = All(
205 v.UnicodeString(not_empty=True),
204 v.UnicodeString(not_empty=True),
206 v.ValidRepoUser(localizer, allow_disabled))
205 v.ValidRepoUser(localizer, allow_disabled))
207 return _RepoGroupForm
206 return _RepoGroupForm
208
207
209
208
210 def RegisterForm(localizer, edit=False, old_data=None):
209 def RegisterForm(localizer, edit=False, old_data=None):
211 _ = localizer
210 _ = localizer
212 old_data = old_data or {}
211 old_data = old_data or {}
213
212
214 class _RegisterForm(formencode.Schema):
213 class _RegisterForm(formencode.Schema):
215 allow_extra_fields = True
214 allow_extra_fields = True
216 filter_extra_fields = True
215 filter_extra_fields = True
217 username = All(
216 username = All(
218 v.ValidUsername(localizer, edit, old_data),
217 v.ValidUsername(localizer, edit, old_data),
219 v.UnicodeString(strip=True, min=1, not_empty=True)
218 v.UnicodeString(strip=True, min=1, not_empty=True)
220 )
219 )
221 password = All(
220 password = All(
222 v.ValidPassword(localizer),
221 v.ValidPassword(localizer),
223 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
224 )
223 )
225 password_confirmation = All(
224 password_confirmation = All(
226 v.ValidPassword(localizer),
225 v.ValidPassword(localizer),
227 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
228 )
227 )
229 active = v.StringBoolean(if_missing=False)
228 active = v.StringBoolean(if_missing=False)
230 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
232 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
233
232
234 chained_validators = [v.ValidPasswordsMatch(localizer)]
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
235 return _RegisterForm
234 return _RegisterForm
236
235
237
236
238 def PasswordResetForm(localizer):
237 def PasswordResetForm(localizer):
239 _ = localizer
238 _ = localizer
240
239
241 class _PasswordResetForm(formencode.Schema):
240 class _PasswordResetForm(formencode.Schema):
242 allow_extra_fields = True
241 allow_extra_fields = True
243 filter_extra_fields = True
242 filter_extra_fields = True
244 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
245 return _PasswordResetForm
244 return _PasswordResetForm
246
245
247
246
248 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
249 _ = localizer
248 _ = localizer
250 old_data = old_data or {}
249 old_data = old_data or {}
251 repo_groups = repo_groups or []
250 repo_groups = repo_groups or []
252 supported_backends = BACKENDS.keys()
251 supported_backends = BACKENDS.keys()
253
252
254 class _RepoForm(formencode.Schema):
253 class _RepoForm(formencode.Schema):
255 allow_extra_fields = True
254 allow_extra_fields = True
256 filter_extra_fields = False
255 filter_extra_fields = False
257 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
256 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
258 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
257 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
259 repo_group = All(v.CanWriteGroup(localizer, old_data),
258 repo_group = All(v.CanWriteGroup(localizer, old_data),
260 v.OneOf(repo_groups, hideList=True))
259 v.OneOf(repo_groups, hideList=True))
261 repo_type = v.OneOf(supported_backends, required=False,
260 repo_type = v.OneOf(supported_backends, required=False,
262 if_missing=old_data.get('repo_type'))
261 if_missing=old_data.get('repo_type'))
263 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
262 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
264 repo_private = v.StringBoolean(if_missing=False)
263 repo_private = v.StringBoolean(if_missing=False)
265 repo_copy_permissions = v.StringBoolean(if_missing=False)
264 repo_copy_permissions = v.StringBoolean(if_missing=False)
266 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
265 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
267
266
268 repo_enable_statistics = v.StringBoolean(if_missing=False)
267 repo_enable_statistics = v.StringBoolean(if_missing=False)
269 repo_enable_downloads = v.StringBoolean(if_missing=False)
268 repo_enable_downloads = v.StringBoolean(if_missing=False)
270 repo_enable_locking = v.StringBoolean(if_missing=False)
269 repo_enable_locking = v.StringBoolean(if_missing=False)
271
270
272 if edit:
271 if edit:
273 # this is repo owner
272 # this is repo owner
274 user = All(
273 user = All(
275 v.UnicodeString(not_empty=True),
274 v.UnicodeString(not_empty=True),
276 v.ValidRepoUser(localizer, allow_disabled))
275 v.ValidRepoUser(localizer, allow_disabled))
277 clone_uri_change = v.UnicodeString(
276 clone_uri_change = v.UnicodeString(
278 not_empty=False, if_missing=v.Missing)
277 not_empty=False, if_missing=v.Missing)
279
278
280 chained_validators = [v.ValidCloneUri(localizer),
279 chained_validators = [v.ValidCloneUri(localizer),
281 v.ValidRepoName(localizer, edit, old_data)]
280 v.ValidRepoName(localizer, edit, old_data)]
282 return _RepoForm
281 return _RepoForm
283
282
284
283
285 def RepoPermsForm(localizer):
284 def RepoPermsForm(localizer):
286 _ = localizer
285 _ = localizer
287
286
288 class _RepoPermsForm(formencode.Schema):
287 class _RepoPermsForm(formencode.Schema):
289 allow_extra_fields = True
288 allow_extra_fields = True
290 filter_extra_fields = False
289 filter_extra_fields = False
291 chained_validators = [v.ValidPerms(localizer, type_='repo')]
290 chained_validators = [v.ValidPerms(localizer, type_='repo')]
292 return _RepoPermsForm
291 return _RepoPermsForm
293
292
294
293
295 def RepoGroupPermsForm(localizer, valid_recursive_choices):
294 def RepoGroupPermsForm(localizer, valid_recursive_choices):
296 _ = localizer
295 _ = localizer
297
296
298 class _RepoGroupPermsForm(formencode.Schema):
297 class _RepoGroupPermsForm(formencode.Schema):
299 allow_extra_fields = True
298 allow_extra_fields = True
300 filter_extra_fields = False
299 filter_extra_fields = False
301 recursive = v.OneOf(valid_recursive_choices)
300 recursive = v.OneOf(valid_recursive_choices)
302 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
301 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
303 return _RepoGroupPermsForm
302 return _RepoGroupPermsForm
304
303
305
304
306 def UserGroupPermsForm(localizer):
305 def UserGroupPermsForm(localizer):
307 _ = localizer
306 _ = localizer
308
307
309 class _UserPermsForm(formencode.Schema):
308 class _UserPermsForm(formencode.Schema):
310 allow_extra_fields = True
309 allow_extra_fields = True
311 filter_extra_fields = False
310 filter_extra_fields = False
312 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
311 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
313 return _UserPermsForm
312 return _UserPermsForm
314
313
315
314
316 def RepoFieldForm(localizer):
315 def RepoFieldForm(localizer):
317 _ = localizer
316 _ = localizer
318
317
319 class _RepoFieldForm(formencode.Schema):
318 class _RepoFieldForm(formencode.Schema):
320 filter_extra_fields = True
319 filter_extra_fields = True
321 allow_extra_fields = True
320 allow_extra_fields = True
322
321
323 new_field_key = All(v.FieldKey(localizer),
322 new_field_key = All(v.FieldKey(localizer),
324 v.UnicodeString(strip=True, min=3, not_empty=True))
323 v.UnicodeString(strip=True, min=3, not_empty=True))
325 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
324 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
326 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
325 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
327 if_missing='str')
326 if_missing='str')
328 new_field_label = v.UnicodeString(not_empty=False)
327 new_field_label = v.UnicodeString(not_empty=False)
329 new_field_desc = v.UnicodeString(not_empty=False)
328 new_field_desc = v.UnicodeString(not_empty=False)
330 return _RepoFieldForm
329 return _RepoFieldForm
331
330
332
331
333 def RepoForkForm(localizer, edit=False, old_data=None,
332 def RepoForkForm(localizer, edit=False, old_data=None,
334 supported_backends=BACKENDS.keys(), repo_groups=None):
333 supported_backends=BACKENDS.keys(), repo_groups=None):
335 _ = localizer
334 _ = localizer
336 old_data = old_data or {}
335 old_data = old_data or {}
337 repo_groups = repo_groups or []
336 repo_groups = repo_groups or []
338
337
339 class _RepoForkForm(formencode.Schema):
338 class _RepoForkForm(formencode.Schema):
340 allow_extra_fields = True
339 allow_extra_fields = True
341 filter_extra_fields = False
340 filter_extra_fields = False
342 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
341 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
343 v.SlugifyName(localizer))
342 v.SlugifyName(localizer))
344 repo_group = All(v.CanWriteGroup(localizer, ),
343 repo_group = All(v.CanWriteGroup(localizer, ),
345 v.OneOf(repo_groups, hideList=True))
344 v.OneOf(repo_groups, hideList=True))
346 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
345 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
347 description = v.UnicodeString(strip=True, min=1, not_empty=True)
346 description = v.UnicodeString(strip=True, min=1, not_empty=True)
348 private = v.StringBoolean(if_missing=False)
347 private = v.StringBoolean(if_missing=False)
349 copy_permissions = v.StringBoolean(if_missing=False)
348 copy_permissions = v.StringBoolean(if_missing=False)
350 fork_parent_id = v.UnicodeString()
349 fork_parent_id = v.UnicodeString()
351 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
350 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
352 return _RepoForkForm
351 return _RepoForkForm
353
352
354
353
355 def ApplicationSettingsForm(localizer):
354 def ApplicationSettingsForm(localizer):
356 _ = localizer
355 _ = localizer
357
356
358 class _ApplicationSettingsForm(formencode.Schema):
357 class _ApplicationSettingsForm(formencode.Schema):
359 allow_extra_fields = True
358 allow_extra_fields = True
360 filter_extra_fields = False
359 filter_extra_fields = False
361 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
360 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
362 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
361 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
363 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
362 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
364 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
364 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
366 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
367 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
366 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
368 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
367 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
369 return _ApplicationSettingsForm
368 return _ApplicationSettingsForm
370
369
371
370
372 def ApplicationVisualisationForm(localizer):
371 def ApplicationVisualisationForm(localizer):
373 from rhodecode.model.db import Repository
372 from rhodecode.model.db import Repository
374 _ = localizer
373 _ = localizer
375
374
376 class _ApplicationVisualisationForm(formencode.Schema):
375 class _ApplicationVisualisationForm(formencode.Schema):
377 allow_extra_fields = True
376 allow_extra_fields = True
378 filter_extra_fields = False
377 filter_extra_fields = False
379 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
378 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
380 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
379 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
381 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
380 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
382
381
383 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
382 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
384 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
383 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
385 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
384 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
386 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
385 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
387 rhodecode_show_version = v.StringBoolean(if_missing=False)
386 rhodecode_show_version = v.StringBoolean(if_missing=False)
388 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
387 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
389 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
388 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
390 rhodecode_gravatar_url = v.UnicodeString(min=3)
389 rhodecode_gravatar_url = v.UnicodeString(min=3)
391 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
390 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
392 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
391 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
393 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
392 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
394 rhodecode_support_url = v.UnicodeString()
393 rhodecode_support_url = v.UnicodeString()
395 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
394 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
396 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
395 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
397 return _ApplicationVisualisationForm
396 return _ApplicationVisualisationForm
398
397
399
398
400 class _BaseVcsSettingsForm(formencode.Schema):
399 class _BaseVcsSettingsForm(formencode.Schema):
401
400
402 allow_extra_fields = True
401 allow_extra_fields = True
403 filter_extra_fields = False
402 filter_extra_fields = False
404 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
403 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
405 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
404 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
406 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
405 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
407
406
408 # PR/Code-review
407 # PR/Code-review
409 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
408 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
410 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
409 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
411
410
412 # hg
411 # hg
413 extensions_largefiles = v.StringBoolean(if_missing=False)
412 extensions_largefiles = v.StringBoolean(if_missing=False)
414 extensions_evolve = v.StringBoolean(if_missing=False)
413 extensions_evolve = v.StringBoolean(if_missing=False)
415 phases_publish = v.StringBoolean(if_missing=False)
414 phases_publish = v.StringBoolean(if_missing=False)
416
415
417 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
416 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
418 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
417 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
419
418
420 # git
419 # git
421 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
420 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
422 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
421 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
423 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
422 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
424
423
425 # svn
424 # svn
426 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
425 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
427 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
426 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
428
427
429 # cache
428 # cache
430 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
429 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
431
430
432
431
433 def ApplicationUiSettingsForm(localizer):
432 def ApplicationUiSettingsForm(localizer):
434 _ = localizer
433 _ = localizer
435
434
436 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
437 web_push_ssl = v.StringBoolean(if_missing=False)
436 web_push_ssl = v.StringBoolean(if_missing=False)
438 paths_root_path = All(
437 paths_root_path = All(
439 v.ValidPath(localizer),
438 v.ValidPath(localizer),
440 v.UnicodeString(strip=True, min=1, not_empty=True)
439 v.UnicodeString(strip=True, min=1, not_empty=True)
441 )
440 )
442 largefiles_usercache = All(
441 largefiles_usercache = All(
443 v.ValidPath(localizer),
442 v.ValidPath(localizer),
444 v.UnicodeString(strip=True, min=2, not_empty=True))
443 v.UnicodeString(strip=True, min=2, not_empty=True))
445 vcs_git_lfs_store_location = All(
444 vcs_git_lfs_store_location = All(
446 v.ValidPath(localizer),
445 v.ValidPath(localizer),
447 v.UnicodeString(strip=True, min=2, not_empty=True))
446 v.UnicodeString(strip=True, min=2, not_empty=True))
448 extensions_hgsubversion = v.StringBoolean(if_missing=False)
447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
449 extensions_hggit = v.StringBoolean(if_missing=False)
448 extensions_hggit = v.StringBoolean(if_missing=False)
450 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
451 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
452 return _ApplicationUiSettingsForm
451 return _ApplicationUiSettingsForm
453
452
454
453
455 def RepoVcsSettingsForm(localizer, repo_name):
454 def RepoVcsSettingsForm(localizer, repo_name):
456 _ = localizer
455 _ = localizer
457
456
458 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
459 inherit_global_settings = v.StringBoolean(if_missing=False)
458 inherit_global_settings = v.StringBoolean(if_missing=False)
460 new_svn_branch = v.ValidSvnPattern(localizer,
459 new_svn_branch = v.ValidSvnPattern(localizer,
461 section='vcs_svn_branch', repo_name=repo_name)
460 section='vcs_svn_branch', repo_name=repo_name)
462 new_svn_tag = v.ValidSvnPattern(localizer,
461 new_svn_tag = v.ValidSvnPattern(localizer,
463 section='vcs_svn_tag', repo_name=repo_name)
462 section='vcs_svn_tag', repo_name=repo_name)
464 return _RepoVcsSettingsForm
463 return _RepoVcsSettingsForm
465
464
466
465
467 def LabsSettingsForm(localizer):
466 def LabsSettingsForm(localizer):
468 _ = localizer
467 _ = localizer
469
468
470 class _LabSettingsForm(formencode.Schema):
469 class _LabSettingsForm(formencode.Schema):
471 allow_extra_fields = True
470 allow_extra_fields = True
472 filter_extra_fields = False
471 filter_extra_fields = False
473 return _LabSettingsForm
472 return _LabSettingsForm
474
473
475
474
476 def ApplicationPermissionsForm(
475 def ApplicationPermissionsForm(
477 localizer, register_choices, password_reset_choices,
476 localizer, register_choices, password_reset_choices,
478 extern_activate_choices):
477 extern_activate_choices):
479 _ = localizer
478 _ = localizer
480
479
481 class _DefaultPermissionsForm(formencode.Schema):
480 class _DefaultPermissionsForm(formencode.Schema):
482 allow_extra_fields = True
481 allow_extra_fields = True
483 filter_extra_fields = True
482 filter_extra_fields = True
484
483
485 anonymous = v.StringBoolean(if_missing=False)
484 anonymous = v.StringBoolean(if_missing=False)
486 default_register = v.OneOf(register_choices)
485 default_register = v.OneOf(register_choices)
487 default_register_message = v.UnicodeString()
486 default_register_message = v.UnicodeString()
488 default_password_reset = v.OneOf(password_reset_choices)
487 default_password_reset = v.OneOf(password_reset_choices)
489 default_extern_activate = v.OneOf(extern_activate_choices)
488 default_extern_activate = v.OneOf(extern_activate_choices)
490 return _DefaultPermissionsForm
489 return _DefaultPermissionsForm
491
490
492
491
493 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
494 user_group_perms_choices):
493 user_group_perms_choices):
495 _ = localizer
494 _ = localizer
496
495
497 class _ObjectPermissionsForm(formencode.Schema):
496 class _ObjectPermissionsForm(formencode.Schema):
498 allow_extra_fields = True
497 allow_extra_fields = True
499 filter_extra_fields = True
498 filter_extra_fields = True
500 overwrite_default_repo = v.StringBoolean(if_missing=False)
499 overwrite_default_repo = v.StringBoolean(if_missing=False)
501 overwrite_default_group = v.StringBoolean(if_missing=False)
500 overwrite_default_group = v.StringBoolean(if_missing=False)
502 overwrite_default_user_group = v.StringBoolean(if_missing=False)
501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
503
502
504 default_repo_perm = v.OneOf(repo_perms_choices)
503 default_repo_perm = v.OneOf(repo_perms_choices)
505 default_group_perm = v.OneOf(group_perms_choices)
504 default_group_perm = v.OneOf(group_perms_choices)
506 default_user_group_perm = v.OneOf(user_group_perms_choices)
505 default_user_group_perm = v.OneOf(user_group_perms_choices)
507
506
508 return _ObjectPermissionsForm
507 return _ObjectPermissionsForm
509
508
510
509
511 def BranchPermissionsForm(localizer, branch_perms_choices):
510 def BranchPermissionsForm(localizer, branch_perms_choices):
512 _ = localizer
511 _ = localizer
513
512
514 class _BranchPermissionsForm(formencode.Schema):
513 class _BranchPermissionsForm(formencode.Schema):
515 allow_extra_fields = True
514 allow_extra_fields = True
516 filter_extra_fields = True
515 filter_extra_fields = True
517 overwrite_default_branch = v.StringBoolean(if_missing=False)
516 overwrite_default_branch = v.StringBoolean(if_missing=False)
518 default_branch_perm = v.OneOf(branch_perms_choices)
517 default_branch_perm = v.OneOf(branch_perms_choices)
519
518
520 return _BranchPermissionsForm
519 return _BranchPermissionsForm
521
520
522
521
523 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
522 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
524 repo_group_create_choices, user_group_create_choices,
523 repo_group_create_choices, user_group_create_choices,
525 fork_choices, inherit_default_permissions_choices):
524 fork_choices, inherit_default_permissions_choices):
526 _ = localizer
525 _ = localizer
527
526
528 class _DefaultPermissionsForm(formencode.Schema):
527 class _DefaultPermissionsForm(formencode.Schema):
529 allow_extra_fields = True
528 allow_extra_fields = True
530 filter_extra_fields = True
529 filter_extra_fields = True
531
530
532 anonymous = v.StringBoolean(if_missing=False)
531 anonymous = v.StringBoolean(if_missing=False)
533
532
534 default_repo_create = v.OneOf(create_choices)
533 default_repo_create = v.OneOf(create_choices)
535 default_repo_create_on_write = v.OneOf(create_on_write_choices)
534 default_repo_create_on_write = v.OneOf(create_on_write_choices)
536 default_user_group_create = v.OneOf(user_group_create_choices)
535 default_user_group_create = v.OneOf(user_group_create_choices)
537 default_repo_group_create = v.OneOf(repo_group_create_choices)
536 default_repo_group_create = v.OneOf(repo_group_create_choices)
538 default_fork_create = v.OneOf(fork_choices)
537 default_fork_create = v.OneOf(fork_choices)
539 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
538 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
540 return _DefaultPermissionsForm
539 return _DefaultPermissionsForm
541
540
542
541
543 def UserIndividualPermissionsForm(localizer):
542 def UserIndividualPermissionsForm(localizer):
544 _ = localizer
543 _ = localizer
545
544
546 class _DefaultPermissionsForm(formencode.Schema):
545 class _DefaultPermissionsForm(formencode.Schema):
547 allow_extra_fields = True
546 allow_extra_fields = True
548 filter_extra_fields = True
547 filter_extra_fields = True
549
548
550 inherit_default_permissions = v.StringBoolean(if_missing=False)
549 inherit_default_permissions = v.StringBoolean(if_missing=False)
551 return _DefaultPermissionsForm
550 return _DefaultPermissionsForm
552
551
553
552
554 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
553 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
555 _ = localizer
554 _ = localizer
556 old_data = old_data or {}
555 old_data = old_data or {}
557
556
558 class _DefaultsForm(formencode.Schema):
557 class _DefaultsForm(formencode.Schema):
559 allow_extra_fields = True
558 allow_extra_fields = True
560 filter_extra_fields = True
559 filter_extra_fields = True
561 default_repo_type = v.OneOf(supported_backends)
560 default_repo_type = v.OneOf(supported_backends)
562 default_repo_private = v.StringBoolean(if_missing=False)
561 default_repo_private = v.StringBoolean(if_missing=False)
563 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
562 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
564 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
563 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
565 default_repo_enable_locking = v.StringBoolean(if_missing=False)
564 default_repo_enable_locking = v.StringBoolean(if_missing=False)
566 return _DefaultsForm
565 return _DefaultsForm
567
566
568
567
569 def AuthSettingsForm(localizer):
568 def AuthSettingsForm(localizer):
570 _ = localizer
569 _ = localizer
571
570
572 class _AuthSettingsForm(formencode.Schema):
571 class _AuthSettingsForm(formencode.Schema):
573 allow_extra_fields = True
572 allow_extra_fields = True
574 filter_extra_fields = True
573 filter_extra_fields = True
575 auth_plugins = All(v.ValidAuthPlugins(localizer),
574 auth_plugins = All(v.ValidAuthPlugins(localizer),
576 v.UniqueListFromString(localizer)(not_empty=True))
575 v.UniqueListFromString(localizer)(not_empty=True))
577 return _AuthSettingsForm
576 return _AuthSettingsForm
578
577
579
578
580 def UserExtraEmailForm(localizer):
579 def UserExtraEmailForm(localizer):
581 _ = localizer
580 _ = localizer
582
581
583 class _UserExtraEmailForm(formencode.Schema):
582 class _UserExtraEmailForm(formencode.Schema):
584 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
583 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
585 return _UserExtraEmailForm
584 return _UserExtraEmailForm
586
585
587
586
588 def UserExtraIpForm(localizer):
587 def UserExtraIpForm(localizer):
589 _ = localizer
588 _ = localizer
590
589
591 class _UserExtraIpForm(formencode.Schema):
590 class _UserExtraIpForm(formencode.Schema):
592 ip = v.ValidIp(localizer)(not_empty=True)
591 ip = v.ValidIp(localizer)(not_empty=True)
593 return _UserExtraIpForm
592 return _UserExtraIpForm
594
593
595
594
596 def PullRequestForm(localizer, repo_id):
595 def PullRequestForm(localizer, repo_id):
597 _ = localizer
596 _ = localizer
598
597
599 class ReviewerForm(formencode.Schema):
598 class ReviewerForm(formencode.Schema):
600 user_id = v.Int(not_empty=True)
599 user_id = v.Int(not_empty=True)
601 reasons = All()
600 reasons = All()
602 rules = All(v.UniqueList(localizer, convert=int)())
601 rules = All(v.UniqueList(localizer, convert=int)())
603 mandatory = v.StringBoolean()
602 mandatory = v.StringBoolean()
604 role = v.String(if_missing='reviewer')
603 role = v.String(if_missing='reviewer')
605
604
606 class ObserverForm(formencode.Schema):
605 class ObserverForm(formencode.Schema):
607 user_id = v.Int(not_empty=True)
606 user_id = v.Int(not_empty=True)
608 reasons = All()
607 reasons = All()
609 rules = All(v.UniqueList(localizer, convert=int)())
608 rules = All(v.UniqueList(localizer, convert=int)())
610 mandatory = v.StringBoolean()
609 mandatory = v.StringBoolean()
611 role = v.String(if_missing='observer')
610 role = v.String(if_missing='observer')
612
611
613 class _PullRequestForm(formencode.Schema):
612 class _PullRequestForm(formencode.Schema):
614 allow_extra_fields = True
613 allow_extra_fields = True
615 filter_extra_fields = True
614 filter_extra_fields = True
616
615
617 common_ancestor = v.UnicodeString(strip=True, required=True)
616 common_ancestor = v.UnicodeString(strip=True, required=True)
618 source_repo = v.UnicodeString(strip=True, required=True)
617 source_repo = v.UnicodeString(strip=True, required=True)
619 source_ref = v.UnicodeString(strip=True, required=True)
618 source_ref = v.UnicodeString(strip=True, required=True)
620 target_repo = v.UnicodeString(strip=True, required=True)
619 target_repo = v.UnicodeString(strip=True, required=True)
621 target_ref = v.UnicodeString(strip=True, required=True)
620 target_ref = v.UnicodeString(strip=True, required=True)
622 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
621 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
623 v.UniqueList(localizer)(not_empty=True))
622 v.UniqueList(localizer)(not_empty=True))
624 review_members = formencode.ForEach(ReviewerForm())
623 review_members = formencode.ForEach(ReviewerForm())
625 observer_members = formencode.ForEach(ObserverForm())
624 observer_members = formencode.ForEach(ObserverForm())
626 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
625 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
627 pullrequest_desc = v.UnicodeString(strip=True, required=False)
626 pullrequest_desc = v.UnicodeString(strip=True, required=False)
628 description_renderer = v.UnicodeString(strip=True, required=False)
627 description_renderer = v.UnicodeString(strip=True, required=False)
629
628
630 return _PullRequestForm
629 return _PullRequestForm
631
630
632
631
633 def IssueTrackerPatternsForm(localizer):
632 def IssueTrackerPatternsForm(localizer):
634 _ = localizer
633 _ = localizer
635
634
636 class _IssueTrackerPatternsForm(formencode.Schema):
635 class _IssueTrackerPatternsForm(formencode.Schema):
637 allow_extra_fields = True
636 allow_extra_fields = True
638 filter_extra_fields = False
637 filter_extra_fields = False
639 chained_validators = [v.ValidPattern(localizer)]
638 chained_validators = [v.ValidPattern(localizer)]
640 return _IssueTrackerPatternsForm
639 return _IssueTrackerPatternsForm
@@ -1,256 +1,254 b''
1
2
3 # Copyright (C) 2013-2023 RhodeCode GmbH
1 # Copyright (C) 2013-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 gist model for RhodeCode
20 gist model for RhodeCode
23 """
21 """
24
22
25 import os
23 import os
26 import time
24 import time
27 import logging
25 import logging
28 import traceback
26 import traceback
29 import shutil
27 import shutil
30
28
31 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
32
30
33 from rhodecode.lib.utils2 import (
31 from rhodecode.lib.utils2 import (
34 unique_id, safe_int, safe_str, time_to_datetime, AttributeDict)
32 unique_id, safe_int, safe_str, time_to_datetime, AttributeDict)
35 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.vcs import VCSError
34 from rhodecode.lib.vcs import VCSError
37 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
38 from rhodecode.model.db import Gist
36 from rhodecode.model.db import Gist
39 from rhodecode.model.repo import RepoModel
37 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.scm import ScmModel
41
39
42 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
43
41
44 GIST_STORE_LOC = '.rc_gist_store'
42 GIST_STORE_LOC = '.rc_gist_store'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
43 GIST_METADATA_FILE = '.rc_gist_metadata'
46
44
47
45
48 class GistModel(BaseModel):
46 class GistModel(BaseModel):
49 cls = Gist
47 cls = Gist
50 vcs_backend = 'hg'
48 vcs_backend = 'hg'
51
49
52 def _get_gist(self, gist):
50 def _get_gist(self, gist):
53 """
51 """
54 Helper method to get gist by ID, or gist_access_id as a fallback
52 Helper method to get gist by ID, or gist_access_id as a fallback
55
53
56 :param gist: GistID, gist_access_id, or Gist instance
54 :param gist: GistID, gist_access_id, or Gist instance
57 """
55 """
58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
56 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
59
57
60 def __delete_gist(self, gist):
58 def __delete_gist(self, gist):
61 """
59 """
62 removes gist from filesystem
60 removes gist from filesystem
63
61
64 :param gist: gist object
62 :param gist: gist object
65 """
63 """
66 root_path = RepoModel().repos_path
64 root_path = RepoModel().repos_path
67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
65 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
68 log.info("Removing %s", rm_path)
66 log.info("Removing %s", rm_path)
69 shutil.rmtree(rm_path)
67 shutil.rmtree(rm_path)
70
68
71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
69 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
72 gist_type, gist_expires, gist_acl_level):
70 gist_type, gist_expires, gist_acl_level):
73 """
71 """
74 store metadata inside the gist repo, this can be later used for imports
72 store metadata inside the gist repo, this can be later used for imports
75 or gist identification. Currently we use this inside RhodeCode tools
73 or gist identification. Currently we use this inside RhodeCode tools
76 to do cleanup of gists that are in storage but not in database.
74 to do cleanup of gists that are in storage but not in database.
77 """
75 """
78 metadata = {
76 metadata = {
79 'metadata_version': '2',
77 'metadata_version': '2',
80 'gist_db_id': gist_id,
78 'gist_db_id': gist_id,
81 'gist_access_id': gist_access_id,
79 'gist_access_id': gist_access_id,
82 'gist_owner_id': user_id,
80 'gist_owner_id': user_id,
83 'gist_owner_username': username,
81 'gist_owner_username': username,
84 'gist_type': gist_type,
82 'gist_type': gist_type,
85 'gist_expires': gist_expires,
83 'gist_expires': gist_expires,
86 'gist_updated': time.time(),
84 'gist_updated': time.time(),
87 'gist_acl_level': gist_acl_level,
85 'gist_acl_level': gist_acl_level,
88 }
86 }
89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
87 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
90 with open(metadata_file, 'wb') as f:
88 with open(metadata_file, 'wb') as f:
91 f.write(json.dumps(metadata))
89 f.write(json.dumps(metadata))
92
90
93 def get_gist(self, gist):
91 def get_gist(self, gist):
94 return self._get_gist(gist)
92 return self._get_gist(gist)
95
93
96 def get_gist_files(self, gist_access_id, revision=None):
94 def get_gist_files(self, gist_access_id, revision=None):
97 """
95 """
98 Get files for given gist
96 Get files for given gist
99
97
100 :param gist_access_id:
98 :param gist_access_id:
101 """
99 """
102 repo = Gist.get_by_access_id(gist_access_id)
100 repo = Gist.get_by_access_id(gist_access_id)
103 vcs_repo = repo.scm_instance()
101 vcs_repo = repo.scm_instance()
104 if not vcs_repo:
102 if not vcs_repo:
105 raise VCSError('Failed to load gist repository for {}'.format(repo))
103 raise VCSError(f'Failed to load gist repository for {repo}')
106
104
107 commit = vcs_repo.get_commit(commit_id=revision)
105 commit = vcs_repo.get_commit(commit_id=revision)
108 return commit, [n for n in commit.get_node('/')]
106 return commit, [n for n in commit.get_node('/')]
109
107
110 def create(self, description, owner, gist_mapping,
108 def create(self, description, owner, gist_mapping,
111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
109 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
110 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
113 """
111 """
114 Create a gist
112 Create a gist
115
113
116 :param description: description of the gist
114 :param description: description of the gist
117 :param owner: user who created this gist
115 :param owner: user who created this gist
118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
116 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
119 :param gist_type: type of gist private/public
117 :param gist_type: type of gist private/public
120 :param lifetime: in minutes, -1 == forever
118 :param lifetime: in minutes, -1 == forever
121 :param gist_acl_level: acl level for this gist
119 :param gist_acl_level: acl level for this gist
122 """
120 """
123 owner = self._get_user(owner)
121 owner = self._get_user(owner)
124 gist_id = safe_str(gist_id or unique_id(20))
122 gist_id = safe_str(gist_id or unique_id(20))
125 lifetime = safe_int(lifetime, -1)
123 lifetime = safe_int(lifetime, -1)
126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
124 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
127 expiration = (time_to_datetime(gist_expires)
125 expiration = (time_to_datetime(gist_expires)
128 if gist_expires != -1 else 'forever')
126 if gist_expires != -1 else 'forever')
129 log.debug('set GIST expiration date to: %s', expiration)
127 log.debug('set GIST expiration date to: %s', expiration)
130 # create the Database version
128 # create the Database version
131 gist = Gist()
129 gist = Gist()
132 gist.gist_description = description
130 gist.gist_description = description
133 gist.gist_access_id = gist_id
131 gist.gist_access_id = gist_id
134 gist.gist_owner = owner.user_id
132 gist.gist_owner = owner.user_id
135 gist.gist_expires = gist_expires
133 gist.gist_expires = gist_expires
136 gist.gist_type = safe_str(gist_type)
134 gist.gist_type = safe_str(gist_type)
137 gist.acl_level = gist_acl_level
135 gist.acl_level = gist_acl_level
138 self.sa.add(gist)
136 self.sa.add(gist)
139 self.sa.flush()
137 self.sa.flush()
140 if gist_type == Gist.GIST_PUBLIC:
138 if gist_type == Gist.GIST_PUBLIC:
141 # use DB ID for easy to use GIST ID
139 # use DB ID for easy to use GIST ID
142 gist_id = safe_str(gist.gist_id)
140 gist_id = safe_str(gist.gist_id)
143 gist.gist_access_id = gist_id
141 gist.gist_access_id = gist_id
144 self.sa.add(gist)
142 self.sa.add(gist)
145
143
146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
144 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
145 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
148 repo = RepoModel()._create_filesystem_repo(
146 repo = RepoModel()._create_filesystem_repo(
149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
147 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
150 use_global_config=True)
148 use_global_config=True)
151
149
152 # now create single multifile commit
150 # now create single multifile commit
153 message = 'added file'
151 message = 'added file'
154 message += 's: ' if len(gist_mapping) > 1 else ': '
152 message += 's: ' if len(gist_mapping) > 1 else ': '
155 message += ', '.join([safe_str(x) for x in gist_mapping])
153 message += ', '.join([safe_str(x) for x in gist_mapping])
156
154
157 # fake RhodeCode Repository object
155 # fake RhodeCode Repository object
158 fake_repo = AttributeDict({
156 fake_repo = AttributeDict({
159 'repo_name': gist_repo_path,
157 'repo_name': gist_repo_path,
160 'scm_instance': lambda *args, **kwargs: repo,
158 'scm_instance': lambda *args, **kwargs: repo,
161 })
159 })
162
160
163 ScmModel().create_nodes(
161 ScmModel().create_nodes(
164 user=owner.user_id, repo=fake_repo,
162 user=owner.user_id, repo=fake_repo,
165 message=message,
163 message=message,
166 nodes=gist_mapping,
164 nodes=gist_mapping,
167 trigger_push_hook=False
165 trigger_push_hook=False
168 )
166 )
169
167
170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
168 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
171 owner.user_id, owner.username, gist.gist_type,
169 owner.user_id, owner.username, gist.gist_type,
172 gist.gist_expires, gist_acl_level)
170 gist.gist_expires, gist_acl_level)
173 return gist
171 return gist
174
172
175 def delete(self, gist, fs_remove=True):
173 def delete(self, gist, fs_remove=True):
176 gist = self._get_gist(gist)
174 gist = self._get_gist(gist)
177 try:
175 try:
178 self.sa.delete(gist)
176 self.sa.delete(gist)
179 if fs_remove:
177 if fs_remove:
180 self.__delete_gist(gist)
178 self.__delete_gist(gist)
181 else:
179 else:
182 log.debug('skipping removal from filesystem')
180 log.debug('skipping removal from filesystem')
183 except Exception:
181 except Exception:
184 log.error(traceback.format_exc())
182 log.error(traceback.format_exc())
185 raise
183 raise
186
184
187 def update(self, gist, description, owner, gist_mapping, lifetime,
185 def update(self, gist, description, owner, gist_mapping, lifetime,
188 gist_acl_level):
186 gist_acl_level):
189 gist = self._get_gist(gist)
187 gist = self._get_gist(gist)
190 gist_repo = gist.scm_instance()
188 gist_repo = gist.scm_instance()
191
189
192 if lifetime == 0: # preserve old value
190 if lifetime == 0: # preserve old value
193 gist_expires = gist.gist_expires
191 gist_expires = gist.gist_expires
194 else:
192 else:
195 gist_expires = (
193 gist_expires = (
196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
194 time.time() + (lifetime * 60) if lifetime != -1 else -1)
197
195
198 # calculate operation type based on given data
196 # calculate operation type based on given data
199 gist_mapping_op = {}
197 gist_mapping_op = {}
200 for k, v in gist_mapping.items():
198 for k, v in gist_mapping.items():
201 # add, mod, del
199 # add, mod, del
202 if not v['filename_org'] and v['filename']:
200 if not v['filename_org'] and v['filename']:
203 op = 'add'
201 op = 'add'
204 elif v['filename_org'] and not v['filename']:
202 elif v['filename_org'] and not v['filename']:
205 op = 'del'
203 op = 'del'
206 else:
204 else:
207 op = 'mod'
205 op = 'mod'
208
206
209 v['op'] = op
207 v['op'] = op
210 gist_mapping_op[k] = v
208 gist_mapping_op[k] = v
211
209
212 gist.gist_description = description
210 gist.gist_description = description
213 gist.gist_expires = gist_expires
211 gist.gist_expires = gist_expires
214 gist.owner = owner
212 gist.owner = owner
215 gist.acl_level = gist_acl_level
213 gist.acl_level = gist_acl_level
216 self.sa.add(gist)
214 self.sa.add(gist)
217 self.sa.flush()
215 self.sa.flush()
218
216
219 message = 'updated file'
217 message = 'updated file'
220 message += 's: ' if len(gist_mapping) > 1 else ': '
218 message += 's: ' if len(gist_mapping) > 1 else ': '
221 message += ', '.join([safe_str(x) for x in gist_mapping])
219 message += ', '.join([safe_str(x) for x in gist_mapping])
222
220
223 # fake RhodeCode Repository object
221 # fake RhodeCode Repository object
224 fake_repo = AttributeDict({
222 fake_repo = AttributeDict({
225 'repo_name': gist_repo.path,
223 'repo_name': gist_repo.path,
226 'scm_instance': lambda *args, **kwargs: gist_repo,
224 'scm_instance': lambda *args, **kwargs: gist_repo,
227 })
225 })
228
226
229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
227 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
230 owner.user_id, owner.username, gist.gist_type,
228 owner.user_id, owner.username, gist.gist_type,
231 gist.gist_expires, gist_acl_level)
229 gist.gist_expires, gist_acl_level)
232
230
233 # this can throw NodeNotChangedError, if changes we're trying to commit
231 # this can throw NodeNotChangedError, if changes we're trying to commit
234 # are not actually changes...
232 # are not actually changes...
235 ScmModel().update_nodes(
233 ScmModel().update_nodes(
236 user=owner.user_id,
234 user=owner.user_id,
237 repo=fake_repo,
235 repo=fake_repo,
238 message=message,
236 message=message,
239 nodes=gist_mapping_op,
237 nodes=gist_mapping_op,
240 trigger_push_hook=False
238 trigger_push_hook=False
241 )
239 )
242
240
243 return gist
241 return gist
244
242
245 def get_url(self, gist, request=None):
243 def get_url(self, gist, request=None):
246 import rhodecode
244 import rhodecode
247
245
248 if not request:
246 if not request:
249 request = get_current_request()
247 request = get_current_request()
250
248
251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
249 alias_url = rhodecode.CONFIG.get('gist_alias_url')
252 if alias_url:
250 if alias_url:
253 return alias_url.replace('{gistid}', gist.gist_access_id)
251 return alias_url.replace('{gistid}', gist.gist_access_id)
254
252
255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
253 return request.route_url('gist_show', gist_id=gist.gist_access_id)
256
254
@@ -1,239 +1,237 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 Model for integrations
21 Model for integrations
24 """
22 """
25
23
26
24
27 import logging
25 import logging
28
26
29 from sqlalchemy import or_, and_
27 from sqlalchemy import or_, and_
30
28
31 from rhodecode import events
29 from rhodecode import events
32 from rhodecode.integrations.types.base import EEIntegration
30 from rhodecode.integrations.types.base import EEIntegration
33 from rhodecode.lib.caching_query import FromCache
31 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.model import BaseModel
32 from rhodecode.model import BaseModel
35 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
33 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
36 from rhodecode.integrations import integration_type_registry
34 from rhodecode.integrations import integration_type_registry
37
35
38 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
39
37
40
38
41 class IntegrationModel(BaseModel):
39 class IntegrationModel(BaseModel):
42
40
43 cls = Integration
41 cls = Integration
44
42
45 def __get_integration(self, integration):
43 def __get_integration(self, integration):
46 if isinstance(integration, Integration):
44 if isinstance(integration, Integration):
47 return integration
45 return integration
48 elif isinstance(integration, int):
46 elif isinstance(integration, int):
49 return self.sa.query(Integration).get(integration)
47 return self.sa.query(Integration).get(integration)
50 else:
48 else:
51 if integration:
49 if integration:
52 raise Exception('integration must be int or Instance'
50 raise Exception('integration must be int or Instance'
53 ' of Integration got %s' % type(integration))
51 ' of Integration got %s' % type(integration))
54
52
55 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
53 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
56 """ Create an IntegrationType integration """
54 """ Create an IntegrationType integration """
57 integration = Integration()
55 integration = Integration()
58 integration.integration_type = IntegrationType.key
56 integration.integration_type = IntegrationType.key
59 self.sa.add(integration)
57 self.sa.add(integration)
60 self.update_integration(integration, name, enabled, repo, repo_group,
58 self.update_integration(integration, name, enabled, repo, repo_group,
61 child_repos_only, settings)
59 child_repos_only, settings)
62 self.sa.commit()
60 self.sa.commit()
63 return integration
61 return integration
64
62
65 def update_integration(self, integration, name, enabled, repo, repo_group,
63 def update_integration(self, integration, name, enabled, repo, repo_group,
66 child_repos_only, settings):
64 child_repos_only, settings):
67 integration = self.__get_integration(integration)
65 integration = self.__get_integration(integration)
68
66
69 integration.repo = repo
67 integration.repo = repo
70 integration.repo_group = repo_group
68 integration.repo_group = repo_group
71 integration.child_repos_only = child_repos_only
69 integration.child_repos_only = child_repos_only
72 integration.name = name
70 integration.name = name
73 integration.enabled = enabled
71 integration.enabled = enabled
74 integration.settings = settings
72 integration.settings = settings
75
73
76 return integration
74 return integration
77
75
78 def delete(self, integration):
76 def delete(self, integration):
79 integration = self.__get_integration(integration)
77 integration = self.__get_integration(integration)
80 if integration:
78 if integration:
81 self.sa.delete(integration)
79 self.sa.delete(integration)
82 return True
80 return True
83 return False
81 return False
84
82
85 def get_integration_handler(self, integration):
83 def get_integration_handler(self, integration):
86 TypeClass = integration_type_registry.get(integration.integration_type)
84 TypeClass = integration_type_registry.get(integration.integration_type)
87 if not TypeClass:
85 if not TypeClass:
88 log.error('No class could be found for integration type: {}'.format(
86 log.error('No class could be found for integration type: {}'.format(
89 integration.integration_type))
87 integration.integration_type))
90 return None
88 return None
91 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
89 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
92 log.error('EE integration cannot be '
90 log.error('EE integration cannot be '
93 'executed for integration type: {}'.format(
91 'executed for integration type: {}'.format(
94 integration.integration_type))
92 integration.integration_type))
95 return None
93 return None
96
94
97 return TypeClass(integration.settings)
95 return TypeClass(integration.settings)
98
96
99 def send_event(self, integration, event):
97 def send_event(self, integration, event):
100 """ Send an event to an integration """
98 """ Send an event to an integration """
101 handler = self.get_integration_handler(integration)
99 handler = self.get_integration_handler(integration)
102 if handler:
100 if handler:
103 log.debug(
101 log.debug(
104 'events: sending event %s on integration %s using handler %s',
102 'events: sending event %s on integration %s using handler %s',
105 event, integration, handler)
103 event, integration, handler)
106 handler.send_event(event)
104 handler.send_event(event)
107
105
108 def get_integrations(self, scope, IntegrationType=None):
106 def get_integrations(self, scope, IntegrationType=None):
109 """
107 """
110 Return integrations for a scope, which must be one of:
108 Return integrations for a scope, which must be one of:
111
109
112 'all' - every integration, global/repogroup/repo
110 'all' - every integration, global/repogroup/repo
113 'global' - global integrations only
111 'global' - global integrations only
114 <Repository> instance - integrations for this repo only
112 <Repository> instance - integrations for this repo only
115 <RepoGroup> instance - integrations for this repogroup only
113 <RepoGroup> instance - integrations for this repogroup only
116 """
114 """
117
115
118 if isinstance(scope, Repository):
116 if isinstance(scope, Repository):
119 query = self.sa.query(Integration).filter(
117 query = self.sa.query(Integration).filter(
120 Integration.repo == scope)
118 Integration.repo == scope)
121 elif isinstance(scope, RepoGroup):
119 elif isinstance(scope, RepoGroup):
122 query = self.sa.query(Integration).filter(
120 query = self.sa.query(Integration).filter(
123 Integration.repo_group == scope)
121 Integration.repo_group == scope)
124 elif scope == 'global':
122 elif scope == 'global':
125 # global integrations
123 # global integrations
126 query = self.sa.query(Integration).filter(
124 query = self.sa.query(Integration).filter(
127 and_(Integration.repo_id == None, Integration.repo_group_id == None)
125 and_(Integration.repo_id == None, Integration.repo_group_id == None)
128 )
126 )
129 elif scope == 'root-repos':
127 elif scope == 'root-repos':
130 query = self.sa.query(Integration).filter(
128 query = self.sa.query(Integration).filter(
131 and_(Integration.repo_id == None,
129 and_(Integration.repo_id == None,
132 Integration.repo_group_id == None,
130 Integration.repo_group_id == None,
133 Integration.child_repos_only == true())
131 Integration.child_repos_only == true())
134 )
132 )
135 elif scope == 'all':
133 elif scope == 'all':
136 query = self.sa.query(Integration)
134 query = self.sa.query(Integration)
137 else:
135 else:
138 raise Exception(
136 raise Exception(
139 "invalid `scope`, must be one of: "
137 "invalid `scope`, must be one of: "
140 "['global', 'all', <Repository>, <RepoGroup>]")
138 "['global', 'all', <Repository>, <RepoGroup>]")
141
139
142 if IntegrationType is not None:
140 if IntegrationType is not None:
143 query = query.filter(
141 query = query.filter(
144 Integration.integration_type==IntegrationType.key)
142 Integration.integration_type==IntegrationType.key)
145
143
146 result = []
144 result = []
147 for integration in query.all():
145 for integration in query.all():
148 IntType = integration_type_registry.get(integration.integration_type)
146 IntType = integration_type_registry.get(integration.integration_type)
149 result.append((IntType, integration))
147 result.append((IntType, integration))
150 return result
148 return result
151
149
152 def get_for_event(self, event, cache=False):
150 def get_for_event(self, event, cache=False):
153 """
151 """
154 Get integrations that match an event
152 Get integrations that match an event
155 """
153 """
156 # base query
154 # base query
157 query = self.sa.query(
155 query = self.sa.query(
158 Integration
156 Integration
159 ).filter(
157 ).filter(
160 Integration.enabled == true()
158 Integration.enabled == true()
161 )
159 )
162
160
163 global_integrations_filter = and_(
161 global_integrations_filter = and_(
164 Integration.repo_id == null(),
162 Integration.repo_id == null(),
165 Integration.repo_group_id == null(),
163 Integration.repo_group_id == null(),
166 Integration.child_repos_only == false(),
164 Integration.child_repos_only == false(),
167 )
165 )
168
166
169 if isinstance(event, events.RepoEvent):
167 if isinstance(event, events.RepoEvent):
170 root_repos_integrations_filter = and_(
168 root_repos_integrations_filter = and_(
171 Integration.repo_id == null(),
169 Integration.repo_id == null(),
172 Integration.repo_group_id == null(),
170 Integration.repo_group_id == null(),
173 Integration.child_repos_only == true(),
171 Integration.child_repos_only == true(),
174 )
172 )
175
173
176 clauses = [
174 clauses = [
177 global_integrations_filter,
175 global_integrations_filter,
178 ]
176 ]
179 cases = [
177 cases = [
180 (global_integrations_filter, 1),
178 (global_integrations_filter, 1),
181 (root_repos_integrations_filter, 2),
179 (root_repos_integrations_filter, 2),
182 ]
180 ]
183
181
184 # repo group integrations
182 # repo group integrations
185 if event.repo.group:
183 if event.repo.group:
186 # repo group with only root level repos
184 # repo group with only root level repos
187 group_child_repos_filter = and_(
185 group_child_repos_filter = and_(
188 Integration.repo_group_id == event.repo.group.group_id,
186 Integration.repo_group_id == event.repo.group.group_id,
189 Integration.child_repos_only == true()
187 Integration.child_repos_only == true()
190 )
188 )
191
189
192 clauses.append(group_child_repos_filter)
190 clauses.append(group_child_repos_filter)
193 cases.append(
191 cases.append(
194 (group_child_repos_filter, 3),
192 (group_child_repos_filter, 3),
195 )
193 )
196
194
197 # repo group cascade to kids
195 # repo group cascade to kids
198 group_recursive_repos_filter = and_(
196 group_recursive_repos_filter = and_(
199 Integration.repo_group_id.in_(
197 Integration.repo_group_id.in_(
200 [group.group_id for group in event.repo.groups_with_parents]
198 [group.group_id for group in event.repo.groups_with_parents]
201 ),
199 ),
202 Integration.child_repos_only == false()
200 Integration.child_repos_only == false()
203 )
201 )
204 clauses.append(group_recursive_repos_filter)
202 clauses.append(group_recursive_repos_filter)
205 cases.append(
203 cases.append(
206 (group_recursive_repos_filter, 4),
204 (group_recursive_repos_filter, 4),
207 )
205 )
208
206
209 if not event.repo.group: # root repo
207 if not event.repo.group: # root repo
210 clauses.append(root_repos_integrations_filter)
208 clauses.append(root_repos_integrations_filter)
211
209
212 # repo integrations
210 # repo integrations
213 if event.repo.repo_id: # pre create events dont have a repo_id yet
211 if event.repo.repo_id: # pre create events dont have a repo_id yet
214 specific_repo_filter = Integration.repo_id == event.repo.repo_id
212 specific_repo_filter = Integration.repo_id == event.repo.repo_id
215 clauses.append(specific_repo_filter)
213 clauses.append(specific_repo_filter)
216 cases.append(
214 cases.append(
217 (specific_repo_filter, 5),
215 (specific_repo_filter, 5),
218 )
216 )
219
217
220 order_by_criterion = case(cases)
218 order_by_criterion = case(cases)
221
219
222 query = query.filter(or_(*clauses))
220 query = query.filter(or_(*clauses))
223 query = query.order_by(order_by_criterion)
221 query = query.order_by(order_by_criterion)
224
222
225 if cache:
223 if cache:
226 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
224 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
227 query = query.options(
225 query = query.options(
228 FromCache("sql_cache_short", cache_key))
226 FromCache("sql_cache_short", cache_key))
229 else: # only global integrations
227 else: # only global integrations
230 order_by_criterion = Integration.integration_id
228 order_by_criterion = Integration.integration_id
231
229
232 query = query.filter(global_integrations_filter)
230 query = query.filter(global_integrations_filter)
233 query = query.order_by(order_by_criterion)
231 query = query.order_by(order_by_criterion)
234 if cache:
232 if cache:
235 query = query.options(
233 query = query.options(
236 FromCache("sql_cache_short", "get_enabled_global_integrations"))
234 FromCache("sql_cache_short", "get_enabled_global_integrations"))
237
235
238 result = query.all()
236 result = query.all()
239 return result
237 return result
@@ -1,75 +1,74 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 SQLAlchemy Metadata and Session object
20 SQLAlchemy Metadata and Session object
22 """
21 """
23
22
24 from sqlalchemy.orm import declarative_base
23 from sqlalchemy.orm import declarative_base
25 from sqlalchemy.orm import scoped_session, sessionmaker
24 from sqlalchemy.orm import scoped_session, sessionmaker
26 from sqlalchemy.orm import Session as SA_Session
25 from sqlalchemy.orm import Session as SA_Session
27 from rhodecode.lib.caching_query import ORMCache
26 from rhodecode.lib.caching_query import ORMCache
28
27
29
28
30 __all__ = [
29 __all__ = [
31 'Base', 'Session', 'SA_Session',
30 'Base', 'Session', 'SA_Session',
32 'raw_query_executor',
31 'raw_query_executor',
33 'bind_engine_to_session',
32 'bind_engine_to_session',
34 'get_engine'
33 'get_engine'
35 ]
34 ]
36
35
37 # scoped_session. Apply our custom CachingQuery class to it,
36 # scoped_session. Apply our custom CachingQuery class to it,
38 # using a callable that will associate the dictionary
37 # using a callable that will associate the dictionary
39 # of regions with the Query.
38 # of regions with the Query.
40 # to use cache use this in query
39 # to use cache use this in query
41 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
40 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
42 session_factory = sessionmaker(
41 session_factory = sessionmaker(
43 expire_on_commit=True,
42 expire_on_commit=True,
44 future=True
43 future=True
45 )
44 )
46
45
47 Session = scoped_session(session_factory)
46 Session = scoped_session(session_factory)
48
47
49 # The declarative Base
48 # The declarative Base
50 Base = declarative_base()
49 Base = declarative_base()
51
50
52 # pass empty regions, so we can fetch it on-demand inside ORMCache
51 # pass empty regions, so we can fetch it on-demand inside ORMCache
53 cache = ORMCache(regions={})
52 cache = ORMCache(regions={})
54 cache.listen_on_session(Session)
53 cache.listen_on_session(Session)
55
54
56
55
57 def raw_query_executor(engine=None):
56 def raw_query_executor(engine=None):
58 """
57 """
59
58
60 :param engine:
59 :param engine:
61 :return:
60 :return:
62 """
61 """
63 if not engine:
62 if not engine:
64 engine = Session.bind
63 engine = Session.bind
65 session = SA_Session(engine)
64 session = SA_Session(engine)
66 return session
65 return session
67
66
68
67
69 def get_engine():
68 def get_engine():
70 return Session.bind
69 return Session.bind
71
70
72
71
73 def bind_engine_to_session(engine):
72 def bind_engine_to_session(engine):
74 Session.remove()
73 Session.remove()
75 Session.configure(bind=engine)
74 Session.configure(bind=engine)
@@ -1,457 +1,455 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 Model for notifications
21 Model for notifications
24 """
22 """
25
23
26 import logging
24 import logging
27 import traceback
25 import traceback
28
26
29 import premailer
27 import premailer
30 from pyramid.threadlocal import get_current_request
28 from pyramid.threadlocal import get_current_request
31 from sqlalchemy.sql.expression import false, true
29 from sqlalchemy.sql.expression import false, true
32
30
33 import rhodecode
31 import rhodecode
34 from rhodecode.lib import helpers as h
32 from rhodecode.lib import helpers as h
35 from rhodecode.model import BaseModel
33 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Notification, User, UserNotification
34 from rhodecode.model.db import Notification, User, UserNotification
37 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
38 from rhodecode.translation import TranslationString
36 from rhodecode.translation import TranslationString
39
37
40 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
41
39
42
40
43 class NotificationModel(BaseModel):
41 class NotificationModel(BaseModel):
44
42
45 cls = Notification
43 cls = Notification
46
44
47 def __get_notification(self, notification):
45 def __get_notification(self, notification):
48 if isinstance(notification, Notification):
46 if isinstance(notification, Notification):
49 return notification
47 return notification
50 elif isinstance(notification, int):
48 elif isinstance(notification, int):
51 return Notification.get(notification)
49 return Notification.get(notification)
52 else:
50 else:
53 if notification:
51 if notification:
54 raise Exception('notification must be int or Instance'
52 raise Exception('notification must be int or Instance'
55 ' of Notification got %s' % type(notification))
53 ' of Notification got %s' % type(notification))
56
54
57 def create(
55 def create(
58 self, created_by, notification_subject='', notification_body='',
56 self, created_by, notification_subject='', notification_body='',
59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
57 notification_type=Notification.TYPE_MESSAGE, recipients=None,
60 mention_recipients=None, with_email=True, email_kwargs=None):
58 mention_recipients=None, with_email=True, email_kwargs=None):
61 """
59 """
62
60
63 Creates notification of given type
61 Creates notification of given type
64
62
65 :param created_by: int, str or User instance. User who created this
63 :param created_by: int, str or User instance. User who created this
66 notification
64 notification
67 :param notification_subject: subject of notification itself,
65 :param notification_subject: subject of notification itself,
68 it will be generated automatically from notification_type if not specified
66 it will be generated automatically from notification_type if not specified
69 :param notification_body: body of notification text
67 :param notification_body: body of notification text
70 it will be generated automatically from notification_type if not specified
68 it will be generated automatically from notification_type if not specified
71 :param notification_type: type of notification, based on that we
69 :param notification_type: type of notification, based on that we
72 pick templates
70 pick templates
73 :param recipients: list of int, str or User objects, when None
71 :param recipients: list of int, str or User objects, when None
74 is given send to all admins
72 is given send to all admins
75 :param mention_recipients: list of int, str or User objects,
73 :param mention_recipients: list of int, str or User objects,
76 that were mentioned
74 that were mentioned
77 :param with_email: send email with this notification
75 :param with_email: send email with this notification
78 :param email_kwargs: dict with arguments to generate email
76 :param email_kwargs: dict with arguments to generate email
79 """
77 """
80
78
81 from rhodecode.lib.celerylib import tasks, run_task
79 from rhodecode.lib.celerylib import tasks, run_task
82
80
83 if recipients and not getattr(recipients, '__iter__', False):
81 if recipients and not getattr(recipients, '__iter__', False):
84 raise Exception('recipients must be an iterable object')
82 raise Exception('recipients must be an iterable object')
85
83
86 if not (notification_subject and notification_body) and not notification_type:
84 if not (notification_subject and notification_body) and not notification_type:
87 raise ValueError('notification_subject, and notification_body '
85 raise ValueError('notification_subject, and notification_body '
88 'cannot be empty when notification_type is not specified')
86 'cannot be empty when notification_type is not specified')
89
87
90 created_by_obj = self._get_user(created_by)
88 created_by_obj = self._get_user(created_by)
91
89
92 if not created_by_obj:
90 if not created_by_obj:
93 raise Exception('unknown user %s' % created_by)
91 raise Exception('unknown user %s' % created_by)
94
92
95 # default MAIN body if not given
93 # default MAIN body if not given
96 email_kwargs = email_kwargs or {'body': notification_body}
94 email_kwargs = email_kwargs or {'body': notification_body}
97 mention_recipients = mention_recipients or set()
95 mention_recipients = mention_recipients or set()
98
96
99 if recipients is None:
97 if recipients is None:
100 # recipients is None means to all admins
98 # recipients is None means to all admins
101 recipients_objs = User.query().filter(User.admin == true()).all()
99 recipients_objs = User.query().filter(User.admin == true()).all()
102 log.debug('sending notifications %s to admins: %s',
100 log.debug('sending notifications %s to admins: %s',
103 notification_type, recipients_objs)
101 notification_type, recipients_objs)
104 else:
102 else:
105 recipients_objs = set()
103 recipients_objs = set()
106 for u in recipients:
104 for u in recipients:
107 obj = self._get_user(u)
105 obj = self._get_user(u)
108 if obj:
106 if obj:
109 recipients_objs.add(obj)
107 recipients_objs.add(obj)
110 else: # we didn't find this user, log the error and carry on
108 else: # we didn't find this user, log the error and carry on
111 log.error('cannot notify unknown user %r', u)
109 log.error('cannot notify unknown user %r', u)
112
110
113 if not recipients_objs:
111 if not recipients_objs:
114 raise Exception('no valid recipients specified')
112 raise Exception('no valid recipients specified')
115
113
116 log.debug('sending notifications %s to %s',
114 log.debug('sending notifications %s to %s',
117 notification_type, recipients_objs)
115 notification_type, recipients_objs)
118
116
119 # add mentioned users into recipients
117 # add mentioned users into recipients
120 final_recipients = set(recipients_objs).union(mention_recipients)
118 final_recipients = set(recipients_objs).union(mention_recipients)
121
119
122 (subject, email_body, email_body_plaintext) = \
120 (subject, email_body, email_body_plaintext) = \
123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
121 EmailNotificationModel().render_email(notification_type, **email_kwargs)
124
122
125 if not notification_subject:
123 if not notification_subject:
126 notification_subject = subject
124 notification_subject = subject
127
125
128 if not notification_body:
126 if not notification_body:
129 notification_body = email_body_plaintext
127 notification_body = email_body_plaintext
130
128
131 notification = Notification.create(
129 notification = Notification.create(
132 created_by=created_by_obj, subject=notification_subject,
130 created_by=created_by_obj, subject=notification_subject,
133 body=notification_body, recipients=final_recipients,
131 body=notification_body, recipients=final_recipients,
134 type_=notification_type
132 type_=notification_type
135 )
133 )
136
134
137 if not with_email: # skip sending email, and just create notification
135 if not with_email: # skip sending email, and just create notification
138 return notification
136 return notification
139
137
140 # don't send email to person who created this comment
138 # don't send email to person who created this comment
141 rec_objs = set(recipients_objs).difference({created_by_obj})
139 rec_objs = set(recipients_objs).difference({created_by_obj})
142
140
143 # now notify all recipients in question
141 # now notify all recipients in question
144
142
145 for recipient in rec_objs.union(mention_recipients):
143 for recipient in rec_objs.union(mention_recipients):
146 # inject current recipient
144 # inject current recipient
147 email_kwargs['recipient'] = recipient
145 email_kwargs['recipient'] = recipient
148 email_kwargs['mention'] = recipient in mention_recipients
146 email_kwargs['mention'] = recipient in mention_recipients
149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
147 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
150 notification_type, **email_kwargs)
148 notification_type, **email_kwargs)
151
149
152 extra_headers = None
150 extra_headers = None
153 if 'thread_ids' in email_kwargs:
151 if 'thread_ids' in email_kwargs:
154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
152 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
155
153
156 log.debug('Creating notification email task for user:`%s`', recipient)
154 log.debug('Creating notification email task for user:`%s`', recipient)
157 task = run_task(tasks.send_email, recipient.email, subject,
155 task = run_task(tasks.send_email, recipient.email, subject,
158 email_body_plaintext, email_body, extra_headers=extra_headers)
156 email_body_plaintext, email_body, extra_headers=extra_headers)
159 log.debug('Created email task: %s', task)
157 log.debug('Created email task: %s', task)
160
158
161 return notification
159 return notification
162
160
163 def delete(self, user, notification):
161 def delete(self, user, notification):
164 # we don't want to remove actual notification just the assignment
162 # we don't want to remove actual notification just the assignment
165 try:
163 try:
166 notification = self.__get_notification(notification)
164 notification = self.__get_notification(notification)
167 user = self._get_user(user)
165 user = self._get_user(user)
168 if notification and user:
166 if notification and user:
169 obj = UserNotification.query()\
167 obj = UserNotification.query()\
170 .filter(UserNotification.user == user)\
168 .filter(UserNotification.user == user)\
171 .filter(UserNotification.notification == notification)\
169 .filter(UserNotification.notification == notification)\
172 .one()
170 .one()
173 Session().delete(obj)
171 Session().delete(obj)
174 return True
172 return True
175 except Exception:
173 except Exception:
176 log.error(traceback.format_exc())
174 log.error(traceback.format_exc())
177 raise
175 raise
178
176
179 def get_for_user(self, user, filter_=None):
177 def get_for_user(self, user, filter_=None):
180 """
178 """
181 Get mentions for given user, filter them if filter dict is given
179 Get mentions for given user, filter them if filter dict is given
182 """
180 """
183 user = self._get_user(user)
181 user = self._get_user(user)
184
182
185 q = UserNotification.query()\
183 q = UserNotification.query()\
186 .filter(UserNotification.user == user)\
184 .filter(UserNotification.user == user)\
187 .join((
185 .join((
188 Notification, UserNotification.notification_id ==
186 Notification, UserNotification.notification_id ==
189 Notification.notification_id))
187 Notification.notification_id))
190 if filter_ == ['all']:
188 if filter_ == ['all']:
191 q = q # no filter
189 q = q # no filter
192 elif filter_ == ['unread']:
190 elif filter_ == ['unread']:
193 q = q.filter(UserNotification.read == false())
191 q = q.filter(UserNotification.read == false())
194 elif filter_:
192 elif filter_:
195 q = q.filter(Notification.type_.in_(filter_))
193 q = q.filter(Notification.type_.in_(filter_))
196
194
197 return q
195 return q
198
196
199 def mark_read(self, user, notification):
197 def mark_read(self, user, notification):
200 try:
198 try:
201 notification = self.__get_notification(notification)
199 notification = self.__get_notification(notification)
202 user = self._get_user(user)
200 user = self._get_user(user)
203 if notification and user:
201 if notification and user:
204 obj = UserNotification.query()\
202 obj = UserNotification.query()\
205 .filter(UserNotification.user == user)\
203 .filter(UserNotification.user == user)\
206 .filter(UserNotification.notification == notification)\
204 .filter(UserNotification.notification == notification)\
207 .one()
205 .one()
208 obj.read = True
206 obj.read = True
209 Session().add(obj)
207 Session().add(obj)
210 return True
208 return True
211 except Exception:
209 except Exception:
212 log.error(traceback.format_exc())
210 log.error(traceback.format_exc())
213 raise
211 raise
214
212
215 def mark_all_read_for_user(self, user, filter_=None):
213 def mark_all_read_for_user(self, user, filter_=None):
216 user = self._get_user(user)
214 user = self._get_user(user)
217 q = UserNotification.query()\
215 q = UserNotification.query()\
218 .filter(UserNotification.user == user)\
216 .filter(UserNotification.user == user)\
219 .filter(UserNotification.read == false())\
217 .filter(UserNotification.read == false())\
220 .join((
218 .join((
221 Notification, UserNotification.notification_id ==
219 Notification, UserNotification.notification_id ==
222 Notification.notification_id))
220 Notification.notification_id))
223 if filter_ == ['unread']:
221 if filter_ == ['unread']:
224 q = q.filter(UserNotification.read == false())
222 q = q.filter(UserNotification.read == false())
225 elif filter_:
223 elif filter_:
226 q = q.filter(Notification.type_.in_(filter_))
224 q = q.filter(Notification.type_.in_(filter_))
227
225
228 # this is a little inefficient but sqlalchemy doesn't support
226 # this is a little inefficient but sqlalchemy doesn't support
229 # update on joined tables :(
227 # update on joined tables :(
230 for obj in q.all():
228 for obj in q.all():
231 obj.read = True
229 obj.read = True
232 Session().add(obj)
230 Session().add(obj)
233
231
234 def get_unread_cnt_for_user(self, user):
232 def get_unread_cnt_for_user(self, user):
235 user = self._get_user(user)
233 user = self._get_user(user)
236 return UserNotification.query()\
234 return UserNotification.query()\
237 .filter(UserNotification.read == false())\
235 .filter(UserNotification.read == false())\
238 .filter(UserNotification.user == user).count()
236 .filter(UserNotification.user == user).count()
239
237
240 def get_unread_for_user(self, user):
238 def get_unread_for_user(self, user):
241 user = self._get_user(user)
239 user = self._get_user(user)
242 return [x.notification for x in UserNotification.query()
240 return [x.notification for x in UserNotification.query()
243 .filter(UserNotification.read == false())
241 .filter(UserNotification.read == false())
244 .filter(UserNotification.user == user).all()]
242 .filter(UserNotification.user == user).all()]
245
243
246 def get_user_notification(self, user, notification):
244 def get_user_notification(self, user, notification):
247 user = self._get_user(user)
245 user = self._get_user(user)
248 notification = self.__get_notification(notification)
246 notification = self.__get_notification(notification)
249
247
250 return UserNotification.query()\
248 return UserNotification.query()\
251 .filter(UserNotification.notification == notification)\
249 .filter(UserNotification.notification == notification)\
252 .filter(UserNotification.user == user).scalar()
250 .filter(UserNotification.user == user).scalar()
253
251
254 def make_description(self, notification, translate, show_age=True):
252 def make_description(self, notification, translate, show_age=True):
255 """
253 """
256 Creates a human readable description based on properties
254 Creates a human readable description based on properties
257 of notification object
255 of notification object
258 """
256 """
259 _ = translate
257 _ = translate
260 _map = {
258 _map = {
261 notification.TYPE_CHANGESET_COMMENT: [
259 notification.TYPE_CHANGESET_COMMENT: [
262 _('%(user)s commented on commit %(date_or_age)s'),
260 _('%(user)s commented on commit %(date_or_age)s'),
263 _('%(user)s commented on commit at %(date_or_age)s'),
261 _('%(user)s commented on commit at %(date_or_age)s'),
264 ],
262 ],
265 notification.TYPE_MESSAGE: [
263 notification.TYPE_MESSAGE: [
266 _('%(user)s sent message %(date_or_age)s'),
264 _('%(user)s sent message %(date_or_age)s'),
267 _('%(user)s sent message at %(date_or_age)s'),
265 _('%(user)s sent message at %(date_or_age)s'),
268 ],
266 ],
269 notification.TYPE_MENTION: [
267 notification.TYPE_MENTION: [
270 _('%(user)s mentioned you %(date_or_age)s'),
268 _('%(user)s mentioned you %(date_or_age)s'),
271 _('%(user)s mentioned you at %(date_or_age)s'),
269 _('%(user)s mentioned you at %(date_or_age)s'),
272 ],
270 ],
273 notification.TYPE_REGISTRATION: [
271 notification.TYPE_REGISTRATION: [
274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
272 _('%(user)s registered in RhodeCode %(date_or_age)s'),
275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
273 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
276 ],
274 ],
277 notification.TYPE_PULL_REQUEST: [
275 notification.TYPE_PULL_REQUEST: [
278 _('%(user)s opened new pull request %(date_or_age)s'),
276 _('%(user)s opened new pull request %(date_or_age)s'),
279 _('%(user)s opened new pull request at %(date_or_age)s'),
277 _('%(user)s opened new pull request at %(date_or_age)s'),
280 ],
278 ],
281 notification.TYPE_PULL_REQUEST_UPDATE: [
279 notification.TYPE_PULL_REQUEST_UPDATE: [
282 _('%(user)s updated pull request %(date_or_age)s'),
280 _('%(user)s updated pull request %(date_or_age)s'),
283 _('%(user)s updated pull request at %(date_or_age)s'),
281 _('%(user)s updated pull request at %(date_or_age)s'),
284 ],
282 ],
285 notification.TYPE_PULL_REQUEST_COMMENT: [
283 notification.TYPE_PULL_REQUEST_COMMENT: [
286 _('%(user)s commented on pull request %(date_or_age)s'),
284 _('%(user)s commented on pull request %(date_or_age)s'),
287 _('%(user)s commented on pull request at %(date_or_age)s'),
285 _('%(user)s commented on pull request at %(date_or_age)s'),
288 ],
286 ],
289 }
287 }
290
288
291 templates = _map[notification.type_]
289 templates = _map[notification.type_]
292
290
293 if show_age:
291 if show_age:
294 template = templates[0]
292 template = templates[0]
295 date_or_age = h.age(notification.created_on)
293 date_or_age = h.age(notification.created_on)
296 if translate:
294 if translate:
297 date_or_age = translate(date_or_age)
295 date_or_age = translate(date_or_age)
298
296
299 if isinstance(date_or_age, TranslationString):
297 if isinstance(date_or_age, TranslationString):
300 date_or_age = date_or_age.interpolate()
298 date_or_age = date_or_age.interpolate()
301
299
302 else:
300 else:
303 template = templates[1]
301 template = templates[1]
304 date_or_age = h.format_date(notification.created_on)
302 date_or_age = h.format_date(notification.created_on)
305
303
306 return template % {
304 return template % {
307 'user': notification.created_by_user.username,
305 'user': notification.created_by_user.username,
308 'date_or_age': date_or_age,
306 'date_or_age': date_or_age,
309 }
307 }
310
308
311
309
312 # Templates for Titles, that could be overwritten by rcextensions
310 # Templates for Titles, that could be overwritten by rcextensions
313 # Title of email for pull-request update
311 # Title of email for pull-request update
314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
312 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
315 # Title of email for request for pull request review
313 # Title of email for request for pull request review
316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
314 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
317
315
318 # Title of email for general comment on pull request
316 # Title of email for general comment on pull request
319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
317 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
320 # Title of email for general comment which includes status change on pull request
318 # Title of email for general comment which includes status change on pull request
321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
319 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
322 # Title of email for inline comment on a file in pull request
320 # Title of email for inline comment on a file in pull request
323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
321 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
324
322
325 # Title of email for general comment on commit
323 # Title of email for general comment on commit
326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
324 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
327 # Title of email for general comment which includes status change on commit
325 # Title of email for general comment which includes status change on commit
328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
326 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
329 # Title of email for inline comment on a file in commit
327 # Title of email for inline comment on a file in commit
330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
328 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
331
329
332 import cssutils
330 import cssutils
333 # hijack css utils logger and replace with ours
331 # hijack css utils logger and replace with ours
334 log = logging.getLogger('rhodecode.cssutils.premailer')
332 log = logging.getLogger('rhodecode.cssutils.premailer')
335 log.setLevel(logging.INFO)
333 log.setLevel(logging.INFO)
336 cssutils.log.setLog(log)
334 cssutils.log.setLog(log)
337
335
338
336
339 class EmailNotificationModel(BaseModel):
337 class EmailNotificationModel(BaseModel):
340 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
338 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
341 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
339 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
342 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
340 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
343 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
341 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
344 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
342 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
345 TYPE_MAIN = Notification.TYPE_MESSAGE
343 TYPE_MAIN = Notification.TYPE_MESSAGE
346
344
347 TYPE_PASSWORD_RESET = 'password_reset'
345 TYPE_PASSWORD_RESET = 'password_reset'
348 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
346 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
349 TYPE_EMAIL_TEST = 'email_test'
347 TYPE_EMAIL_TEST = 'email_test'
350 TYPE_EMAIL_EXCEPTION = 'exception'
348 TYPE_EMAIL_EXCEPTION = 'exception'
351 TYPE_UPDATE_AVAILABLE = 'update_available'
349 TYPE_UPDATE_AVAILABLE = 'update_available'
352 TYPE_TEST = 'test'
350 TYPE_TEST = 'test'
353
351
354 email_types = {
352 email_types = {
355 TYPE_MAIN:
353 TYPE_MAIN:
356 'rhodecode:templates/email_templates/main.mako',
354 'rhodecode:templates/email_templates/main.mako',
357 TYPE_TEST:
355 TYPE_TEST:
358 'rhodecode:templates/email_templates/test.mako',
356 'rhodecode:templates/email_templates/test.mako',
359 TYPE_EMAIL_EXCEPTION:
357 TYPE_EMAIL_EXCEPTION:
360 'rhodecode:templates/email_templates/exception_tracker.mako',
358 'rhodecode:templates/email_templates/exception_tracker.mako',
361 TYPE_UPDATE_AVAILABLE:
359 TYPE_UPDATE_AVAILABLE:
362 'rhodecode:templates/email_templates/update_available.mako',
360 'rhodecode:templates/email_templates/update_available.mako',
363 TYPE_EMAIL_TEST:
361 TYPE_EMAIL_TEST:
364 'rhodecode:templates/email_templates/email_test.mako',
362 'rhodecode:templates/email_templates/email_test.mako',
365 TYPE_REGISTRATION:
363 TYPE_REGISTRATION:
366 'rhodecode:templates/email_templates/user_registration.mako',
364 'rhodecode:templates/email_templates/user_registration.mako',
367 TYPE_PASSWORD_RESET:
365 TYPE_PASSWORD_RESET:
368 'rhodecode:templates/email_templates/password_reset.mako',
366 'rhodecode:templates/email_templates/password_reset.mako',
369 TYPE_PASSWORD_RESET_CONFIRMATION:
367 TYPE_PASSWORD_RESET_CONFIRMATION:
370 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
368 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
371 TYPE_COMMIT_COMMENT:
369 TYPE_COMMIT_COMMENT:
372 'rhodecode:templates/email_templates/commit_comment.mako',
370 'rhodecode:templates/email_templates/commit_comment.mako',
373 TYPE_PULL_REQUEST:
371 TYPE_PULL_REQUEST:
374 'rhodecode:templates/email_templates/pull_request_review.mako',
372 'rhodecode:templates/email_templates/pull_request_review.mako',
375 TYPE_PULL_REQUEST_COMMENT:
373 TYPE_PULL_REQUEST_COMMENT:
376 'rhodecode:templates/email_templates/pull_request_comment.mako',
374 'rhodecode:templates/email_templates/pull_request_comment.mako',
377 TYPE_PULL_REQUEST_UPDATE:
375 TYPE_PULL_REQUEST_UPDATE:
378 'rhodecode:templates/email_templates/pull_request_update.mako',
376 'rhodecode:templates/email_templates/pull_request_update.mako',
379 }
377 }
380
378
381 premailer_instance = premailer.Premailer(
379 premailer_instance = premailer.Premailer(
382 #cssutils_logging_handler=log.handlers[0],
380 #cssutils_logging_handler=log.handlers[0],
383 #cssutils_logging_level=logging.INFO
381 #cssutils_logging_level=logging.INFO
384 )
382 )
385
383
386 def __init__(self):
384 def __init__(self):
387 """
385 """
388 Example usage::
386 Example usage::
389
387
390 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
388 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
391 EmailNotificationModel.TYPE_TEST, **email_kwargs)
389 EmailNotificationModel.TYPE_TEST, **email_kwargs)
392
390
393 """
391 """
394 super(EmailNotificationModel, self).__init__()
392 super().__init__()
395 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
393 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
396
394
397 def _update_kwargs_for_render(self, kwargs):
395 def _update_kwargs_for_render(self, kwargs):
398 """
396 """
399 Inject params required for Mako rendering
397 Inject params required for Mako rendering
400
398
401 :param kwargs:
399 :param kwargs:
402 """
400 """
403
401
404 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
402 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
405 kwargs['rhodecode_version'] = rhodecode.__version__
403 kwargs['rhodecode_version'] = rhodecode.__version__
406 instance_url = h.route_url('home')
404 instance_url = h.route_url('home')
407 _kwargs = {
405 _kwargs = {
408 'instance_url': instance_url,
406 'instance_url': instance_url,
409 'whitespace_filter': self.whitespace_filter,
407 'whitespace_filter': self.whitespace_filter,
410 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
408 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
411 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
409 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
412 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
410 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
413 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
411 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
414 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
412 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
415 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
413 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
416 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
414 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
417 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
415 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
418 }
416 }
419 _kwargs.update(kwargs)
417 _kwargs.update(kwargs)
420 return _kwargs
418 return _kwargs
421
419
422 def whitespace_filter(self, text):
420 def whitespace_filter(self, text):
423 return text.replace('\n', '').replace('\t', '')
421 return text.replace('\n', '').replace('\t', '')
424
422
425 def get_renderer(self, type_, request):
423 def get_renderer(self, type_, request):
426 template_name = self.email_types[type_]
424 template_name = self.email_types[type_]
427 return request.get_partial_renderer(template_name)
425 return request.get_partial_renderer(template_name)
428
426
429 def render_email(self, type_, **kwargs):
427 def render_email(self, type_, **kwargs):
430 """
428 """
431 renders template for email, and returns a tuple of
429 renders template for email, and returns a tuple of
432 (subject, email_headers, email_html_body, email_plaintext_body)
430 (subject, email_headers, email_html_body, email_plaintext_body)
433 """
431 """
434 request = get_current_request()
432 request = get_current_request()
435
433
436 # translator and helpers inject
434 # translator and helpers inject
437 _kwargs = self._update_kwargs_for_render(kwargs)
435 _kwargs = self._update_kwargs_for_render(kwargs)
438 email_template = self.get_renderer(type_, request=request)
436 email_template = self.get_renderer(type_, request=request)
439 subject = email_template.render('subject', **_kwargs)
437 subject = email_template.render('subject', **_kwargs)
440
438
441 try:
439 try:
442 body_plaintext = email_template.render('body_plaintext', **_kwargs)
440 body_plaintext = email_template.render('body_plaintext', **_kwargs)
443 except AttributeError:
441 except AttributeError:
444 # it's not defined in template, ok we can skip it
442 # it's not defined in template, ok we can skip it
445 body_plaintext = ''
443 body_plaintext = ''
446
444
447 # render WHOLE template
445 # render WHOLE template
448 body = email_template.render(None, **_kwargs)
446 body = email_template.render(None, **_kwargs)
449
447
450 try:
448 try:
451 # Inline CSS styles and conversion
449 # Inline CSS styles and conversion
452 body = self.premailer_instance.transform(body)
450 body = self.premailer_instance.transform(body)
453 except Exception:
451 except Exception:
454 log.exception('Failed to parse body with premailer')
452 log.exception('Failed to parse body with premailer')
455 pass
453 pass
456
454
457 return subject, body, body_plaintext
455 return subject, body, body_plaintext
@@ -1,607 +1,606 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 permissions model for RhodeCode
20 permissions model for RhodeCode
22 """
21 """
23 import collections
22 import collections
24 import logging
23 import logging
25 import traceback
24 import traceback
26
25
27 from sqlalchemy.exc import DatabaseError
26 from sqlalchemy.exc import DatabaseError
28
27
29 from rhodecode import events
28 from rhodecode import events
30 from rhodecode.model import BaseModel
29 from rhodecode.model import BaseModel
31 from rhodecode.model.db import (
30 from rhodecode.model.db import (
32 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
31 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
33 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
32 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
34 from rhodecode.lib.utils2 import str2bool, safe_int
33 from rhodecode.lib.utils2 import str2bool, safe_int
35
34
36 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
37
36
38
37
39 class PermissionModel(BaseModel):
38 class PermissionModel(BaseModel):
40 """
39 """
41 Permissions model for RhodeCode
40 Permissions model for RhodeCode
42 """
41 """
43 FORKING_DISABLED = 'hg.fork.none'
42 FORKING_DISABLED = 'hg.fork.none'
44 FORKING_ENABLED = 'hg.fork.repository'
43 FORKING_ENABLED = 'hg.fork.repository'
45
44
46 cls = Permission
45 cls = Permission
47 global_perms = {
46 global_perms = {
48 'default_repo_create': None,
47 'default_repo_create': None,
49 # special case for create repos on write access to group
48 # special case for create repos on write access to group
50 'default_repo_create_on_write': None,
49 'default_repo_create_on_write': None,
51 'default_repo_group_create': None,
50 'default_repo_group_create': None,
52 'default_user_group_create': None,
51 'default_user_group_create': None,
53 'default_fork_create': None,
52 'default_fork_create': None,
54 'default_inherit_default_permissions': None,
53 'default_inherit_default_permissions': None,
55 'default_register': None,
54 'default_register': None,
56 'default_password_reset': None,
55 'default_password_reset': None,
57 'default_extern_activate': None,
56 'default_extern_activate': None,
58
57
59 # object permissions below
58 # object permissions below
60 'default_repo_perm': None,
59 'default_repo_perm': None,
61 'default_group_perm': None,
60 'default_group_perm': None,
62 'default_user_group_perm': None,
61 'default_user_group_perm': None,
63
62
64 # branch
63 # branch
65 'default_branch_perm': None,
64 'default_branch_perm': None,
66 }
65 }
67
66
68 def set_global_permission_choices(self, c_obj, gettext_translator):
67 def set_global_permission_choices(self, c_obj, gettext_translator):
69 _ = gettext_translator
68 _ = gettext_translator
70
69
71 c_obj.repo_perms_choices = [
70 c_obj.repo_perms_choices = [
72 ('repository.none', _('None'),),
71 ('repository.none', _('None'),),
73 ('repository.read', _('Read'),),
72 ('repository.read', _('Read'),),
74 ('repository.write', _('Write'),),
73 ('repository.write', _('Write'),),
75 ('repository.admin', _('Admin'),)]
74 ('repository.admin', _('Admin'),)]
76
75
77 c_obj.group_perms_choices = [
76 c_obj.group_perms_choices = [
78 ('group.none', _('None'),),
77 ('group.none', _('None'),),
79 ('group.read', _('Read'),),
78 ('group.read', _('Read'),),
80 ('group.write', _('Write'),),
79 ('group.write', _('Write'),),
81 ('group.admin', _('Admin'),)]
80 ('group.admin', _('Admin'),)]
82
81
83 c_obj.user_group_perms_choices = [
82 c_obj.user_group_perms_choices = [
84 ('usergroup.none', _('None'),),
83 ('usergroup.none', _('None'),),
85 ('usergroup.read', _('Read'),),
84 ('usergroup.read', _('Read'),),
86 ('usergroup.write', _('Write'),),
85 ('usergroup.write', _('Write'),),
87 ('usergroup.admin', _('Admin'),)]
86 ('usergroup.admin', _('Admin'),)]
88
87
89 c_obj.branch_perms_choices = [
88 c_obj.branch_perms_choices = [
90 ('branch.none', _('Protected/No Access'),),
89 ('branch.none', _('Protected/No Access'),),
91 ('branch.merge', _('Web merge'),),
90 ('branch.merge', _('Web merge'),),
92 ('branch.push', _('Push'),),
91 ('branch.push', _('Push'),),
93 ('branch.push_force', _('Force Push'),)]
92 ('branch.push_force', _('Force Push'),)]
94
93
95 c_obj.register_choices = [
94 c_obj.register_choices = [
96 ('hg.register.none', _('Disabled')),
95 ('hg.register.none', _('Disabled')),
97 ('hg.register.manual_activate', _('Allowed with manual account activation')),
96 ('hg.register.manual_activate', _('Allowed with manual account activation')),
98 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
97 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
99
98
100 c_obj.password_reset_choices = [
99 c_obj.password_reset_choices = [
101 ('hg.password_reset.enabled', _('Allow password recovery')),
100 ('hg.password_reset.enabled', _('Allow password recovery')),
102 ('hg.password_reset.hidden', _('Hide password recovery link')),
101 ('hg.password_reset.hidden', _('Hide password recovery link')),
103 ('hg.password_reset.disabled', _('Disable password recovery'))]
102 ('hg.password_reset.disabled', _('Disable password recovery'))]
104
103
105 c_obj.extern_activate_choices = [
104 c_obj.extern_activate_choices = [
106 ('hg.extern_activate.manual', _('Manual activation of external account')),
105 ('hg.extern_activate.manual', _('Manual activation of external account')),
107 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
106 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
108
107
109 c_obj.repo_create_choices = [
108 c_obj.repo_create_choices = [
110 ('hg.create.none', _('Disabled')),
109 ('hg.create.none', _('Disabled')),
111 ('hg.create.repository', _('Enabled'))]
110 ('hg.create.repository', _('Enabled'))]
112
111
113 c_obj.repo_create_on_write_choices = [
112 c_obj.repo_create_on_write_choices = [
114 ('hg.create.write_on_repogroup.false', _('Disabled')),
113 ('hg.create.write_on_repogroup.false', _('Disabled')),
115 ('hg.create.write_on_repogroup.true', _('Enabled'))]
114 ('hg.create.write_on_repogroup.true', _('Enabled'))]
116
115
117 c_obj.user_group_create_choices = [
116 c_obj.user_group_create_choices = [
118 ('hg.usergroup.create.false', _('Disabled')),
117 ('hg.usergroup.create.false', _('Disabled')),
119 ('hg.usergroup.create.true', _('Enabled'))]
118 ('hg.usergroup.create.true', _('Enabled'))]
120
119
121 c_obj.repo_group_create_choices = [
120 c_obj.repo_group_create_choices = [
122 ('hg.repogroup.create.false', _('Disabled')),
121 ('hg.repogroup.create.false', _('Disabled')),
123 ('hg.repogroup.create.true', _('Enabled'))]
122 ('hg.repogroup.create.true', _('Enabled'))]
124
123
125 c_obj.fork_choices = [
124 c_obj.fork_choices = [
126 (self.FORKING_DISABLED, _('Disabled')),
125 (self.FORKING_DISABLED, _('Disabled')),
127 (self.FORKING_ENABLED, _('Enabled'))]
126 (self.FORKING_ENABLED, _('Enabled'))]
128
127
129 c_obj.inherit_default_permission_choices = [
128 c_obj.inherit_default_permission_choices = [
130 ('hg.inherit_default_perms.false', _('Disabled')),
129 ('hg.inherit_default_perms.false', _('Disabled')),
131 ('hg.inherit_default_perms.true', _('Enabled'))]
130 ('hg.inherit_default_perms.true', _('Enabled'))]
132
131
133 def get_default_perms(self, object_perms, suffix):
132 def get_default_perms(self, object_perms, suffix):
134 defaults = {}
133 defaults = {}
135 for perm in object_perms:
134 for perm in object_perms:
136 # perms
135 # perms
137 if perm.permission.permission_name.startswith('repository.'):
136 if perm.permission.permission_name.startswith('repository.'):
138 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
137 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
139
138
140 if perm.permission.permission_name.startswith('group.'):
139 if perm.permission.permission_name.startswith('group.'):
141 defaults['default_group_perm' + suffix] = perm.permission.permission_name
140 defaults['default_group_perm' + suffix] = perm.permission.permission_name
142
141
143 if perm.permission.permission_name.startswith('usergroup.'):
142 if perm.permission.permission_name.startswith('usergroup.'):
144 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
143 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
145
144
146 # branch
145 # branch
147 if perm.permission.permission_name.startswith('branch.'):
146 if perm.permission.permission_name.startswith('branch.'):
148 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
147 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
149
148
150 # creation of objects
149 # creation of objects
151 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
150 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
152 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
151 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
153
152
154 elif perm.permission.permission_name.startswith('hg.create.'):
153 elif perm.permission.permission_name.startswith('hg.create.'):
155 defaults['default_repo_create' + suffix] = perm.permission.permission_name
154 defaults['default_repo_create' + suffix] = perm.permission.permission_name
156
155
157 if perm.permission.permission_name.startswith('hg.fork.'):
156 if perm.permission.permission_name.startswith('hg.fork.'):
158 defaults['default_fork_create' + suffix] = perm.permission.permission_name
157 defaults['default_fork_create' + suffix] = perm.permission.permission_name
159
158
160 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
159 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
161 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
160 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
162
161
163 if perm.permission.permission_name.startswith('hg.repogroup.'):
162 if perm.permission.permission_name.startswith('hg.repogroup.'):
164 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
163 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
165
164
166 if perm.permission.permission_name.startswith('hg.usergroup.'):
165 if perm.permission.permission_name.startswith('hg.usergroup.'):
167 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
166 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
168
167
169 # registration and external account activation
168 # registration and external account activation
170 if perm.permission.permission_name.startswith('hg.register.'):
169 if perm.permission.permission_name.startswith('hg.register.'):
171 defaults['default_register' + suffix] = perm.permission.permission_name
170 defaults['default_register' + suffix] = perm.permission.permission_name
172
171
173 if perm.permission.permission_name.startswith('hg.password_reset.'):
172 if perm.permission.permission_name.startswith('hg.password_reset.'):
174 defaults['default_password_reset' + suffix] = perm.permission.permission_name
173 defaults['default_password_reset' + suffix] = perm.permission.permission_name
175
174
176 if perm.permission.permission_name.startswith('hg.extern_activate.'):
175 if perm.permission.permission_name.startswith('hg.extern_activate.'):
177 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
176 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
178
177
179 return defaults
178 return defaults
180
179
181 def _make_new_user_perm(self, user, perm_name):
180 def _make_new_user_perm(self, user, perm_name):
182 log.debug('Creating new user permission:%s', perm_name)
181 log.debug('Creating new user permission:%s', perm_name)
183 new_perm = Permission.get_by_key(perm_name)
182 new_perm = Permission.get_by_key(perm_name)
184 if not new_perm:
183 if not new_perm:
185 raise ValueError(f'permission with name {perm_name} not found')
184 raise ValueError(f'permission with name {perm_name} not found')
186
185
187 new = UserToPerm()
186 new = UserToPerm()
188 new.user = user
187 new.user = user
189 new.permission = new_perm
188 new.permission = new_perm
190 return new
189 return new
191
190
192 def _make_new_user_group_perm(self, user_group, perm_name):
191 def _make_new_user_group_perm(self, user_group, perm_name):
193 log.debug('Creating new user group permission:%s', perm_name)
192 log.debug('Creating new user group permission:%s', perm_name)
194 new_perm = Permission.get_by_key(perm_name)
193 new_perm = Permission.get_by_key(perm_name)
195 if not new_perm:
194 if not new_perm:
196 raise ValueError(f'permission with name {perm_name} not found')
195 raise ValueError(f'permission with name {perm_name} not found')
197
196
198 new = UserGroupToPerm()
197 new = UserGroupToPerm()
199 new.users_group = user_group
198 new.users_group = user_group
200 new.permission = new_perm
199 new.permission = new_perm
201 return new
200 return new
202
201
203 def _keep_perm(self, perm_name, keep_fields):
202 def _keep_perm(self, perm_name, keep_fields):
204 def get_pat(field_name):
203 def get_pat(field_name):
205 return {
204 return {
206 # global perms
205 # global perms
207 'default_repo_create': 'hg.create.',
206 'default_repo_create': 'hg.create.',
208 # special case for create repos on write access to group
207 # special case for create repos on write access to group
209 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
208 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
210 'default_repo_group_create': 'hg.repogroup.create.',
209 'default_repo_group_create': 'hg.repogroup.create.',
211 'default_user_group_create': 'hg.usergroup.create.',
210 'default_user_group_create': 'hg.usergroup.create.',
212 'default_fork_create': 'hg.fork.',
211 'default_fork_create': 'hg.fork.',
213 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
212 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
214
213
215 # application perms
214 # application perms
216 'default_register': 'hg.register.',
215 'default_register': 'hg.register.',
217 'default_password_reset': 'hg.password_reset.',
216 'default_password_reset': 'hg.password_reset.',
218 'default_extern_activate': 'hg.extern_activate.',
217 'default_extern_activate': 'hg.extern_activate.',
219
218
220 # object permissions below
219 # object permissions below
221 'default_repo_perm': 'repository.',
220 'default_repo_perm': 'repository.',
222 'default_group_perm': 'group.',
221 'default_group_perm': 'group.',
223 'default_user_group_perm': 'usergroup.',
222 'default_user_group_perm': 'usergroup.',
224 # branch
223 # branch
225 'default_branch_perm': 'branch.',
224 'default_branch_perm': 'branch.',
226
225
227 }[field_name]
226 }[field_name]
228 for field in keep_fields:
227 for field in keep_fields:
229 pat = get_pat(field)
228 pat = get_pat(field)
230 if perm_name.startswith(pat):
229 if perm_name.startswith(pat):
231 return True
230 return True
232 return False
231 return False
233
232
234 def _clear_object_perm(self, object_perms, preserve=None):
233 def _clear_object_perm(self, object_perms, preserve=None):
235 preserve = preserve or []
234 preserve = preserve or []
236 _deleted = []
235 _deleted = []
237 for perm in object_perms:
236 for perm in object_perms:
238 perm_name = perm.permission.permission_name
237 perm_name = perm.permission.permission_name
239 if not self._keep_perm(perm_name, keep_fields=preserve):
238 if not self._keep_perm(perm_name, keep_fields=preserve):
240 _deleted.append(perm_name)
239 _deleted.append(perm_name)
241 self.sa.delete(perm)
240 self.sa.delete(perm)
242 return _deleted
241 return _deleted
243
242
244 def _clear_user_perms(self, user_id, preserve=None):
243 def _clear_user_perms(self, user_id, preserve=None):
245 perms = self.sa.query(UserToPerm)\
244 perms = self.sa.query(UserToPerm)\
246 .filter(UserToPerm.user_id == user_id)\
245 .filter(UserToPerm.user_id == user_id)\
247 .all()
246 .all()
248 return self._clear_object_perm(perms, preserve=preserve)
247 return self._clear_object_perm(perms, preserve=preserve)
249
248
250 def _clear_user_group_perms(self, user_group_id, preserve=None):
249 def _clear_user_group_perms(self, user_group_id, preserve=None):
251 perms = self.sa.query(UserGroupToPerm)\
250 perms = self.sa.query(UserGroupToPerm)\
252 .filter(UserGroupToPerm.users_group_id == user_group_id)\
251 .filter(UserGroupToPerm.users_group_id == user_group_id)\
253 .all()
252 .all()
254 return self._clear_object_perm(perms, preserve=preserve)
253 return self._clear_object_perm(perms, preserve=preserve)
255
254
256 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
255 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
257 # clear current entries, to make this function idempotent
256 # clear current entries, to make this function idempotent
258 # it will fix even if we define more permissions or permissions
257 # it will fix even if we define more permissions or permissions
259 # are somehow missing
258 # are somehow missing
260 preserve = preserve or []
259 preserve = preserve or []
261 _global_perms = self.global_perms.copy()
260 _global_perms = self.global_perms.copy()
262 if obj_type not in ['user', 'user_group']:
261 if obj_type not in ['user', 'user_group']:
263 raise ValueError("obj_type must be on of 'user' or 'user_group'")
262 raise ValueError("obj_type must be on of 'user' or 'user_group'")
264 global_perms = len(_global_perms)
263 global_perms = len(_global_perms)
265 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
264 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
266 if global_perms != default_user_perms:
265 if global_perms != default_user_perms:
267 raise Exception(
266 raise Exception(
268 'Inconsistent permissions definition. Got {} vs {}'.format(
267 'Inconsistent permissions definition. Got {} vs {}'.format(
269 global_perms, default_user_perms))
268 global_perms, default_user_perms))
270
269
271 if obj_type == 'user':
270 if obj_type == 'user':
272 self._clear_user_perms(to_object.user_id, preserve)
271 self._clear_user_perms(to_object.user_id, preserve)
273 if obj_type == 'user_group':
272 if obj_type == 'user_group':
274 self._clear_user_group_perms(to_object.users_group_id, preserve)
273 self._clear_user_group_perms(to_object.users_group_id, preserve)
275
274
276 # now kill the keys that we want to preserve from the form.
275 # now kill the keys that we want to preserve from the form.
277 for key in preserve:
276 for key in preserve:
278 del _global_perms[key]
277 del _global_perms[key]
279
278
280 for k in _global_perms.copy():
279 for k in _global_perms.copy():
281 _global_perms[k] = form_result[k]
280 _global_perms[k] = form_result[k]
282
281
283 # at that stage we validate all are passed inside form_result
282 # at that stage we validate all are passed inside form_result
284 for _perm_key, perm_value in _global_perms.items():
283 for _perm_key, perm_value in _global_perms.items():
285 if perm_value is None:
284 if perm_value is None:
286 raise ValueError('Missing permission for %s' % (_perm_key,))
285 raise ValueError('Missing permission for {}'.format(_perm_key))
287
286
288 if obj_type == 'user':
287 if obj_type == 'user':
289 p = self._make_new_user_perm(to_object, perm_value)
288 p = self._make_new_user_perm(to_object, perm_value)
290 self.sa.add(p)
289 self.sa.add(p)
291 if obj_type == 'user_group':
290 if obj_type == 'user_group':
292 p = self._make_new_user_group_perm(to_object, perm_value)
291 p = self._make_new_user_group_perm(to_object, perm_value)
293 self.sa.add(p)
292 self.sa.add(p)
294
293
295 def _set_new_user_perms(self, user, form_result, preserve=None):
294 def _set_new_user_perms(self, user, form_result, preserve=None):
296 return self._set_new_object_perms(
295 return self._set_new_object_perms(
297 'user', user, form_result, preserve)
296 'user', user, form_result, preserve)
298
297
299 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
298 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
300 return self._set_new_object_perms(
299 return self._set_new_object_perms(
301 'user_group', user_group, form_result, preserve)
300 'user_group', user_group, form_result, preserve)
302
301
303 def set_new_user_perms(self, user, form_result):
302 def set_new_user_perms(self, user, form_result):
304 # calculate what to preserve from what is given in form_result
303 # calculate what to preserve from what is given in form_result
305 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
304 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
306 return self._set_new_user_perms(user, form_result, preserve)
305 return self._set_new_user_perms(user, form_result, preserve)
307
306
308 def set_new_user_group_perms(self, user_group, form_result):
307 def set_new_user_group_perms(self, user_group, form_result):
309 # calculate what to preserve from what is given in form_result
308 # calculate what to preserve from what is given in form_result
310 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
309 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
311 return self._set_new_user_group_perms(user_group, form_result, preserve)
310 return self._set_new_user_group_perms(user_group, form_result, preserve)
312
311
313 def create_permissions(self):
312 def create_permissions(self):
314 """
313 """
315 Create permissions for whole system
314 Create permissions for whole system
316 """
315 """
317 for p in Permission.PERMS:
316 for p in Permission.PERMS:
318 if not Permission.get_by_key(p[0]):
317 if not Permission.get_by_key(p[0]):
319 new_perm = Permission()
318 new_perm = Permission()
320 new_perm.permission_name = p[0]
319 new_perm.permission_name = p[0]
321 new_perm.permission_longname = p[0] # translation err with p[1]
320 new_perm.permission_longname = p[0] # translation err with p[1]
322 self.sa.add(new_perm)
321 self.sa.add(new_perm)
323
322
324 def _create_default_object_permission(self, obj_type, obj, obj_perms,
323 def _create_default_object_permission(self, obj_type, obj, obj_perms,
325 force=False):
324 force=False):
326 if obj_type not in ['user', 'user_group']:
325 if obj_type not in ['user', 'user_group']:
327 raise ValueError("obj_type must be on of 'user' or 'user_group'")
326 raise ValueError("obj_type must be on of 'user' or 'user_group'")
328
327
329 def _get_group(perm_name):
328 def _get_group(perm_name):
330 return '.'.join(perm_name.split('.')[:1])
329 return '.'.join(perm_name.split('.')[:1])
331
330
332 defined_perms_groups = list(map(
331 defined_perms_groups = list(map(
333 _get_group, (x.permission.permission_name for x in obj_perms)))
332 _get_group, (x.permission.permission_name for x in obj_perms)))
334 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
333 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
335
334
336 if force:
335 if force:
337 self._clear_object_perm(obj_perms)
336 self._clear_object_perm(obj_perms)
338 self.sa.commit()
337 self.sa.commit()
339 defined_perms_groups = []
338 defined_perms_groups = []
340 # for every default permission that needs to be created, we check if
339 # for every default permission that needs to be created, we check if
341 # it's group is already defined, if it's not we create default perm
340 # it's group is already defined, if it's not we create default perm
342 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
341 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
343 gr = _get_group(perm_name)
342 gr = _get_group(perm_name)
344 if gr not in defined_perms_groups:
343 if gr not in defined_perms_groups:
345 log.debug('GR:%s not found, creating permission %s',
344 log.debug('GR:%s not found, creating permission %s',
346 gr, perm_name)
345 gr, perm_name)
347 if obj_type == 'user':
346 if obj_type == 'user':
348 new_perm = self._make_new_user_perm(obj, perm_name)
347 new_perm = self._make_new_user_perm(obj, perm_name)
349 self.sa.add(new_perm)
348 self.sa.add(new_perm)
350 if obj_type == 'user_group':
349 if obj_type == 'user_group':
351 new_perm = self._make_new_user_group_perm(obj, perm_name)
350 new_perm = self._make_new_user_group_perm(obj, perm_name)
352 self.sa.add(new_perm)
351 self.sa.add(new_perm)
353
352
354 def create_default_user_permissions(self, user, force=False):
353 def create_default_user_permissions(self, user, force=False):
355 """
354 """
356 Creates only missing default permissions for user, if force is set it
355 Creates only missing default permissions for user, if force is set it
357 resets the default permissions for that user
356 resets the default permissions for that user
358
357
359 :param user:
358 :param user:
360 :param force:
359 :param force:
361 """
360 """
362 user = self._get_user(user)
361 user = self._get_user(user)
363 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
362 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
364 return self._create_default_object_permission(
363 return self._create_default_object_permission(
365 'user', user, obj_perms, force)
364 'user', user, obj_perms, force)
366
365
367 def create_default_user_group_permissions(self, user_group, force=False):
366 def create_default_user_group_permissions(self, user_group, force=False):
368 """
367 """
369 Creates only missing default permissions for user group, if force is
368 Creates only missing default permissions for user group, if force is
370 set it resets the default permissions for that user group
369 set it resets the default permissions for that user group
371
370
372 :param user_group:
371 :param user_group:
373 :param force:
372 :param force:
374 """
373 """
375 user_group = self._get_user_group(user_group)
374 user_group = self._get_user_group(user_group)
376 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
375 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
377 return self._create_default_object_permission(
376 return self._create_default_object_permission(
378 'user_group', user_group, obj_perms, force)
377 'user_group', user_group, obj_perms, force)
379
378
380 def update_application_permissions(self, form_result):
379 def update_application_permissions(self, form_result):
381 if 'perm_user_id' in form_result:
380 if 'perm_user_id' in form_result:
382 perm_user = User.get(safe_int(form_result['perm_user_id']))
381 perm_user = User.get(safe_int(form_result['perm_user_id']))
383 else:
382 else:
384 # used mostly to do lookup for default user
383 # used mostly to do lookup for default user
385 perm_user = User.get_by_username(form_result['perm_user_name'])
384 perm_user = User.get_by_username(form_result['perm_user_name'])
386
385
387 try:
386 try:
388 # stage 1 set anonymous access
387 # stage 1 set anonymous access
389 if perm_user.username == User.DEFAULT_USER:
388 if perm_user.username == User.DEFAULT_USER:
390 perm_user.active = str2bool(form_result['anonymous'])
389 perm_user.active = str2bool(form_result['anonymous'])
391 self.sa.add(perm_user)
390 self.sa.add(perm_user)
392
391
393 # stage 2 reset defaults and set them from form data
392 # stage 2 reset defaults and set them from form data
394 self._set_new_user_perms(perm_user, form_result, preserve=[
393 self._set_new_user_perms(perm_user, form_result, preserve=[
395 'default_repo_perm',
394 'default_repo_perm',
396 'default_group_perm',
395 'default_group_perm',
397 'default_user_group_perm',
396 'default_user_group_perm',
398 'default_branch_perm',
397 'default_branch_perm',
399
398
400 'default_repo_group_create',
399 'default_repo_group_create',
401 'default_user_group_create',
400 'default_user_group_create',
402 'default_repo_create_on_write',
401 'default_repo_create_on_write',
403 'default_repo_create',
402 'default_repo_create',
404 'default_fork_create',
403 'default_fork_create',
405 'default_inherit_default_permissions'])
404 'default_inherit_default_permissions'])
406
405
407 self.sa.commit()
406 self.sa.commit()
408 except (DatabaseError,):
407 except (DatabaseError,):
409 log.error(traceback.format_exc())
408 log.error(traceback.format_exc())
410 self.sa.rollback()
409 self.sa.rollback()
411 raise
410 raise
412
411
413 def update_user_permissions(self, form_result):
412 def update_user_permissions(self, form_result):
414 if 'perm_user_id' in form_result:
413 if 'perm_user_id' in form_result:
415 perm_user = User.get(safe_int(form_result['perm_user_id']))
414 perm_user = User.get(safe_int(form_result['perm_user_id']))
416 else:
415 else:
417 # used mostly to do lookup for default user
416 # used mostly to do lookup for default user
418 perm_user = User.get_by_username(form_result['perm_user_name'])
417 perm_user = User.get_by_username(form_result['perm_user_name'])
419 try:
418 try:
420 # stage 2 reset defaults and set them from form data
419 # stage 2 reset defaults and set them from form data
421 self._set_new_user_perms(perm_user, form_result, preserve=[
420 self._set_new_user_perms(perm_user, form_result, preserve=[
422 'default_repo_perm',
421 'default_repo_perm',
423 'default_group_perm',
422 'default_group_perm',
424 'default_user_group_perm',
423 'default_user_group_perm',
425 'default_branch_perm',
424 'default_branch_perm',
426
425
427 'default_register',
426 'default_register',
428 'default_password_reset',
427 'default_password_reset',
429 'default_extern_activate'])
428 'default_extern_activate'])
430 self.sa.commit()
429 self.sa.commit()
431 except (DatabaseError,):
430 except (DatabaseError,):
432 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
433 self.sa.rollback()
432 self.sa.rollback()
434 raise
433 raise
435
434
436 def update_user_group_permissions(self, form_result):
435 def update_user_group_permissions(self, form_result):
437 if 'perm_user_group_id' in form_result:
436 if 'perm_user_group_id' in form_result:
438 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
437 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
439 else:
438 else:
440 # used mostly to do lookup for default user
439 # used mostly to do lookup for default user
441 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
440 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
442 try:
441 try:
443 # stage 2 reset defaults and set them from form data
442 # stage 2 reset defaults and set them from form data
444 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
443 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
445 'default_repo_perm',
444 'default_repo_perm',
446 'default_group_perm',
445 'default_group_perm',
447 'default_user_group_perm',
446 'default_user_group_perm',
448 'default_branch_perm',
447 'default_branch_perm',
449
448
450 'default_register',
449 'default_register',
451 'default_password_reset',
450 'default_password_reset',
452 'default_extern_activate'])
451 'default_extern_activate'])
453 self.sa.commit()
452 self.sa.commit()
454 except (DatabaseError,):
453 except (DatabaseError,):
455 log.error(traceback.format_exc())
454 log.error(traceback.format_exc())
456 self.sa.rollback()
455 self.sa.rollback()
457 raise
456 raise
458
457
459 def update_object_permissions(self, form_result):
458 def update_object_permissions(self, form_result):
460 if 'perm_user_id' in form_result:
459 if 'perm_user_id' in form_result:
461 perm_user = User.get(safe_int(form_result['perm_user_id']))
460 perm_user = User.get(safe_int(form_result['perm_user_id']))
462 else:
461 else:
463 # used mostly to do lookup for default user
462 # used mostly to do lookup for default user
464 perm_user = User.get_by_username(form_result['perm_user_name'])
463 perm_user = User.get_by_username(form_result['perm_user_name'])
465 try:
464 try:
466
465
467 # stage 2 reset defaults and set them from form data
466 # stage 2 reset defaults and set them from form data
468 self._set_new_user_perms(perm_user, form_result, preserve=[
467 self._set_new_user_perms(perm_user, form_result, preserve=[
469 'default_repo_group_create',
468 'default_repo_group_create',
470 'default_user_group_create',
469 'default_user_group_create',
471 'default_repo_create_on_write',
470 'default_repo_create_on_write',
472 'default_repo_create',
471 'default_repo_create',
473 'default_fork_create',
472 'default_fork_create',
474 'default_inherit_default_permissions',
473 'default_inherit_default_permissions',
475 'default_branch_perm',
474 'default_branch_perm',
476
475
477 'default_register',
476 'default_register',
478 'default_password_reset',
477 'default_password_reset',
479 'default_extern_activate'])
478 'default_extern_activate'])
480
479
481 # overwrite default repo permissions
480 # overwrite default repo permissions
482 if form_result['overwrite_default_repo']:
481 if form_result['overwrite_default_repo']:
483 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
482 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
484 _def = Permission.get_by_key('repository.' + _def_name)
483 _def = Permission.get_by_key('repository.' + _def_name)
485 for r2p in self.sa.query(UserRepoToPerm)\
484 for r2p in self.sa.query(UserRepoToPerm)\
486 .filter(UserRepoToPerm.user == perm_user)\
485 .filter(UserRepoToPerm.user == perm_user)\
487 .all():
486 .all():
488 # don't reset PRIVATE repositories
487 # don't reset PRIVATE repositories
489 if not r2p.repository.private:
488 if not r2p.repository.private:
490 r2p.permission = _def
489 r2p.permission = _def
491 self.sa.add(r2p)
490 self.sa.add(r2p)
492
491
493 # overwrite default repo group permissions
492 # overwrite default repo group permissions
494 if form_result['overwrite_default_group']:
493 if form_result['overwrite_default_group']:
495 _def_name = form_result['default_group_perm'].split('group.')[-1]
494 _def_name = form_result['default_group_perm'].split('group.')[-1]
496 _def = Permission.get_by_key('group.' + _def_name)
495 _def = Permission.get_by_key('group.' + _def_name)
497 for g2p in self.sa.query(UserRepoGroupToPerm)\
496 for g2p in self.sa.query(UserRepoGroupToPerm)\
498 .filter(UserRepoGroupToPerm.user == perm_user)\
497 .filter(UserRepoGroupToPerm.user == perm_user)\
499 .all():
498 .all():
500 g2p.permission = _def
499 g2p.permission = _def
501 self.sa.add(g2p)
500 self.sa.add(g2p)
502
501
503 # overwrite default user group permissions
502 # overwrite default user group permissions
504 if form_result['overwrite_default_user_group']:
503 if form_result['overwrite_default_user_group']:
505 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
504 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
506 # user groups
505 # user groups
507 _def = Permission.get_by_key('usergroup.' + _def_name)
506 _def = Permission.get_by_key('usergroup.' + _def_name)
508 for g2p in self.sa.query(UserUserGroupToPerm)\
507 for g2p in self.sa.query(UserUserGroupToPerm)\
509 .filter(UserUserGroupToPerm.user == perm_user)\
508 .filter(UserUserGroupToPerm.user == perm_user)\
510 .all():
509 .all():
511 g2p.permission = _def
510 g2p.permission = _def
512 self.sa.add(g2p)
511 self.sa.add(g2p)
513
512
514 # COMMIT
513 # COMMIT
515 self.sa.commit()
514 self.sa.commit()
516 except (DatabaseError,):
515 except (DatabaseError,):
517 log.exception('Failed to set default object permissions')
516 log.exception('Failed to set default object permissions')
518 self.sa.rollback()
517 self.sa.rollback()
519 raise
518 raise
520
519
521 def update_branch_permissions(self, form_result):
520 def update_branch_permissions(self, form_result):
522 if 'perm_user_id' in form_result:
521 if 'perm_user_id' in form_result:
523 perm_user = User.get(safe_int(form_result['perm_user_id']))
522 perm_user = User.get(safe_int(form_result['perm_user_id']))
524 else:
523 else:
525 # used mostly to do lookup for default user
524 # used mostly to do lookup for default user
526 perm_user = User.get_by_username(form_result['perm_user_name'])
525 perm_user = User.get_by_username(form_result['perm_user_name'])
527 try:
526 try:
528
527
529 # stage 2 reset defaults and set them from form data
528 # stage 2 reset defaults and set them from form data
530 self._set_new_user_perms(perm_user, form_result, preserve=[
529 self._set_new_user_perms(perm_user, form_result, preserve=[
531 'default_repo_perm',
530 'default_repo_perm',
532 'default_group_perm',
531 'default_group_perm',
533 'default_user_group_perm',
532 'default_user_group_perm',
534
533
535 'default_repo_group_create',
534 'default_repo_group_create',
536 'default_user_group_create',
535 'default_user_group_create',
537 'default_repo_create_on_write',
536 'default_repo_create_on_write',
538 'default_repo_create',
537 'default_repo_create',
539 'default_fork_create',
538 'default_fork_create',
540 'default_inherit_default_permissions',
539 'default_inherit_default_permissions',
541
540
542 'default_register',
541 'default_register',
543 'default_password_reset',
542 'default_password_reset',
544 'default_extern_activate'])
543 'default_extern_activate'])
545
544
546 # overwrite default branch permissions
545 # overwrite default branch permissions
547 if form_result['overwrite_default_branch']:
546 if form_result['overwrite_default_branch']:
548 _def_name = \
547 _def_name = \
549 form_result['default_branch_perm'].split('branch.')[-1]
548 form_result['default_branch_perm'].split('branch.')[-1]
550
549
551 _def = Permission.get_by_key('branch.' + _def_name)
550 _def = Permission.get_by_key('branch.' + _def_name)
552
551
553 user_perms = UserToRepoBranchPermission.query()\
552 user_perms = UserToRepoBranchPermission.query()\
554 .join(UserToRepoBranchPermission.user_repo_to_perm)\
553 .join(UserToRepoBranchPermission.user_repo_to_perm)\
555 .filter(UserRepoToPerm.user == perm_user).all()
554 .filter(UserRepoToPerm.user == perm_user).all()
556
555
557 for g2p in user_perms:
556 for g2p in user_perms:
558 g2p.permission = _def
557 g2p.permission = _def
559 self.sa.add(g2p)
558 self.sa.add(g2p)
560
559
561 # COMMIT
560 # COMMIT
562 self.sa.commit()
561 self.sa.commit()
563 except (DatabaseError,):
562 except (DatabaseError,):
564 log.exception('Failed to set default branch permissions')
563 log.exception('Failed to set default branch permissions')
565 self.sa.rollback()
564 self.sa.rollback()
566 raise
565 raise
567
566
568 def get_users_with_repo_write(self, db_repo):
567 def get_users_with_repo_write(self, db_repo):
569 write_plus = ['repository.write', 'repository.admin']
568 write_plus = ['repository.write', 'repository.admin']
570 default_user_id = User.get_default_user_id()
569 default_user_id = User.get_default_user_id()
571 user_write_permissions = collections.OrderedDict()
570 user_write_permissions = collections.OrderedDict()
572
571
573 # write or higher and DEFAULT user for inheritance
572 # write or higher and DEFAULT user for inheritance
574 for perm in db_repo.permissions():
573 for perm in db_repo.permissions():
575 if perm.permission in write_plus or perm.user_id == default_user_id:
574 if perm.permission in write_plus or perm.user_id == default_user_id:
576 user_write_permissions[perm.user_id] = perm
575 user_write_permissions[perm.user_id] = perm
577 return user_write_permissions
576 return user_write_permissions
578
577
579 def get_user_groups_with_repo_write(self, db_repo):
578 def get_user_groups_with_repo_write(self, db_repo):
580 write_plus = ['repository.write', 'repository.admin']
579 write_plus = ['repository.write', 'repository.admin']
581 user_group_write_permissions = collections.OrderedDict()
580 user_group_write_permissions = collections.OrderedDict()
582
581
583 # write or higher and DEFAULT user for inheritance
582 # write or higher and DEFAULT user for inheritance
584 for p in db_repo.permission_user_groups():
583 for p in db_repo.permission_user_groups():
585 if p.permission in write_plus:
584 if p.permission in write_plus:
586 user_group_write_permissions[p.users_group_id] = p
585 user_group_write_permissions[p.users_group_id] = p
587 return user_group_write_permissions
586 return user_group_write_permissions
588
587
589 def trigger_permission_flush(self, affected_user_ids=None):
588 def trigger_permission_flush(self, affected_user_ids=None):
590 affected_user_ids = affected_user_ids or User.get_all_user_ids()
589 affected_user_ids = affected_user_ids or User.get_all_user_ids()
591 events.trigger(events.UserPermissionsChange(affected_user_ids))
590 events.trigger(events.UserPermissionsChange(affected_user_ids))
592
591
593 def flush_user_permission_caches(self, changes, affected_user_ids=None):
592 def flush_user_permission_caches(self, changes, affected_user_ids=None):
594 affected_user_ids = affected_user_ids or []
593 affected_user_ids = affected_user_ids or []
595
594
596 for change in changes['added'] + changes['updated'] + changes['deleted']:
595 for change in changes['added'] + changes['updated'] + changes['deleted']:
597 if change['type'] == 'user':
596 if change['type'] == 'user':
598 affected_user_ids.append(change['id'])
597 affected_user_ids.append(change['id'])
599 if change['type'] == 'user_group':
598 if change['type'] == 'user_group':
600 user_group = UserGroup.get(safe_int(change['id']))
599 user_group = UserGroup.get(safe_int(change['id']))
601 if user_group:
600 if user_group:
602 group_members_ids = [x.user_id for x in user_group.members]
601 group_members_ids = [x.user_id for x in user_group.members]
603 affected_user_ids.extend(group_members_ids)
602 affected_user_ids.extend(group_members_ids)
604
603
605 self.trigger_permission_flush(affected_user_ids)
604 self.trigger_permission_flush(affected_user_ids)
606
605
607 return affected_user_ids
606 return affected_user_ids
@@ -1,2394 +1,2392 b''
1
2
3 # Copyright (C) 2012-2023 RhodeCode GmbH
1 # Copyright (C) 2012-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 pull request model for RhodeCode
21 pull request model for RhodeCode
24 """
22 """
25
23
26 import logging
24 import logging
27 import os
25 import os
28
26
29 import datetime
27 import datetime
30 import urllib.request
28 import urllib.request
31 import urllib.parse
29 import urllib.parse
32 import urllib.error
30 import urllib.error
33 import collections
31 import collections
34
32
35 import dataclasses as dataclasses
33 import dataclasses as dataclasses
36 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
37
35
38 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
42 from collections import OrderedDict
40 from collections import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.ext_json import sjson as json
42 from rhodecode.lib.ext_json import sjson as json
45 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
46 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
47 from rhodecode.lib.hash_utils import md5_safe
45 from rhodecode.lib.hash_utils import md5_safe
48 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.str_utils import safe_str
49 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
50 from rhodecode.lib.vcs.backends.base import (
48 from rhodecode.lib.vcs.backends.base import (
51 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
52 TargetRefMissing, SourceRefMissing)
50 TargetRefMissing, SourceRefMissing)
53 from rhodecode.lib.vcs.conf import settings as vcs_settings
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.exceptions import (
52 from rhodecode.lib.vcs.exceptions import (
55 CommitDoesNotExistError, EmptyRepositoryError)
53 CommitDoesNotExistError, EmptyRepositoryError)
56 from rhodecode.model import BaseModel
54 from rhodecode.model import BaseModel
57 from rhodecode.model.changeset_status import ChangesetStatusModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
58 from rhodecode.model.comment import CommentsModel
56 from rhodecode.model.comment import CommentsModel
59 from rhodecode.model.db import (
57 from rhodecode.model.db import (
60 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
61 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
62 from rhodecode.model.meta import Session
60 from rhodecode.model.meta import Session
63 from rhodecode.model.notification import NotificationModel, \
61 from rhodecode.model.notification import NotificationModel, \
64 EmailNotificationModel
62 EmailNotificationModel
65 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
66 from rhodecode.model.settings import VcsSettingsModel
64 from rhodecode.model.settings import VcsSettingsModel
67
65
68
66
69 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
70
68
71
69
72 # Data structure to hold the response data when updating commits during a pull
70 # Data structure to hold the response data when updating commits during a pull
73 # request update.
71 # request update.
74 class UpdateResponse(object):
72 class UpdateResponse(object):
75
73
76 def __init__(self, executed, reason, new, old, common_ancestor_id,
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
77 commit_changes, source_changed, target_changed):
75 commit_changes, source_changed, target_changed):
78
76
79 self.executed = executed
77 self.executed = executed
80 self.reason = reason
78 self.reason = reason
81 self.new = new
79 self.new = new
82 self.old = old
80 self.old = old
83 self.common_ancestor_id = common_ancestor_id
81 self.common_ancestor_id = common_ancestor_id
84 self.changes = commit_changes
82 self.changes = commit_changes
85 self.source_changed = source_changed
83 self.source_changed = source_changed
86 self.target_changed = target_changed
84 self.target_changed = target_changed
87
85
88
86
89 def get_diff_info(
87 def get_diff_info(
90 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
91 get_commit_authors=True):
89 get_commit_authors=True):
92 """
90 """
93 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
94 This is also used for default reviewers logic
92 This is also used for default reviewers logic
95 """
93 """
96
94
97 source_scm = source_repo.scm_instance()
95 source_scm = source_repo.scm_instance()
98 target_scm = target_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
99
97
100 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
101 if not ancestor_id:
99 if not ancestor_id:
102 raise ValueError(
100 raise ValueError(
103 'cannot calculate diff info without a common ancestor. '
101 'cannot calculate diff info without a common ancestor. '
104 'Make sure both repositories are related, and have a common forking commit.')
102 'Make sure both repositories are related, and have a common forking commit.')
105
103
106 # case here is that want a simple diff without incoming commits,
104 # case here is that want a simple diff without incoming commits,
107 # previewing what will be merged based only on commits in the source.
105 # previewing what will be merged based only on commits in the source.
108 log.debug('Using ancestor %s as source_ref instead of %s',
106 log.debug('Using ancestor %s as source_ref instead of %s',
109 ancestor_id, source_ref)
107 ancestor_id, source_ref)
110
108
111 # source of changes now is the common ancestor
109 # source of changes now is the common ancestor
112 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
113 # target commit becomes the source ref as it is the last commit
111 # target commit becomes the source ref as it is the last commit
114 # for diff generation this logic gives proper diff
112 # for diff generation this logic gives proper diff
115 target_commit = source_scm.get_commit(commit_id=source_ref)
113 target_commit = source_scm.get_commit(commit_id=source_ref)
116
114
117 vcs_diff = \
115 vcs_diff = \
118 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
119 ignore_whitespace=False, context=3)
117 ignore_whitespace=False, context=3)
120
118
121 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
122 diff_limit=0, file_limit=0, show_full_diff=True)
120 diff_limit=0, file_limit=0, show_full_diff=True)
123
121
124 _parsed = diff_processor.prepare()
122 _parsed = diff_processor.prepare()
125
123
126 all_files = []
124 all_files = []
127 all_files_changes = []
125 all_files_changes = []
128 changed_lines = {}
126 changed_lines = {}
129 stats = [0, 0]
127 stats = [0, 0]
130 for f in _parsed:
128 for f in _parsed:
131 all_files.append(f['filename'])
129 all_files.append(f['filename'])
132 all_files_changes.append({
130 all_files_changes.append({
133 'filename': f['filename'],
131 'filename': f['filename'],
134 'stats': f['stats']
132 'stats': f['stats']
135 })
133 })
136 stats[0] += f['stats']['added']
134 stats[0] += f['stats']['added']
137 stats[1] += f['stats']['deleted']
135 stats[1] += f['stats']['deleted']
138
136
139 changed_lines[f['filename']] = []
137 changed_lines[f['filename']] = []
140 if len(f['chunks']) < 2:
138 if len(f['chunks']) < 2:
141 continue
139 continue
142 # first line is "context" information
140 # first line is "context" information
143 for chunks in f['chunks'][1:]:
141 for chunks in f['chunks'][1:]:
144 for chunk in chunks['lines']:
142 for chunk in chunks['lines']:
145 if chunk['action'] not in ('del', 'mod'):
143 if chunk['action'] not in ('del', 'mod'):
146 continue
144 continue
147 changed_lines[f['filename']].append(chunk['old_lineno'])
145 changed_lines[f['filename']].append(chunk['old_lineno'])
148
146
149 commit_authors = []
147 commit_authors = []
150 user_counts = {}
148 user_counts = {}
151 email_counts = {}
149 email_counts = {}
152 author_counts = {}
150 author_counts = {}
153 _commit_cache = {}
151 _commit_cache = {}
154
152
155 commits = []
153 commits = []
156 if get_commit_authors:
154 if get_commit_authors:
157 log.debug('Obtaining commit authors from set of commits')
155 log.debug('Obtaining commit authors from set of commits')
158 _compare_data = target_scm.compare(
156 _compare_data = target_scm.compare(
159 target_ref, source_ref, source_scm, merge=True,
157 target_ref, source_ref, source_scm, merge=True,
160 pre_load=["author", "date", "message"]
158 pre_load=["author", "date", "message"]
161 )
159 )
162
160
163 for commit in _compare_data:
161 for commit in _compare_data:
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 # at this function which is later called via JSON serialization
163 # at this function which is later called via JSON serialization
166 serialized_commit = dict(
164 serialized_commit = dict(
167 author=commit.author,
165 author=commit.author,
168 date=commit.date,
166 date=commit.date,
169 message=commit.message,
167 message=commit.message,
170 commit_id=commit.raw_id,
168 commit_id=commit.raw_id,
171 raw_id=commit.raw_id
169 raw_id=commit.raw_id
172 )
170 )
173 commits.append(serialized_commit)
171 commits.append(serialized_commit)
174 user = User.get_from_cs_author(serialized_commit['author'])
172 user = User.get_from_cs_author(serialized_commit['author'])
175 if user and user not in commit_authors:
173 if user and user not in commit_authors:
176 commit_authors.append(user)
174 commit_authors.append(user)
177
175
178 # lines
176 # lines
179 if get_authors:
177 if get_authors:
180 log.debug('Calculating authors of changed files')
178 log.debug('Calculating authors of changed files')
181 target_commit = source_repo.get_commit(ancestor_id)
179 target_commit = source_repo.get_commit(ancestor_id)
182
180
183 for fname, lines in changed_lines.items():
181 for fname, lines in changed_lines.items():
184
182
185 try:
183 try:
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
184 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 except Exception:
185 except Exception:
188 log.exception("Failed to load node with path %s", fname)
186 log.exception("Failed to load node with path %s", fname)
189 continue
187 continue
190
188
191 if not isinstance(node, FileNode):
189 if not isinstance(node, FileNode):
192 continue
190 continue
193
191
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
192 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 if node.is_binary:
193 if node.is_binary:
196 author = node.last_commit.author
194 author = node.last_commit.author
197 email = node.last_commit.author_email
195 email = node.last_commit.author_email
198
196
199 user = User.get_from_cs_author(author)
197 user = User.get_from_cs_author(author)
200 if user:
198 if user:
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
200 author_counts[author] = author_counts.get(author, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
201 email_counts[email] = email_counts.get(email, 0) + 1
204
202
205 continue
203 continue
206
204
207 for annotation in node.annotate:
205 for annotation in node.annotate:
208 line_no, commit_id, get_commit_func, line_text = annotation
206 line_no, commit_id, get_commit_func, line_text = annotation
209 if line_no in lines:
207 if line_no in lines:
210 if commit_id not in _commit_cache:
208 if commit_id not in _commit_cache:
211 _commit_cache[commit_id] = get_commit_func()
209 _commit_cache[commit_id] = get_commit_func()
212 commit = _commit_cache[commit_id]
210 commit = _commit_cache[commit_id]
213 author = commit.author
211 author = commit.author
214 email = commit.author_email
212 email = commit.author_email
215 user = User.get_from_cs_author(author)
213 user = User.get_from_cs_author(author)
216 if user:
214 if user:
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
216 author_counts[author] = author_counts.get(author, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
217 email_counts[email] = email_counts.get(email, 0) + 1
220
218
221 log.debug('Default reviewers processing finished')
219 log.debug('Default reviewers processing finished')
222
220
223 return {
221 return {
224 'commits': commits,
222 'commits': commits,
225 'files': all_files_changes,
223 'files': all_files_changes,
226 'stats': stats,
224 'stats': stats,
227 'ancestor': ancestor_id,
225 'ancestor': ancestor_id,
228 # original authors of modified files
226 # original authors of modified files
229 'original_authors': {
227 'original_authors': {
230 'users': user_counts,
228 'users': user_counts,
231 'authors': author_counts,
229 'authors': author_counts,
232 'emails': email_counts,
230 'emails': email_counts,
233 },
231 },
234 'commit_authors': commit_authors
232 'commit_authors': commit_authors
235 }
233 }
236
234
237
235
238 class PullRequestModel(BaseModel):
236 class PullRequestModel(BaseModel):
239
237
240 cls = PullRequest
238 cls = PullRequest
241
239
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243
241
244 UPDATE_STATUS_MESSAGES = {
242 UPDATE_STATUS_MESSAGES = {
245 UpdateFailureReason.NONE: lazy_ugettext(
243 UpdateFailureReason.NONE: lazy_ugettext(
246 'Pull request update successful.'),
244 'Pull request update successful.'),
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 'Pull request update failed because of an unknown error.'),
246 'Pull request update failed because of an unknown error.'),
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 'No update needed because the source and target have not changed.'),
248 'No update needed because the source and target have not changed.'),
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 'Pull request cannot be updated because the reference type is '
250 'Pull request cannot be updated because the reference type is '
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 'This pull request cannot be updated because the target '
253 'This pull request cannot be updated because the target '
256 'reference is missing.'),
254 'reference is missing.'),
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 'This pull request cannot be updated because the source '
256 'This pull request cannot be updated because the source '
259 'reference is missing.'),
257 'reference is missing.'),
260 }
258 }
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263
261
264 def __get_pull_request(self, pull_request):
262 def __get_pull_request(self, pull_request):
265 return self._get_instance((
263 return self._get_instance((
266 PullRequest, PullRequestVersion), pull_request)
264 PullRequest, PullRequestVersion), pull_request)
267
265
268 def _check_perms(self, perms, pull_request, user, api=False):
266 def _check_perms(self, perms, pull_request, user, api=False):
269 if not api:
267 if not api:
270 return h.HasRepoPermissionAny(*perms)(
268 return h.HasRepoPermissionAny(*perms)(
271 user=user, repo_name=pull_request.target_repo.repo_name)
269 user=user, repo_name=pull_request.target_repo.repo_name)
272 else:
270 else:
273 return h.HasRepoPermissionAnyApi(*perms)(
271 return h.HasRepoPermissionAnyApi(*perms)(
274 user=user, repo_name=pull_request.target_repo.repo_name)
272 user=user, repo_name=pull_request.target_repo.repo_name)
275
273
276 def check_user_read(self, pull_request, user, api=False):
274 def check_user_read(self, pull_request, user, api=False):
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 return self._check_perms(_perms, pull_request, user, api)
276 return self._check_perms(_perms, pull_request, user, api)
279
277
280 def check_user_merge(self, pull_request, user, api=False):
278 def check_user_merge(self, pull_request, user, api=False):
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 return self._check_perms(_perms, pull_request, user, api)
280 return self._check_perms(_perms, pull_request, user, api)
283
281
284 def check_user_update(self, pull_request, user, api=False):
282 def check_user_update(self, pull_request, user, api=False):
285 owner = user.user_id == pull_request.user_id
283 owner = user.user_id == pull_request.user_id
286 return self.check_user_merge(pull_request, user, api) or owner
284 return self.check_user_merge(pull_request, user, api) or owner
287
285
288 def check_user_delete(self, pull_request, user):
286 def check_user_delete(self, pull_request, user):
289 owner = user.user_id == pull_request.user_id
287 owner = user.user_id == pull_request.user_id
290 _perms = ('repository.admin',)
288 _perms = ('repository.admin',)
291 return self._check_perms(_perms, pull_request, user) or owner
289 return self._check_perms(_perms, pull_request, user) or owner
292
290
293 def is_user_reviewer(self, pull_request, user):
291 def is_user_reviewer(self, pull_request, user):
294 return user.user_id in [
292 return user.user_id in [
295 x.user_id for x in
293 x.user_id for x in
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 if x.user
295 if x.user
298 ]
296 ]
299
297
300 def check_user_change_status(self, pull_request, user, api=False):
298 def check_user_change_status(self, pull_request, user, api=False):
301 return self.check_user_update(pull_request, user, api) \
299 return self.check_user_update(pull_request, user, api) \
302 or self.is_user_reviewer(pull_request, user)
300 or self.is_user_reviewer(pull_request, user)
303
301
304 def check_user_comment(self, pull_request, user):
302 def check_user_comment(self, pull_request, user):
305 owner = user.user_id == pull_request.user_id
303 owner = user.user_id == pull_request.user_id
306 return self.check_user_read(pull_request, user) or owner
304 return self.check_user_read(pull_request, user) or owner
307
305
308 def get(self, pull_request):
306 def get(self, pull_request):
309 return self.__get_pull_request(pull_request)
307 return self.__get_pull_request(pull_request)
310
308
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 statuses=None, opened_by=None, order_by=None,
310 statuses=None, opened_by=None, order_by=None,
313 order_dir='desc', only_created=False):
311 order_dir='desc', only_created=False):
314 repo = None
312 repo = None
315 if repo_name:
313 if repo_name:
316 repo = self._get_repo(repo_name)
314 repo = self._get_repo(repo_name)
317
315
318 q = PullRequest.query()
316 q = PullRequest.query()
319
317
320 if search_q:
318 if search_q:
321 like_expression = u'%{}%'.format(safe_str(search_q))
319 like_expression = u'%{}%'.format(safe_str(search_q))
322 q = q.join(User, User.user_id == PullRequest.user_id)
320 q = q.join(User, User.user_id == PullRequest.user_id)
323 q = q.filter(or_(
321 q = q.filter(or_(
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 User.username.ilike(like_expression),
323 User.username.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
324 PullRequest.title.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
325 PullRequest.description.ilike(like_expression),
328 ))
326 ))
329
327
330 # source or target
328 # source or target
331 if repo and source:
329 if repo and source:
332 q = q.filter(PullRequest.source_repo == repo)
330 q = q.filter(PullRequest.source_repo == repo)
333 elif repo:
331 elif repo:
334 q = q.filter(PullRequest.target_repo == repo)
332 q = q.filter(PullRequest.target_repo == repo)
335
333
336 # closed,opened
334 # closed,opened
337 if statuses:
335 if statuses:
338 q = q.filter(PullRequest.status.in_(statuses))
336 q = q.filter(PullRequest.status.in_(statuses))
339
337
340 # opened by filter
338 # opened by filter
341 if opened_by:
339 if opened_by:
342 q = q.filter(PullRequest.user_id.in_(opened_by))
340 q = q.filter(PullRequest.user_id.in_(opened_by))
343
341
344 # only get those that are in "created" state
342 # only get those that are in "created" state
345 if only_created:
343 if only_created:
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347
345
348 order_map = {
346 order_map = {
349 'name_raw': PullRequest.pull_request_id,
347 'name_raw': PullRequest.pull_request_id,
350 'id': PullRequest.pull_request_id,
348 'id': PullRequest.pull_request_id,
351 'title': PullRequest.title,
349 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
350 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
351 'target_repo': PullRequest.target_repo_id
354 }
352 }
355 if order_by and order_by in order_map:
353 if order_by and order_by in order_map:
356 if order_dir == 'asc':
354 if order_dir == 'asc':
357 q = q.order_by(order_map[order_by].asc())
355 q = q.order_by(order_map[order_by].asc())
358 else:
356 else:
359 q = q.order_by(order_map[order_by].desc())
357 q = q.order_by(order_map[order_by].desc())
360
358
361 return q
359 return q
362
360
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 opened_by=None):
362 opened_by=None):
365 """
363 """
366 Count the number of pull requests for a specific repository.
364 Count the number of pull requests for a specific repository.
367
365
368 :param repo_name: target or source repo
366 :param repo_name: target or source repo
369 :param search_q: filter by text
367 :param search_q: filter by text
370 :param source: boolean flag to specify if repo_name refers to source
368 :param source: boolean flag to specify if repo_name refers to source
371 :param statuses: list of pull request statuses
369 :param statuses: list of pull request statuses
372 :param opened_by: author user of the pull request
370 :param opened_by: author user of the pull request
373 :returns: int number of pull requests
371 :returns: int number of pull requests
374 """
372 """
375 q = self._prepare_get_all_query(
373 q = self._prepare_get_all_query(
376 repo_name, search_q=search_q, source=source, statuses=statuses,
374 repo_name, search_q=search_q, source=source, statuses=statuses,
377 opened_by=opened_by)
375 opened_by=opened_by)
378
376
379 return q.count()
377 return q.count()
380
378
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 """
381 """
384 Get all pull requests for a specific repository.
382 Get all pull requests for a specific repository.
385
383
386 :param repo_name: target or source repo
384 :param repo_name: target or source repo
387 :param search_q: filter by text
385 :param search_q: filter by text
388 :param source: boolean flag to specify if repo_name refers to source
386 :param source: boolean flag to specify if repo_name refers to source
389 :param statuses: list of pull request statuses
387 :param statuses: list of pull request statuses
390 :param opened_by: author user of the pull request
388 :param opened_by: author user of the pull request
391 :param offset: pagination offset
389 :param offset: pagination offset
392 :param length: length of returned list
390 :param length: length of returned list
393 :param order_by: order of the returned list
391 :param order_by: order of the returned list
394 :param order_dir: 'asc' or 'desc' ordering direction
392 :param order_dir: 'asc' or 'desc' ordering direction
395 :returns: list of pull requests
393 :returns: list of pull requests
396 """
394 """
397 q = self._prepare_get_all_query(
395 q = self._prepare_get_all_query(
398 repo_name, search_q=search_q, source=source, statuses=statuses,
396 repo_name, search_q=search_q, source=source, statuses=statuses,
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400
398
401 if length:
399 if length:
402 pull_requests = q.limit(length).offset(offset).all()
400 pull_requests = q.limit(length).offset(offset).all()
403 else:
401 else:
404 pull_requests = q.all()
402 pull_requests = q.all()
405
403
406 return pull_requests
404 return pull_requests
407
405
408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 """
407 """
410 Count the number of pull requests for a specific repository that are
408 Count the number of pull requests for a specific repository that are
411 awaiting review.
409 awaiting review.
412
410
413 :param repo_name: target or source repo
411 :param repo_name: target or source repo
414 :param search_q: filter by text
412 :param search_q: filter by text
415 :param statuses: list of pull request statuses
413 :param statuses: list of pull request statuses
416 :returns: int number of pull requests
414 :returns: int number of pull requests
417 """
415 """
418 pull_requests = self.get_awaiting_review(
416 pull_requests = self.get_awaiting_review(
419 repo_name, search_q=search_q, statuses=statuses)
417 repo_name, search_q=search_q, statuses=statuses)
420
418
421 return len(pull_requests)
419 return len(pull_requests)
422
420
423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 offset=0, length=None, order_by=None, order_dir='desc'):
422 offset=0, length=None, order_by=None, order_dir='desc'):
425 """
423 """
426 Get all pull requests for a specific repository that are awaiting
424 Get all pull requests for a specific repository that are awaiting
427 review.
425 review.
428
426
429 :param repo_name: target or source repo
427 :param repo_name: target or source repo
430 :param search_q: filter by text
428 :param search_q: filter by text
431 :param statuses: list of pull request statuses
429 :param statuses: list of pull request statuses
432 :param offset: pagination offset
430 :param offset: pagination offset
433 :param length: length of returned list
431 :param length: length of returned list
434 :param order_by: order of the returned list
432 :param order_by: order of the returned list
435 :param order_dir: 'asc' or 'desc' ordering direction
433 :param order_dir: 'asc' or 'desc' ordering direction
436 :returns: list of pull requests
434 :returns: list of pull requests
437 """
435 """
438 pull_requests = self.get_all(
436 pull_requests = self.get_all(
439 repo_name, search_q=search_q, statuses=statuses,
437 repo_name, search_q=search_q, statuses=statuses,
440 order_by=order_by, order_dir=order_dir)
438 order_by=order_by, order_dir=order_dir)
441
439
442 _filtered_pull_requests = []
440 _filtered_pull_requests = []
443 for pr in pull_requests:
441 for pr in pull_requests:
444 status = pr.calculated_review_status()
442 status = pr.calculated_review_status()
445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 ChangesetStatus.STATUS_UNDER_REVIEW]:
444 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 _filtered_pull_requests.append(pr)
445 _filtered_pull_requests.append(pr)
448 if length:
446 if length:
449 return _filtered_pull_requests[offset:offset+length]
447 return _filtered_pull_requests[offset:offset+length]
450 else:
448 else:
451 return _filtered_pull_requests
449 return _filtered_pull_requests
452
450
453 def _prepare_awaiting_my_review_review_query(
451 def _prepare_awaiting_my_review_review_query(
454 self, repo_name, user_id, search_q=None, statuses=None,
452 self, repo_name, user_id, search_q=None, statuses=None,
455 order_by=None, order_dir='desc'):
453 order_by=None, order_dir='desc'):
456
454
457 for_review_statuses = [
455 for_review_statuses = [
458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 ]
457 ]
460
458
461 pull_request_alias = aliased(PullRequest)
459 pull_request_alias = aliased(PullRequest)
462 status_alias = aliased(ChangesetStatus)
460 status_alias = aliased(ChangesetStatus)
463 reviewers_alias = aliased(PullRequestReviewers)
461 reviewers_alias = aliased(PullRequestReviewers)
464 repo_alias = aliased(Repository)
462 repo_alias = aliased(Repository)
465
463
466 last_ver_subq = Session()\
464 last_ver_subq = Session()\
467 .query(func.min(ChangesetStatus.version)) \
465 .query(func.min(ChangesetStatus.version)) \
468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 .subquery()
468 .subquery()
471
469
472 q = Session().query(pull_request_alias) \
470 q = Session().query(pull_request_alias) \
473 .options(lazyload(pull_request_alias.author)) \
471 .options(lazyload(pull_request_alias.author)) \
474 .join(reviewers_alias,
472 .join(reviewers_alias,
475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 .join(repo_alias,
474 .join(repo_alias,
477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 .outerjoin(status_alias,
476 .outerjoin(status_alias,
479 and_(status_alias.user_id == reviewers_alias.user_id,
477 and_(status_alias.user_id == reviewers_alias.user_id,
480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 .filter(or_(status_alias.version == null(),
479 .filter(or_(status_alias.version == null(),
482 status_alias.version == last_ver_subq)) \
480 status_alias.version == last_ver_subq)) \
483 .filter(reviewers_alias.user_id == user_id) \
481 .filter(reviewers_alias.user_id == user_id) \
484 .filter(repo_alias.repo_name == repo_name) \
482 .filter(repo_alias.repo_name == repo_name) \
485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 .group_by(pull_request_alias)
484 .group_by(pull_request_alias)
487
485
488 # closed,opened
486 # closed,opened
489 if statuses:
487 if statuses:
490 q = q.filter(pull_request_alias.status.in_(statuses))
488 q = q.filter(pull_request_alias.status.in_(statuses))
491
489
492 if search_q:
490 if search_q:
493 like_expression = u'%{}%'.format(safe_str(search_q))
491 like_expression = u'%{}%'.format(safe_str(search_q))
494 q = q.join(User, User.user_id == pull_request_alias.user_id)
492 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 q = q.filter(or_(
493 q = q.filter(or_(
496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 User.username.ilike(like_expression),
495 User.username.ilike(like_expression),
498 pull_request_alias.title.ilike(like_expression),
496 pull_request_alias.title.ilike(like_expression),
499 pull_request_alias.description.ilike(like_expression),
497 pull_request_alias.description.ilike(like_expression),
500 ))
498 ))
501
499
502 order_map = {
500 order_map = {
503 'name_raw': pull_request_alias.pull_request_id,
501 'name_raw': pull_request_alias.pull_request_id,
504 'title': pull_request_alias.title,
502 'title': pull_request_alias.title,
505 'updated_on_raw': pull_request_alias.updated_on,
503 'updated_on_raw': pull_request_alias.updated_on,
506 'target_repo': pull_request_alias.target_repo_id
504 'target_repo': pull_request_alias.target_repo_id
507 }
505 }
508 if order_by and order_by in order_map:
506 if order_by and order_by in order_map:
509 if order_dir == 'asc':
507 if order_dir == 'asc':
510 q = q.order_by(order_map[order_by].asc())
508 q = q.order_by(order_map[order_by].asc())
511 else:
509 else:
512 q = q.order_by(order_map[order_by].desc())
510 q = q.order_by(order_map[order_by].desc())
513
511
514 return q
512 return q
515
513
516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 """
515 """
518 Count the number of pull requests for a specific repository that are
516 Count the number of pull requests for a specific repository that are
519 awaiting review from a specific user.
517 awaiting review from a specific user.
520
518
521 :param repo_name: target or source repo
519 :param repo_name: target or source repo
522 :param user_id: reviewer user of the pull request
520 :param user_id: reviewer user of the pull request
523 :param search_q: filter by text
521 :param search_q: filter by text
524 :param statuses: list of pull request statuses
522 :param statuses: list of pull request statuses
525 :returns: int number of pull requests
523 :returns: int number of pull requests
526 """
524 """
527 q = self._prepare_awaiting_my_review_review_query(
525 q = self._prepare_awaiting_my_review_review_query(
528 repo_name, user_id, search_q=search_q, statuses=statuses)
526 repo_name, user_id, search_q=search_q, statuses=statuses)
529 return q.count()
527 return q.count()
530
528
531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 offset=0, length=None, order_by=None, order_dir='desc'):
530 offset=0, length=None, order_by=None, order_dir='desc'):
533 """
531 """
534 Get all pull requests for a specific repository that are awaiting
532 Get all pull requests for a specific repository that are awaiting
535 review from a specific user.
533 review from a specific user.
536
534
537 :param repo_name: target or source repo
535 :param repo_name: target or source repo
538 :param user_id: reviewer user of the pull request
536 :param user_id: reviewer user of the pull request
539 :param search_q: filter by text
537 :param search_q: filter by text
540 :param statuses: list of pull request statuses
538 :param statuses: list of pull request statuses
541 :param offset: pagination offset
539 :param offset: pagination offset
542 :param length: length of returned list
540 :param length: length of returned list
543 :param order_by: order of the returned list
541 :param order_by: order of the returned list
544 :param order_dir: 'asc' or 'desc' ordering direction
542 :param order_dir: 'asc' or 'desc' ordering direction
545 :returns: list of pull requests
543 :returns: list of pull requests
546 """
544 """
547
545
548 q = self._prepare_awaiting_my_review_review_query(
546 q = self._prepare_awaiting_my_review_review_query(
549 repo_name, user_id, search_q=search_q, statuses=statuses,
547 repo_name, user_id, search_q=search_q, statuses=statuses,
550 order_by=order_by, order_dir=order_dir)
548 order_by=order_by, order_dir=order_dir)
551
549
552 if length:
550 if length:
553 pull_requests = q.limit(length).offset(offset).all()
551 pull_requests = q.limit(length).offset(offset).all()
554 else:
552 else:
555 pull_requests = q.all()
553 pull_requests = q.all()
556
554
557 return pull_requests
555 return pull_requests
558
556
559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 order_by=None, order_dir='desc'):
558 order_by=None, order_dir='desc'):
561 """
559 """
562 return a query of pull-requests user is an creator, or he's added as a reviewer
560 return a query of pull-requests user is an creator, or he's added as a reviewer
563 """
561 """
564 q = PullRequest.query()
562 q = PullRequest.query()
565 if user_id:
563 if user_id:
566
564
567 base_query = select(PullRequestReviewers)\
565 base_query = select(PullRequestReviewers)\
568 .where(PullRequestReviewers.user_id == user_id)\
566 .where(PullRequestReviewers.user_id == user_id)\
569 .with_only_columns(PullRequestReviewers.pull_request_id)
567 .with_only_columns(PullRequestReviewers.pull_request_id)
570
568
571 user_filter = or_(
569 user_filter = or_(
572 PullRequest.user_id == user_id,
570 PullRequest.user_id == user_id,
573 PullRequest.pull_request_id.in_(base_query)
571 PullRequest.pull_request_id.in_(base_query)
574 )
572 )
575 q = PullRequest.query().filter(user_filter)
573 q = PullRequest.query().filter(user_filter)
576
574
577 # closed,opened
575 # closed,opened
578 if statuses:
576 if statuses:
579 q = q.filter(PullRequest.status.in_(statuses))
577 q = q.filter(PullRequest.status.in_(statuses))
580
578
581 if query:
579 if query:
582 like_expression = u'%{}%'.format(safe_str(query))
580 like_expression = u'%{}%'.format(safe_str(query))
583 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.join(User, User.user_id == PullRequest.user_id)
584 q = q.filter(or_(
582 q = q.filter(or_(
585 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
586 User.username.ilike(like_expression),
584 User.username.ilike(like_expression),
587 PullRequest.title.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
588 PullRequest.description.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
589 ))
587 ))
590
588
591 order_map = {
589 order_map = {
592 'name_raw': PullRequest.pull_request_id,
590 'name_raw': PullRequest.pull_request_id,
593 'title': PullRequest.title,
591 'title': PullRequest.title,
594 'updated_on_raw': PullRequest.updated_on,
592 'updated_on_raw': PullRequest.updated_on,
595 'target_repo': PullRequest.target_repo_id
593 'target_repo': PullRequest.target_repo_id
596 }
594 }
597 if order_by and order_by in order_map:
595 if order_by and order_by in order_map:
598 if order_dir == 'asc':
596 if order_dir == 'asc':
599 q = q.order_by(order_map[order_by].asc())
597 q = q.order_by(order_map[order_by].asc())
600 else:
598 else:
601 q = q.order_by(order_map[order_by].desc())
599 q = q.order_by(order_map[order_by].desc())
602
600
603 return q
601 return q
604
602
605 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
606 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
607 return q.count()
605 return q.count()
608
606
609 def get_im_participating_in(
607 def get_im_participating_in(
610 self, user_id=None, statuses=None, query='', offset=0,
608 self, user_id=None, statuses=None, query='', offset=0,
611 length=None, order_by=None, order_dir='desc'):
609 length=None, order_by=None, order_dir='desc'):
612 """
610 """
613 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
614 """
612 """
615
613
616 q = self._prepare_im_participating_query(
614 q = self._prepare_im_participating_query(
617 user_id, statuses=statuses, query=query, order_by=order_by,
615 user_id, statuses=statuses, query=query, order_by=order_by,
618 order_dir=order_dir)
616 order_dir=order_dir)
619
617
620 if length:
618 if length:
621 pull_requests = q.limit(length).offset(offset).all()
619 pull_requests = q.limit(length).offset(offset).all()
622 else:
620 else:
623 pull_requests = q.all()
621 pull_requests = q.all()
624
622
625 return pull_requests
623 return pull_requests
626
624
627 def _prepare_participating_in_for_review_query(
625 def _prepare_participating_in_for_review_query(
628 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
629
627
630 for_review_statuses = [
628 for_review_statuses = [
631 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
632 ]
630 ]
633
631
634 pull_request_alias = aliased(PullRequest)
632 pull_request_alias = aliased(PullRequest)
635 status_alias = aliased(ChangesetStatus)
633 status_alias = aliased(ChangesetStatus)
636 reviewers_alias = aliased(PullRequestReviewers)
634 reviewers_alias = aliased(PullRequestReviewers)
637
635
638 last_ver_subq = Session()\
636 last_ver_subq = Session()\
639 .query(func.min(ChangesetStatus.version)) \
637 .query(func.min(ChangesetStatus.version)) \
640 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
641 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
642 .subquery()
640 .subquery()
643
641
644 q = Session().query(pull_request_alias) \
642 q = Session().query(pull_request_alias) \
645 .options(lazyload(pull_request_alias.author)) \
643 .options(lazyload(pull_request_alias.author)) \
646 .join(reviewers_alias,
644 .join(reviewers_alias,
647 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
648 .outerjoin(status_alias,
646 .outerjoin(status_alias,
649 and_(status_alias.user_id == reviewers_alias.user_id,
647 and_(status_alias.user_id == reviewers_alias.user_id,
650 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
651 .filter(or_(status_alias.version == null(),
649 .filter(or_(status_alias.version == null(),
652 status_alias.version == last_ver_subq)) \
650 status_alias.version == last_ver_subq)) \
653 .filter(reviewers_alias.user_id == user_id) \
651 .filter(reviewers_alias.user_id == user_id) \
654 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
655 .group_by(pull_request_alias)
653 .group_by(pull_request_alias)
656
654
657 # closed,opened
655 # closed,opened
658 if statuses:
656 if statuses:
659 q = q.filter(pull_request_alias.status.in_(statuses))
657 q = q.filter(pull_request_alias.status.in_(statuses))
660
658
661 if query:
659 if query:
662 like_expression = u'%{}%'.format(safe_str(query))
660 like_expression = u'%{}%'.format(safe_str(query))
663 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
664 q = q.filter(or_(
662 q = q.filter(or_(
665 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
666 User.username.ilike(like_expression),
664 User.username.ilike(like_expression),
667 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
668 pull_request_alias.description.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
669 ))
667 ))
670
668
671 order_map = {
669 order_map = {
672 'name_raw': pull_request_alias.pull_request_id,
670 'name_raw': pull_request_alias.pull_request_id,
673 'title': pull_request_alias.title,
671 'title': pull_request_alias.title,
674 'updated_on_raw': pull_request_alias.updated_on,
672 'updated_on_raw': pull_request_alias.updated_on,
675 'target_repo': pull_request_alias.target_repo_id
673 'target_repo': pull_request_alias.target_repo_id
676 }
674 }
677 if order_by and order_by in order_map:
675 if order_by and order_by in order_map:
678 if order_dir == 'asc':
676 if order_dir == 'asc':
679 q = q.order_by(order_map[order_by].asc())
677 q = q.order_by(order_map[order_by].asc())
680 else:
678 else:
681 q = q.order_by(order_map[order_by].desc())
679 q = q.order_by(order_map[order_by].desc())
682
680
683 return q
681 return q
684
682
685 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
686 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
687 return q.count()
685 return q.count()
688
686
689 def get_im_participating_in_for_review(
687 def get_im_participating_in_for_review(
690 self, user_id, statuses=None, query='', offset=0,
688 self, user_id, statuses=None, query='', offset=0,
691 length=None, order_by=None, order_dir='desc'):
689 length=None, order_by=None, order_dir='desc'):
692 """
690 """
693 Get all Pull requests that needs user approval or rejection
691 Get all Pull requests that needs user approval or rejection
694 """
692 """
695
693
696 q = self._prepare_participating_in_for_review_query(
694 q = self._prepare_participating_in_for_review_query(
697 user_id, statuses=statuses, query=query, order_by=order_by,
695 user_id, statuses=statuses, query=query, order_by=order_by,
698 order_dir=order_dir)
696 order_dir=order_dir)
699
697
700 if length:
698 if length:
701 pull_requests = q.limit(length).offset(offset).all()
699 pull_requests = q.limit(length).offset(offset).all()
702 else:
700 else:
703 pull_requests = q.all()
701 pull_requests = q.all()
704
702
705 return pull_requests
703 return pull_requests
706
704
707 def get_versions(self, pull_request):
705 def get_versions(self, pull_request):
708 """
706 """
709 returns version of pull request sorted by ID descending
707 returns version of pull request sorted by ID descending
710 """
708 """
711 return PullRequestVersion.query()\
709 return PullRequestVersion.query()\
712 .filter(PullRequestVersion.pull_request == pull_request)\
710 .filter(PullRequestVersion.pull_request == pull_request)\
713 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
714 .all()
712 .all()
715
713
716 def get_pr_version(self, pull_request_id, version=None):
714 def get_pr_version(self, pull_request_id, version=None):
717 at_version = None
715 at_version = None
718
716
719 if version and version == 'latest':
717 if version and version == 'latest':
720 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_ver = PullRequest.get(pull_request_id)
721 pull_request_obj = pull_request_ver
719 pull_request_obj = pull_request_ver
722 _org_pull_request_obj = pull_request_obj
720 _org_pull_request_obj = pull_request_obj
723 at_version = 'latest'
721 at_version = 'latest'
724 elif version:
722 elif version:
725 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_ver = PullRequestVersion.get_or_404(version)
726 pull_request_obj = pull_request_ver
724 pull_request_obj = pull_request_ver
727 _org_pull_request_obj = pull_request_ver.pull_request
725 _org_pull_request_obj = pull_request_ver.pull_request
728 at_version = pull_request_ver.pull_request_version_id
726 at_version = pull_request_ver.pull_request_version_id
729 else:
727 else:
730 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
731 pull_request_id)
729 pull_request_id)
732
730
733 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_display_obj = PullRequest.get_pr_display_object(
734 pull_request_obj, _org_pull_request_obj)
732 pull_request_obj, _org_pull_request_obj)
735
733
736 return _org_pull_request_obj, pull_request_obj, \
734 return _org_pull_request_obj, pull_request_obj, \
737 pull_request_display_obj, at_version
735 pull_request_display_obj, at_version
738
736
739 def pr_commits_versions(self, versions):
737 def pr_commits_versions(self, versions):
740 """
738 """
741 Maps the pull-request commits into all known PR versions. This way we can obtain
739 Maps the pull-request commits into all known PR versions. This way we can obtain
742 each pr version the commit was introduced in.
740 each pr version the commit was introduced in.
743 """
741 """
744 commit_versions = collections.defaultdict(list)
742 commit_versions = collections.defaultdict(list)
745 num_versions = [x.pull_request_version_id for x in versions]
743 num_versions = [x.pull_request_version_id for x in versions]
746 for ver in versions:
744 for ver in versions:
747 for commit_id in ver.revisions:
745 for commit_id in ver.revisions:
748 ver_idx = ChangesetComment.get_index_from_version(
746 ver_idx = ChangesetComment.get_index_from_version(
749 ver.pull_request_version_id, num_versions=num_versions)
747 ver.pull_request_version_id, num_versions=num_versions)
750 commit_versions[commit_id].append(ver_idx)
748 commit_versions[commit_id].append(ver_idx)
751 return commit_versions
749 return commit_versions
752
750
753 def create(self, created_by, source_repo, source_ref, target_repo,
751 def create(self, created_by, source_repo, source_ref, target_repo,
754 target_ref, revisions, reviewers, observers, title, description=None,
752 target_ref, revisions, reviewers, observers, title, description=None,
755 common_ancestor_id=None,
753 common_ancestor_id=None,
756 description_renderer=None,
754 description_renderer=None,
757 reviewer_data=None, translator=None, auth_user=None):
755 reviewer_data=None, translator=None, auth_user=None):
758 translator = translator or get_current_request().translate
756 translator = translator or get_current_request().translate
759
757
760 created_by_user = self._get_user(created_by)
758 created_by_user = self._get_user(created_by)
761 auth_user = auth_user or created_by_user.AuthUser()
759 auth_user = auth_user or created_by_user.AuthUser()
762 source_repo = self._get_repo(source_repo)
760 source_repo = self._get_repo(source_repo)
763 target_repo = self._get_repo(target_repo)
761 target_repo = self._get_repo(target_repo)
764
762
765 pull_request = PullRequest()
763 pull_request = PullRequest()
766 pull_request.source_repo = source_repo
764 pull_request.source_repo = source_repo
767 pull_request.source_ref = source_ref
765 pull_request.source_ref = source_ref
768 pull_request.target_repo = target_repo
766 pull_request.target_repo = target_repo
769 pull_request.target_ref = target_ref
767 pull_request.target_ref = target_ref
770 pull_request.revisions = revisions
768 pull_request.revisions = revisions
771 pull_request.title = title
769 pull_request.title = title
772 pull_request.description = description
770 pull_request.description = description
773 pull_request.description_renderer = description_renderer
771 pull_request.description_renderer = description_renderer
774 pull_request.author = created_by_user
772 pull_request.author = created_by_user
775 pull_request.reviewer_data = reviewer_data
773 pull_request.reviewer_data = reviewer_data
776 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.pull_request_state = pull_request.STATE_CREATING
777 pull_request.common_ancestor_id = common_ancestor_id
775 pull_request.common_ancestor_id = common_ancestor_id
778
776
779 Session().add(pull_request)
777 Session().add(pull_request)
780 Session().flush()
778 Session().flush()
781
779
782 reviewer_ids = set()
780 reviewer_ids = set()
783 # members / reviewers
781 # members / reviewers
784 for reviewer_object in reviewers:
782 for reviewer_object in reviewers:
785 user_id, reasons, mandatory, role, rules = reviewer_object
783 user_id, reasons, mandatory, role, rules = reviewer_object
786 user = self._get_user(user_id)
784 user = self._get_user(user_id)
787
785
788 # skip duplicates
786 # skip duplicates
789 if user.user_id in reviewer_ids:
787 if user.user_id in reviewer_ids:
790 continue
788 continue
791
789
792 reviewer_ids.add(user.user_id)
790 reviewer_ids.add(user.user_id)
793
791
794 reviewer = PullRequestReviewers()
792 reviewer = PullRequestReviewers()
795 reviewer.user = user
793 reviewer.user = user
796 reviewer.pull_request = pull_request
794 reviewer.pull_request = pull_request
797 reviewer.reasons = reasons
795 reviewer.reasons = reasons
798 reviewer.mandatory = mandatory
796 reviewer.mandatory = mandatory
799 reviewer.role = role
797 reviewer.role = role
800
798
801 # NOTE(marcink): pick only first rule for now
799 # NOTE(marcink): pick only first rule for now
802 rule_id = list(rules)[0] if rules else None
800 rule_id = list(rules)[0] if rules else None
803 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
804 if rule:
802 if rule:
805 review_group = rule.user_group_vote_rule(user_id)
803 review_group = rule.user_group_vote_rule(user_id)
806 # we check if this particular reviewer is member of a voting group
804 # we check if this particular reviewer is member of a voting group
807 if review_group:
805 if review_group:
808 # NOTE(marcink):
806 # NOTE(marcink):
809 # can be that user is member of more but we pick the first same,
807 # can be that user is member of more but we pick the first same,
810 # same as default reviewers algo
808 # same as default reviewers algo
811 review_group = review_group[0]
809 review_group = review_group[0]
812
810
813 rule_data = {
811 rule_data = {
814 'rule_name':
812 'rule_name':
815 rule.review_rule_name,
813 rule.review_rule_name,
816 'rule_user_group_entry_id':
814 'rule_user_group_entry_id':
817 review_group.repo_review_rule_users_group_id,
815 review_group.repo_review_rule_users_group_id,
818 'rule_user_group_name':
816 'rule_user_group_name':
819 review_group.users_group.users_group_name,
817 review_group.users_group.users_group_name,
820 'rule_user_group_members':
818 'rule_user_group_members':
821 [x.user.username for x in review_group.users_group.members],
819 [x.user.username for x in review_group.users_group.members],
822 'rule_user_group_members_id':
820 'rule_user_group_members_id':
823 [x.user.user_id for x in review_group.users_group.members],
821 [x.user.user_id for x in review_group.users_group.members],
824 }
822 }
825 # e.g {'vote_rule': -1, 'mandatory': True}
823 # e.g {'vote_rule': -1, 'mandatory': True}
826 rule_data.update(review_group.rule_data())
824 rule_data.update(review_group.rule_data())
827
825
828 reviewer.rule_data = rule_data
826 reviewer.rule_data = rule_data
829
827
830 Session().add(reviewer)
828 Session().add(reviewer)
831 Session().flush()
829 Session().flush()
832
830
833 for observer_object in observers:
831 for observer_object in observers:
834 user_id, reasons, mandatory, role, rules = observer_object
832 user_id, reasons, mandatory, role, rules = observer_object
835 user = self._get_user(user_id)
833 user = self._get_user(user_id)
836
834
837 # skip duplicates from reviewers
835 # skip duplicates from reviewers
838 if user.user_id in reviewer_ids:
836 if user.user_id in reviewer_ids:
839 continue
837 continue
840
838
841 #reviewer_ids.add(user.user_id)
839 #reviewer_ids.add(user.user_id)
842
840
843 observer = PullRequestReviewers()
841 observer = PullRequestReviewers()
844 observer.user = user
842 observer.user = user
845 observer.pull_request = pull_request
843 observer.pull_request = pull_request
846 observer.reasons = reasons
844 observer.reasons = reasons
847 observer.mandatory = mandatory
845 observer.mandatory = mandatory
848 observer.role = role
846 observer.role = role
849
847
850 # NOTE(marcink): pick only first rule for now
848 # NOTE(marcink): pick only first rule for now
851 rule_id = list(rules)[0] if rules else None
849 rule_id = list(rules)[0] if rules else None
852 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
853 if rule:
851 if rule:
854 # TODO(marcink): do we need this for observers ??
852 # TODO(marcink): do we need this for observers ??
855 pass
853 pass
856
854
857 Session().add(observer)
855 Session().add(observer)
858 Session().flush()
856 Session().flush()
859
857
860 # Set approval status to "Under Review" for all commits which are
858 # Set approval status to "Under Review" for all commits which are
861 # part of this pull request.
859 # part of this pull request.
862 ChangesetStatusModel().set_status(
860 ChangesetStatusModel().set_status(
863 repo=target_repo,
861 repo=target_repo,
864 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
865 user=created_by_user,
863 user=created_by_user,
866 pull_request=pull_request
864 pull_request=pull_request
867 )
865 )
868 # we commit early at this point. This has to do with a fact
866 # we commit early at this point. This has to do with a fact
869 # that before queries do some row-locking. And because of that
867 # that before queries do some row-locking. And because of that
870 # we need to commit and finish transaction before below validate call
868 # we need to commit and finish transaction before below validate call
871 # that for large repos could be long resulting in long row locks
869 # that for large repos could be long resulting in long row locks
872 Session().commit()
870 Session().commit()
873
871
874 # prepare workspace, and run initial merge simulation. Set state during that
872 # prepare workspace, and run initial merge simulation. Set state during that
875 # operation
873 # operation
876 pull_request = PullRequest.get(pull_request.pull_request_id)
874 pull_request = PullRequest.get(pull_request.pull_request_id)
877
875
878 # set as merging, for merge simulation, and if finished to created so we mark
876 # set as merging, for merge simulation, and if finished to created so we mark
879 # simulation is working fine
877 # simulation is working fine
880 with pull_request.set_state(PullRequest.STATE_MERGING,
878 with pull_request.set_state(PullRequest.STATE_MERGING,
881 final_state=PullRequest.STATE_CREATED) as state_obj:
879 final_state=PullRequest.STATE_CREATED) as state_obj:
882 MergeCheck.validate(
880 MergeCheck.validate(
883 pull_request, auth_user=auth_user, translator=translator)
881 pull_request, auth_user=auth_user, translator=translator)
884
882
885 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
886 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
887
885
888 creation_data = pull_request.get_api_data(with_merge_state=False)
886 creation_data = pull_request.get_api_data(with_merge_state=False)
889 self._log_audit_action(
887 self._log_audit_action(
890 'repo.pull_request.create', {'data': creation_data},
888 'repo.pull_request.create', {'data': creation_data},
891 auth_user, pull_request)
889 auth_user, pull_request)
892
890
893 return pull_request
891 return pull_request
894
892
895 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
896 pull_request = self.__get_pull_request(pull_request)
894 pull_request = self.__get_pull_request(pull_request)
897 target_scm = pull_request.target_repo.scm_instance()
895 target_scm = pull_request.target_repo.scm_instance()
898 if action == 'create':
896 if action == 'create':
899 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
900 elif action == 'merge':
898 elif action == 'merge':
901 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
902 elif action == 'close':
900 elif action == 'close':
903 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
904 elif action == 'review_status_change':
902 elif action == 'review_status_change':
905 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
906 elif action == 'update':
904 elif action == 'update':
907 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
908 elif action == 'comment':
906 elif action == 'comment':
909 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
910 elif action == 'comment_edit':
908 elif action == 'comment_edit':
911 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
912 else:
910 else:
913 return
911 return
914
912
915 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
916 pull_request, action, trigger_hook)
914 pull_request, action, trigger_hook)
917 trigger_hook(
915 trigger_hook(
918 username=user.username,
916 username=user.username,
919 repo_name=pull_request.target_repo.repo_name,
917 repo_name=pull_request.target_repo.repo_name,
920 repo_type=target_scm.alias,
918 repo_type=target_scm.alias,
921 pull_request=pull_request,
919 pull_request=pull_request,
922 data=data)
920 data=data)
923
921
924 def _get_commit_ids(self, pull_request):
922 def _get_commit_ids(self, pull_request):
925 """
923 """
926 Return the commit ids of the merged pull request.
924 Return the commit ids of the merged pull request.
927
925
928 This method is not dealing correctly yet with the lack of autoupdates
926 This method is not dealing correctly yet with the lack of autoupdates
929 nor with the implicit target updates.
927 nor with the implicit target updates.
930 For example: if a commit in the source repo is already in the target it
928 For example: if a commit in the source repo is already in the target it
931 will be reported anyways.
929 will be reported anyways.
932 """
930 """
933 merge_rev = pull_request.merge_rev
931 merge_rev = pull_request.merge_rev
934 if merge_rev is None:
932 if merge_rev is None:
935 raise ValueError('This pull request was not merged yet')
933 raise ValueError('This pull request was not merged yet')
936
934
937 commit_ids = list(pull_request.revisions)
935 commit_ids = list(pull_request.revisions)
938 if merge_rev not in commit_ids:
936 if merge_rev not in commit_ids:
939 commit_ids.append(merge_rev)
937 commit_ids.append(merge_rev)
940
938
941 return commit_ids
939 return commit_ids
942
940
943 def merge_repo(self, pull_request, user, extras):
941 def merge_repo(self, pull_request, user, extras):
944 repo_type = pull_request.source_repo.repo_type
942 repo_type = pull_request.source_repo.repo_type
945 log.debug("Merging pull request %s", pull_request)
943 log.debug("Merging pull request %s", pull_request)
946
944
947 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
948 merge_state = self._merge_pull_request(pull_request, user, extras)
946 merge_state = self._merge_pull_request(pull_request, user, extras)
949 if merge_state.executed:
947 if merge_state.executed:
950 log.debug("Merge was successful, updating the pull request comments.")
948 log.debug("Merge was successful, updating the pull request comments.")
951 self._comment_and_close_pr(pull_request, user, merge_state)
949 self._comment_and_close_pr(pull_request, user, merge_state)
952
950
953 self._log_audit_action(
951 self._log_audit_action(
954 'repo.pull_request.merge',
952 'repo.pull_request.merge',
955 {'merge_state': merge_state.__dict__},
953 {'merge_state': merge_state.__dict__},
956 user, pull_request)
954 user, pull_request)
957
955
958 else:
956 else:
959 log.warning("Merge failed, not updating the pull request.")
957 log.warning("Merge failed, not updating the pull request.")
960 return merge_state
958 return merge_state
961
959
962 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
963 target_vcs = pull_request.target_repo.scm_instance()
961 target_vcs = pull_request.target_repo.scm_instance()
964 source_vcs = pull_request.source_repo.scm_instance()
962 source_vcs = pull_request.source_repo.scm_instance()
965
963
966 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
967 pr_id=pull_request.pull_request_id,
965 pr_id=pull_request.pull_request_id,
968 pr_title=pull_request.title,
966 pr_title=pull_request.title,
969 pr_desc=pull_request.description,
967 pr_desc=pull_request.description,
970 source_repo=source_vcs.name,
968 source_repo=source_vcs.name,
971 source_ref_name=pull_request.source_ref_parts.name,
969 source_ref_name=pull_request.source_ref_parts.name,
972 target_repo=target_vcs.name,
970 target_repo=target_vcs.name,
973 target_ref_name=pull_request.target_ref_parts.name,
971 target_ref_name=pull_request.target_ref_parts.name,
974 )
972 )
975
973
976 workspace_id = self._workspace_id(pull_request)
974 workspace_id = self._workspace_id(pull_request)
977 repo_id = pull_request.target_repo.repo_id
975 repo_id = pull_request.target_repo.repo_id
978 use_rebase = self._use_rebase_for_merging(pull_request)
976 use_rebase = self._use_rebase_for_merging(pull_request)
979 close_branch = self._close_branch_before_merging(pull_request)
977 close_branch = self._close_branch_before_merging(pull_request)
980 user_name = self._user_name_for_merging(pull_request, user)
978 user_name = self._user_name_for_merging(pull_request, user)
981
979
982 target_ref = self._refresh_reference(
980 target_ref = self._refresh_reference(
983 pull_request.target_ref_parts, target_vcs)
981 pull_request.target_ref_parts, target_vcs)
984
982
985 callback_daemon, extras = prepare_callback_daemon(
983 callback_daemon, extras = prepare_callback_daemon(
986 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
987 host=vcs_settings.HOOKS_HOST,
985 host=vcs_settings.HOOKS_HOST,
988 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
989
987
990 with callback_daemon:
988 with callback_daemon:
991 # TODO: johbo: Implement a clean way to run a config_override
989 # TODO: johbo: Implement a clean way to run a config_override
992 # for a single call.
990 # for a single call.
993 target_vcs.config.set(
991 target_vcs.config.set(
994 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
995
993
996 merge_state = target_vcs.merge(
994 merge_state = target_vcs.merge(
997 repo_id, workspace_id, target_ref, source_vcs,
995 repo_id, workspace_id, target_ref, source_vcs,
998 pull_request.source_ref_parts,
996 pull_request.source_ref_parts,
999 user_name=user_name, user_email=user.email,
997 user_name=user_name, user_email=user.email,
1000 message=message, use_rebase=use_rebase,
998 message=message, use_rebase=use_rebase,
1001 close_branch=close_branch)
999 close_branch=close_branch)
1002
1000
1003 return merge_state
1001 return merge_state
1004
1002
1005 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1006 pull_request.merge_rev = merge_state.merge_ref.commit_id
1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1007 pull_request.updated_on = datetime.datetime.now()
1005 pull_request.updated_on = datetime.datetime.now()
1008 close_msg = close_msg or 'Pull request merged and closed'
1006 close_msg = close_msg or 'Pull request merged and closed'
1009
1007
1010 CommentsModel().create(
1008 CommentsModel().create(
1011 text=safe_str(close_msg),
1009 text=safe_str(close_msg),
1012 repo=pull_request.target_repo.repo_id,
1010 repo=pull_request.target_repo.repo_id,
1013 user=user.user_id,
1011 user=user.user_id,
1014 pull_request=pull_request.pull_request_id,
1012 pull_request=pull_request.pull_request_id,
1015 f_path=None,
1013 f_path=None,
1016 line_no=None,
1014 line_no=None,
1017 closing_pr=True
1015 closing_pr=True
1018 )
1016 )
1019
1017
1020 Session().add(pull_request)
1018 Session().add(pull_request)
1021 Session().flush()
1019 Session().flush()
1022 # TODO: paris: replace invalidation with less radical solution
1020 # TODO: paris: replace invalidation with less radical solution
1023 ScmModel().mark_for_invalidation(
1021 ScmModel().mark_for_invalidation(
1024 pull_request.target_repo.repo_name)
1022 pull_request.target_repo.repo_name)
1025 self.trigger_pull_request_hook(pull_request, user, 'merge')
1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1026
1024
1027 def has_valid_update_type(self, pull_request):
1025 def has_valid_update_type(self, pull_request):
1028 source_ref_type = pull_request.source_ref_parts.type
1026 source_ref_type = pull_request.source_ref_parts.type
1029 return source_ref_type in self.REF_TYPES
1027 return source_ref_type in self.REF_TYPES
1030
1028
1031 def get_flow_commits(self, pull_request):
1029 def get_flow_commits(self, pull_request):
1032
1030
1033 # source repo
1031 # source repo
1034 source_ref_name = pull_request.source_ref_parts.name
1032 source_ref_name = pull_request.source_ref_parts.name
1035 source_ref_type = pull_request.source_ref_parts.type
1033 source_ref_type = pull_request.source_ref_parts.type
1036 source_ref_id = pull_request.source_ref_parts.commit_id
1034 source_ref_id = pull_request.source_ref_parts.commit_id
1037 source_repo = pull_request.source_repo.scm_instance()
1035 source_repo = pull_request.source_repo.scm_instance()
1038
1036
1039 try:
1037 try:
1040 if source_ref_type in self.REF_TYPES:
1038 if source_ref_type in self.REF_TYPES:
1041 source_commit = source_repo.get_commit(
1039 source_commit = source_repo.get_commit(
1042 source_ref_name, reference_obj=pull_request.source_ref_parts)
1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1043 else:
1041 else:
1044 source_commit = source_repo.get_commit(source_ref_id)
1042 source_commit = source_repo.get_commit(source_ref_id)
1045 except CommitDoesNotExistError:
1043 except CommitDoesNotExistError:
1046 raise SourceRefMissing()
1044 raise SourceRefMissing()
1047
1045
1048 # target repo
1046 # target repo
1049 target_ref_name = pull_request.target_ref_parts.name
1047 target_ref_name = pull_request.target_ref_parts.name
1050 target_ref_type = pull_request.target_ref_parts.type
1048 target_ref_type = pull_request.target_ref_parts.type
1051 target_ref_id = pull_request.target_ref_parts.commit_id
1049 target_ref_id = pull_request.target_ref_parts.commit_id
1052 target_repo = pull_request.target_repo.scm_instance()
1050 target_repo = pull_request.target_repo.scm_instance()
1053
1051
1054 try:
1052 try:
1055 if target_ref_type in self.REF_TYPES:
1053 if target_ref_type in self.REF_TYPES:
1056 target_commit = target_repo.get_commit(
1054 target_commit = target_repo.get_commit(
1057 target_ref_name, reference_obj=pull_request.target_ref_parts)
1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1058 else:
1056 else:
1059 target_commit = target_repo.get_commit(target_ref_id)
1057 target_commit = target_repo.get_commit(target_ref_id)
1060 except CommitDoesNotExistError:
1058 except CommitDoesNotExistError:
1061 raise TargetRefMissing()
1059 raise TargetRefMissing()
1062
1060
1063 return source_commit, target_commit
1061 return source_commit, target_commit
1064
1062
1065 def update_commits(self, pull_request, updating_user):
1063 def update_commits(self, pull_request, updating_user):
1066 """
1064 """
1067 Get the updated list of commits for the pull request
1065 Get the updated list of commits for the pull request
1068 and return the new pull request version and the list
1066 and return the new pull request version and the list
1069 of commits processed by this update action
1067 of commits processed by this update action
1070
1068
1071 updating_user is the user_object who triggered the update
1069 updating_user is the user_object who triggered the update
1072 """
1070 """
1073 pull_request = self.__get_pull_request(pull_request)
1071 pull_request = self.__get_pull_request(pull_request)
1074 source_ref_type = pull_request.source_ref_parts.type
1072 source_ref_type = pull_request.source_ref_parts.type
1075 source_ref_name = pull_request.source_ref_parts.name
1073 source_ref_name = pull_request.source_ref_parts.name
1076 source_ref_id = pull_request.source_ref_parts.commit_id
1074 source_ref_id = pull_request.source_ref_parts.commit_id
1077
1075
1078 target_ref_type = pull_request.target_ref_parts.type
1076 target_ref_type = pull_request.target_ref_parts.type
1079 target_ref_name = pull_request.target_ref_parts.name
1077 target_ref_name = pull_request.target_ref_parts.name
1080 target_ref_id = pull_request.target_ref_parts.commit_id
1078 target_ref_id = pull_request.target_ref_parts.commit_id
1081
1079
1082 if not self.has_valid_update_type(pull_request):
1080 if not self.has_valid_update_type(pull_request):
1083 log.debug("Skipping update of pull request %s due to ref type: %s",
1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1084 pull_request, source_ref_type)
1082 pull_request, source_ref_type)
1085 return UpdateResponse(
1083 return UpdateResponse(
1086 executed=False,
1084 executed=False,
1087 reason=UpdateFailureReason.WRONG_REF_TYPE,
1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1088 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1089 source_changed=False, target_changed=False)
1087 source_changed=False, target_changed=False)
1090
1088
1091 try:
1089 try:
1092 source_commit, target_commit = self.get_flow_commits(pull_request)
1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1093 except SourceRefMissing:
1091 except SourceRefMissing:
1094 return UpdateResponse(
1092 return UpdateResponse(
1095 executed=False,
1093 executed=False,
1096 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1098 source_changed=False, target_changed=False)
1096 source_changed=False, target_changed=False)
1099 except TargetRefMissing:
1097 except TargetRefMissing:
1100 return UpdateResponse(
1098 return UpdateResponse(
1101 executed=False,
1099 executed=False,
1102 reason=UpdateFailureReason.MISSING_TARGET_REF,
1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1103 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1104 source_changed=False, target_changed=False)
1102 source_changed=False, target_changed=False)
1105
1103
1106 source_changed = source_ref_id != source_commit.raw_id
1104 source_changed = source_ref_id != source_commit.raw_id
1107 target_changed = target_ref_id != target_commit.raw_id
1105 target_changed = target_ref_id != target_commit.raw_id
1108
1106
1109 if not (source_changed or target_changed):
1107 if not (source_changed or target_changed):
1110 log.debug("Nothing changed in pull request %s", pull_request)
1108 log.debug("Nothing changed in pull request %s", pull_request)
1111 return UpdateResponse(
1109 return UpdateResponse(
1112 executed=False,
1110 executed=False,
1113 reason=UpdateFailureReason.NO_CHANGE,
1111 reason=UpdateFailureReason.NO_CHANGE,
1114 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1115 source_changed=target_changed, target_changed=source_changed)
1113 source_changed=target_changed, target_changed=source_changed)
1116
1114
1117 change_in_found = 'target repo' if target_changed else 'source repo'
1115 change_in_found = 'target repo' if target_changed else 'source repo'
1118 log.debug('Updating pull request because of change in %s detected',
1116 log.debug('Updating pull request because of change in %s detected',
1119 change_in_found)
1117 change_in_found)
1120
1118
1121 # Finally there is a need for an update, in case of source change
1119 # Finally there is a need for an update, in case of source change
1122 # we create a new version, else just an update
1120 # we create a new version, else just an update
1123 if source_changed:
1121 if source_changed:
1124 pull_request_version = self._create_version_from_snapshot(pull_request)
1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1125 self._link_comments_to_version(pull_request_version)
1123 self._link_comments_to_version(pull_request_version)
1126 else:
1124 else:
1127 try:
1125 try:
1128 ver = pull_request.versions[-1]
1126 ver = pull_request.versions[-1]
1129 except IndexError:
1127 except IndexError:
1130 ver = None
1128 ver = None
1131
1129
1132 pull_request.pull_request_version_id = \
1130 pull_request.pull_request_version_id = \
1133 ver.pull_request_version_id if ver else None
1131 ver.pull_request_version_id if ver else None
1134 pull_request_version = pull_request
1132 pull_request_version = pull_request
1135
1133
1136 source_repo = pull_request.source_repo.scm_instance()
1134 source_repo = pull_request.source_repo.scm_instance()
1137 target_repo = pull_request.target_repo.scm_instance()
1135 target_repo = pull_request.target_repo.scm_instance()
1138
1136
1139 # re-compute commit ids
1137 # re-compute commit ids
1140 old_commit_ids = pull_request.revisions
1138 old_commit_ids = pull_request.revisions
1141 pre_load = ["author", "date", "message", "branch"]
1139 pre_load = ["author", "date", "message", "branch"]
1142 commit_ranges = target_repo.compare(
1140 commit_ranges = target_repo.compare(
1143 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1144 pre_load=pre_load)
1142 pre_load=pre_load)
1145
1143
1146 target_ref = target_commit.raw_id
1144 target_ref = target_commit.raw_id
1147 source_ref = source_commit.raw_id
1145 source_ref = source_commit.raw_id
1148 ancestor_commit_id = target_repo.get_common_ancestor(
1146 ancestor_commit_id = target_repo.get_common_ancestor(
1149 target_ref, source_ref, source_repo)
1147 target_ref, source_ref, source_repo)
1150
1148
1151 if not ancestor_commit_id:
1149 if not ancestor_commit_id:
1152 raise ValueError(
1150 raise ValueError(
1153 'cannot calculate diff info without a common ancestor. '
1151 'cannot calculate diff info without a common ancestor. '
1154 'Make sure both repositories are related, and have a common forking commit.')
1152 'Make sure both repositories are related, and have a common forking commit.')
1155
1153
1156 pull_request.common_ancestor_id = ancestor_commit_id
1154 pull_request.common_ancestor_id = ancestor_commit_id
1157
1155
1158 pull_request.source_ref = '%s:%s:%s' % (
1156 pull_request.source_ref = '%s:%s:%s' % (
1159 source_ref_type, source_ref_name, source_commit.raw_id)
1157 source_ref_type, source_ref_name, source_commit.raw_id)
1160 pull_request.target_ref = '%s:%s:%s' % (
1158 pull_request.target_ref = '%s:%s:%s' % (
1161 target_ref_type, target_ref_name, ancestor_commit_id)
1159 target_ref_type, target_ref_name, ancestor_commit_id)
1162
1160
1163 pull_request.revisions = [
1161 pull_request.revisions = [
1164 commit.raw_id for commit in reversed(commit_ranges)]
1162 commit.raw_id for commit in reversed(commit_ranges)]
1165 pull_request.updated_on = datetime.datetime.now()
1163 pull_request.updated_on = datetime.datetime.now()
1166 Session().add(pull_request)
1164 Session().add(pull_request)
1167 new_commit_ids = pull_request.revisions
1165 new_commit_ids = pull_request.revisions
1168
1166
1169 old_diff_data, new_diff_data = self._generate_update_diffs(
1167 old_diff_data, new_diff_data = self._generate_update_diffs(
1170 pull_request, pull_request_version)
1168 pull_request, pull_request_version)
1171
1169
1172 # calculate commit and file changes
1170 # calculate commit and file changes
1173 commit_changes = self._calculate_commit_id_changes(
1171 commit_changes = self._calculate_commit_id_changes(
1174 old_commit_ids, new_commit_ids)
1172 old_commit_ids, new_commit_ids)
1175 file_changes = self._calculate_file_changes(
1173 file_changes = self._calculate_file_changes(
1176 old_diff_data, new_diff_data)
1174 old_diff_data, new_diff_data)
1177
1175
1178 # set comments as outdated if DIFFS changed
1176 # set comments as outdated if DIFFS changed
1179 CommentsModel().outdate_comments(
1177 CommentsModel().outdate_comments(
1180 pull_request, old_diff_data=old_diff_data,
1178 pull_request, old_diff_data=old_diff_data,
1181 new_diff_data=new_diff_data)
1179 new_diff_data=new_diff_data)
1182
1180
1183 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1181 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1184 file_node_changes = (
1182 file_node_changes = (
1185 file_changes.added or file_changes.modified or file_changes.removed)
1183 file_changes.added or file_changes.modified or file_changes.removed)
1186 pr_has_changes = valid_commit_changes or file_node_changes
1184 pr_has_changes = valid_commit_changes or file_node_changes
1187
1185
1188 # Add an automatic comment to the pull request, in case
1186 # Add an automatic comment to the pull request, in case
1189 # anything has changed
1187 # anything has changed
1190 if pr_has_changes:
1188 if pr_has_changes:
1191 update_comment = CommentsModel().create(
1189 update_comment = CommentsModel().create(
1192 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1190 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1193 repo=pull_request.target_repo,
1191 repo=pull_request.target_repo,
1194 user=pull_request.author,
1192 user=pull_request.author,
1195 pull_request=pull_request,
1193 pull_request=pull_request,
1196 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1194 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1197
1195
1198 # Update status to "Under Review" for added commits
1196 # Update status to "Under Review" for added commits
1199 for commit_id in commit_changes.added:
1197 for commit_id in commit_changes.added:
1200 ChangesetStatusModel().set_status(
1198 ChangesetStatusModel().set_status(
1201 repo=pull_request.source_repo,
1199 repo=pull_request.source_repo,
1202 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1200 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1203 comment=update_comment,
1201 comment=update_comment,
1204 user=pull_request.author,
1202 user=pull_request.author,
1205 pull_request=pull_request,
1203 pull_request=pull_request,
1206 revision=commit_id)
1204 revision=commit_id)
1207
1205
1208 # initial commit
1206 # initial commit
1209 Session().commit()
1207 Session().commit()
1210
1208
1211 if pr_has_changes:
1209 if pr_has_changes:
1212 # send update email to users
1210 # send update email to users
1213 try:
1211 try:
1214 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1212 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1215 ancestor_commit_id=ancestor_commit_id,
1213 ancestor_commit_id=ancestor_commit_id,
1216 commit_changes=commit_changes,
1214 commit_changes=commit_changes,
1217 file_changes=file_changes)
1215 file_changes=file_changes)
1218 Session().commit()
1216 Session().commit()
1219 except Exception:
1217 except Exception:
1220 log.exception('Failed to send email notification to users')
1218 log.exception('Failed to send email notification to users')
1221 Session().rollback()
1219 Session().rollback()
1222
1220
1223 log.debug(
1221 log.debug(
1224 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1222 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1225 'removed_ids: %s', pull_request.pull_request_id,
1223 'removed_ids: %s', pull_request.pull_request_id,
1226 commit_changes.added, commit_changes.common, commit_changes.removed)
1224 commit_changes.added, commit_changes.common, commit_changes.removed)
1227 log.debug(
1225 log.debug(
1228 'Updated pull request with the following file changes: %s',
1226 'Updated pull request with the following file changes: %s',
1229 file_changes)
1227 file_changes)
1230
1228
1231 log.info(
1229 log.info(
1232 "Updated pull request %s from commit %s to commit %s, "
1230 "Updated pull request %s from commit %s to commit %s, "
1233 "stored new version %s of this pull request.",
1231 "stored new version %s of this pull request.",
1234 pull_request.pull_request_id, source_ref_id,
1232 pull_request.pull_request_id, source_ref_id,
1235 pull_request.source_ref_parts.commit_id,
1233 pull_request.source_ref_parts.commit_id,
1236 pull_request_version.pull_request_version_id)
1234 pull_request_version.pull_request_version_id)
1237
1235
1238 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1236 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1239
1237
1240 return UpdateResponse(
1238 return UpdateResponse(
1241 executed=True, reason=UpdateFailureReason.NONE,
1239 executed=True, reason=UpdateFailureReason.NONE,
1242 old=pull_request, new=pull_request_version,
1240 old=pull_request, new=pull_request_version,
1243 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1241 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1244 source_changed=source_changed, target_changed=target_changed)
1242 source_changed=source_changed, target_changed=target_changed)
1245
1243
1246 def _create_version_from_snapshot(self, pull_request):
1244 def _create_version_from_snapshot(self, pull_request):
1247 version = PullRequestVersion()
1245 version = PullRequestVersion()
1248 version.title = pull_request.title
1246 version.title = pull_request.title
1249 version.description = pull_request.description
1247 version.description = pull_request.description
1250 version.status = pull_request.status
1248 version.status = pull_request.status
1251 version.pull_request_state = pull_request.pull_request_state
1249 version.pull_request_state = pull_request.pull_request_state
1252 version.created_on = datetime.datetime.now()
1250 version.created_on = datetime.datetime.now()
1253 version.updated_on = pull_request.updated_on
1251 version.updated_on = pull_request.updated_on
1254 version.user_id = pull_request.user_id
1252 version.user_id = pull_request.user_id
1255 version.source_repo = pull_request.source_repo
1253 version.source_repo = pull_request.source_repo
1256 version.source_ref = pull_request.source_ref
1254 version.source_ref = pull_request.source_ref
1257 version.target_repo = pull_request.target_repo
1255 version.target_repo = pull_request.target_repo
1258 version.target_ref = pull_request.target_ref
1256 version.target_ref = pull_request.target_ref
1259
1257
1260 version._last_merge_source_rev = pull_request._last_merge_source_rev
1258 version._last_merge_source_rev = pull_request._last_merge_source_rev
1261 version._last_merge_target_rev = pull_request._last_merge_target_rev
1259 version._last_merge_target_rev = pull_request._last_merge_target_rev
1262 version.last_merge_status = pull_request.last_merge_status
1260 version.last_merge_status = pull_request.last_merge_status
1263 version.last_merge_metadata = pull_request.last_merge_metadata
1261 version.last_merge_metadata = pull_request.last_merge_metadata
1264 version.shadow_merge_ref = pull_request.shadow_merge_ref
1262 version.shadow_merge_ref = pull_request.shadow_merge_ref
1265 version.merge_rev = pull_request.merge_rev
1263 version.merge_rev = pull_request.merge_rev
1266 version.reviewer_data = pull_request.reviewer_data
1264 version.reviewer_data = pull_request.reviewer_data
1267
1265
1268 version.revisions = pull_request.revisions
1266 version.revisions = pull_request.revisions
1269 version.common_ancestor_id = pull_request.common_ancestor_id
1267 version.common_ancestor_id = pull_request.common_ancestor_id
1270 version.pull_request = pull_request
1268 version.pull_request = pull_request
1271 Session().add(version)
1269 Session().add(version)
1272 Session().flush()
1270 Session().flush()
1273
1271
1274 return version
1272 return version
1275
1273
1276 def _generate_update_diffs(self, pull_request, pull_request_version):
1274 def _generate_update_diffs(self, pull_request, pull_request_version):
1277
1275
1278 diff_context = (
1276 diff_context = (
1279 self.DIFF_CONTEXT +
1277 self.DIFF_CONTEXT +
1280 CommentsModel.needed_extra_diff_context())
1278 CommentsModel.needed_extra_diff_context())
1281 hide_whitespace_changes = False
1279 hide_whitespace_changes = False
1282 source_repo = pull_request_version.source_repo
1280 source_repo = pull_request_version.source_repo
1283 source_ref_id = pull_request_version.source_ref_parts.commit_id
1281 source_ref_id = pull_request_version.source_ref_parts.commit_id
1284 target_ref_id = pull_request_version.target_ref_parts.commit_id
1282 target_ref_id = pull_request_version.target_ref_parts.commit_id
1285 old_diff = self._get_diff_from_pr_or_version(
1283 old_diff = self._get_diff_from_pr_or_version(
1286 source_repo, source_ref_id, target_ref_id,
1284 source_repo, source_ref_id, target_ref_id,
1287 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1285 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1288
1286
1289 source_repo = pull_request.source_repo
1287 source_repo = pull_request.source_repo
1290 source_ref_id = pull_request.source_ref_parts.commit_id
1288 source_ref_id = pull_request.source_ref_parts.commit_id
1291 target_ref_id = pull_request.target_ref_parts.commit_id
1289 target_ref_id = pull_request.target_ref_parts.commit_id
1292
1290
1293 new_diff = self._get_diff_from_pr_or_version(
1291 new_diff = self._get_diff_from_pr_or_version(
1294 source_repo, source_ref_id, target_ref_id,
1292 source_repo, source_ref_id, target_ref_id,
1295 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1293 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1296
1294
1297 # NOTE: this was using diff_format='gitdiff'
1295 # NOTE: this was using diff_format='gitdiff'
1298 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1296 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1299 old_diff_data.prepare()
1297 old_diff_data.prepare()
1300 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1298 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1301 new_diff_data.prepare()
1299 new_diff_data.prepare()
1302
1300
1303 return old_diff_data, new_diff_data
1301 return old_diff_data, new_diff_data
1304
1302
1305 def _link_comments_to_version(self, pull_request_version):
1303 def _link_comments_to_version(self, pull_request_version):
1306 """
1304 """
1307 Link all unlinked comments of this pull request to the given version.
1305 Link all unlinked comments of this pull request to the given version.
1308
1306
1309 :param pull_request_version: The `PullRequestVersion` to which
1307 :param pull_request_version: The `PullRequestVersion` to which
1310 the comments shall be linked.
1308 the comments shall be linked.
1311
1309
1312 """
1310 """
1313 pull_request = pull_request_version.pull_request
1311 pull_request = pull_request_version.pull_request
1314 comments = ChangesetComment.query()\
1312 comments = ChangesetComment.query()\
1315 .filter(
1313 .filter(
1316 # TODO: johbo: Should we query for the repo at all here?
1314 # TODO: johbo: Should we query for the repo at all here?
1317 # Pending decision on how comments of PRs are to be related
1315 # Pending decision on how comments of PRs are to be related
1318 # to either the source repo, the target repo or no repo at all.
1316 # to either the source repo, the target repo or no repo at all.
1319 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1317 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1320 ChangesetComment.pull_request == pull_request,
1318 ChangesetComment.pull_request == pull_request,
1321 ChangesetComment.pull_request_version == None)\
1319 ChangesetComment.pull_request_version == None)\
1322 .order_by(ChangesetComment.comment_id.asc())
1320 .order_by(ChangesetComment.comment_id.asc())
1323
1321
1324 # TODO: johbo: Find out why this breaks if it is done in a bulk
1322 # TODO: johbo: Find out why this breaks if it is done in a bulk
1325 # operation.
1323 # operation.
1326 for comment in comments:
1324 for comment in comments:
1327 comment.pull_request_version_id = (
1325 comment.pull_request_version_id = (
1328 pull_request_version.pull_request_version_id)
1326 pull_request_version.pull_request_version_id)
1329 Session().add(comment)
1327 Session().add(comment)
1330
1328
1331 def _calculate_commit_id_changes(self, old_ids, new_ids):
1329 def _calculate_commit_id_changes(self, old_ids, new_ids):
1332 added = [x for x in new_ids if x not in old_ids]
1330 added = [x for x in new_ids if x not in old_ids]
1333 common = [x for x in new_ids if x in old_ids]
1331 common = [x for x in new_ids if x in old_ids]
1334 removed = [x for x in old_ids if x not in new_ids]
1332 removed = [x for x in old_ids if x not in new_ids]
1335 total = new_ids
1333 total = new_ids
1336 return ChangeTuple(added, common, removed, total)
1334 return ChangeTuple(added, common, removed, total)
1337
1335
1338 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1336 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1339
1337
1340 old_files = OrderedDict()
1338 old_files = OrderedDict()
1341 for diff_data in old_diff_data.parsed_diff:
1339 for diff_data in old_diff_data.parsed_diff:
1342 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1340 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1343
1341
1344 added_files = []
1342 added_files = []
1345 modified_files = []
1343 modified_files = []
1346 removed_files = []
1344 removed_files = []
1347 for diff_data in new_diff_data.parsed_diff:
1345 for diff_data in new_diff_data.parsed_diff:
1348 new_filename = diff_data['filename']
1346 new_filename = diff_data['filename']
1349 new_hash = md5_safe(diff_data['raw_diff'])
1347 new_hash = md5_safe(diff_data['raw_diff'])
1350
1348
1351 old_hash = old_files.get(new_filename)
1349 old_hash = old_files.get(new_filename)
1352 if not old_hash:
1350 if not old_hash:
1353 # file is not present in old diff, we have to figure out from parsed diff
1351 # file is not present in old diff, we have to figure out from parsed diff
1354 # operation ADD/REMOVE
1352 # operation ADD/REMOVE
1355 operations_dict = diff_data['stats']['ops']
1353 operations_dict = diff_data['stats']['ops']
1356 if diffs.DEL_FILENODE in operations_dict:
1354 if diffs.DEL_FILENODE in operations_dict:
1357 removed_files.append(new_filename)
1355 removed_files.append(new_filename)
1358 else:
1356 else:
1359 added_files.append(new_filename)
1357 added_files.append(new_filename)
1360 else:
1358 else:
1361 if new_hash != old_hash:
1359 if new_hash != old_hash:
1362 modified_files.append(new_filename)
1360 modified_files.append(new_filename)
1363 # now remove a file from old, since we have seen it already
1361 # now remove a file from old, since we have seen it already
1364 del old_files[new_filename]
1362 del old_files[new_filename]
1365
1363
1366 # removed files is when there are present in old, but not in NEW,
1364 # removed files is when there are present in old, but not in NEW,
1367 # since we remove old files that are present in new diff, left-overs
1365 # since we remove old files that are present in new diff, left-overs
1368 # if any should be the removed files
1366 # if any should be the removed files
1369 removed_files.extend(old_files.keys())
1367 removed_files.extend(old_files.keys())
1370
1368
1371 return FileChangeTuple(added_files, modified_files, removed_files)
1369 return FileChangeTuple(added_files, modified_files, removed_files)
1372
1370
1373 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1371 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1374 """
1372 """
1375 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1373 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1376 so it's always looking the same disregarding on which default
1374 so it's always looking the same disregarding on which default
1377 renderer system is using.
1375 renderer system is using.
1378
1376
1379 :param ancestor_commit_id: ancestor raw_id
1377 :param ancestor_commit_id: ancestor raw_id
1380 :param changes: changes named tuple
1378 :param changes: changes named tuple
1381 :param file_changes: file changes named tuple
1379 :param file_changes: file changes named tuple
1382
1380
1383 """
1381 """
1384 new_status = ChangesetStatus.get_status_lbl(
1382 new_status = ChangesetStatus.get_status_lbl(
1385 ChangesetStatus.STATUS_UNDER_REVIEW)
1383 ChangesetStatus.STATUS_UNDER_REVIEW)
1386
1384
1387 changed_files = (
1385 changed_files = (
1388 file_changes.added + file_changes.modified + file_changes.removed)
1386 file_changes.added + file_changes.modified + file_changes.removed)
1389
1387
1390 params = {
1388 params = {
1391 'under_review_label': new_status,
1389 'under_review_label': new_status,
1392 'added_commits': changes.added,
1390 'added_commits': changes.added,
1393 'removed_commits': changes.removed,
1391 'removed_commits': changes.removed,
1394 'changed_files': changed_files,
1392 'changed_files': changed_files,
1395 'added_files': file_changes.added,
1393 'added_files': file_changes.added,
1396 'modified_files': file_changes.modified,
1394 'modified_files': file_changes.modified,
1397 'removed_files': file_changes.removed,
1395 'removed_files': file_changes.removed,
1398 'ancestor_commit_id': ancestor_commit_id
1396 'ancestor_commit_id': ancestor_commit_id
1399 }
1397 }
1400 renderer = RstTemplateRenderer()
1398 renderer = RstTemplateRenderer()
1401 return renderer.render('pull_request_update.mako', **params)
1399 return renderer.render('pull_request_update.mako', **params)
1402
1400
1403 def edit(self, pull_request, title, description, description_renderer, user):
1401 def edit(self, pull_request, title, description, description_renderer, user):
1404 pull_request = self.__get_pull_request(pull_request)
1402 pull_request = self.__get_pull_request(pull_request)
1405 old_data = pull_request.get_api_data(with_merge_state=False)
1403 old_data = pull_request.get_api_data(with_merge_state=False)
1406 if pull_request.is_closed():
1404 if pull_request.is_closed():
1407 raise ValueError('This pull request is closed')
1405 raise ValueError('This pull request is closed')
1408 if title:
1406 if title:
1409 pull_request.title = title
1407 pull_request.title = title
1410 pull_request.description = description
1408 pull_request.description = description
1411 pull_request.updated_on = datetime.datetime.now()
1409 pull_request.updated_on = datetime.datetime.now()
1412 pull_request.description_renderer = description_renderer
1410 pull_request.description_renderer = description_renderer
1413 Session().add(pull_request)
1411 Session().add(pull_request)
1414 self._log_audit_action(
1412 self._log_audit_action(
1415 'repo.pull_request.edit', {'old_data': old_data},
1413 'repo.pull_request.edit', {'old_data': old_data},
1416 user, pull_request)
1414 user, pull_request)
1417
1415
1418 def update_reviewers(self, pull_request, reviewer_data, user):
1416 def update_reviewers(self, pull_request, reviewer_data, user):
1419 """
1417 """
1420 Update the reviewers in the pull request
1418 Update the reviewers in the pull request
1421
1419
1422 :param pull_request: the pr to update
1420 :param pull_request: the pr to update
1423 :param reviewer_data: list of tuples
1421 :param reviewer_data: list of tuples
1424 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1422 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1425 :param user: current use who triggers this action
1423 :param user: current use who triggers this action
1426 """
1424 """
1427
1425
1428 pull_request = self.__get_pull_request(pull_request)
1426 pull_request = self.__get_pull_request(pull_request)
1429 if pull_request.is_closed():
1427 if pull_request.is_closed():
1430 raise ValueError('This pull request is closed')
1428 raise ValueError('This pull request is closed')
1431
1429
1432 reviewers = {}
1430 reviewers = {}
1433 for user_id, reasons, mandatory, role, rules in reviewer_data:
1431 for user_id, reasons, mandatory, role, rules in reviewer_data:
1434 if isinstance(user_id, (int, str)):
1432 if isinstance(user_id, (int, str)):
1435 user_id = self._get_user(user_id).user_id
1433 user_id = self._get_user(user_id).user_id
1436 reviewers[user_id] = {
1434 reviewers[user_id] = {
1437 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1435 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1438
1436
1439 reviewers_ids = set(reviewers.keys())
1437 reviewers_ids = set(reviewers.keys())
1440 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1438 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1441 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1439 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1442
1440
1443 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1441 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1444
1442
1445 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1443 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1446 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1444 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1447
1445
1448 log.debug("Adding %s reviewers", ids_to_add)
1446 log.debug("Adding %s reviewers", ids_to_add)
1449 log.debug("Removing %s reviewers", ids_to_remove)
1447 log.debug("Removing %s reviewers", ids_to_remove)
1450 changed = False
1448 changed = False
1451 added_audit_reviewers = []
1449 added_audit_reviewers = []
1452 removed_audit_reviewers = []
1450 removed_audit_reviewers = []
1453
1451
1454 for uid in ids_to_add:
1452 for uid in ids_to_add:
1455 changed = True
1453 changed = True
1456 _usr = self._get_user(uid)
1454 _usr = self._get_user(uid)
1457 reviewer = PullRequestReviewers()
1455 reviewer = PullRequestReviewers()
1458 reviewer.user = _usr
1456 reviewer.user = _usr
1459 reviewer.pull_request = pull_request
1457 reviewer.pull_request = pull_request
1460 reviewer.reasons = reviewers[uid]['reasons']
1458 reviewer.reasons = reviewers[uid]['reasons']
1461 # NOTE(marcink): mandatory shouldn't be changed now
1459 # NOTE(marcink): mandatory shouldn't be changed now
1462 # reviewer.mandatory = reviewers[uid]['reasons']
1460 # reviewer.mandatory = reviewers[uid]['reasons']
1463 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1461 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1464 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1462 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1465 Session().add(reviewer)
1463 Session().add(reviewer)
1466 added_audit_reviewers.append(reviewer.get_dict())
1464 added_audit_reviewers.append(reviewer.get_dict())
1467
1465
1468 for uid in ids_to_remove:
1466 for uid in ids_to_remove:
1469 changed = True
1467 changed = True
1470 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1468 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1471 # This is an edge case that handles previous state of having the same reviewer twice.
1469 # This is an edge case that handles previous state of having the same reviewer twice.
1472 # this CAN happen due to the lack of DB checks
1470 # this CAN happen due to the lack of DB checks
1473 reviewers = PullRequestReviewers.query()\
1471 reviewers = PullRequestReviewers.query()\
1474 .filter(PullRequestReviewers.user_id == uid,
1472 .filter(PullRequestReviewers.user_id == uid,
1475 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1473 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1476 PullRequestReviewers.pull_request == pull_request)\
1474 PullRequestReviewers.pull_request == pull_request)\
1477 .all()
1475 .all()
1478
1476
1479 for obj in reviewers:
1477 for obj in reviewers:
1480 added_audit_reviewers.append(obj.get_dict())
1478 added_audit_reviewers.append(obj.get_dict())
1481 Session().delete(obj)
1479 Session().delete(obj)
1482
1480
1483 if changed:
1481 if changed:
1484 Session().expire_all()
1482 Session().expire_all()
1485 pull_request.updated_on = datetime.datetime.now()
1483 pull_request.updated_on = datetime.datetime.now()
1486 Session().add(pull_request)
1484 Session().add(pull_request)
1487
1485
1488 # finally store audit logs
1486 # finally store audit logs
1489 for user_data in added_audit_reviewers:
1487 for user_data in added_audit_reviewers:
1490 self._log_audit_action(
1488 self._log_audit_action(
1491 'repo.pull_request.reviewer.add', {'data': user_data},
1489 'repo.pull_request.reviewer.add', {'data': user_data},
1492 user, pull_request)
1490 user, pull_request)
1493 for user_data in removed_audit_reviewers:
1491 for user_data in removed_audit_reviewers:
1494 self._log_audit_action(
1492 self._log_audit_action(
1495 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1493 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1496 user, pull_request)
1494 user, pull_request)
1497
1495
1498 self.notify_reviewers(pull_request, ids_to_add, user)
1496 self.notify_reviewers(pull_request, ids_to_add, user)
1499 return ids_to_add, ids_to_remove
1497 return ids_to_add, ids_to_remove
1500
1498
1501 def update_observers(self, pull_request, observer_data, user):
1499 def update_observers(self, pull_request, observer_data, user):
1502 """
1500 """
1503 Update the observers in the pull request
1501 Update the observers in the pull request
1504
1502
1505 :param pull_request: the pr to update
1503 :param pull_request: the pr to update
1506 :param observer_data: list of tuples
1504 :param observer_data: list of tuples
1507 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1505 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1508 :param user: current use who triggers this action
1506 :param user: current use who triggers this action
1509 """
1507 """
1510 pull_request = self.__get_pull_request(pull_request)
1508 pull_request = self.__get_pull_request(pull_request)
1511 if pull_request.is_closed():
1509 if pull_request.is_closed():
1512 raise ValueError('This pull request is closed')
1510 raise ValueError('This pull request is closed')
1513
1511
1514 observers = {}
1512 observers = {}
1515 for user_id, reasons, mandatory, role, rules in observer_data:
1513 for user_id, reasons, mandatory, role, rules in observer_data:
1516 if isinstance(user_id, (int, str)):
1514 if isinstance(user_id, (int, str)):
1517 user_id = self._get_user(user_id).user_id
1515 user_id = self._get_user(user_id).user_id
1518 observers[user_id] = {
1516 observers[user_id] = {
1519 'reasons': reasons, 'observers': mandatory, 'role': role}
1517 'reasons': reasons, 'observers': mandatory, 'role': role}
1520
1518
1521 observers_ids = set(observers.keys())
1519 observers_ids = set(observers.keys())
1522 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1520 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1523 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1521 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1524
1522
1525 current_observers_ids = set([x.user.user_id for x in current_observers])
1523 current_observers_ids = set([x.user.user_id for x in current_observers])
1526
1524
1527 ids_to_add = observers_ids.difference(current_observers_ids)
1525 ids_to_add = observers_ids.difference(current_observers_ids)
1528 ids_to_remove = current_observers_ids.difference(observers_ids)
1526 ids_to_remove = current_observers_ids.difference(observers_ids)
1529
1527
1530 log.debug("Adding %s observer", ids_to_add)
1528 log.debug("Adding %s observer", ids_to_add)
1531 log.debug("Removing %s observer", ids_to_remove)
1529 log.debug("Removing %s observer", ids_to_remove)
1532 changed = False
1530 changed = False
1533 added_audit_observers = []
1531 added_audit_observers = []
1534 removed_audit_observers = []
1532 removed_audit_observers = []
1535
1533
1536 for uid in ids_to_add:
1534 for uid in ids_to_add:
1537 changed = True
1535 changed = True
1538 _usr = self._get_user(uid)
1536 _usr = self._get_user(uid)
1539 observer = PullRequestReviewers()
1537 observer = PullRequestReviewers()
1540 observer.user = _usr
1538 observer.user = _usr
1541 observer.pull_request = pull_request
1539 observer.pull_request = pull_request
1542 observer.reasons = observers[uid]['reasons']
1540 observer.reasons = observers[uid]['reasons']
1543 # NOTE(marcink): mandatory shouldn't be changed now
1541 # NOTE(marcink): mandatory shouldn't be changed now
1544 # observer.mandatory = observer[uid]['reasons']
1542 # observer.mandatory = observer[uid]['reasons']
1545
1543
1546 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1544 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1547 observer.role = PullRequestReviewers.ROLE_OBSERVER
1545 observer.role = PullRequestReviewers.ROLE_OBSERVER
1548 Session().add(observer)
1546 Session().add(observer)
1549 added_audit_observers.append(observer.get_dict())
1547 added_audit_observers.append(observer.get_dict())
1550
1548
1551 for uid in ids_to_remove:
1549 for uid in ids_to_remove:
1552 changed = True
1550 changed = True
1553 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1551 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1554 # This is an edge case that handles previous state of having the same reviewer twice.
1552 # This is an edge case that handles previous state of having the same reviewer twice.
1555 # this CAN happen due to the lack of DB checks
1553 # this CAN happen due to the lack of DB checks
1556 observers = PullRequestReviewers.query()\
1554 observers = PullRequestReviewers.query()\
1557 .filter(PullRequestReviewers.user_id == uid,
1555 .filter(PullRequestReviewers.user_id == uid,
1558 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1556 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1559 PullRequestReviewers.pull_request == pull_request)\
1557 PullRequestReviewers.pull_request == pull_request)\
1560 .all()
1558 .all()
1561
1559
1562 for obj in observers:
1560 for obj in observers:
1563 added_audit_observers.append(obj.get_dict())
1561 added_audit_observers.append(obj.get_dict())
1564 Session().delete(obj)
1562 Session().delete(obj)
1565
1563
1566 if changed:
1564 if changed:
1567 Session().expire_all()
1565 Session().expire_all()
1568 pull_request.updated_on = datetime.datetime.now()
1566 pull_request.updated_on = datetime.datetime.now()
1569 Session().add(pull_request)
1567 Session().add(pull_request)
1570
1568
1571 # finally store audit logs
1569 # finally store audit logs
1572 for user_data in added_audit_observers:
1570 for user_data in added_audit_observers:
1573 self._log_audit_action(
1571 self._log_audit_action(
1574 'repo.pull_request.observer.add', {'data': user_data},
1572 'repo.pull_request.observer.add', {'data': user_data},
1575 user, pull_request)
1573 user, pull_request)
1576 for user_data in removed_audit_observers:
1574 for user_data in removed_audit_observers:
1577 self._log_audit_action(
1575 self._log_audit_action(
1578 'repo.pull_request.observer.delete', {'old_data': user_data},
1576 'repo.pull_request.observer.delete', {'old_data': user_data},
1579 user, pull_request)
1577 user, pull_request)
1580
1578
1581 self.notify_observers(pull_request, ids_to_add, user)
1579 self.notify_observers(pull_request, ids_to_add, user)
1582 return ids_to_add, ids_to_remove
1580 return ids_to_add, ids_to_remove
1583
1581
1584 def get_url(self, pull_request, request=None, permalink=False):
1582 def get_url(self, pull_request, request=None, permalink=False):
1585 if not request:
1583 if not request:
1586 request = get_current_request()
1584 request = get_current_request()
1587
1585
1588 if permalink:
1586 if permalink:
1589 return request.route_url(
1587 return request.route_url(
1590 'pull_requests_global',
1588 'pull_requests_global',
1591 pull_request_id=pull_request.pull_request_id,)
1589 pull_request_id=pull_request.pull_request_id,)
1592 else:
1590 else:
1593 return request.route_url('pullrequest_show',
1591 return request.route_url('pullrequest_show',
1594 repo_name=safe_str(pull_request.target_repo.repo_name),
1592 repo_name=safe_str(pull_request.target_repo.repo_name),
1595 pull_request_id=pull_request.pull_request_id,)
1593 pull_request_id=pull_request.pull_request_id,)
1596
1594
1597 def get_shadow_clone_url(self, pull_request, request=None):
1595 def get_shadow_clone_url(self, pull_request, request=None):
1598 """
1596 """
1599 Returns qualified url pointing to the shadow repository. If this pull
1597 Returns qualified url pointing to the shadow repository. If this pull
1600 request is closed there is no shadow repository and ``None`` will be
1598 request is closed there is no shadow repository and ``None`` will be
1601 returned.
1599 returned.
1602 """
1600 """
1603 if pull_request.is_closed():
1601 if pull_request.is_closed():
1604 return None
1602 return None
1605 else:
1603 else:
1606 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1604 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1607 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1605 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1608
1606
1609 def _notify_reviewers(self, pull_request, user_ids, role, user):
1607 def _notify_reviewers(self, pull_request, user_ids, role, user):
1610 # notification to reviewers/observers
1608 # notification to reviewers/observers
1611 if not user_ids:
1609 if not user_ids:
1612 return
1610 return
1613
1611
1614 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1612 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1615
1613
1616 pull_request_obj = pull_request
1614 pull_request_obj = pull_request
1617 # get the current participants of this pull request
1615 # get the current participants of this pull request
1618 recipients = user_ids
1616 recipients = user_ids
1619 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1617 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1620
1618
1621 pr_source_repo = pull_request_obj.source_repo
1619 pr_source_repo = pull_request_obj.source_repo
1622 pr_target_repo = pull_request_obj.target_repo
1620 pr_target_repo = pull_request_obj.target_repo
1623
1621
1624 pr_url = h.route_url('pullrequest_show',
1622 pr_url = h.route_url('pullrequest_show',
1625 repo_name=pr_target_repo.repo_name,
1623 repo_name=pr_target_repo.repo_name,
1626 pull_request_id=pull_request_obj.pull_request_id,)
1624 pull_request_id=pull_request_obj.pull_request_id,)
1627
1625
1628 # set some variables for email notification
1626 # set some variables for email notification
1629 pr_target_repo_url = h.route_url(
1627 pr_target_repo_url = h.route_url(
1630 'repo_summary', repo_name=pr_target_repo.repo_name)
1628 'repo_summary', repo_name=pr_target_repo.repo_name)
1631
1629
1632 pr_source_repo_url = h.route_url(
1630 pr_source_repo_url = h.route_url(
1633 'repo_summary', repo_name=pr_source_repo.repo_name)
1631 'repo_summary', repo_name=pr_source_repo.repo_name)
1634
1632
1635 # pull request specifics
1633 # pull request specifics
1636 pull_request_commits = [
1634 pull_request_commits = [
1637 (x.raw_id, x.message)
1635 (x.raw_id, x.message)
1638 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1636 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1639
1637
1640 current_rhodecode_user = user
1638 current_rhodecode_user = user
1641 kwargs = {
1639 kwargs = {
1642 'user': current_rhodecode_user,
1640 'user': current_rhodecode_user,
1643 'pull_request_author': pull_request.author,
1641 'pull_request_author': pull_request.author,
1644 'pull_request': pull_request_obj,
1642 'pull_request': pull_request_obj,
1645 'pull_request_commits': pull_request_commits,
1643 'pull_request_commits': pull_request_commits,
1646
1644
1647 'pull_request_target_repo': pr_target_repo,
1645 'pull_request_target_repo': pr_target_repo,
1648 'pull_request_target_repo_url': pr_target_repo_url,
1646 'pull_request_target_repo_url': pr_target_repo_url,
1649
1647
1650 'pull_request_source_repo': pr_source_repo,
1648 'pull_request_source_repo': pr_source_repo,
1651 'pull_request_source_repo_url': pr_source_repo_url,
1649 'pull_request_source_repo_url': pr_source_repo_url,
1652
1650
1653 'pull_request_url': pr_url,
1651 'pull_request_url': pr_url,
1654 'thread_ids': [pr_url],
1652 'thread_ids': [pr_url],
1655 'user_role': role
1653 'user_role': role
1656 }
1654 }
1657
1655
1658 # create notification objects, and emails
1656 # create notification objects, and emails
1659 NotificationModel().create(
1657 NotificationModel().create(
1660 created_by=current_rhodecode_user,
1658 created_by=current_rhodecode_user,
1661 notification_subject='', # Filled in based on the notification_type
1659 notification_subject='', # Filled in based on the notification_type
1662 notification_body='', # Filled in based on the notification_type
1660 notification_body='', # Filled in based on the notification_type
1663 notification_type=notification_type,
1661 notification_type=notification_type,
1664 recipients=recipients,
1662 recipients=recipients,
1665 email_kwargs=kwargs,
1663 email_kwargs=kwargs,
1666 )
1664 )
1667
1665
1668 def notify_reviewers(self, pull_request, reviewers_ids, user):
1666 def notify_reviewers(self, pull_request, reviewers_ids, user):
1669 return self._notify_reviewers(pull_request, reviewers_ids,
1667 return self._notify_reviewers(pull_request, reviewers_ids,
1670 PullRequestReviewers.ROLE_REVIEWER, user)
1668 PullRequestReviewers.ROLE_REVIEWER, user)
1671
1669
1672 def notify_observers(self, pull_request, observers_ids, user):
1670 def notify_observers(self, pull_request, observers_ids, user):
1673 return self._notify_reviewers(pull_request, observers_ids,
1671 return self._notify_reviewers(pull_request, observers_ids,
1674 PullRequestReviewers.ROLE_OBSERVER, user)
1672 PullRequestReviewers.ROLE_OBSERVER, user)
1675
1673
1676 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1674 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1677 commit_changes, file_changes):
1675 commit_changes, file_changes):
1678
1676
1679 updating_user_id = updating_user.user_id
1677 updating_user_id = updating_user.user_id
1680 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1678 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1681 # NOTE(marcink): send notification to all other users except to
1679 # NOTE(marcink): send notification to all other users except to
1682 # person who updated the PR
1680 # person who updated the PR
1683 recipients = reviewers.difference(set([updating_user_id]))
1681 recipients = reviewers.difference(set([updating_user_id]))
1684
1682
1685 log.debug('Notify following recipients about pull-request update %s', recipients)
1683 log.debug('Notify following recipients about pull-request update %s', recipients)
1686
1684
1687 pull_request_obj = pull_request
1685 pull_request_obj = pull_request
1688
1686
1689 # send email about the update
1687 # send email about the update
1690 changed_files = (
1688 changed_files = (
1691 file_changes.added + file_changes.modified + file_changes.removed)
1689 file_changes.added + file_changes.modified + file_changes.removed)
1692
1690
1693 pr_source_repo = pull_request_obj.source_repo
1691 pr_source_repo = pull_request_obj.source_repo
1694 pr_target_repo = pull_request_obj.target_repo
1692 pr_target_repo = pull_request_obj.target_repo
1695
1693
1696 pr_url = h.route_url('pullrequest_show',
1694 pr_url = h.route_url('pullrequest_show',
1697 repo_name=pr_target_repo.repo_name,
1695 repo_name=pr_target_repo.repo_name,
1698 pull_request_id=pull_request_obj.pull_request_id,)
1696 pull_request_id=pull_request_obj.pull_request_id,)
1699
1697
1700 # set some variables for email notification
1698 # set some variables for email notification
1701 pr_target_repo_url = h.route_url(
1699 pr_target_repo_url = h.route_url(
1702 'repo_summary', repo_name=pr_target_repo.repo_name)
1700 'repo_summary', repo_name=pr_target_repo.repo_name)
1703
1701
1704 pr_source_repo_url = h.route_url(
1702 pr_source_repo_url = h.route_url(
1705 'repo_summary', repo_name=pr_source_repo.repo_name)
1703 'repo_summary', repo_name=pr_source_repo.repo_name)
1706
1704
1707 email_kwargs = {
1705 email_kwargs = {
1708 'date': datetime.datetime.now(),
1706 'date': datetime.datetime.now(),
1709 'updating_user': updating_user,
1707 'updating_user': updating_user,
1710
1708
1711 'pull_request': pull_request_obj,
1709 'pull_request': pull_request_obj,
1712
1710
1713 'pull_request_target_repo': pr_target_repo,
1711 'pull_request_target_repo': pr_target_repo,
1714 'pull_request_target_repo_url': pr_target_repo_url,
1712 'pull_request_target_repo_url': pr_target_repo_url,
1715
1713
1716 'pull_request_source_repo': pr_source_repo,
1714 'pull_request_source_repo': pr_source_repo,
1717 'pull_request_source_repo_url': pr_source_repo_url,
1715 'pull_request_source_repo_url': pr_source_repo_url,
1718
1716
1719 'pull_request_url': pr_url,
1717 'pull_request_url': pr_url,
1720
1718
1721 'ancestor_commit_id': ancestor_commit_id,
1719 'ancestor_commit_id': ancestor_commit_id,
1722 'added_commits': commit_changes.added,
1720 'added_commits': commit_changes.added,
1723 'removed_commits': commit_changes.removed,
1721 'removed_commits': commit_changes.removed,
1724 'changed_files': changed_files,
1722 'changed_files': changed_files,
1725 'added_files': file_changes.added,
1723 'added_files': file_changes.added,
1726 'modified_files': file_changes.modified,
1724 'modified_files': file_changes.modified,
1727 'removed_files': file_changes.removed,
1725 'removed_files': file_changes.removed,
1728 'thread_ids': [pr_url],
1726 'thread_ids': [pr_url],
1729 }
1727 }
1730
1728
1731 # create notification objects, and emails
1729 # create notification objects, and emails
1732 NotificationModel().create(
1730 NotificationModel().create(
1733 created_by=updating_user,
1731 created_by=updating_user,
1734 notification_subject='', # Filled in based on the notification_type
1732 notification_subject='', # Filled in based on the notification_type
1735 notification_body='', # Filled in based on the notification_type
1733 notification_body='', # Filled in based on the notification_type
1736 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1734 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1737 recipients=recipients,
1735 recipients=recipients,
1738 email_kwargs=email_kwargs,
1736 email_kwargs=email_kwargs,
1739 )
1737 )
1740
1738
1741 def delete(self, pull_request, user=None):
1739 def delete(self, pull_request, user=None):
1742 if not user:
1740 if not user:
1743 user = getattr(get_current_rhodecode_user(), 'username', None)
1741 user = getattr(get_current_rhodecode_user(), 'username', None)
1744
1742
1745 pull_request = self.__get_pull_request(pull_request)
1743 pull_request = self.__get_pull_request(pull_request)
1746 old_data = pull_request.get_api_data(with_merge_state=False)
1744 old_data = pull_request.get_api_data(with_merge_state=False)
1747 self._cleanup_merge_workspace(pull_request)
1745 self._cleanup_merge_workspace(pull_request)
1748 self._log_audit_action(
1746 self._log_audit_action(
1749 'repo.pull_request.delete', {'old_data': old_data},
1747 'repo.pull_request.delete', {'old_data': old_data},
1750 user, pull_request)
1748 user, pull_request)
1751 Session().delete(pull_request)
1749 Session().delete(pull_request)
1752
1750
1753 def close_pull_request(self, pull_request, user):
1751 def close_pull_request(self, pull_request, user):
1754 pull_request = self.__get_pull_request(pull_request)
1752 pull_request = self.__get_pull_request(pull_request)
1755 self._cleanup_merge_workspace(pull_request)
1753 self._cleanup_merge_workspace(pull_request)
1756 pull_request.status = PullRequest.STATUS_CLOSED
1754 pull_request.status = PullRequest.STATUS_CLOSED
1757 pull_request.updated_on = datetime.datetime.now()
1755 pull_request.updated_on = datetime.datetime.now()
1758 Session().add(pull_request)
1756 Session().add(pull_request)
1759 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1757 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1760
1758
1761 pr_data = pull_request.get_api_data(with_merge_state=False)
1759 pr_data = pull_request.get_api_data(with_merge_state=False)
1762 self._log_audit_action(
1760 self._log_audit_action(
1763 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1761 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1764
1762
1765 def close_pull_request_with_comment(
1763 def close_pull_request_with_comment(
1766 self, pull_request, user, repo, message=None, auth_user=None):
1764 self, pull_request, user, repo, message=None, auth_user=None):
1767
1765
1768 pull_request_review_status = pull_request.calculated_review_status()
1766 pull_request_review_status = pull_request.calculated_review_status()
1769
1767
1770 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1768 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1771 # approved only if we have voting consent
1769 # approved only if we have voting consent
1772 status = ChangesetStatus.STATUS_APPROVED
1770 status = ChangesetStatus.STATUS_APPROVED
1773 else:
1771 else:
1774 status = ChangesetStatus.STATUS_REJECTED
1772 status = ChangesetStatus.STATUS_REJECTED
1775 status_lbl = ChangesetStatus.get_status_lbl(status)
1773 status_lbl = ChangesetStatus.get_status_lbl(status)
1776
1774
1777 default_message = (
1775 default_message = (
1778 'Closing with status change {transition_icon} {status}.'
1776 'Closing with status change {transition_icon} {status}.'
1779 ).format(transition_icon='>', status=status_lbl)
1777 ).format(transition_icon='>', status=status_lbl)
1780 text = message or default_message
1778 text = message or default_message
1781
1779
1782 # create a comment, and link it to new status
1780 # create a comment, and link it to new status
1783 comment = CommentsModel().create(
1781 comment = CommentsModel().create(
1784 text=text,
1782 text=text,
1785 repo=repo.repo_id,
1783 repo=repo.repo_id,
1786 user=user.user_id,
1784 user=user.user_id,
1787 pull_request=pull_request.pull_request_id,
1785 pull_request=pull_request.pull_request_id,
1788 status_change=status_lbl,
1786 status_change=status_lbl,
1789 status_change_type=status,
1787 status_change_type=status,
1790 closing_pr=True,
1788 closing_pr=True,
1791 auth_user=auth_user,
1789 auth_user=auth_user,
1792 )
1790 )
1793
1791
1794 # calculate old status before we change it
1792 # calculate old status before we change it
1795 old_calculated_status = pull_request.calculated_review_status()
1793 old_calculated_status = pull_request.calculated_review_status()
1796 ChangesetStatusModel().set_status(
1794 ChangesetStatusModel().set_status(
1797 repo.repo_id,
1795 repo.repo_id,
1798 status,
1796 status,
1799 user.user_id,
1797 user.user_id,
1800 comment=comment,
1798 comment=comment,
1801 pull_request=pull_request.pull_request_id
1799 pull_request=pull_request.pull_request_id
1802 )
1800 )
1803
1801
1804 Session().flush()
1802 Session().flush()
1805
1803
1806 self.trigger_pull_request_hook(pull_request, user, 'comment',
1804 self.trigger_pull_request_hook(pull_request, user, 'comment',
1807 data={'comment': comment})
1805 data={'comment': comment})
1808
1806
1809 # we now calculate the status of pull request again, and based on that
1807 # we now calculate the status of pull request again, and based on that
1810 # calculation trigger status change. This might happen in cases
1808 # calculation trigger status change. This might happen in cases
1811 # that non-reviewer admin closes a pr, which means his vote doesn't
1809 # that non-reviewer admin closes a pr, which means his vote doesn't
1812 # change the status, while if he's a reviewer this might change it.
1810 # change the status, while if he's a reviewer this might change it.
1813 calculated_status = pull_request.calculated_review_status()
1811 calculated_status = pull_request.calculated_review_status()
1814 if old_calculated_status != calculated_status:
1812 if old_calculated_status != calculated_status:
1815 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1813 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1816 data={'status': calculated_status})
1814 data={'status': calculated_status})
1817
1815
1818 # finally close the PR
1816 # finally close the PR
1819 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1817 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1820
1818
1821 return comment, status
1819 return comment, status
1822
1820
1823 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1821 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1824 _ = translator or get_current_request().translate
1822 _ = translator or get_current_request().translate
1825
1823
1826 if not self._is_merge_enabled(pull_request):
1824 if not self._is_merge_enabled(pull_request):
1827 return None, False, _('Server-side pull request merging is disabled.')
1825 return None, False, _('Server-side pull request merging is disabled.')
1828
1826
1829 if pull_request.is_closed():
1827 if pull_request.is_closed():
1830 return None, False, _('This pull request is closed.')
1828 return None, False, _('This pull request is closed.')
1831
1829
1832 merge_possible, msg = self._check_repo_requirements(
1830 merge_possible, msg = self._check_repo_requirements(
1833 target=pull_request.target_repo, source=pull_request.source_repo,
1831 target=pull_request.target_repo, source=pull_request.source_repo,
1834 translator=_)
1832 translator=_)
1835 if not merge_possible:
1833 if not merge_possible:
1836 return None, merge_possible, msg
1834 return None, merge_possible, msg
1837
1835
1838 try:
1836 try:
1839 merge_response = self._try_merge(
1837 merge_response = self._try_merge(
1840 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1838 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1841 log.debug("Merge response: %s", merge_response)
1839 log.debug("Merge response: %s", merge_response)
1842 return merge_response, merge_response.possible, merge_response.merge_status_message
1840 return merge_response, merge_response.possible, merge_response.merge_status_message
1843 except NotImplementedError:
1841 except NotImplementedError:
1844 return None, False, _('Pull request merging is not supported.')
1842 return None, False, _('Pull request merging is not supported.')
1845
1843
1846 def _check_repo_requirements(self, target, source, translator):
1844 def _check_repo_requirements(self, target, source, translator):
1847 """
1845 """
1848 Check if `target` and `source` have compatible requirements.
1846 Check if `target` and `source` have compatible requirements.
1849
1847
1850 Currently this is just checking for largefiles.
1848 Currently this is just checking for largefiles.
1851 """
1849 """
1852 _ = translator
1850 _ = translator
1853 target_has_largefiles = self._has_largefiles(target)
1851 target_has_largefiles = self._has_largefiles(target)
1854 source_has_largefiles = self._has_largefiles(source)
1852 source_has_largefiles = self._has_largefiles(source)
1855 merge_possible = True
1853 merge_possible = True
1856 message = u''
1854 message = u''
1857
1855
1858 if target_has_largefiles != source_has_largefiles:
1856 if target_has_largefiles != source_has_largefiles:
1859 merge_possible = False
1857 merge_possible = False
1860 if source_has_largefiles:
1858 if source_has_largefiles:
1861 message = _(
1859 message = _(
1862 'Target repository large files support is disabled.')
1860 'Target repository large files support is disabled.')
1863 else:
1861 else:
1864 message = _(
1862 message = _(
1865 'Source repository large files support is disabled.')
1863 'Source repository large files support is disabled.')
1866
1864
1867 return merge_possible, message
1865 return merge_possible, message
1868
1866
1869 def _has_largefiles(self, repo):
1867 def _has_largefiles(self, repo):
1870 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1868 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1871 'extensions', 'largefiles')
1869 'extensions', 'largefiles')
1872 return largefiles_ui and largefiles_ui[0].active
1870 return largefiles_ui and largefiles_ui[0].active
1873
1871
1874 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1872 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1875 """
1873 """
1876 Try to merge the pull request and return the merge status.
1874 Try to merge the pull request and return the merge status.
1877 """
1875 """
1878 log.debug(
1876 log.debug(
1879 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1877 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1880 pull_request.pull_request_id, force_shadow_repo_refresh)
1878 pull_request.pull_request_id, force_shadow_repo_refresh)
1881 target_vcs = pull_request.target_repo.scm_instance()
1879 target_vcs = pull_request.target_repo.scm_instance()
1882 # Refresh the target reference.
1880 # Refresh the target reference.
1883 try:
1881 try:
1884 target_ref = self._refresh_reference(
1882 target_ref = self._refresh_reference(
1885 pull_request.target_ref_parts, target_vcs)
1883 pull_request.target_ref_parts, target_vcs)
1886 except CommitDoesNotExistError:
1884 except CommitDoesNotExistError:
1887 merge_state = MergeResponse(
1885 merge_state = MergeResponse(
1888 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1886 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1889 metadata={'target_ref': pull_request.target_ref_parts})
1887 metadata={'target_ref': pull_request.target_ref_parts})
1890 return merge_state
1888 return merge_state
1891
1889
1892 target_locked = pull_request.target_repo.locked
1890 target_locked = pull_request.target_repo.locked
1893 if target_locked and target_locked[0]:
1891 if target_locked and target_locked[0]:
1894 locked_by = 'user:{}'.format(target_locked[0])
1892 locked_by = 'user:{}'.format(target_locked[0])
1895 log.debug("The target repository is locked by %s.", locked_by)
1893 log.debug("The target repository is locked by %s.", locked_by)
1896 merge_state = MergeResponse(
1894 merge_state = MergeResponse(
1897 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1895 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1898 metadata={'locked_by': locked_by})
1896 metadata={'locked_by': locked_by})
1899 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1897 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1900 pull_request, target_ref):
1898 pull_request, target_ref):
1901 log.debug("Refreshing the merge status of the repository.")
1899 log.debug("Refreshing the merge status of the repository.")
1902 merge_state = self._refresh_merge_state(
1900 merge_state = self._refresh_merge_state(
1903 pull_request, target_vcs, target_ref)
1901 pull_request, target_vcs, target_ref)
1904 else:
1902 else:
1905 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1903 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1906 metadata = {
1904 metadata = {
1907 'unresolved_files': '',
1905 'unresolved_files': '',
1908 'target_ref': pull_request.target_ref_parts,
1906 'target_ref': pull_request.target_ref_parts,
1909 'source_ref': pull_request.source_ref_parts,
1907 'source_ref': pull_request.source_ref_parts,
1910 }
1908 }
1911 if pull_request.last_merge_metadata:
1909 if pull_request.last_merge_metadata:
1912 metadata.update(pull_request.last_merge_metadata_parsed)
1910 metadata.update(pull_request.last_merge_metadata_parsed)
1913
1911
1914 if not possible and target_ref.type == 'branch':
1912 if not possible and target_ref.type == 'branch':
1915 # NOTE(marcink): case for mercurial multiple heads on branch
1913 # NOTE(marcink): case for mercurial multiple heads on branch
1916 heads = target_vcs._heads(target_ref.name)
1914 heads = target_vcs._heads(target_ref.name)
1917 if len(heads) != 1:
1915 if len(heads) != 1:
1918 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1916 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1919 metadata.update({
1917 metadata.update({
1920 'heads': heads
1918 'heads': heads
1921 })
1919 })
1922
1920
1923 merge_state = MergeResponse(
1921 merge_state = MergeResponse(
1924 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1922 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1925
1923
1926 return merge_state
1924 return merge_state
1927
1925
1928 def _refresh_reference(self, reference, vcs_repository):
1926 def _refresh_reference(self, reference, vcs_repository):
1929 if reference.type in self.UPDATABLE_REF_TYPES:
1927 if reference.type in self.UPDATABLE_REF_TYPES:
1930 name_or_id = reference.name
1928 name_or_id = reference.name
1931 else:
1929 else:
1932 name_or_id = reference.commit_id
1930 name_or_id = reference.commit_id
1933
1931
1934 refreshed_commit = vcs_repository.get_commit(name_or_id)
1932 refreshed_commit = vcs_repository.get_commit(name_or_id)
1935 refreshed_reference = Reference(
1933 refreshed_reference = Reference(
1936 reference.type, reference.name, refreshed_commit.raw_id)
1934 reference.type, reference.name, refreshed_commit.raw_id)
1937 return refreshed_reference
1935 return refreshed_reference
1938
1936
1939 def _needs_merge_state_refresh(self, pull_request, target_reference):
1937 def _needs_merge_state_refresh(self, pull_request, target_reference):
1940 return not(
1938 return not(
1941 pull_request.revisions and
1939 pull_request.revisions and
1942 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1940 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1943 target_reference.commit_id == pull_request._last_merge_target_rev)
1941 target_reference.commit_id == pull_request._last_merge_target_rev)
1944
1942
1945 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1943 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1946 workspace_id = self._workspace_id(pull_request)
1944 workspace_id = self._workspace_id(pull_request)
1947 source_vcs = pull_request.source_repo.scm_instance()
1945 source_vcs = pull_request.source_repo.scm_instance()
1948 repo_id = pull_request.target_repo.repo_id
1946 repo_id = pull_request.target_repo.repo_id
1949 use_rebase = self._use_rebase_for_merging(pull_request)
1947 use_rebase = self._use_rebase_for_merging(pull_request)
1950 close_branch = self._close_branch_before_merging(pull_request)
1948 close_branch = self._close_branch_before_merging(pull_request)
1951 merge_state = target_vcs.merge(
1949 merge_state = target_vcs.merge(
1952 repo_id, workspace_id,
1950 repo_id, workspace_id,
1953 target_reference, source_vcs, pull_request.source_ref_parts,
1951 target_reference, source_vcs, pull_request.source_ref_parts,
1954 dry_run=True, use_rebase=use_rebase,
1952 dry_run=True, use_rebase=use_rebase,
1955 close_branch=close_branch)
1953 close_branch=close_branch)
1956
1954
1957 # Do not store the response if there was an unknown error.
1955 # Do not store the response if there was an unknown error.
1958 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1956 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1959 pull_request._last_merge_source_rev = \
1957 pull_request._last_merge_source_rev = \
1960 pull_request.source_ref_parts.commit_id
1958 pull_request.source_ref_parts.commit_id
1961 pull_request._last_merge_target_rev = target_reference.commit_id
1959 pull_request._last_merge_target_rev = target_reference.commit_id
1962 pull_request.last_merge_status = merge_state.failure_reason
1960 pull_request.last_merge_status = merge_state.failure_reason
1963 pull_request.last_merge_metadata = merge_state.metadata
1961 pull_request.last_merge_metadata = merge_state.metadata
1964
1962
1965 pull_request.shadow_merge_ref = merge_state.merge_ref
1963 pull_request.shadow_merge_ref = merge_state.merge_ref
1966 Session().add(pull_request)
1964 Session().add(pull_request)
1967 Session().commit()
1965 Session().commit()
1968
1966
1969 return merge_state
1967 return merge_state
1970
1968
1971 def _workspace_id(self, pull_request):
1969 def _workspace_id(self, pull_request):
1972 workspace_id = 'pr-%s' % pull_request.pull_request_id
1970 workspace_id = 'pr-%s' % pull_request.pull_request_id
1973 return workspace_id
1971 return workspace_id
1974
1972
1975 def generate_repo_data(self, repo, commit_id=None, branch=None,
1973 def generate_repo_data(self, repo, commit_id=None, branch=None,
1976 bookmark=None, translator=None):
1974 bookmark=None, translator=None):
1977 from rhodecode.model.repo import RepoModel
1975 from rhodecode.model.repo import RepoModel
1978
1976
1979 all_refs, selected_ref = \
1977 all_refs, selected_ref = \
1980 self._get_repo_pullrequest_sources(
1978 self._get_repo_pullrequest_sources(
1981 repo.scm_instance(), commit_id=commit_id,
1979 repo.scm_instance(), commit_id=commit_id,
1982 branch=branch, bookmark=bookmark, translator=translator)
1980 branch=branch, bookmark=bookmark, translator=translator)
1983
1981
1984 refs_select2 = []
1982 refs_select2 = []
1985 for element in all_refs:
1983 for element in all_refs:
1986 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1984 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1987 refs_select2.append({'text': element[1], 'children': children})
1985 refs_select2.append({'text': element[1], 'children': children})
1988
1986
1989 return {
1987 return {
1990 'user': {
1988 'user': {
1991 'user_id': repo.user.user_id,
1989 'user_id': repo.user.user_id,
1992 'username': repo.user.username,
1990 'username': repo.user.username,
1993 'firstname': repo.user.first_name,
1991 'firstname': repo.user.first_name,
1994 'lastname': repo.user.last_name,
1992 'lastname': repo.user.last_name,
1995 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1993 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1996 },
1994 },
1997 'name': repo.repo_name,
1995 'name': repo.repo_name,
1998 'link': RepoModel().get_url(repo),
1996 'link': RepoModel().get_url(repo),
1999 'description': h.chop_at_smart(repo.description_safe, '\n'),
1997 'description': h.chop_at_smart(repo.description_safe, '\n'),
2000 'refs': {
1998 'refs': {
2001 'all_refs': all_refs,
1999 'all_refs': all_refs,
2002 'selected_ref': selected_ref,
2000 'selected_ref': selected_ref,
2003 'select2_refs': refs_select2
2001 'select2_refs': refs_select2
2004 }
2002 }
2005 }
2003 }
2006
2004
2007 def generate_pullrequest_title(self, source, source_ref, target):
2005 def generate_pullrequest_title(self, source, source_ref, target):
2008 return u'{source}#{at_ref} to {target}'.format(
2006 return u'{source}#{at_ref} to {target}'.format(
2009 source=source,
2007 source=source,
2010 at_ref=source_ref,
2008 at_ref=source_ref,
2011 target=target,
2009 target=target,
2012 )
2010 )
2013
2011
2014 def _cleanup_merge_workspace(self, pull_request):
2012 def _cleanup_merge_workspace(self, pull_request):
2015 # Merging related cleanup
2013 # Merging related cleanup
2016 repo_id = pull_request.target_repo.repo_id
2014 repo_id = pull_request.target_repo.repo_id
2017 target_scm = pull_request.target_repo.scm_instance()
2015 target_scm = pull_request.target_repo.scm_instance()
2018 workspace_id = self._workspace_id(pull_request)
2016 workspace_id = self._workspace_id(pull_request)
2019
2017
2020 try:
2018 try:
2021 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2019 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2022 except NotImplementedError:
2020 except NotImplementedError:
2023 pass
2021 pass
2024
2022
2025 def _get_repo_pullrequest_sources(
2023 def _get_repo_pullrequest_sources(
2026 self, repo, commit_id=None, branch=None, bookmark=None,
2024 self, repo, commit_id=None, branch=None, bookmark=None,
2027 translator=None):
2025 translator=None):
2028 """
2026 """
2029 Return a structure with repo's interesting commits, suitable for
2027 Return a structure with repo's interesting commits, suitable for
2030 the selectors in pullrequest controller
2028 the selectors in pullrequest controller
2031
2029
2032 :param commit_id: a commit that must be in the list somehow
2030 :param commit_id: a commit that must be in the list somehow
2033 and selected by default
2031 and selected by default
2034 :param branch: a branch that must be in the list and selected
2032 :param branch: a branch that must be in the list and selected
2035 by default - even if closed
2033 by default - even if closed
2036 :param bookmark: a bookmark that must be in the list and selected
2034 :param bookmark: a bookmark that must be in the list and selected
2037 """
2035 """
2038 _ = translator or get_current_request().translate
2036 _ = translator or get_current_request().translate
2039
2037
2040 commit_id = safe_str(commit_id) if commit_id else None
2038 commit_id = safe_str(commit_id) if commit_id else None
2041 branch = safe_str(branch) if branch else None
2039 branch = safe_str(branch) if branch else None
2042 bookmark = safe_str(bookmark) if bookmark else None
2040 bookmark = safe_str(bookmark) if bookmark else None
2043
2041
2044 selected = None
2042 selected = None
2045
2043
2046 # order matters: first source that has commit_id in it will be selected
2044 # order matters: first source that has commit_id in it will be selected
2047 sources = []
2045 sources = []
2048 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2046 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2049 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2047 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2050
2048
2051 if commit_id:
2049 if commit_id:
2052 ref_commit = (h.short_id(commit_id), commit_id)
2050 ref_commit = (h.short_id(commit_id), commit_id)
2053 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2051 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2054
2052
2055 sources.append(
2053 sources.append(
2056 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2054 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2057 )
2055 )
2058
2056
2059 groups = []
2057 groups = []
2060
2058
2061 for group_key, ref_list, group_name, match in sources:
2059 for group_key, ref_list, group_name, match in sources:
2062 group_refs = []
2060 group_refs = []
2063 for ref_name, ref_id in ref_list:
2061 for ref_name, ref_id in ref_list:
2064 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2062 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2065 group_refs.append((ref_key, ref_name))
2063 group_refs.append((ref_key, ref_name))
2066
2064
2067 if not selected:
2065 if not selected:
2068 if set([commit_id, match]) & set([ref_id, ref_name]):
2066 if set([commit_id, match]) & set([ref_id, ref_name]):
2069 selected = ref_key
2067 selected = ref_key
2070
2068
2071 if group_refs:
2069 if group_refs:
2072 groups.append((group_refs, group_name))
2070 groups.append((group_refs, group_name))
2073
2071
2074 if not selected:
2072 if not selected:
2075 ref = commit_id or branch or bookmark
2073 ref = commit_id or branch or bookmark
2076 if ref:
2074 if ref:
2077 raise CommitDoesNotExistError(
2075 raise CommitDoesNotExistError(
2078 u'No commit refs could be found matching: {}'.format(ref))
2076 u'No commit refs could be found matching: {}'.format(ref))
2079 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2077 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2080 selected = u'branch:{}:{}'.format(
2078 selected = u'branch:{}:{}'.format(
2081 safe_str(repo.DEFAULT_BRANCH_NAME),
2079 safe_str(repo.DEFAULT_BRANCH_NAME),
2082 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2080 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2083 )
2081 )
2084 elif repo.commit_ids:
2082 elif repo.commit_ids:
2085 # make the user select in this case
2083 # make the user select in this case
2086 selected = None
2084 selected = None
2087 else:
2085 else:
2088 raise EmptyRepositoryError()
2086 raise EmptyRepositoryError()
2089 return groups, selected
2087 return groups, selected
2090
2088
2091 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2089 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2092 hide_whitespace_changes, diff_context):
2090 hide_whitespace_changes, diff_context):
2093
2091
2094 return self._get_diff_from_pr_or_version(
2092 return self._get_diff_from_pr_or_version(
2095 source_repo, source_ref_id, target_ref_id,
2093 source_repo, source_ref_id, target_ref_id,
2096 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2094 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2097
2095
2098 def _get_diff_from_pr_or_version(
2096 def _get_diff_from_pr_or_version(
2099 self, source_repo, source_ref_id, target_ref_id,
2097 self, source_repo, source_ref_id, target_ref_id,
2100 hide_whitespace_changes, diff_context):
2098 hide_whitespace_changes, diff_context):
2101
2099
2102 target_commit = source_repo.get_commit(
2100 target_commit = source_repo.get_commit(
2103 commit_id=safe_str(target_ref_id))
2101 commit_id=safe_str(target_ref_id))
2104 source_commit = source_repo.get_commit(
2102 source_commit = source_repo.get_commit(
2105 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2103 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2106 if isinstance(source_repo, Repository):
2104 if isinstance(source_repo, Repository):
2107 vcs_repo = source_repo.scm_instance()
2105 vcs_repo = source_repo.scm_instance()
2108 else:
2106 else:
2109 vcs_repo = source_repo
2107 vcs_repo = source_repo
2110
2108
2111 # TODO: johbo: In the context of an update, we cannot reach
2109 # TODO: johbo: In the context of an update, we cannot reach
2112 # the old commit anymore with our normal mechanisms. It needs
2110 # the old commit anymore with our normal mechanisms. It needs
2113 # some sort of special support in the vcs layer to avoid this
2111 # some sort of special support in the vcs layer to avoid this
2114 # workaround.
2112 # workaround.
2115 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2116 vcs_repo.alias == 'git'):
2114 vcs_repo.alias == 'git'):
2117 source_commit.raw_id = safe_str(source_ref_id)
2115 source_commit.raw_id = safe_str(source_ref_id)
2118
2116
2119 log.debug('calculating diff between '
2117 log.debug('calculating diff between '
2120 'source_ref:%s and target_ref:%s for repo `%s`',
2118 'source_ref:%s and target_ref:%s for repo `%s`',
2121 target_ref_id, source_ref_id,
2119 target_ref_id, source_ref_id,
2122 safe_str(vcs_repo.path))
2120 safe_str(vcs_repo.path))
2123
2121
2124 vcs_diff = vcs_repo.get_diff(
2122 vcs_diff = vcs_repo.get_diff(
2125 commit1=target_commit, commit2=source_commit,
2123 commit1=target_commit, commit2=source_commit,
2126 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2124 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2127 return vcs_diff
2125 return vcs_diff
2128
2126
2129 def _is_merge_enabled(self, pull_request):
2127 def _is_merge_enabled(self, pull_request):
2130 return self._get_general_setting(
2128 return self._get_general_setting(
2131 pull_request, 'rhodecode_pr_merge_enabled')
2129 pull_request, 'rhodecode_pr_merge_enabled')
2132
2130
2133 def _use_rebase_for_merging(self, pull_request):
2131 def _use_rebase_for_merging(self, pull_request):
2134 repo_type = pull_request.target_repo.repo_type
2132 repo_type = pull_request.target_repo.repo_type
2135 if repo_type == 'hg':
2133 if repo_type == 'hg':
2136 return self._get_general_setting(
2134 return self._get_general_setting(
2137 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2138 elif repo_type == 'git':
2136 elif repo_type == 'git':
2139 return self._get_general_setting(
2137 return self._get_general_setting(
2140 pull_request, 'rhodecode_git_use_rebase_for_merging')
2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2141
2139
2142 return False
2140 return False
2143
2141
2144 def _user_name_for_merging(self, pull_request, user):
2142 def _user_name_for_merging(self, pull_request, user):
2145 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2146 if env_user_name_attr and hasattr(user, env_user_name_attr):
2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2147 user_name_attr = env_user_name_attr
2145 user_name_attr = env_user_name_attr
2148 else:
2146 else:
2149 user_name_attr = 'short_contact'
2147 user_name_attr = 'short_contact'
2150
2148
2151 user_name = getattr(user, user_name_attr)
2149 user_name = getattr(user, user_name_attr)
2152 return user_name
2150 return user_name
2153
2151
2154 def _close_branch_before_merging(self, pull_request):
2152 def _close_branch_before_merging(self, pull_request):
2155 repo_type = pull_request.target_repo.repo_type
2153 repo_type = pull_request.target_repo.repo_type
2156 if repo_type == 'hg':
2154 if repo_type == 'hg':
2157 return self._get_general_setting(
2155 return self._get_general_setting(
2158 pull_request, 'rhodecode_hg_close_branch_before_merging')
2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2159 elif repo_type == 'git':
2157 elif repo_type == 'git':
2160 return self._get_general_setting(
2158 return self._get_general_setting(
2161 pull_request, 'rhodecode_git_close_branch_before_merging')
2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2162
2160
2163 return False
2161 return False
2164
2162
2165 def _get_general_setting(self, pull_request, settings_key, default=False):
2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2166 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2167 settings = settings_model.get_general_settings()
2165 settings = settings_model.get_general_settings()
2168 return settings.get(settings_key, default)
2166 return settings.get(settings_key, default)
2169
2167
2170 def _log_audit_action(self, action, action_data, user, pull_request):
2168 def _log_audit_action(self, action, action_data, user, pull_request):
2171 audit_logger.store(
2169 audit_logger.store(
2172 action=action,
2170 action=action,
2173 action_data=action_data,
2171 action_data=action_data,
2174 user=user,
2172 user=user,
2175 repo=pull_request.target_repo)
2173 repo=pull_request.target_repo)
2176
2174
2177 def get_reviewer_functions(self):
2175 def get_reviewer_functions(self):
2178 """
2176 """
2179 Fetches functions for validation and fetching default reviewers.
2177 Fetches functions for validation and fetching default reviewers.
2180 If available we use the EE package, else we fallback to CE
2178 If available we use the EE package, else we fallback to CE
2181 package functions
2179 package functions
2182 """
2180 """
2183 try:
2181 try:
2184 from rc_reviewers.utils import get_default_reviewers_data
2182 from rc_reviewers.utils import get_default_reviewers_data
2185 from rc_reviewers.utils import validate_default_reviewers
2183 from rc_reviewers.utils import validate_default_reviewers
2186 from rc_reviewers.utils import validate_observers
2184 from rc_reviewers.utils import validate_observers
2187 except ImportError:
2185 except ImportError:
2188 from rhodecode.apps.repository.utils import get_default_reviewers_data
2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2189 from rhodecode.apps.repository.utils import validate_default_reviewers
2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2190 from rhodecode.apps.repository.utils import validate_observers
2188 from rhodecode.apps.repository.utils import validate_observers
2191
2189
2192 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2193
2191
2194
2192
2195 class MergeCheck(object):
2193 class MergeCheck(object):
2196 """
2194 """
2197 Perform Merge Checks and returns a check object which stores information
2195 Perform Merge Checks and returns a check object which stores information
2198 about merge errors, and merge conditions
2196 about merge errors, and merge conditions
2199 """
2197 """
2200 TODO_CHECK = 'todo'
2198 TODO_CHECK = 'todo'
2201 PERM_CHECK = 'perm'
2199 PERM_CHECK = 'perm'
2202 REVIEW_CHECK = 'review'
2200 REVIEW_CHECK = 'review'
2203 MERGE_CHECK = 'merge'
2201 MERGE_CHECK = 'merge'
2204 WIP_CHECK = 'wip'
2202 WIP_CHECK = 'wip'
2205
2203
2206 def __init__(self):
2204 def __init__(self):
2207 self.review_status = None
2205 self.review_status = None
2208 self.merge_possible = None
2206 self.merge_possible = None
2209 self.merge_msg = ''
2207 self.merge_msg = ''
2210 self.merge_response = None
2208 self.merge_response = None
2211 self.failed = None
2209 self.failed = None
2212 self.errors = []
2210 self.errors = []
2213 self.error_details = OrderedDict()
2211 self.error_details = OrderedDict()
2214 self.source_commit = AttributeDict()
2212 self.source_commit = AttributeDict()
2215 self.target_commit = AttributeDict()
2213 self.target_commit = AttributeDict()
2216 self.reviewers_count = 0
2214 self.reviewers_count = 0
2217 self.observers_count = 0
2215 self.observers_count = 0
2218
2216
2219 def __repr__(self):
2217 def __repr__(self):
2220 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2221 self.merge_possible, self.failed, self.errors)
2219 self.merge_possible, self.failed, self.errors)
2222
2220
2223 def push_error(self, error_type, message, error_key, details):
2221 def push_error(self, error_type, message, error_key, details):
2224 self.failed = True
2222 self.failed = True
2225 self.errors.append([error_type, message])
2223 self.errors.append([error_type, message])
2226 self.error_details[error_key] = dict(
2224 self.error_details[error_key] = dict(
2227 details=details,
2225 details=details,
2228 error_type=error_type,
2226 error_type=error_type,
2229 message=message
2227 message=message
2230 )
2228 )
2231
2229
2232 @classmethod
2230 @classmethod
2233 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2231 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2234 force_shadow_repo_refresh=False):
2232 force_shadow_repo_refresh=False):
2235 _ = translator
2233 _ = translator
2236 merge_check = cls()
2234 merge_check = cls()
2237
2235
2238 # title has WIP:
2236 # title has WIP:
2239 if pull_request.work_in_progress:
2237 if pull_request.work_in_progress:
2240 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2238 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2241
2239
2242 msg = _('WIP marker in title prevents from accidental merge.')
2240 msg = _('WIP marker in title prevents from accidental merge.')
2243 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2241 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2244 if fail_early:
2242 if fail_early:
2245 return merge_check
2243 return merge_check
2246
2244
2247 # permissions to merge
2245 # permissions to merge
2248 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2246 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2249 if not user_allowed_to_merge:
2247 if not user_allowed_to_merge:
2250 log.debug("MergeCheck: cannot merge, approval is pending.")
2248 log.debug("MergeCheck: cannot merge, approval is pending.")
2251
2249
2252 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2250 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2253 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2251 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2254 if fail_early:
2252 if fail_early:
2255 return merge_check
2253 return merge_check
2256
2254
2257 # permission to merge into the target branch
2255 # permission to merge into the target branch
2258 target_commit_id = pull_request.target_ref_parts.commit_id
2256 target_commit_id = pull_request.target_ref_parts.commit_id
2259 if pull_request.target_ref_parts.type == 'branch':
2257 if pull_request.target_ref_parts.type == 'branch':
2260 branch_name = pull_request.target_ref_parts.name
2258 branch_name = pull_request.target_ref_parts.name
2261 else:
2259 else:
2262 # for mercurial we can always figure out the branch from the commit
2260 # for mercurial we can always figure out the branch from the commit
2263 # in case of bookmark
2261 # in case of bookmark
2264 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2262 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2265 branch_name = target_commit.branch
2263 branch_name = target_commit.branch
2266
2264
2267 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2265 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2268 pull_request.target_repo.repo_name, branch_name)
2266 pull_request.target_repo.repo_name, branch_name)
2269 if branch_perm and branch_perm == 'branch.none':
2267 if branch_perm and branch_perm == 'branch.none':
2270 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2268 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2271 branch_name, rule)
2269 branch_name, rule)
2272 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2270 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2273 if fail_early:
2271 if fail_early:
2274 return merge_check
2272 return merge_check
2275
2273
2276 # review status, must be always present
2274 # review status, must be always present
2277 review_status = pull_request.calculated_review_status()
2275 review_status = pull_request.calculated_review_status()
2278 merge_check.review_status = review_status
2276 merge_check.review_status = review_status
2279 merge_check.reviewers_count = pull_request.reviewers_count
2277 merge_check.reviewers_count = pull_request.reviewers_count
2280 merge_check.observers_count = pull_request.observers_count
2278 merge_check.observers_count = pull_request.observers_count
2281
2279
2282 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2280 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2283 if not status_approved and merge_check.reviewers_count:
2281 if not status_approved and merge_check.reviewers_count:
2284 log.debug("MergeCheck: cannot merge, approval is pending.")
2282 log.debug("MergeCheck: cannot merge, approval is pending.")
2285 msg = _('Pull request reviewer approval is pending.')
2283 msg = _('Pull request reviewer approval is pending.')
2286
2284
2287 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2285 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2288
2286
2289 if fail_early:
2287 if fail_early:
2290 return merge_check
2288 return merge_check
2291
2289
2292 # left over TODOs
2290 # left over TODOs
2293 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2291 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2294 if todos:
2292 if todos:
2295 log.debug("MergeCheck: cannot merge, {} "
2293 log.debug("MergeCheck: cannot merge, {} "
2296 "unresolved TODOs left.".format(len(todos)))
2294 "unresolved TODOs left.".format(len(todos)))
2297
2295
2298 if len(todos) == 1:
2296 if len(todos) == 1:
2299 msg = _('Cannot merge, {} TODO still not resolved.').format(
2297 msg = _('Cannot merge, {} TODO still not resolved.').format(
2300 len(todos))
2298 len(todos))
2301 else:
2299 else:
2302 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2300 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2303 len(todos))
2301 len(todos))
2304
2302
2305 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2303 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2306
2304
2307 if fail_early:
2305 if fail_early:
2308 return merge_check
2306 return merge_check
2309
2307
2310 # merge possible, here is the filesystem simulation + shadow repo
2308 # merge possible, here is the filesystem simulation + shadow repo
2311 merge_response, merge_status, msg = PullRequestModel().merge_status(
2309 merge_response, merge_status, msg = PullRequestModel().merge_status(
2312 pull_request, translator=translator,
2310 pull_request, translator=translator,
2313 force_shadow_repo_refresh=force_shadow_repo_refresh)
2311 force_shadow_repo_refresh=force_shadow_repo_refresh)
2314
2312
2315 merge_check.merge_possible = merge_status
2313 merge_check.merge_possible = merge_status
2316 merge_check.merge_msg = msg
2314 merge_check.merge_msg = msg
2317 merge_check.merge_response = merge_response
2315 merge_check.merge_response = merge_response
2318
2316
2319 source_ref_id = pull_request.source_ref_parts.commit_id
2317 source_ref_id = pull_request.source_ref_parts.commit_id
2320 target_ref_id = pull_request.target_ref_parts.commit_id
2318 target_ref_id = pull_request.target_ref_parts.commit_id
2321
2319
2322 try:
2320 try:
2323 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2321 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2324 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2322 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2325 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2323 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2326 merge_check.source_commit.current_raw_id = source_commit.raw_id
2324 merge_check.source_commit.current_raw_id = source_commit.raw_id
2327 merge_check.source_commit.previous_raw_id = source_ref_id
2325 merge_check.source_commit.previous_raw_id = source_ref_id
2328
2326
2329 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2327 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2330 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2328 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2331 merge_check.target_commit.current_raw_id = target_commit.raw_id
2329 merge_check.target_commit.current_raw_id = target_commit.raw_id
2332 merge_check.target_commit.previous_raw_id = target_ref_id
2330 merge_check.target_commit.previous_raw_id = target_ref_id
2333 except (SourceRefMissing, TargetRefMissing):
2331 except (SourceRefMissing, TargetRefMissing):
2334 pass
2332 pass
2335
2333
2336 if not merge_status:
2334 if not merge_status:
2337 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2335 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2338 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2336 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2339
2337
2340 if fail_early:
2338 if fail_early:
2341 return merge_check
2339 return merge_check
2342
2340
2343 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2341 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2344 return merge_check
2342 return merge_check
2345
2343
2346 @classmethod
2344 @classmethod
2347 def get_merge_conditions(cls, pull_request, translator):
2345 def get_merge_conditions(cls, pull_request, translator):
2348 _ = translator
2346 _ = translator
2349 merge_details = {}
2347 merge_details = {}
2350
2348
2351 model = PullRequestModel()
2349 model = PullRequestModel()
2352 use_rebase = model._use_rebase_for_merging(pull_request)
2350 use_rebase = model._use_rebase_for_merging(pull_request)
2353
2351
2354 if use_rebase:
2352 if use_rebase:
2355 merge_details['merge_strategy'] = dict(
2353 merge_details['merge_strategy'] = dict(
2356 details={},
2354 details={},
2357 message=_('Merge strategy: rebase')
2355 message=_('Merge strategy: rebase')
2358 )
2356 )
2359 else:
2357 else:
2360 merge_details['merge_strategy'] = dict(
2358 merge_details['merge_strategy'] = dict(
2361 details={},
2359 details={},
2362 message=_('Merge strategy: explicit merge commit')
2360 message=_('Merge strategy: explicit merge commit')
2363 )
2361 )
2364
2362
2365 close_branch = model._close_branch_before_merging(pull_request)
2363 close_branch = model._close_branch_before_merging(pull_request)
2366 if close_branch:
2364 if close_branch:
2367 repo_type = pull_request.target_repo.repo_type
2365 repo_type = pull_request.target_repo.repo_type
2368 close_msg = ''
2366 close_msg = ''
2369 if repo_type == 'hg':
2367 if repo_type == 'hg':
2370 close_msg = _('Source branch will be closed before the merge.')
2368 close_msg = _('Source branch will be closed before the merge.')
2371 elif repo_type == 'git':
2369 elif repo_type == 'git':
2372 close_msg = _('Source branch will be deleted after the merge.')
2370 close_msg = _('Source branch will be deleted after the merge.')
2373
2371
2374 merge_details['close_branch'] = dict(
2372 merge_details['close_branch'] = dict(
2375 details={},
2373 details={},
2376 message=close_msg
2374 message=close_msg
2377 )
2375 )
2378
2376
2379 return merge_details
2377 return merge_details
2380
2378
2381
2379
2382 @dataclasses.dataclass
2380 @dataclasses.dataclass
2383 class ChangeTuple:
2381 class ChangeTuple:
2384 added: list
2382 added: list
2385 common: list
2383 common: list
2386 removed: list
2384 removed: list
2387 total: list
2385 total: list
2388
2386
2389
2387
2390 @dataclasses.dataclass
2388 @dataclasses.dataclass
2391 class FileChangeTuple:
2389 class FileChangeTuple:
2392 added: list
2390 added: list
2393 modified: list
2391 modified: list
2394 removed: list
2392 removed: list
@@ -1,1199 +1,1198 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import os
19 import os
21 import re
20 import re
22 import shutil
21 import shutil
23 import time
22 import time
24 import logging
23 import logging
25 import traceback
24 import traceback
26 import datetime
25 import datetime
27
26
28 from pyramid.threadlocal import get_current_request
27 from pyramid.threadlocal import get_current_request
29 from sqlalchemy.orm import aliased
28 from sqlalchemy.orm import aliased
30 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31
30
32 from rhodecode import events
31 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
37 from rhodecode.lib.user_log_filter import user_log_filter
36 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
37 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
40 safe_str, remove_prefix, obfuscate_url_pw,
39 safe_str, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
40 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
41 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
43 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
44 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
45 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
46 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
47 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
48 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
49 from rhodecode.model.settings import VcsSettingsModel
51
50
52 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
53
52
54
53
55 class RepoModel(BaseModel):
54 class RepoModel(BaseModel):
56
55
57 cls = Repository
56 cls = Repository
58
57
59 def _get_user_group(self, users_group):
58 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
59 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
60 callback=UserGroup.get_by_group_name)
62
61
63 def _get_repo_group(self, repo_group):
62 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
63 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
64 callback=RepoGroup.get_by_group_name)
66
65
67 def _create_default_perms(self, repository, private):
66 def _create_default_perms(self, repository, private):
68 # create default permission
67 # create default permission
69 default = 'repository.read'
68 default = 'repository.read'
70 def_user = User.get_default_user()
69 def_user = User.get_default_user()
71 for p in def_user.user_perms:
70 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
71 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
72 default = p.permission.permission_name
74 break
73 break
75
74
76 default_perm = 'repository.none' if private else default
75 default_perm = 'repository.none' if private else default
77
76
78 repo_to_perm = UserRepoToPerm()
77 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
78 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
79
81 repo_to_perm.repository = repository
80 repo_to_perm.repository = repository
82 repo_to_perm.user = def_user
81 repo_to_perm.user = def_user
83
82
84 return repo_to_perm
83 return repo_to_perm
85
84
86 @LazyProperty
85 @LazyProperty
87 def repos_path(self):
86 def repos_path(self):
88 """
87 """
89 Gets the repositories root path from database
88 Gets the repositories root path from database
90 """
89 """
91 settings_model = VcsSettingsModel(sa=self.sa)
90 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
91 return settings_model.get_repos_location()
93
92
94 def get(self, repo_id):
93 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
94 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
95 .filter(Repository.repo_id == repo_id)
97
96
98 return repo.scalar()
97 return repo.scalar()
99
98
100 def get_repo(self, repository):
99 def get_repo(self, repository):
101 return self._get_repo(repository)
100 return self._get_repo(repository)
102
101
103 def get_by_repo_name(self, repo_name, cache=False):
102 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
103 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
104 .filter(Repository.repo_name == repo_name)
106
105
107 if cache:
106 if cache:
108 name_key = _hash_key(repo_name)
107 name_key = _hash_key(repo_name)
109 repo = repo.options(
108 repo = repo.options(
110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
109 FromCache("sql_cache_short", f"get_repo_{name_key}"))
111 return repo.scalar()
110 return repo.scalar()
112
111
113 def _extract_id_from_repo_name(self, repo_name):
112 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
113 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
114 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d+)', repo_name)
115 by_id_match = re.match(r'^_(\d+)', repo_name)
117 if by_id_match:
116 if by_id_match:
118 return by_id_match.groups()[0]
117 return by_id_match.groups()[0]
119
118
120 def get_repo_by_id(self, repo_name):
119 def get_repo_by_id(self, repo_name):
121 """
120 """
122 Extracts repo_name by id from special urls.
121 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
122 Example url is _11/repo_name
124
123
125 :param repo_name:
124 :param repo_name:
126 :return: repo object if matched else None
125 :return: repo object if matched else None
127 """
126 """
128 _repo_id = None
127 _repo_id = None
129 try:
128 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
129 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
130 if _repo_id:
132 return self.get(_repo_id)
131 return self.get(_repo_id)
133 except Exception:
132 except Exception:
134 log.exception('Failed to extract repo_name from URL')
133 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
134 if _repo_id:
136 Session().rollback()
135 Session().rollback()
137
136
138 return None
137 return None
139
138
140 def get_repos_for_root(self, root, traverse=False):
139 def get_repos_for_root(self, root, traverse=False):
141 if traverse:
140 if traverse:
142 like_expression = u'{}%'.format(safe_str(root))
141 like_expression = u'{}%'.format(safe_str(root))
143 repos = Repository.query().filter(
142 repos = Repository.query().filter(
144 Repository.repo_name.like(like_expression)).all()
143 Repository.repo_name.like(like_expression)).all()
145 else:
144 else:
146 if root and not isinstance(root, RepoGroup):
145 if root and not isinstance(root, RepoGroup):
147 raise ValueError(
146 raise ValueError(
148 'Root must be an instance '
147 'Root must be an instance '
149 'of RepoGroup, got:{} instead'.format(type(root)))
148 'of RepoGroup, got:{} instead'.format(type(root)))
150 repos = Repository.query().filter(Repository.group == root).all()
149 repos = Repository.query().filter(Repository.group == root).all()
151 return repos
150 return repos
152
151
153 def get_url(self, repo, request=None, permalink=False):
152 def get_url(self, repo, request=None, permalink=False):
154 if not request:
153 if not request:
155 request = get_current_request()
154 request = get_current_request()
156
155
157 if not request:
156 if not request:
158 return
157 return
159
158
160 if permalink:
159 if permalink:
161 return request.route_url(
160 return request.route_url(
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 else:
162 else:
164 return request.route_url(
163 return request.route_url(
165 'repo_summary', repo_name=safe_str(repo.repo_name))
164 'repo_summary', repo_name=safe_str(repo.repo_name))
166
165
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 if not request:
167 if not request:
169 request = get_current_request()
168 request = get_current_request()
170
169
171 if not request:
170 if not request:
172 return
171 return
173
172
174 if permalink:
173 if permalink:
175 return request.route_url(
174 return request.route_url(
176 'repo_commit', repo_name=safe_str(repo.repo_id),
175 'repo_commit', repo_name=safe_str(repo.repo_id),
177 commit_id=commit_id)
176 commit_id=commit_id)
178
177
179 else:
178 else:
180 return request.route_url(
179 return request.route_url(
181 'repo_commit', repo_name=safe_str(repo.repo_name),
180 'repo_commit', repo_name=safe_str(repo.repo_name),
182 commit_id=commit_id)
181 commit_id=commit_id)
183
182
184 def get_repo_log(self, repo, filter_term):
183 def get_repo_log(self, repo, filter_term):
185 repo_log = UserLog.query()\
184 repo_log = UserLog.query()\
186 .filter(or_(UserLog.repository_id == repo.repo_id,
185 .filter(or_(UserLog.repository_id == repo.repo_id,
187 UserLog.repository_name == repo.repo_name))\
186 UserLog.repository_name == repo.repo_name))\
188 .options(joinedload(UserLog.user))\
187 .options(joinedload(UserLog.user))\
189 .options(joinedload(UserLog.repository))\
188 .options(joinedload(UserLog.repository))\
190 .order_by(UserLog.action_date.desc())
189 .order_by(UserLog.action_date.desc())
191
190
192 repo_log = user_log_filter(repo_log, filter_term)
191 repo_log = user_log_filter(repo_log, filter_term)
193 return repo_log
192 return repo_log
194
193
195 @classmethod
194 @classmethod
196 def update_commit_cache(cls, repositories=None):
195 def update_commit_cache(cls, repositories=None):
197 if not repositories:
196 if not repositories:
198 repositories = Repository.getAll()
197 repositories = Repository.getAll()
199 for repo in repositories:
198 for repo in repositories:
200 repo.update_commit_cache()
199 repo.update_commit_cache()
201
200
202 def get_repos_as_dict(self, repo_list=None, admin=False,
201 def get_repos_as_dict(self, repo_list=None, admin=False,
203 super_user_actions=False, short_name=None):
202 super_user_actions=False, short_name=None):
204
203
205 _render = get_current_request().get_partial_renderer(
204 _render = get_current_request().get_partial_renderer(
206 'rhodecode:templates/data_table/_dt_elements.mako')
205 'rhodecode:templates/data_table/_dt_elements.mako')
207 c = _render.get_call_context()
206 c = _render.get_call_context()
208 h = _render.get_helpers()
207 h = _render.get_helpers()
209
208
210 def quick_menu(repo_name):
209 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
210 return _render('quick_menu', repo_name)
212
211
213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
212 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
214 if short_name is not None:
213 if short_name is not None:
215 short_name_var = short_name
214 short_name_var = short_name
216 else:
215 else:
217 short_name_var = not admin
216 short_name_var = not admin
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
217 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
219 short_name=short_name_var, admin=False)
218 short_name=short_name_var, admin=False)
220
219
221 def last_change(last_change):
220 def last_change(last_change):
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
221 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 ts = time.time()
222 ts = time.time()
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
223 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
224 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
225 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227
226
228 return _render("last_change", last_change)
227 return _render("last_change", last_change)
229
228
230 def rss_lnk(repo_name):
229 def rss_lnk(repo_name):
231 return _render("rss", repo_name)
230 return _render("rss", repo_name)
232
231
233 def atom_lnk(repo_name):
232 def atom_lnk(repo_name):
234 return _render("atom", repo_name)
233 return _render("atom", repo_name)
235
234
236 def last_rev(repo_name, cs_cache):
235 def last_rev(repo_name, cs_cache):
237 return _render('revision', repo_name, cs_cache.get('revision'),
236 return _render('revision', repo_name, cs_cache.get('revision'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
237 cs_cache.get('raw_id'), cs_cache.get('author'),
239 cs_cache.get('message'), cs_cache.get('date'))
238 cs_cache.get('message'), cs_cache.get('date'))
240
239
241 def desc(desc):
240 def desc(desc):
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
241 return _render('repo_desc', desc, c.visual.stylify_metatags)
243
242
244 def state(repo_state):
243 def state(repo_state):
245 return _render("repo_state", repo_state)
244 return _render("repo_state", repo_state)
246
245
247 def repo_actions(repo_name):
246 def repo_actions(repo_name):
248 return _render('repo_actions', repo_name, super_user_actions)
247 return _render('repo_actions', repo_name, super_user_actions)
249
248
250 def user_profile(username):
249 def user_profile(username):
251 return _render('user_profile', username)
250 return _render('user_profile', username)
252
251
253 repos_data = []
252 repos_data = []
254 for repo in repo_list:
253 for repo in repo_list:
255 # NOTE(marcink): because we use only raw column we need to load it like that
254 # NOTE(marcink): because we use only raw column we need to load it like that
256 changeset_cache = Repository._load_changeset_cache(
255 changeset_cache = Repository._load_changeset_cache(
257 repo.repo_id, repo._changeset_cache)
256 repo.repo_id, repo._changeset_cache)
258
257
259 row = {
258 row = {
260 "menu": quick_menu(repo.repo_name),
259 "menu": quick_menu(repo.repo_name),
261
260
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
261 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 repo.private, repo.archived, repo.fork_repo_name),
262 repo.private, repo.archived, repo.fork_repo_name),
264
263
265 "desc": desc(h.escape(repo.description)),
264 "desc": desc(h.escape(repo.description)),
266
265
267 "last_change": last_change(repo.updated_on),
266 "last_change": last_change(repo.updated_on),
268
267
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
268 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 "last_changeset_raw": changeset_cache.get('revision'),
269 "last_changeset_raw": changeset_cache.get('revision'),
271
270
272 "owner": user_profile(repo.owner_username),
271 "owner": user_profile(repo.owner_username),
273
272
274 "state": state(repo.repo_state),
273 "state": state(repo.repo_state),
275 "rss": rss_lnk(repo.repo_name),
274 "rss": rss_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
275 "atom": atom_lnk(repo.repo_name),
277 }
276 }
278 if admin:
277 if admin:
279 row.update({
278 row.update({
280 "action": repo_actions(repo.repo_name),
279 "action": repo_actions(repo.repo_name),
281 })
280 })
282 repos_data.append(row)
281 repos_data.append(row)
283
282
284 return repos_data
283 return repos_data
285
284
286 def get_repos_data_table(
285 def get_repos_data_table(
287 self, draw, start, limit,
286 self, draw, start, limit,
288 search_q, order_by, order_dir,
287 search_q, order_by, order_dir,
289 auth_user, repo_group_id):
288 auth_user, repo_group_id):
290 from rhodecode.model.scm import RepoList
289 from rhodecode.model.scm import RepoList
291
290
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
291 _perms = ['repository.read', 'repository.write', 'repository.admin']
293
292
294 repos = Repository.query() \
293 repos = Repository.query() \
295 .filter(Repository.group_id == repo_group_id) \
294 .filter(Repository.group_id == repo_group_id) \
296 .all()
295 .all()
297 auth_repo_list = RepoList(
296 auth_repo_list = RepoList(
298 repos, perm_set=_perms,
297 repos, perm_set=_perms,
299 extra_kwargs=dict(user=auth_user))
298 extra_kwargs=dict(user=auth_user))
300
299
301 allowed_ids = [-1]
300 allowed_ids = [-1]
302 for repo in auth_repo_list:
301 for repo in auth_repo_list:
303 allowed_ids.append(repo.repo_id)
302 allowed_ids.append(repo.repo_id)
304
303
305 repos_data_total_count = Repository.query() \
304 repos_data_total_count = Repository.query() \
306 .filter(Repository.group_id == repo_group_id) \
305 .filter(Repository.group_id == repo_group_id) \
307 .filter(or_(
306 .filter(or_(
308 # generate multiple IN to fix limitation problems
307 # generate multiple IN to fix limitation problems
309 *in_filter_generator(Repository.repo_id, allowed_ids))
308 *in_filter_generator(Repository.repo_id, allowed_ids))
310 ) \
309 ) \
311 .count()
310 .count()
312
311
313 RepoFork = aliased(Repository)
312 RepoFork = aliased(Repository)
314 OwnerUser = aliased(User)
313 OwnerUser = aliased(User)
315 base_q = Session.query(
314 base_q = Session.query(
316 Repository.repo_id,
315 Repository.repo_id,
317 Repository.repo_name,
316 Repository.repo_name,
318 Repository.description,
317 Repository.description,
319 Repository.repo_type,
318 Repository.repo_type,
320 Repository.repo_state,
319 Repository.repo_state,
321 Repository.private,
320 Repository.private,
322 Repository.archived,
321 Repository.archived,
323 Repository.updated_on,
322 Repository.updated_on,
324 Repository._changeset_cache,
323 Repository._changeset_cache,
325 RepoFork.repo_name.label('fork_repo_name'),
324 RepoFork.repo_name.label('fork_repo_name'),
326 OwnerUser.username.label('owner_username'),
325 OwnerUser.username.label('owner_username'),
327 ) \
326 ) \
328 .filter(Repository.group_id == repo_group_id) \
327 .filter(Repository.group_id == repo_group_id) \
329 .filter(or_(
328 .filter(or_(
330 # generate multiple IN to fix limitation problems
329 # generate multiple IN to fix limitation problems
331 *in_filter_generator(Repository.repo_id, allowed_ids))
330 *in_filter_generator(Repository.repo_id, allowed_ids))
332 ) \
331 ) \
333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
332 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
333 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
335
334
336 repos_data_total_filtered_count = base_q.count()
335 repos_data_total_filtered_count = base_q.count()
337
336
338 sort_defined = False
337 sort_defined = False
339 if order_by == 'repo_name':
338 if order_by == 'repo_name':
340 sort_col = func.lower(Repository.repo_name)
339 sort_col = func.lower(Repository.repo_name)
341 sort_defined = True
340 sort_defined = True
342 elif order_by == 'user_username':
341 elif order_by == 'user_username':
343 sort_col = User.username
342 sort_col = User.username
344 else:
343 else:
345 sort_col = getattr(Repository, order_by, None)
344 sort_col = getattr(Repository, order_by, None)
346
345
347 if sort_defined or sort_col:
346 if sort_defined or sort_col:
348 if order_dir == 'asc':
347 if order_dir == 'asc':
349 sort_col = sort_col.asc()
348 sort_col = sort_col.asc()
350 else:
349 else:
351 sort_col = sort_col.desc()
350 sort_col = sort_col.desc()
352
351
353 base_q = base_q.order_by(sort_col)
352 base_q = base_q.order_by(sort_col)
354 base_q = base_q.offset(start).limit(limit)
353 base_q = base_q.offset(start).limit(limit)
355
354
356 repos_list = base_q.all()
355 repos_list = base_q.all()
357
356
358 repos_data = RepoModel().get_repos_as_dict(
357 repos_data = RepoModel().get_repos_as_dict(
359 repo_list=repos_list, admin=False)
358 repo_list=repos_list, admin=False)
360
359
361 data = ({
360 data = ({
362 'draw': draw,
361 'draw': draw,
363 'data': repos_data,
362 'data': repos_data,
364 'recordsTotal': repos_data_total_count,
363 'recordsTotal': repos_data_total_count,
365 'recordsFiltered': repos_data_total_filtered_count,
364 'recordsFiltered': repos_data_total_filtered_count,
366 })
365 })
367 return data
366 return data
368
367
369 def _get_defaults(self, repo_name):
368 def _get_defaults(self, repo_name):
370 """
369 """
371 Gets information about repository, and returns a dict for
370 Gets information about repository, and returns a dict for
372 usage in forms
371 usage in forms
373
372
374 :param repo_name:
373 :param repo_name:
375 """
374 """
376
375
377 repo_info = Repository.get_by_repo_name(repo_name)
376 repo_info = Repository.get_by_repo_name(repo_name)
378
377
379 if repo_info is None:
378 if repo_info is None:
380 return None
379 return None
381
380
382 defaults = repo_info.get_dict()
381 defaults = repo_info.get_dict()
383 defaults['repo_name'] = repo_info.just_name
382 defaults['repo_name'] = repo_info.just_name
384
383
385 groups = repo_info.groups_with_parents
384 groups = repo_info.groups_with_parents
386 parent_group = groups[-1] if groups else None
385 parent_group = groups[-1] if groups else None
387
386
388 # we use -1 as this is how in HTML, we mark an empty group
387 # we use -1 as this is how in HTML, we mark an empty group
389 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
388 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
390
389
391 keys_to_process = (
390 keys_to_process = (
392 {'k': 'repo_type', 'strip': False},
391 {'k': 'repo_type', 'strip': False},
393 {'k': 'repo_enable_downloads', 'strip': True},
392 {'k': 'repo_enable_downloads', 'strip': True},
394 {'k': 'repo_description', 'strip': True},
393 {'k': 'repo_description', 'strip': True},
395 {'k': 'repo_enable_locking', 'strip': True},
394 {'k': 'repo_enable_locking', 'strip': True},
396 {'k': 'repo_landing_rev', 'strip': True},
395 {'k': 'repo_landing_rev', 'strip': True},
397 {'k': 'clone_uri', 'strip': False},
396 {'k': 'clone_uri', 'strip': False},
398 {'k': 'push_uri', 'strip': False},
397 {'k': 'push_uri', 'strip': False},
399 {'k': 'repo_private', 'strip': True},
398 {'k': 'repo_private', 'strip': True},
400 {'k': 'repo_enable_statistics', 'strip': True}
399 {'k': 'repo_enable_statistics', 'strip': True}
401 )
400 )
402
401
403 for item in keys_to_process:
402 for item in keys_to_process:
404 attr = item['k']
403 attr = item['k']
405 if item['strip']:
404 if item['strip']:
406 attr = remove_prefix(item['k'], 'repo_')
405 attr = remove_prefix(item['k'], 'repo_')
407
406
408 val = defaults[attr]
407 val = defaults[attr]
409 if item['k'] == 'repo_landing_rev':
408 if item['k'] == 'repo_landing_rev':
410 val = ':'.join(defaults[attr])
409 val = ':'.join(defaults[attr])
411 defaults[item['k']] = val
410 defaults[item['k']] = val
412 if item['k'] == 'clone_uri':
411 if item['k'] == 'clone_uri':
413 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
412 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
414 if item['k'] == 'push_uri':
413 if item['k'] == 'push_uri':
415 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
414 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
416
415
417 # fill owner
416 # fill owner
418 if repo_info.user:
417 if repo_info.user:
419 defaults.update({'user': repo_info.user.username})
418 defaults.update({'user': repo_info.user.username})
420 else:
419 else:
421 replacement_user = User.get_first_super_admin().username
420 replacement_user = User.get_first_super_admin().username
422 defaults.update({'user': replacement_user})
421 defaults.update({'user': replacement_user})
423
422
424 return defaults
423 return defaults
425
424
426 def update(self, repo, **kwargs):
425 def update(self, repo, **kwargs):
427 try:
426 try:
428 cur_repo = self._get_repo(repo)
427 cur_repo = self._get_repo(repo)
429 source_repo_name = cur_repo.repo_name
428 source_repo_name = cur_repo.repo_name
430
429
431 affected_user_ids = []
430 affected_user_ids = []
432 if 'user' in kwargs:
431 if 'user' in kwargs:
433 old_owner_id = cur_repo.user.user_id
432 old_owner_id = cur_repo.user.user_id
434 new_owner = User.get_by_username(kwargs['user'])
433 new_owner = User.get_by_username(kwargs['user'])
435 cur_repo.user = new_owner
434 cur_repo.user = new_owner
436
435
437 if old_owner_id != new_owner.user_id:
436 if old_owner_id != new_owner.user_id:
438 affected_user_ids = [new_owner.user_id, old_owner_id]
437 affected_user_ids = [new_owner.user_id, old_owner_id]
439
438
440 if 'repo_group' in kwargs:
439 if 'repo_group' in kwargs:
441 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
440 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
442 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
441 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
443
442
444 update_keys = [
443 update_keys = [
445 (1, 'repo_description'),
444 (1, 'repo_description'),
446 (1, 'repo_landing_rev'),
445 (1, 'repo_landing_rev'),
447 (1, 'repo_private'),
446 (1, 'repo_private'),
448 (1, 'repo_enable_downloads'),
447 (1, 'repo_enable_downloads'),
449 (1, 'repo_enable_locking'),
448 (1, 'repo_enable_locking'),
450 (1, 'repo_enable_statistics'),
449 (1, 'repo_enable_statistics'),
451 (0, 'clone_uri'),
450 (0, 'clone_uri'),
452 (0, 'push_uri'),
451 (0, 'push_uri'),
453 (0, 'fork_id')
452 (0, 'fork_id')
454 ]
453 ]
455 for strip, k in update_keys:
454 for strip, k in update_keys:
456 if k in kwargs:
455 if k in kwargs:
457 val = kwargs[k]
456 val = kwargs[k]
458 if strip:
457 if strip:
459 k = remove_prefix(k, 'repo_')
458 k = remove_prefix(k, 'repo_')
460
459
461 setattr(cur_repo, k, val)
460 setattr(cur_repo, k, val)
462
461
463 new_name = cur_repo.get_new_name(kwargs['repo_name'])
462 new_name = cur_repo.get_new_name(kwargs['repo_name'])
464 cur_repo.repo_name = new_name
463 cur_repo.repo_name = new_name
465
464
466 # if private flag is set, reset default permission to NONE
465 # if private flag is set, reset default permission to NONE
467 if kwargs.get('repo_private'):
466 if kwargs.get('repo_private'):
468 EMPTY_PERM = 'repository.none'
467 EMPTY_PERM = 'repository.none'
469 RepoModel().grant_user_permission(
468 RepoModel().grant_user_permission(
470 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
469 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
471 )
470 )
472 if kwargs.get('repo_landing_rev'):
471 if kwargs.get('repo_landing_rev'):
473 landing_rev_val = kwargs['repo_landing_rev']
472 landing_rev_val = kwargs['repo_landing_rev']
474 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
473 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
475
474
476 # handle extra fields
475 # handle extra fields
477 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
476 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
478 k = RepositoryField.un_prefix_key(field)
477 k = RepositoryField.un_prefix_key(field)
479 ex_field = RepositoryField.get_by_key_name(
478 ex_field = RepositoryField.get_by_key_name(
480 key=k, repo=cur_repo)
479 key=k, repo=cur_repo)
481 if ex_field:
480 if ex_field:
482 ex_field.field_value = kwargs[field]
481 ex_field.field_value = kwargs[field]
483 self.sa.add(ex_field)
482 self.sa.add(ex_field)
484
483
485 self.sa.add(cur_repo)
484 self.sa.add(cur_repo)
486
485
487 if source_repo_name != new_name:
486 if source_repo_name != new_name:
488 # rename repository
487 # rename repository
489 self._rename_filesystem_repo(
488 self._rename_filesystem_repo(
490 old=source_repo_name, new=new_name)
489 old=source_repo_name, new=new_name)
491
490
492 if affected_user_ids:
491 if affected_user_ids:
493 PermissionModel().trigger_permission_flush(affected_user_ids)
492 PermissionModel().trigger_permission_flush(affected_user_ids)
494
493
495 return cur_repo
494 return cur_repo
496 except Exception:
495 except Exception:
497 log.error(traceback.format_exc())
496 log.error(traceback.format_exc())
498 raise
497 raise
499
498
500 def _create_repo(self, repo_name, repo_type, description, owner,
499 def _create_repo(self, repo_name, repo_type, description, owner,
501 private=False, clone_uri=None, repo_group=None,
500 private=False, clone_uri=None, repo_group=None,
502 landing_rev=None, fork_of=None,
501 landing_rev=None, fork_of=None,
503 copy_fork_permissions=False, enable_statistics=False,
502 copy_fork_permissions=False, enable_statistics=False,
504 enable_locking=False, enable_downloads=False,
503 enable_locking=False, enable_downloads=False,
505 copy_group_permissions=False,
504 copy_group_permissions=False,
506 state=Repository.STATE_PENDING):
505 state=Repository.STATE_PENDING):
507 """
506 """
508 Create repository inside database with PENDING state, this should be
507 Create repository inside database with PENDING state, this should be
509 only executed by create() repo. With exception of importing existing
508 only executed by create() repo. With exception of importing existing
510 repos
509 repos
511 """
510 """
512 from rhodecode.model.scm import ScmModel
511 from rhodecode.model.scm import ScmModel
513
512
514 owner = self._get_user(owner)
513 owner = self._get_user(owner)
515 fork_of = self._get_repo(fork_of)
514 fork_of = self._get_repo(fork_of)
516 repo_group = self._get_repo_group(safe_int(repo_group))
515 repo_group = self._get_repo_group(safe_int(repo_group))
517 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
516 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
518 landing_rev = landing_rev or default_landing_ref
517 landing_rev = landing_rev or default_landing_ref
519
518
520 try:
519 try:
521 repo_name = safe_str(repo_name)
520 repo_name = safe_str(repo_name)
522 description = safe_str(description)
521 description = safe_str(description)
523 # repo name is just a name of repository
522 # repo name is just a name of repository
524 # while repo_name_full is a full qualified name that is combined
523 # while repo_name_full is a full qualified name that is combined
525 # with name and path of group
524 # with name and path of group
526 repo_name_full = repo_name
525 repo_name_full = repo_name
527 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
526 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
528
527
529 new_repo = Repository()
528 new_repo = Repository()
530 new_repo.repo_state = state
529 new_repo.repo_state = state
531 new_repo.enable_statistics = False
530 new_repo.enable_statistics = False
532 new_repo.repo_name = repo_name_full
531 new_repo.repo_name = repo_name_full
533 new_repo.repo_type = repo_type
532 new_repo.repo_type = repo_type
534 new_repo.user = owner
533 new_repo.user = owner
535 new_repo.group = repo_group
534 new_repo.group = repo_group
536 new_repo.description = description or repo_name
535 new_repo.description = description or repo_name
537 new_repo.private = private
536 new_repo.private = private
538 new_repo.archived = False
537 new_repo.archived = False
539 new_repo.clone_uri = clone_uri
538 new_repo.clone_uri = clone_uri
540 new_repo.landing_rev = landing_rev
539 new_repo.landing_rev = landing_rev
541
540
542 new_repo.enable_statistics = enable_statistics
541 new_repo.enable_statistics = enable_statistics
543 new_repo.enable_locking = enable_locking
542 new_repo.enable_locking = enable_locking
544 new_repo.enable_downloads = enable_downloads
543 new_repo.enable_downloads = enable_downloads
545
544
546 if repo_group:
545 if repo_group:
547 new_repo.enable_locking = repo_group.enable_locking
546 new_repo.enable_locking = repo_group.enable_locking
548
547
549 if fork_of:
548 if fork_of:
550 parent_repo = fork_of
549 parent_repo = fork_of
551 new_repo.fork = parent_repo
550 new_repo.fork = parent_repo
552
551
553 events.trigger(events.RepoPreCreateEvent(new_repo))
552 events.trigger(events.RepoPreCreateEvent(new_repo))
554
553
555 self.sa.add(new_repo)
554 self.sa.add(new_repo)
556
555
557 EMPTY_PERM = 'repository.none'
556 EMPTY_PERM = 'repository.none'
558 if fork_of and copy_fork_permissions:
557 if fork_of and copy_fork_permissions:
559 repo = fork_of
558 repo = fork_of
560 user_perms = UserRepoToPerm.query() \
559 user_perms = UserRepoToPerm.query() \
561 .filter(UserRepoToPerm.repository == repo).all()
560 .filter(UserRepoToPerm.repository == repo).all()
562 group_perms = UserGroupRepoToPerm.query() \
561 group_perms = UserGroupRepoToPerm.query() \
563 .filter(UserGroupRepoToPerm.repository == repo).all()
562 .filter(UserGroupRepoToPerm.repository == repo).all()
564
563
565 for perm in user_perms:
564 for perm in user_perms:
566 UserRepoToPerm.create(
565 UserRepoToPerm.create(
567 perm.user, new_repo, perm.permission)
566 perm.user, new_repo, perm.permission)
568
567
569 for perm in group_perms:
568 for perm in group_perms:
570 UserGroupRepoToPerm.create(
569 UserGroupRepoToPerm.create(
571 perm.users_group, new_repo, perm.permission)
570 perm.users_group, new_repo, perm.permission)
572 # in case we copy permissions and also set this repo to private
571 # in case we copy permissions and also set this repo to private
573 # override the default user permission to make it a private repo
572 # override the default user permission to make it a private repo
574 if private:
573 if private:
575 RepoModel(self.sa).grant_user_permission(
574 RepoModel(self.sa).grant_user_permission(
576 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
575 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
577
576
578 elif repo_group and copy_group_permissions:
577 elif repo_group and copy_group_permissions:
579 user_perms = UserRepoGroupToPerm.query() \
578 user_perms = UserRepoGroupToPerm.query() \
580 .filter(UserRepoGroupToPerm.group == repo_group).all()
579 .filter(UserRepoGroupToPerm.group == repo_group).all()
581
580
582 group_perms = UserGroupRepoGroupToPerm.query() \
581 group_perms = UserGroupRepoGroupToPerm.query() \
583 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
582 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
584
583
585 for perm in user_perms:
584 for perm in user_perms:
586 perm_name = perm.permission.permission_name.replace(
585 perm_name = perm.permission.permission_name.replace(
587 'group.', 'repository.')
586 'group.', 'repository.')
588 perm_obj = Permission.get_by_key(perm_name)
587 perm_obj = Permission.get_by_key(perm_name)
589 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
588 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
590
589
591 for perm in group_perms:
590 for perm in group_perms:
592 perm_name = perm.permission.permission_name.replace(
591 perm_name = perm.permission.permission_name.replace(
593 'group.', 'repository.')
592 'group.', 'repository.')
594 perm_obj = Permission.get_by_key(perm_name)
593 perm_obj = Permission.get_by_key(perm_name)
595 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
594 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
596
595
597 if private:
596 if private:
598 RepoModel(self.sa).grant_user_permission(
597 RepoModel(self.sa).grant_user_permission(
599 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
598 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
600
599
601 else:
600 else:
602 perm_obj = self._create_default_perms(new_repo, private)
601 perm_obj = self._create_default_perms(new_repo, private)
603 self.sa.add(perm_obj)
602 self.sa.add(perm_obj)
604
603
605 # now automatically start following this repository as owner
604 # now automatically start following this repository as owner
606 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
605 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
607
606
608 # we need to flush here, in order to check if database won't
607 # we need to flush here, in order to check if database won't
609 # throw any exceptions, create filesystem dirs at the very end
608 # throw any exceptions, create filesystem dirs at the very end
610 self.sa.flush()
609 self.sa.flush()
611 events.trigger(events.RepoCreateEvent(new_repo))
610 events.trigger(events.RepoCreateEvent(new_repo))
612 return new_repo
611 return new_repo
613
612
614 except Exception:
613 except Exception:
615 log.error(traceback.format_exc())
614 log.error(traceback.format_exc())
616 raise
615 raise
617
616
618 def create(self, form_data, cur_user):
617 def create(self, form_data, cur_user):
619 """
618 """
620 Create repository using celery tasks
619 Create repository using celery tasks
621
620
622 :param form_data:
621 :param form_data:
623 :param cur_user:
622 :param cur_user:
624 """
623 """
625 from rhodecode.lib.celerylib import tasks, run_task
624 from rhodecode.lib.celerylib import tasks, run_task
626 return run_task(tasks.create_repo, form_data, cur_user)
625 return run_task(tasks.create_repo, form_data, cur_user)
627
626
628 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
627 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
629 perm_deletions=None, check_perms=True,
628 perm_deletions=None, check_perms=True,
630 cur_user=None):
629 cur_user=None):
631 if not perm_additions:
630 if not perm_additions:
632 perm_additions = []
631 perm_additions = []
633 if not perm_updates:
632 if not perm_updates:
634 perm_updates = []
633 perm_updates = []
635 if not perm_deletions:
634 if not perm_deletions:
636 perm_deletions = []
635 perm_deletions = []
637
636
638 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
637 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
639
638
640 changes = {
639 changes = {
641 'added': [],
640 'added': [],
642 'updated': [],
641 'updated': [],
643 'deleted': [],
642 'deleted': [],
644 'default_user_changed': None
643 'default_user_changed': None
645 }
644 }
646
645
647 repo = self._get_repo(repo)
646 repo = self._get_repo(repo)
648
647
649 # update permissions
648 # update permissions
650 for member_id, perm, member_type in perm_updates:
649 for member_id, perm, member_type in perm_updates:
651 member_id = int(member_id)
650 member_id = int(member_id)
652 if member_type == 'user':
651 if member_type == 'user':
653 member_name = User.get(member_id).username
652 member_name = User.get(member_id).username
654 if member_name == User.DEFAULT_USER:
653 if member_name == User.DEFAULT_USER:
655 # NOTE(dan): detect if we changed permissions for default user
654 # NOTE(dan): detect if we changed permissions for default user
656 perm_obj = self.sa.query(UserRepoToPerm) \
655 perm_obj = self.sa.query(UserRepoToPerm) \
657 .filter(UserRepoToPerm.user_id == member_id) \
656 .filter(UserRepoToPerm.user_id == member_id) \
658 .filter(UserRepoToPerm.repository == repo) \
657 .filter(UserRepoToPerm.repository == repo) \
659 .scalar()
658 .scalar()
660 if perm_obj and perm_obj.permission.permission_name != perm:
659 if perm_obj and perm_obj.permission.permission_name != perm:
661 changes['default_user_changed'] = True
660 changes['default_user_changed'] = True
662
661
663 # this updates also current one if found
662 # this updates also current one if found
664 self.grant_user_permission(
663 self.grant_user_permission(
665 repo=repo, user=member_id, perm=perm)
664 repo=repo, user=member_id, perm=perm)
666 elif member_type == 'user_group':
665 elif member_type == 'user_group':
667 # check if we have permissions to alter this usergroup
666 # check if we have permissions to alter this usergroup
668 member_name = UserGroup.get(member_id).users_group_name
667 member_name = UserGroup.get(member_id).users_group_name
669 if not check_perms or HasUserGroupPermissionAny(
668 if not check_perms or HasUserGroupPermissionAny(
670 *req_perms)(member_name, user=cur_user):
669 *req_perms)(member_name, user=cur_user):
671 self.grant_user_group_permission(
670 self.grant_user_group_permission(
672 repo=repo, group_name=member_id, perm=perm)
671 repo=repo, group_name=member_id, perm=perm)
673 else:
672 else:
674 raise ValueError("member_type must be 'user' or 'user_group' "
673 raise ValueError("member_type must be 'user' or 'user_group' "
675 "got {} instead".format(member_type))
674 "got {} instead".format(member_type))
676 changes['updated'].append({'type': member_type, 'id': member_id,
675 changes['updated'].append({'type': member_type, 'id': member_id,
677 'name': member_name, 'new_perm': perm})
676 'name': member_name, 'new_perm': perm})
678
677
679 # set new permissions
678 # set new permissions
680 for member_id, perm, member_type in perm_additions:
679 for member_id, perm, member_type in perm_additions:
681 member_id = int(member_id)
680 member_id = int(member_id)
682 if member_type == 'user':
681 if member_type == 'user':
683 member_name = User.get(member_id).username
682 member_name = User.get(member_id).username
684 self.grant_user_permission(
683 self.grant_user_permission(
685 repo=repo, user=member_id, perm=perm)
684 repo=repo, user=member_id, perm=perm)
686 elif member_type == 'user_group':
685 elif member_type == 'user_group':
687 # check if we have permissions to alter this usergroup
686 # check if we have permissions to alter this usergroup
688 member_name = UserGroup.get(member_id).users_group_name
687 member_name = UserGroup.get(member_id).users_group_name
689 if not check_perms or HasUserGroupPermissionAny(
688 if not check_perms or HasUserGroupPermissionAny(
690 *req_perms)(member_name, user=cur_user):
689 *req_perms)(member_name, user=cur_user):
691 self.grant_user_group_permission(
690 self.grant_user_group_permission(
692 repo=repo, group_name=member_id, perm=perm)
691 repo=repo, group_name=member_id, perm=perm)
693 else:
692 else:
694 raise ValueError("member_type must be 'user' or 'user_group' "
693 raise ValueError("member_type must be 'user' or 'user_group' "
695 "got {} instead".format(member_type))
694 "got {} instead".format(member_type))
696
695
697 changes['added'].append({'type': member_type, 'id': member_id,
696 changes['added'].append({'type': member_type, 'id': member_id,
698 'name': member_name, 'new_perm': perm})
697 'name': member_name, 'new_perm': perm})
699 # delete permissions
698 # delete permissions
700 for member_id, perm, member_type in perm_deletions:
699 for member_id, perm, member_type in perm_deletions:
701 member_id = int(member_id)
700 member_id = int(member_id)
702 if member_type == 'user':
701 if member_type == 'user':
703 member_name = User.get(member_id).username
702 member_name = User.get(member_id).username
704 self.revoke_user_permission(repo=repo, user=member_id)
703 self.revoke_user_permission(repo=repo, user=member_id)
705 elif member_type == 'user_group':
704 elif member_type == 'user_group':
706 # check if we have permissions to alter this usergroup
705 # check if we have permissions to alter this usergroup
707 member_name = UserGroup.get(member_id).users_group_name
706 member_name = UserGroup.get(member_id).users_group_name
708 if not check_perms or HasUserGroupPermissionAny(
707 if not check_perms or HasUserGroupPermissionAny(
709 *req_perms)(member_name, user=cur_user):
708 *req_perms)(member_name, user=cur_user):
710 self.revoke_user_group_permission(
709 self.revoke_user_group_permission(
711 repo=repo, group_name=member_id)
710 repo=repo, group_name=member_id)
712 else:
711 else:
713 raise ValueError("member_type must be 'user' or 'user_group' "
712 raise ValueError("member_type must be 'user' or 'user_group' "
714 "got {} instead".format(member_type))
713 "got {} instead".format(member_type))
715
714
716 changes['deleted'].append({'type': member_type, 'id': member_id,
715 changes['deleted'].append({'type': member_type, 'id': member_id,
717 'name': member_name, 'new_perm': perm})
716 'name': member_name, 'new_perm': perm})
718 return changes
717 return changes
719
718
720 def create_fork(self, form_data, cur_user):
719 def create_fork(self, form_data, cur_user):
721 """
720 """
722 Simple wrapper into executing celery task for fork creation
721 Simple wrapper into executing celery task for fork creation
723
722
724 :param form_data:
723 :param form_data:
725 :param cur_user:
724 :param cur_user:
726 """
725 """
727 from rhodecode.lib.celerylib import tasks, run_task
726 from rhodecode.lib.celerylib import tasks, run_task
728 return run_task(tasks.create_repo_fork, form_data, cur_user)
727 return run_task(tasks.create_repo_fork, form_data, cur_user)
729
728
730 def archive(self, repo):
729 def archive(self, repo):
731 """
730 """
732 Archive given repository. Set archive flag.
731 Archive given repository. Set archive flag.
733
732
734 :param repo:
733 :param repo:
735 """
734 """
736 repo = self._get_repo(repo)
735 repo = self._get_repo(repo)
737 if repo:
736 if repo:
738
737
739 try:
738 try:
740 repo.archived = True
739 repo.archived = True
741 self.sa.add(repo)
740 self.sa.add(repo)
742 self.sa.commit()
741 self.sa.commit()
743 except Exception:
742 except Exception:
744 log.error(traceback.format_exc())
743 log.error(traceback.format_exc())
745 raise
744 raise
746
745
747 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
746 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
748 """
747 """
749 Delete given repository, forks parameter defines what do do with
748 Delete given repository, forks parameter defines what do do with
750 attached forks. Throws AttachedForksError if deleted repo has attached
749 attached forks. Throws AttachedForksError if deleted repo has attached
751 forks
750 forks
752
751
753 :param repo:
752 :param repo:
754 :param forks: str 'delete' or 'detach'
753 :param forks: str 'delete' or 'detach'
755 :param pull_requests: str 'delete' or None
754 :param pull_requests: str 'delete' or None
756 :param fs_remove: remove(archive) repo from filesystem
755 :param fs_remove: remove(archive) repo from filesystem
757 """
756 """
758 if not cur_user:
757 if not cur_user:
759 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
758 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
760 repo = self._get_repo(repo)
759 repo = self._get_repo(repo)
761 if repo:
760 if repo:
762 if forks == 'detach':
761 if forks == 'detach':
763 for r in repo.forks:
762 for r in repo.forks:
764 r.fork = None
763 r.fork = None
765 self.sa.add(r)
764 self.sa.add(r)
766 elif forks == 'delete':
765 elif forks == 'delete':
767 for r in repo.forks:
766 for r in repo.forks:
768 self.delete(r, forks='delete')
767 self.delete(r, forks='delete')
769 elif [f for f in repo.forks]:
768 elif [f for f in repo.forks]:
770 raise AttachedForksError()
769 raise AttachedForksError()
771
770
772 # check for pull requests
771 # check for pull requests
773 pr_sources = repo.pull_requests_source
772 pr_sources = repo.pull_requests_source
774 pr_targets = repo.pull_requests_target
773 pr_targets = repo.pull_requests_target
775 if pull_requests != 'delete' and (pr_sources or pr_targets):
774 if pull_requests != 'delete' and (pr_sources or pr_targets):
776 raise AttachedPullRequestsError()
775 raise AttachedPullRequestsError()
777
776
778 old_repo_dict = repo.get_dict()
777 old_repo_dict = repo.get_dict()
779 events.trigger(events.RepoPreDeleteEvent(repo))
778 events.trigger(events.RepoPreDeleteEvent(repo))
780 try:
779 try:
781 self.sa.delete(repo)
780 self.sa.delete(repo)
782 if fs_remove:
781 if fs_remove:
783 self._delete_filesystem_repo(repo)
782 self._delete_filesystem_repo(repo)
784 else:
783 else:
785 log.debug('skipping removal from filesystem')
784 log.debug('skipping removal from filesystem')
786 old_repo_dict.update({
785 old_repo_dict.update({
787 'deleted_by': cur_user,
786 'deleted_by': cur_user,
788 'deleted_on': time.time(),
787 'deleted_on': time.time(),
789 })
788 })
790 hooks_base.delete_repository(**old_repo_dict)
789 hooks_base.delete_repository(**old_repo_dict)
791 events.trigger(events.RepoDeleteEvent(repo))
790 events.trigger(events.RepoDeleteEvent(repo))
792 except Exception:
791 except Exception:
793 log.error(traceback.format_exc())
792 log.error(traceback.format_exc())
794 raise
793 raise
795
794
796 def grant_user_permission(self, repo, user, perm):
795 def grant_user_permission(self, repo, user, perm):
797 """
796 """
798 Grant permission for user on given repository, or update existing one
797 Grant permission for user on given repository, or update existing one
799 if found
798 if found
800
799
801 :param repo: Instance of Repository, repository_id, or repository name
800 :param repo: Instance of Repository, repository_id, or repository name
802 :param user: Instance of User, user_id or username
801 :param user: Instance of User, user_id or username
803 :param perm: Instance of Permission, or permission_name
802 :param perm: Instance of Permission, or permission_name
804 """
803 """
805 user = self._get_user(user)
804 user = self._get_user(user)
806 repo = self._get_repo(repo)
805 repo = self._get_repo(repo)
807 permission = self._get_perm(perm)
806 permission = self._get_perm(perm)
808
807
809 # check if we have that permission already
808 # check if we have that permission already
810 obj = self.sa.query(UserRepoToPerm) \
809 obj = self.sa.query(UserRepoToPerm) \
811 .filter(UserRepoToPerm.user == user) \
810 .filter(UserRepoToPerm.user == user) \
812 .filter(UserRepoToPerm.repository == repo) \
811 .filter(UserRepoToPerm.repository == repo) \
813 .scalar()
812 .scalar()
814 if obj is None:
813 if obj is None:
815 # create new !
814 # create new !
816 obj = UserRepoToPerm()
815 obj = UserRepoToPerm()
817 obj.repository = repo
816 obj.repository = repo
818 obj.user = user
817 obj.user = user
819 obj.permission = permission
818 obj.permission = permission
820 self.sa.add(obj)
819 self.sa.add(obj)
821 log.debug('Granted perm %s to %s on %s', perm, user, repo)
820 log.debug('Granted perm %s to %s on %s', perm, user, repo)
822 action_logger_generic(
821 action_logger_generic(
823 'granted permission: {} to user: {} on repo: {}'.format(
822 'granted permission: {} to user: {} on repo: {}'.format(
824 perm, user, repo), namespace='security.repo')
823 perm, user, repo), namespace='security.repo')
825 return obj
824 return obj
826
825
827 def revoke_user_permission(self, repo, user):
826 def revoke_user_permission(self, repo, user):
828 """
827 """
829 Revoke permission for user on given repository
828 Revoke permission for user on given repository
830
829
831 :param repo: Instance of Repository, repository_id, or repository name
830 :param repo: Instance of Repository, repository_id, or repository name
832 :param user: Instance of User, user_id or username
831 :param user: Instance of User, user_id or username
833 """
832 """
834
833
835 user = self._get_user(user)
834 user = self._get_user(user)
836 repo = self._get_repo(repo)
835 repo = self._get_repo(repo)
837
836
838 obj = self.sa.query(UserRepoToPerm) \
837 obj = self.sa.query(UserRepoToPerm) \
839 .filter(UserRepoToPerm.repository == repo) \
838 .filter(UserRepoToPerm.repository == repo) \
840 .filter(UserRepoToPerm.user == user) \
839 .filter(UserRepoToPerm.user == user) \
841 .scalar()
840 .scalar()
842 if obj:
841 if obj:
843 self.sa.delete(obj)
842 self.sa.delete(obj)
844 log.debug('Revoked perm on %s on %s', repo, user)
843 log.debug('Revoked perm on %s on %s', repo, user)
845 action_logger_generic(
844 action_logger_generic(
846 'revoked permission from user: {} on repo: {}'.format(
845 'revoked permission from user: {} on repo: {}'.format(
847 user, repo), namespace='security.repo')
846 user, repo), namespace='security.repo')
848
847
849 def grant_user_group_permission(self, repo, group_name, perm):
848 def grant_user_group_permission(self, repo, group_name, perm):
850 """
849 """
851 Grant permission for user group on given repository, or update
850 Grant permission for user group on given repository, or update
852 existing one if found
851 existing one if found
853
852
854 :param repo: Instance of Repository, repository_id, or repository name
853 :param repo: Instance of Repository, repository_id, or repository name
855 :param group_name: Instance of UserGroup, users_group_id,
854 :param group_name: Instance of UserGroup, users_group_id,
856 or user group name
855 or user group name
857 :param perm: Instance of Permission, or permission_name
856 :param perm: Instance of Permission, or permission_name
858 """
857 """
859 repo = self._get_repo(repo)
858 repo = self._get_repo(repo)
860 group_name = self._get_user_group(group_name)
859 group_name = self._get_user_group(group_name)
861 permission = self._get_perm(perm)
860 permission = self._get_perm(perm)
862
861
863 # check if we have that permission already
862 # check if we have that permission already
864 obj = self.sa.query(UserGroupRepoToPerm) \
863 obj = self.sa.query(UserGroupRepoToPerm) \
865 .filter(UserGroupRepoToPerm.users_group == group_name) \
864 .filter(UserGroupRepoToPerm.users_group == group_name) \
866 .filter(UserGroupRepoToPerm.repository == repo) \
865 .filter(UserGroupRepoToPerm.repository == repo) \
867 .scalar()
866 .scalar()
868
867
869 if obj is None:
868 if obj is None:
870 # create new
869 # create new
871 obj = UserGroupRepoToPerm()
870 obj = UserGroupRepoToPerm()
872
871
873 obj.repository = repo
872 obj.repository = repo
874 obj.users_group = group_name
873 obj.users_group = group_name
875 obj.permission = permission
874 obj.permission = permission
876 self.sa.add(obj)
875 self.sa.add(obj)
877 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
876 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
878 action_logger_generic(
877 action_logger_generic(
879 'granted permission: {} to usergroup: {} on repo: {}'.format(
878 'granted permission: {} to usergroup: {} on repo: {}'.format(
880 perm, group_name, repo), namespace='security.repo')
879 perm, group_name, repo), namespace='security.repo')
881
880
882 return obj
881 return obj
883
882
884 def revoke_user_group_permission(self, repo, group_name):
883 def revoke_user_group_permission(self, repo, group_name):
885 """
884 """
886 Revoke permission for user group on given repository
885 Revoke permission for user group on given repository
887
886
888 :param repo: Instance of Repository, repository_id, or repository name
887 :param repo: Instance of Repository, repository_id, or repository name
889 :param group_name: Instance of UserGroup, users_group_id,
888 :param group_name: Instance of UserGroup, users_group_id,
890 or user group name
889 or user group name
891 """
890 """
892 repo = self._get_repo(repo)
891 repo = self._get_repo(repo)
893 group_name = self._get_user_group(group_name)
892 group_name = self._get_user_group(group_name)
894
893
895 obj = self.sa.query(UserGroupRepoToPerm) \
894 obj = self.sa.query(UserGroupRepoToPerm) \
896 .filter(UserGroupRepoToPerm.repository == repo) \
895 .filter(UserGroupRepoToPerm.repository == repo) \
897 .filter(UserGroupRepoToPerm.users_group == group_name) \
896 .filter(UserGroupRepoToPerm.users_group == group_name) \
898 .scalar()
897 .scalar()
899 if obj:
898 if obj:
900 self.sa.delete(obj)
899 self.sa.delete(obj)
901 log.debug('Revoked perm to %s on %s', repo, group_name)
900 log.debug('Revoked perm to %s on %s', repo, group_name)
902 action_logger_generic(
901 action_logger_generic(
903 'revoked permission from usergroup: {} on repo: {}'.format(
902 'revoked permission from usergroup: {} on repo: {}'.format(
904 group_name, repo), namespace='security.repo')
903 group_name, repo), namespace='security.repo')
905
904
906 def delete_stats(self, repo_name):
905 def delete_stats(self, repo_name):
907 """
906 """
908 removes stats for given repo
907 removes stats for given repo
909
908
910 :param repo_name:
909 :param repo_name:
911 """
910 """
912 repo = self._get_repo(repo_name)
911 repo = self._get_repo(repo_name)
913 try:
912 try:
914 obj = self.sa.query(Statistics) \
913 obj = self.sa.query(Statistics) \
915 .filter(Statistics.repository == repo).scalar()
914 .filter(Statistics.repository == repo).scalar()
916 if obj:
915 if obj:
917 self.sa.delete(obj)
916 self.sa.delete(obj)
918 except Exception:
917 except Exception:
919 log.error(traceback.format_exc())
918 log.error(traceback.format_exc())
920 raise
919 raise
921
920
922 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
921 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
923 field_type='str', field_desc=''):
922 field_type='str', field_desc=''):
924
923
925 repo = self._get_repo(repo_name)
924 repo = self._get_repo(repo_name)
926
925
927 new_field = RepositoryField()
926 new_field = RepositoryField()
928 new_field.repository = repo
927 new_field.repository = repo
929 new_field.field_key = field_key
928 new_field.field_key = field_key
930 new_field.field_type = field_type # python type
929 new_field.field_type = field_type # python type
931 new_field.field_value = field_value
930 new_field.field_value = field_value
932 new_field.field_desc = field_desc
931 new_field.field_desc = field_desc
933 new_field.field_label = field_label
932 new_field.field_label = field_label
934 self.sa.add(new_field)
933 self.sa.add(new_field)
935 return new_field
934 return new_field
936
935
937 def delete_repo_field(self, repo_name, field_key):
936 def delete_repo_field(self, repo_name, field_key):
938 repo = self._get_repo(repo_name)
937 repo = self._get_repo(repo_name)
939 field = RepositoryField.get_by_key_name(field_key, repo)
938 field = RepositoryField.get_by_key_name(field_key, repo)
940 if field:
939 if field:
941 self.sa.delete(field)
940 self.sa.delete(field)
942
941
943 def set_landing_rev(self, repo, landing_rev_name):
942 def set_landing_rev(self, repo, landing_rev_name):
944 if landing_rev_name.startswith('branch:'):
943 if landing_rev_name.startswith('branch:'):
945 landing_rev_name = landing_rev_name.split('branch:')[-1]
944 landing_rev_name = landing_rev_name.split('branch:')[-1]
946 scm_instance = repo.scm_instance()
945 scm_instance = repo.scm_instance()
947 if scm_instance:
946 if scm_instance:
948 return scm_instance._remote.set_head_ref(landing_rev_name)
947 return scm_instance._remote.set_head_ref(landing_rev_name)
949
948
950 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
949 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
951 clone_uri=None, repo_store_location=None,
950 clone_uri=None, repo_store_location=None,
952 use_global_config=False, install_hooks=True):
951 use_global_config=False, install_hooks=True):
953 """
952 """
954 makes repository on filesystem. It's group aware means it'll create
953 makes repository on filesystem. It's group aware means it'll create
955 a repository within a group, and alter the paths accordingly of
954 a repository within a group, and alter the paths accordingly of
956 group location
955 group location
957
956
958 :param repo_name:
957 :param repo_name:
959 :param alias:
958 :param alias:
960 :param parent:
959 :param parent:
961 :param clone_uri:
960 :param clone_uri:
962 :param repo_store_location:
961 :param repo_store_location:
963 """
962 """
964 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
963 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
965 from rhodecode.model.scm import ScmModel
964 from rhodecode.model.scm import ScmModel
966
965
967 if Repository.NAME_SEP in repo_name:
966 if Repository.NAME_SEP in repo_name:
968 raise ValueError(
967 raise ValueError(
969 'repo_name must not contain groups got `%s`' % repo_name)
968 'repo_name must not contain groups got `%s`' % repo_name)
970
969
971 if isinstance(repo_group, RepoGroup):
970 if isinstance(repo_group, RepoGroup):
972 new_parent_path = os.sep.join(repo_group.full_path_splitted)
971 new_parent_path = os.sep.join(repo_group.full_path_splitted)
973 else:
972 else:
974 new_parent_path = repo_group or ''
973 new_parent_path = repo_group or ''
975
974
976 if repo_store_location:
975 if repo_store_location:
977 _paths = [repo_store_location]
976 _paths = [repo_store_location]
978 else:
977 else:
979 _paths = [self.repos_path, new_parent_path, repo_name]
978 _paths = [self.repos_path, new_parent_path, repo_name]
980 # we need to make it str for mercurial
979 # we need to make it str for mercurial
981 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
980 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
982
981
983 # check if this path is not a repository
982 # check if this path is not a repository
984 if is_valid_repo(repo_path, self.repos_path):
983 if is_valid_repo(repo_path, self.repos_path):
985 raise Exception(f'This path {repo_path} is a valid repository')
984 raise Exception(f'This path {repo_path} is a valid repository')
986
985
987 # check if this path is a group
986 # check if this path is a group
988 if is_valid_repo_group(repo_path, self.repos_path):
987 if is_valid_repo_group(repo_path, self.repos_path):
989 raise Exception(f'This path {repo_path} is a valid group')
988 raise Exception(f'This path {repo_path} is a valid group')
990
989
991 log.info('creating repo %s in %s from url: `%s`',
990 log.info('creating repo %s in %s from url: `%s`',
992 repo_name, safe_str(repo_path),
991 repo_name, safe_str(repo_path),
993 obfuscate_url_pw(clone_uri))
992 obfuscate_url_pw(clone_uri))
994
993
995 backend = get_backend(repo_type)
994 backend = get_backend(repo_type)
996
995
997 config_repo = None if use_global_config else repo_name
996 config_repo = None if use_global_config else repo_name
998 if config_repo and new_parent_path:
997 if config_repo and new_parent_path:
999 config_repo = Repository.NAME_SEP.join(
998 config_repo = Repository.NAME_SEP.join(
1000 (new_parent_path, config_repo))
999 (new_parent_path, config_repo))
1001 config = make_db_config(clear_session=False, repo=config_repo)
1000 config = make_db_config(clear_session=False, repo=config_repo)
1002 config.set('extensions', 'largefiles', '')
1001 config.set('extensions', 'largefiles', '')
1003
1002
1004 # patch and reset hooks section of UI config to not run any
1003 # patch and reset hooks section of UI config to not run any
1005 # hooks on creating remote repo
1004 # hooks on creating remote repo
1006 config.clear_section('hooks')
1005 config.clear_section('hooks')
1007
1006
1008 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1007 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1009 if repo_type == 'git':
1008 if repo_type == 'git':
1010 repo = backend(
1009 repo = backend(
1011 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1010 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1012 with_wire={"cache": False})
1011 with_wire={"cache": False})
1013 else:
1012 else:
1014 repo = backend(
1013 repo = backend(
1015 repo_path, config=config, create=True, src_url=clone_uri,
1014 repo_path, config=config, create=True, src_url=clone_uri,
1016 with_wire={"cache": False})
1015 with_wire={"cache": False})
1017
1016
1018 if install_hooks:
1017 if install_hooks:
1019 repo.install_hooks()
1018 repo.install_hooks()
1020
1019
1021 log.debug('Created repo %s with %s backend',
1020 log.debug('Created repo %s with %s backend',
1022 safe_str(repo_name), safe_str(repo_type))
1021 safe_str(repo_name), safe_str(repo_type))
1023 return repo
1022 return repo
1024
1023
1025 def _rename_filesystem_repo(self, old, new):
1024 def _rename_filesystem_repo(self, old, new):
1026 """
1025 """
1027 renames repository on filesystem
1026 renames repository on filesystem
1028
1027
1029 :param old: old name
1028 :param old: old name
1030 :param new: new name
1029 :param new: new name
1031 """
1030 """
1032 log.info('renaming repo from %s to %s', old, new)
1031 log.info('renaming repo from %s to %s', old, new)
1033
1032
1034 old_path = os.path.join(self.repos_path, old)
1033 old_path = os.path.join(self.repos_path, old)
1035 new_path = os.path.join(self.repos_path, new)
1034 new_path = os.path.join(self.repos_path, new)
1036 if os.path.isdir(new_path):
1035 if os.path.isdir(new_path):
1037 raise Exception(
1036 raise Exception(
1038 'Was trying to rename to already existing dir %s' % new_path
1037 'Was trying to rename to already existing dir %s' % new_path
1039 )
1038 )
1040 shutil.move(old_path, new_path)
1039 shutil.move(old_path, new_path)
1041
1040
1042 def _delete_filesystem_repo(self, repo):
1041 def _delete_filesystem_repo(self, repo):
1043 """
1042 """
1044 removes repo from filesystem, the removal is actually made by
1043 removes repo from filesystem, the removal is actually made by
1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1044 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1046 repository is no longer valid for rhodecode, can be undeleted later on
1045 repository is no longer valid for rhodecode, can be undeleted later on
1047 by reverting the renames on this repository
1046 by reverting the renames on this repository
1048
1047
1049 :param repo: repo object
1048 :param repo: repo object
1050 """
1049 """
1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1050 rm_path = os.path.join(self.repos_path, repo.repo_name)
1052 repo_group = repo.group
1051 repo_group = repo.group
1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1052 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1054 # disable hg/git internal that it doesn't get detected as repo
1053 # disable hg/git internal that it doesn't get detected as repo
1055 alias = repo.repo_type
1054 alias = repo.repo_type
1056
1055
1057 config = make_db_config(clear_session=False)
1056 config = make_db_config(clear_session=False)
1058 config.set('extensions', 'largefiles', '')
1057 config.set('extensions', 'largefiles', '')
1059 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1058 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1060
1059
1061 # skip this for bare git repos
1060 # skip this for bare git repos
1062 if not bare:
1061 if not bare:
1063 # disable VCS repo
1062 # disable VCS repo
1064 vcs_path = os.path.join(rm_path, '.%s' % alias)
1063 vcs_path = os.path.join(rm_path, '.%s' % alias)
1065 if os.path.exists(vcs_path):
1064 if os.path.exists(vcs_path):
1066 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1065 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1067
1066
1068 _now = datetime.datetime.now()
1067 _now = datetime.datetime.now()
1069 _ms = str(_now.microsecond).rjust(6, '0')
1068 _ms = str(_now.microsecond).rjust(6, '0')
1070 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1069 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1071 repo.just_name)
1070 repo.just_name)
1072 if repo_group:
1071 if repo_group:
1073 # if repository is in group, prefix the removal path with the group
1072 # if repository is in group, prefix the removal path with the group
1074 args = repo_group.full_path_splitted + [_d]
1073 args = repo_group.full_path_splitted + [_d]
1075 _d = os.path.join(*args)
1074 _d = os.path.join(*args)
1076
1075
1077 if os.path.isdir(rm_path):
1076 if os.path.isdir(rm_path):
1078 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1077 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1079
1078
1080 # finally cleanup diff-cache if it exists
1079 # finally cleanup diff-cache if it exists
1081 cached_diffs_dir = repo.cached_diffs_dir
1080 cached_diffs_dir = repo.cached_diffs_dir
1082 if os.path.isdir(cached_diffs_dir):
1081 if os.path.isdir(cached_diffs_dir):
1083 shutil.rmtree(cached_diffs_dir)
1082 shutil.rmtree(cached_diffs_dir)
1084
1083
1085
1084
1086 class ReadmeFinder:
1085 class ReadmeFinder:
1087 """
1086 """
1088 Utility which knows how to find a readme for a specific commit.
1087 Utility which knows how to find a readme for a specific commit.
1089
1088
1090 The main idea is that this is a configurable algorithm. When creating an
1089 The main idea is that this is a configurable algorithm. When creating an
1091 instance you can define parameters, currently only the `default_renderer`.
1090 instance you can define parameters, currently only the `default_renderer`.
1092 Based on this configuration the method :meth:`search` behaves slightly
1091 Based on this configuration the method :meth:`search` behaves slightly
1093 different.
1092 different.
1094 """
1093 """
1095
1094
1096 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1095 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1096 path_re = re.compile(r'^docs?', re.IGNORECASE)
1098
1097
1099 default_priorities = {
1098 default_priorities = {
1100 None: 0,
1099 None: 0,
1101 '.rst': 1,
1100 '.rst': 1,
1102 '.md': 1,
1101 '.md': 1,
1103 '.rest': 2,
1102 '.rest': 2,
1104 '.mkdn': 2,
1103 '.mkdn': 2,
1105 '.text': 2,
1104 '.text': 2,
1106 '.txt': 3,
1105 '.txt': 3,
1107 '.mdown': 3,
1106 '.mdown': 3,
1108 '.markdown': 4,
1107 '.markdown': 4,
1109 }
1108 }
1110
1109
1111 path_priority = {
1110 path_priority = {
1112 'doc': 0,
1111 'doc': 0,
1113 'docs': 1,
1112 'docs': 1,
1114 }
1113 }
1115
1114
1116 FALLBACK_PRIORITY = 99
1115 FALLBACK_PRIORITY = 99
1117
1116
1118 RENDERER_TO_EXTENSION = {
1117 RENDERER_TO_EXTENSION = {
1119 'rst': ['.rst', '.rest'],
1118 'rst': ['.rst', '.rest'],
1120 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1119 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1121 }
1120 }
1122
1121
1123 def __init__(self, default_renderer=None):
1122 def __init__(self, default_renderer=None):
1124 self._default_renderer = default_renderer
1123 self._default_renderer = default_renderer
1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1124 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1126 default_renderer, [])
1125 default_renderer, [])
1127
1126
1128 def search(self, commit, path='/'):
1127 def search(self, commit, path='/'):
1129 """
1128 """
1130 Find a readme in the given `commit`.
1129 Find a readme in the given `commit`.
1131 """
1130 """
1132 nodes = commit.get_nodes(path)
1131 nodes = commit.get_nodes(path)
1133 matches = self._match_readmes(nodes)
1132 matches = self._match_readmes(nodes)
1134 matches = self._sort_according_to_priority(matches)
1133 matches = self._sort_according_to_priority(matches)
1135 if matches:
1134 if matches:
1136 return matches[0].node
1135 return matches[0].node
1137
1136
1138 paths = self._match_paths(nodes)
1137 paths = self._match_paths(nodes)
1139 paths = self._sort_paths_according_to_priority(paths)
1138 paths = self._sort_paths_according_to_priority(paths)
1140 for path in paths:
1139 for path in paths:
1141 match = self.search(commit, path=path)
1140 match = self.search(commit, path=path)
1142 if match:
1141 if match:
1143 return match
1142 return match
1144
1143
1145 return None
1144 return None
1146
1145
1147 def _match_readmes(self, nodes):
1146 def _match_readmes(self, nodes):
1148 for node in nodes:
1147 for node in nodes:
1149 if not node.is_file():
1148 if not node.is_file():
1150 continue
1149 continue
1151 path = node.path.rsplit('/', 1)[-1]
1150 path = node.path.rsplit('/', 1)[-1]
1152 match = self.readme_re.match(path)
1151 match = self.readme_re.match(path)
1153 if match:
1152 if match:
1154 extension = match.group(1)
1153 extension = match.group(1)
1155 yield ReadmeMatch(node, match, self._priority(extension))
1154 yield ReadmeMatch(node, match, self._priority(extension))
1156
1155
1157 def _match_paths(self, nodes):
1156 def _match_paths(self, nodes):
1158 for node in nodes:
1157 for node in nodes:
1159 if not node.is_dir():
1158 if not node.is_dir():
1160 continue
1159 continue
1161 match = self.path_re.match(node.path)
1160 match = self.path_re.match(node.path)
1162 if match:
1161 if match:
1163 yield node.path
1162 yield node.path
1164
1163
1165 def _priority(self, extension):
1164 def _priority(self, extension):
1166 renderer_priority = (
1165 renderer_priority = (
1167 0 if extension in self._renderer_extensions else 1)
1166 0 if extension in self._renderer_extensions else 1)
1168 extension_priority = self.default_priorities.get(
1167 extension_priority = self.default_priorities.get(
1169 extension, self.FALLBACK_PRIORITY)
1168 extension, self.FALLBACK_PRIORITY)
1170 return (renderer_priority, extension_priority)
1169 return (renderer_priority, extension_priority)
1171
1170
1172 def _sort_according_to_priority(self, matches):
1171 def _sort_according_to_priority(self, matches):
1173
1172
1174 def priority_and_path(match):
1173 def priority_and_path(match):
1175 return (match.priority, match.path)
1174 return (match.priority, match.path)
1176
1175
1177 return sorted(matches, key=priority_and_path)
1176 return sorted(matches, key=priority_and_path)
1178
1177
1179 def _sort_paths_according_to_priority(self, paths):
1178 def _sort_paths_according_to_priority(self, paths):
1180
1179
1181 def priority_and_path(path):
1180 def priority_and_path(path):
1182 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1181 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1183
1182
1184 return sorted(paths, key=priority_and_path)
1183 return sorted(paths, key=priority_and_path)
1185
1184
1186
1185
1187 class ReadmeMatch:
1186 class ReadmeMatch:
1188
1187
1189 def __init__(self, node, match, priority):
1188 def __init__(self, node, match, priority):
1190 self.node = node
1189 self.node = node
1191 self._match = match
1190 self._match = match
1192 self.priority = priority
1191 self.priority = priority
1193
1192
1194 @property
1193 @property
1195 def path(self):
1194 def path(self):
1196 return self.node.path
1195 return self.node.path
1197
1196
1198 def __repr__(self):
1197 def __repr__(self):
1199 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1198 return f'<ReadmeMatch {self.path} priority={self.priority}'
@@ -1,897 +1,895 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 repo group model for RhodeCode
21 repo group model for RhodeCode
24 """
22 """
25
23
26 import os
24 import os
27 import datetime
25 import datetime
28 import itertools
26 import itertools
29 import logging
27 import logging
30 import shutil
28 import shutil
31 import time
29 import time
32 import traceback
30 import traceback
33 import string
31 import string
34
32
35 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
36
34
37 from rhodecode import events
35 from rhodecode import events
38 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
37 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
38 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
41 UserGroup, Repository)
39 UserGroup, Repository)
42 from rhodecode.model.permission import PermissionModel
40 from rhodecode.model.permission import PermissionModel
43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
44 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.lib.caching_query import FromCache
45 from rhodecode.lib.utils2 import action_logger_generic
43 from rhodecode.lib.utils2 import action_logger_generic
46
44
47 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
48
46
49
47
50 class RepoGroupModel(BaseModel):
48 class RepoGroupModel(BaseModel):
51
49
52 cls = RepoGroup
50 cls = RepoGroup
53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
54 PERSONAL_GROUP_PATTERN = '${username}' # default
52 PERSONAL_GROUP_PATTERN = '${username}' # default
55
53
56 def _get_user_group(self, users_group):
54 def _get_user_group(self, users_group):
57 return self._get_instance(UserGroup, users_group,
55 return self._get_instance(UserGroup, users_group,
58 callback=UserGroup.get_by_group_name)
56 callback=UserGroup.get_by_group_name)
59
57
60 def _get_repo_group(self, repo_group):
58 def _get_repo_group(self, repo_group):
61 return self._get_instance(RepoGroup, repo_group,
59 return self._get_instance(RepoGroup, repo_group,
62 callback=RepoGroup.get_by_group_name)
60 callback=RepoGroup.get_by_group_name)
63
61
64 def get_repo_group(self, repo_group):
62 def get_repo_group(self, repo_group):
65 return self._get_repo_group(repo_group)
63 return self._get_repo_group(repo_group)
66
64
67 @LazyProperty
65 @LazyProperty
68 def repos_path(self):
66 def repos_path(self):
69 """
67 """
70 Gets the repositories root path from database
68 Gets the repositories root path from database
71 """
69 """
72
70
73 settings_model = VcsSettingsModel(sa=self.sa)
71 settings_model = VcsSettingsModel(sa=self.sa)
74 return settings_model.get_repos_location()
72 return settings_model.get_repos_location()
75
73
76 def get_by_group_name(self, repo_group_name, cache=None):
74 def get_by_group_name(self, repo_group_name, cache=None):
77 repo = self.sa.query(RepoGroup) \
75 repo = self.sa.query(RepoGroup) \
78 .filter(RepoGroup.group_name == repo_group_name)
76 .filter(RepoGroup.group_name == repo_group_name)
79
77
80 if cache:
78 if cache:
81 name_key = _hash_key(repo_group_name)
79 name_key = _hash_key(repo_group_name)
82 repo = repo.options(
80 repo = repo.options(
83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
81 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
84 return repo.scalar()
82 return repo.scalar()
85
83
86 def get_default_create_personal_repo_group(self):
84 def get_default_create_personal_repo_group(self):
87 value = SettingsModel().get_setting_by_name(
85 value = SettingsModel().get_setting_by_name(
88 'create_personal_repo_group')
86 'create_personal_repo_group')
89 return value.app_settings_value if value else None or False
87 return value.app_settings_value if value else None or False
90
88
91 def get_personal_group_name_pattern(self):
89 def get_personal_group_name_pattern(self):
92 value = SettingsModel().get_setting_by_name(
90 value = SettingsModel().get_setting_by_name(
93 'personal_repo_group_pattern')
91 'personal_repo_group_pattern')
94 val = value.app_settings_value if value else None
92 val = value.app_settings_value if value else None
95 group_template = val or self.PERSONAL_GROUP_PATTERN
93 group_template = val or self.PERSONAL_GROUP_PATTERN
96
94
97 group_template = group_template.lstrip('/')
95 group_template = group_template.lstrip('/')
98 return group_template
96 return group_template
99
97
100 def get_personal_group_name(self, user):
98 def get_personal_group_name(self, user):
101 template = self.get_personal_group_name_pattern()
99 template = self.get_personal_group_name_pattern()
102 return string.Template(template).safe_substitute(
100 return string.Template(template).safe_substitute(
103 username=user.username,
101 username=user.username,
104 user_id=user.user_id,
102 user_id=user.user_id,
105 first_name=user.first_name,
103 first_name=user.first_name,
106 last_name=user.last_name,
104 last_name=user.last_name,
107 )
105 )
108
106
109 def create_personal_repo_group(self, user, commit_early=True):
107 def create_personal_repo_group(self, user, commit_early=True):
110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
108 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
111 personal_repo_group_name = self.get_personal_group_name(user)
109 personal_repo_group_name = self.get_personal_group_name(user)
112
110
113 # create a new one
111 # create a new one
114 RepoGroupModel().create(
112 RepoGroupModel().create(
115 group_name=personal_repo_group_name,
113 group_name=personal_repo_group_name,
116 group_description=desc,
114 group_description=desc,
117 owner=user.username,
115 owner=user.username,
118 personal=True,
116 personal=True,
119 commit_early=commit_early)
117 commit_early=commit_early)
120
118
121 def _create_default_perms(self, new_group):
119 def _create_default_perms(self, new_group):
122 # create default permission
120 # create default permission
123 default_perm = 'group.read'
121 default_perm = 'group.read'
124 def_user = User.get_default_user()
122 def_user = User.get_default_user()
125 for p in def_user.user_perms:
123 for p in def_user.user_perms:
126 if p.permission.permission_name.startswith('group.'):
124 if p.permission.permission_name.startswith('group.'):
127 default_perm = p.permission.permission_name
125 default_perm = p.permission.permission_name
128 break
126 break
129
127
130 repo_group_to_perm = UserRepoGroupToPerm()
128 repo_group_to_perm = UserRepoGroupToPerm()
131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
129 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
132
130
133 repo_group_to_perm.group = new_group
131 repo_group_to_perm.group = new_group
134 repo_group_to_perm.user = def_user
132 repo_group_to_perm.user = def_user
135 return repo_group_to_perm
133 return repo_group_to_perm
136
134
137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
135 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
138 get_object=False):
136 get_object=False):
139 """
137 """
140 Get's the group name and a parent group name from given group name.
138 Get's the group name and a parent group name from given group name.
141 If repo_in_path is set to truth, we asume the full path also includes
139 If repo_in_path is set to truth, we asume the full path also includes
142 repo name, in such case we clean the last element.
140 repo name, in such case we clean the last element.
143
141
144 :param group_name_full:
142 :param group_name_full:
145 """
143 """
146 split_paths = 1
144 split_paths = 1
147 if repo_in_path:
145 if repo_in_path:
148 split_paths = 2
146 split_paths = 2
149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
147 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
150
148
151 if repo_in_path and len(_parts) > 1:
149 if repo_in_path and len(_parts) > 1:
152 # such case last element is the repo_name
150 # such case last element is the repo_name
153 _parts.pop(-1)
151 _parts.pop(-1)
154 group_name_cleaned = _parts[-1] # just the group name
152 group_name_cleaned = _parts[-1] # just the group name
155 parent_repo_group_name = None
153 parent_repo_group_name = None
156
154
157 if len(_parts) > 1:
155 if len(_parts) > 1:
158 parent_repo_group_name = _parts[0]
156 parent_repo_group_name = _parts[0]
159
157
160 parent_group = None
158 parent_group = None
161 if parent_repo_group_name:
159 if parent_repo_group_name:
162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
160 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
163
161
164 if get_object:
162 if get_object:
165 return group_name_cleaned, parent_repo_group_name, parent_group
163 return group_name_cleaned, parent_repo_group_name, parent_group
166
164
167 return group_name_cleaned, parent_repo_group_name
165 return group_name_cleaned, parent_repo_group_name
168
166
169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
167 def check_exist_filesystem(self, group_name, exc_on_failure=True):
170 create_path = os.path.join(self.repos_path, group_name)
168 create_path = os.path.join(self.repos_path, group_name)
171 log.debug('creating new group in %s', create_path)
169 log.debug('creating new group in %s', create_path)
172
170
173 if os.path.isdir(create_path):
171 if os.path.isdir(create_path):
174 if exc_on_failure:
172 if exc_on_failure:
175 abs_create_path = os.path.abspath(create_path)
173 abs_create_path = os.path.abspath(create_path)
176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
174 raise Exception(f'Directory `{abs_create_path}` already exists !')
177 return False
175 return False
178 return True
176 return True
179
177
180 def _create_group(self, group_name):
178 def _create_group(self, group_name):
181 """
179 """
182 makes repository group on filesystem
180 makes repository group on filesystem
183
181
184 :param repo_name:
182 :param repo_name:
185 :param parent_id:
183 :param parent_id:
186 """
184 """
187
185
188 self.check_exist_filesystem(group_name)
186 self.check_exist_filesystem(group_name)
189 create_path = os.path.join(self.repos_path, group_name)
187 create_path = os.path.join(self.repos_path, group_name)
190 log.debug('creating new group in %s', create_path)
188 log.debug('creating new group in %s', create_path)
191 os.makedirs(create_path, mode=0o755)
189 os.makedirs(create_path, mode=0o755)
192 log.debug('created group in %s', create_path)
190 log.debug('created group in %s', create_path)
193
191
194 def _rename_group(self, old, new):
192 def _rename_group(self, old, new):
195 """
193 """
196 Renames a group on filesystem
194 Renames a group on filesystem
197
195
198 :param group_name:
196 :param group_name:
199 """
197 """
200
198
201 if old == new:
199 if old == new:
202 log.debug('skipping group rename')
200 log.debug('skipping group rename')
203 return
201 return
204
202
205 log.debug('renaming repository group from %s to %s', old, new)
203 log.debug('renaming repository group from %s to %s', old, new)
206
204
207 old_path = os.path.join(self.repos_path, old)
205 old_path = os.path.join(self.repos_path, old)
208 new_path = os.path.join(self.repos_path, new)
206 new_path = os.path.join(self.repos_path, new)
209
207
210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
208 log.debug('renaming repos paths from %s to %s', old_path, new_path)
211
209
212 if os.path.isdir(new_path):
210 if os.path.isdir(new_path):
213 raise Exception('Was trying to rename to already '
211 raise Exception('Was trying to rename to already '
214 'existing dir %s' % new_path)
212 'existing dir %s' % new_path)
215 shutil.move(old_path, new_path)
213 shutil.move(old_path, new_path)
216
214
217 def _delete_filesystem_group(self, group, force_delete=False):
215 def _delete_filesystem_group(self, group, force_delete=False):
218 """
216 """
219 Deletes a group from a filesystem
217 Deletes a group from a filesystem
220
218
221 :param group: instance of group from database
219 :param group: instance of group from database
222 :param force_delete: use shutil rmtree to remove all objects
220 :param force_delete: use shutil rmtree to remove all objects
223 """
221 """
224 paths = group.full_path.split(RepoGroup.url_sep())
222 paths = group.full_path.split(RepoGroup.url_sep())
225 paths = os.sep.join(paths)
223 paths = os.sep.join(paths)
226
224
227 rm_path = os.path.join(self.repos_path, paths)
225 rm_path = os.path.join(self.repos_path, paths)
228 log.info("Removing group %s", rm_path)
226 log.info("Removing group %s", rm_path)
229 # delete only if that path really exists
227 # delete only if that path really exists
230 if os.path.isdir(rm_path):
228 if os.path.isdir(rm_path):
231 if force_delete:
229 if force_delete:
232 shutil.rmtree(rm_path)
230 shutil.rmtree(rm_path)
233 else:
231 else:
234 # archive that group`
232 # archive that group`
235 _now = datetime.datetime.now()
233 _now = datetime.datetime.now()
236 _ms = str(_now.microsecond).rjust(6, '0')
234 _ms = str(_now.microsecond).rjust(6, '0')
237 _d = 'rm__%s_GROUP_%s' % (
235 _d = 'rm__{}_GROUP_{}'.format(
238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
236 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
237 shutil.move(rm_path, os.path.join(self.repos_path, _d))
240
238
241 def create(self, group_name, group_description, owner, just_db=False,
239 def create(self, group_name, group_description, owner, just_db=False,
242 copy_permissions=False, personal=None, commit_early=True):
240 copy_permissions=False, personal=None, commit_early=True):
243
241
244 (group_name_cleaned,
242 (group_name_cleaned,
245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
243 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
246
244
247 parent_group = None
245 parent_group = None
248 if parent_group_name:
246 if parent_group_name:
249 parent_group = self._get_repo_group(parent_group_name)
247 parent_group = self._get_repo_group(parent_group_name)
250 if not parent_group:
248 if not parent_group:
251 # we tried to create a nested group, but the parent is not
249 # we tried to create a nested group, but the parent is not
252 # existing
250 # existing
253 raise ValueError(
251 raise ValueError(
254 'Parent group `%s` given in `%s` group name '
252 'Parent group `%s` given in `%s` group name '
255 'is not yet existing.' % (parent_group_name, group_name))
253 'is not yet existing.' % (parent_group_name, group_name))
256
254
257 # because we are doing a cleanup, we need to check if such directory
255 # because we are doing a cleanup, we need to check if such directory
258 # already exists. If we don't do that we can accidentally delete
256 # already exists. If we don't do that we can accidentally delete
259 # existing directory via cleanup that can cause data issues, since
257 # existing directory via cleanup that can cause data issues, since
260 # delete does a folder rename to special syntax later cleanup
258 # delete does a folder rename to special syntax later cleanup
261 # functions can delete this
259 # functions can delete this
262 cleanup_group = self.check_exist_filesystem(group_name,
260 cleanup_group = self.check_exist_filesystem(group_name,
263 exc_on_failure=False)
261 exc_on_failure=False)
264 user = self._get_user(owner)
262 user = self._get_user(owner)
265 if not user:
263 if not user:
266 raise ValueError('Owner %s not found as rhodecode user', owner)
264 raise ValueError('Owner %s not found as rhodecode user', owner)
267
265
268 try:
266 try:
269 new_repo_group = RepoGroup()
267 new_repo_group = RepoGroup()
270 new_repo_group.user = user
268 new_repo_group.user = user
271 new_repo_group.group_description = group_description or group_name
269 new_repo_group.group_description = group_description or group_name
272 new_repo_group.parent_group = parent_group
270 new_repo_group.parent_group = parent_group
273 new_repo_group.group_name = group_name
271 new_repo_group.group_name = group_name
274 new_repo_group.personal = personal
272 new_repo_group.personal = personal
275
273
276 self.sa.add(new_repo_group)
274 self.sa.add(new_repo_group)
277
275
278 # create an ADMIN permission for owner except if we're super admin,
276 # create an ADMIN permission for owner except if we're super admin,
279 # later owner should go into the owner field of groups
277 # later owner should go into the owner field of groups
280 if not user.is_admin:
278 if not user.is_admin:
281 self.grant_user_permission(repo_group=new_repo_group,
279 self.grant_user_permission(repo_group=new_repo_group,
282 user=owner, perm='group.admin')
280 user=owner, perm='group.admin')
283
281
284 if parent_group and copy_permissions:
282 if parent_group and copy_permissions:
285 # copy permissions from parent
283 # copy permissions from parent
286 user_perms = UserRepoGroupToPerm.query() \
284 user_perms = UserRepoGroupToPerm.query() \
287 .filter(UserRepoGroupToPerm.group == parent_group).all()
285 .filter(UserRepoGroupToPerm.group == parent_group).all()
288
286
289 group_perms = UserGroupRepoGroupToPerm.query() \
287 group_perms = UserGroupRepoGroupToPerm.query() \
290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
288 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
291
289
292 for perm in user_perms:
290 for perm in user_perms:
293 # don't copy over the permission for user who is creating
291 # don't copy over the permission for user who is creating
294 # this group, if he is not super admin he get's admin
292 # this group, if he is not super admin he get's admin
295 # permission set above
293 # permission set above
296 if perm.user != user or user.is_admin:
294 if perm.user != user or user.is_admin:
297 UserRepoGroupToPerm.create(
295 UserRepoGroupToPerm.create(
298 perm.user, new_repo_group, perm.permission)
296 perm.user, new_repo_group, perm.permission)
299
297
300 for perm in group_perms:
298 for perm in group_perms:
301 UserGroupRepoGroupToPerm.create(
299 UserGroupRepoGroupToPerm.create(
302 perm.users_group, new_repo_group, perm.permission)
300 perm.users_group, new_repo_group, perm.permission)
303 else:
301 else:
304 perm_obj = self._create_default_perms(new_repo_group)
302 perm_obj = self._create_default_perms(new_repo_group)
305 self.sa.add(perm_obj)
303 self.sa.add(perm_obj)
306
304
307 # now commit the changes, earlier so we are sure everything is in
305 # now commit the changes, earlier so we are sure everything is in
308 # the database.
306 # the database.
309 if commit_early:
307 if commit_early:
310 self.sa.commit()
308 self.sa.commit()
311 if not just_db:
309 if not just_db:
312 self._create_group(new_repo_group.group_name)
310 self._create_group(new_repo_group.group_name)
313
311
314 # trigger the post hook
312 # trigger the post hook
315 from rhodecode.lib import hooks_base
313 from rhodecode.lib import hooks_base
316 repo_group = RepoGroup.get_by_group_name(group_name)
314 repo_group = RepoGroup.get_by_group_name(group_name)
317
315
318 # update repo group commit caches initially
316 # update repo group commit caches initially
319 repo_group.update_commit_cache()
317 repo_group.update_commit_cache()
320
318
321 hooks_base.create_repository_group(
319 hooks_base.create_repository_group(
322 created_by=user.username, **repo_group.get_dict())
320 created_by=user.username, **repo_group.get_dict())
323
321
324 # Trigger create event.
322 # Trigger create event.
325 events.trigger(events.RepoGroupCreateEvent(repo_group))
323 events.trigger(events.RepoGroupCreateEvent(repo_group))
326
324
327 return new_repo_group
325 return new_repo_group
328 except Exception:
326 except Exception:
329 self.sa.rollback()
327 self.sa.rollback()
330 log.exception('Exception occurred when creating repository group, '
328 log.exception('Exception occurred when creating repository group, '
331 'doing cleanup...')
329 'doing cleanup...')
332 # rollback things manually !
330 # rollback things manually !
333 repo_group = RepoGroup.get_by_group_name(group_name)
331 repo_group = RepoGroup.get_by_group_name(group_name)
334 if repo_group:
332 if repo_group:
335 RepoGroup.delete(repo_group.group_id)
333 RepoGroup.delete(repo_group.group_id)
336 self.sa.commit()
334 self.sa.commit()
337 if cleanup_group:
335 if cleanup_group:
338 RepoGroupModel()._delete_filesystem_group(repo_group)
336 RepoGroupModel()._delete_filesystem_group(repo_group)
339 raise
337 raise
340
338
341 def update_permissions(
339 def update_permissions(
342 self, repo_group, perm_additions=None, perm_updates=None,
340 self, repo_group, perm_additions=None, perm_updates=None,
343 perm_deletions=None, recursive=None, check_perms=True,
341 perm_deletions=None, recursive=None, check_perms=True,
344 cur_user=None):
342 cur_user=None):
345 from rhodecode.model.repo import RepoModel
343 from rhodecode.model.repo import RepoModel
346 from rhodecode.lib.auth import HasUserGroupPermissionAny
344 from rhodecode.lib.auth import HasUserGroupPermissionAny
347
345
348 if not perm_additions:
346 if not perm_additions:
349 perm_additions = []
347 perm_additions = []
350 if not perm_updates:
348 if not perm_updates:
351 perm_updates = []
349 perm_updates = []
352 if not perm_deletions:
350 if not perm_deletions:
353 perm_deletions = []
351 perm_deletions = []
354
352
355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
353 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
356
354
357 changes = {
355 changes = {
358 'added': [],
356 'added': [],
359 'updated': [],
357 'updated': [],
360 'deleted': [],
358 'deleted': [],
361 'default_user_changed': None
359 'default_user_changed': None
362 }
360 }
363
361
364 def _set_perm_user(obj, user, perm):
362 def _set_perm_user(obj, user, perm):
365 if isinstance(obj, RepoGroup):
363 if isinstance(obj, RepoGroup):
366 self.grant_user_permission(
364 self.grant_user_permission(
367 repo_group=obj, user=user, perm=perm)
365 repo_group=obj, user=user, perm=perm)
368 elif isinstance(obj, Repository):
366 elif isinstance(obj, Repository):
369 # private repos will not allow to change the default
367 # private repos will not allow to change the default
370 # permissions using recursive mode
368 # permissions using recursive mode
371 if obj.private and user == User.DEFAULT_USER:
369 if obj.private and user == User.DEFAULT_USER:
372 return
370 return
373
371
374 # we set group permission but we have to switch to repo
372 # we set group permission but we have to switch to repo
375 # permission
373 # permission
376 perm = perm.replace('group.', 'repository.')
374 perm = perm.replace('group.', 'repository.')
377 RepoModel().grant_user_permission(
375 RepoModel().grant_user_permission(
378 repo=obj, user=user, perm=perm)
376 repo=obj, user=user, perm=perm)
379
377
380 def _set_perm_group(obj, users_group, perm):
378 def _set_perm_group(obj, users_group, perm):
381 if isinstance(obj, RepoGroup):
379 if isinstance(obj, RepoGroup):
382 self.grant_user_group_permission(
380 self.grant_user_group_permission(
383 repo_group=obj, group_name=users_group, perm=perm)
381 repo_group=obj, group_name=users_group, perm=perm)
384 elif isinstance(obj, Repository):
382 elif isinstance(obj, Repository):
385 # we set group permission but we have to switch to repo
383 # we set group permission but we have to switch to repo
386 # permission
384 # permission
387 perm = perm.replace('group.', 'repository.')
385 perm = perm.replace('group.', 'repository.')
388 RepoModel().grant_user_group_permission(
386 RepoModel().grant_user_group_permission(
389 repo=obj, group_name=users_group, perm=perm)
387 repo=obj, group_name=users_group, perm=perm)
390
388
391 def _revoke_perm_user(obj, user):
389 def _revoke_perm_user(obj, user):
392 if isinstance(obj, RepoGroup):
390 if isinstance(obj, RepoGroup):
393 self.revoke_user_permission(repo_group=obj, user=user)
391 self.revoke_user_permission(repo_group=obj, user=user)
394 elif isinstance(obj, Repository):
392 elif isinstance(obj, Repository):
395 RepoModel().revoke_user_permission(repo=obj, user=user)
393 RepoModel().revoke_user_permission(repo=obj, user=user)
396
394
397 def _revoke_perm_group(obj, user_group):
395 def _revoke_perm_group(obj, user_group):
398 if isinstance(obj, RepoGroup):
396 if isinstance(obj, RepoGroup):
399 self.revoke_user_group_permission(
397 self.revoke_user_group_permission(
400 repo_group=obj, group_name=user_group)
398 repo_group=obj, group_name=user_group)
401 elif isinstance(obj, Repository):
399 elif isinstance(obj, Repository):
402 RepoModel().revoke_user_group_permission(
400 RepoModel().revoke_user_group_permission(
403 repo=obj, group_name=user_group)
401 repo=obj, group_name=user_group)
404
402
405 # start updates
403 # start updates
406 log.debug('Now updating permissions for %s in recursive mode:%s',
404 log.debug('Now updating permissions for %s in recursive mode:%s',
407 repo_group, recursive)
405 repo_group, recursive)
408
406
409 # initialize check function, we'll call that multiple times
407 # initialize check function, we'll call that multiple times
410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
408 has_group_perm = HasUserGroupPermissionAny(*req_perms)
411
409
412 for obj in repo_group.recursive_groups_and_repos():
410 for obj in repo_group.recursive_groups_and_repos():
413 # iterated obj is an instance of a repos group or repository in
411 # iterated obj is an instance of a repos group or repository in
414 # that group, recursive option can be: none, repos, groups, all
412 # that group, recursive option can be: none, repos, groups, all
415 if recursive == 'all':
413 if recursive == 'all':
416 obj = obj
414 obj = obj
417 elif recursive == 'repos':
415 elif recursive == 'repos':
418 # skip groups, other than this one
416 # skip groups, other than this one
419 if isinstance(obj, RepoGroup) and not obj == repo_group:
417 if isinstance(obj, RepoGroup) and not obj == repo_group:
420 continue
418 continue
421 elif recursive == 'groups':
419 elif recursive == 'groups':
422 # skip repos
420 # skip repos
423 if isinstance(obj, Repository):
421 if isinstance(obj, Repository):
424 continue
422 continue
425 else: # recursive == 'none':
423 else: # recursive == 'none':
426 # DEFAULT option - don't apply to iterated objects
424 # DEFAULT option - don't apply to iterated objects
427 # also we do a break at the end of this loop. if we are not
425 # also we do a break at the end of this loop. if we are not
428 # in recursive mode
426 # in recursive mode
429 obj = repo_group
427 obj = repo_group
430
428
431 change_obj = obj.get_api_data()
429 change_obj = obj.get_api_data()
432
430
433 # update permissions
431 # update permissions
434 for member_id, perm, member_type in perm_updates:
432 for member_id, perm, member_type in perm_updates:
435 member_id = int(member_id)
433 member_id = int(member_id)
436 if member_type == 'user':
434 if member_type == 'user':
437 member_name = User.get(member_id).username
435 member_name = User.get(member_id).username
438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
436 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
439 # NOTE(dan): detect if we changed permissions for default user
437 # NOTE(dan): detect if we changed permissions for default user
440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
438 perm_obj = self.sa.query(UserRepoGroupToPerm) \
441 .filter(UserRepoGroupToPerm.user_id == member_id) \
439 .filter(UserRepoGroupToPerm.user_id == member_id) \
442 .filter(UserRepoGroupToPerm.group == repo_group) \
440 .filter(UserRepoGroupToPerm.group == repo_group) \
443 .scalar()
441 .scalar()
444 if perm_obj and perm_obj.permission.permission_name != perm:
442 if perm_obj and perm_obj.permission.permission_name != perm:
445 changes['default_user_changed'] = True
443 changes['default_user_changed'] = True
446
444
447 # this updates also current one if found
445 # this updates also current one if found
448 _set_perm_user(obj, user=member_id, perm=perm)
446 _set_perm_user(obj, user=member_id, perm=perm)
449 elif member_type == 'user_group':
447 elif member_type == 'user_group':
450 member_name = UserGroup.get(member_id).users_group_name
448 member_name = UserGroup.get(member_id).users_group_name
451 if not check_perms or has_group_perm(member_name,
449 if not check_perms or has_group_perm(member_name,
452 user=cur_user):
450 user=cur_user):
453 _set_perm_group(obj, users_group=member_id, perm=perm)
451 _set_perm_group(obj, users_group=member_id, perm=perm)
454 else:
452 else:
455 raise ValueError("member_type must be 'user' or 'user_group' "
453 raise ValueError("member_type must be 'user' or 'user_group' "
456 "got {} instead".format(member_type))
454 "got {} instead".format(member_type))
457
455
458 changes['updated'].append(
456 changes['updated'].append(
459 {'change_obj': change_obj, 'type': member_type,
457 {'change_obj': change_obj, 'type': member_type,
460 'id': member_id, 'name': member_name, 'new_perm': perm})
458 'id': member_id, 'name': member_name, 'new_perm': perm})
461
459
462 # set new permissions
460 # set new permissions
463 for member_id, perm, member_type in perm_additions:
461 for member_id, perm, member_type in perm_additions:
464 member_id = int(member_id)
462 member_id = int(member_id)
465 if member_type == 'user':
463 if member_type == 'user':
466 member_name = User.get(member_id).username
464 member_name = User.get(member_id).username
467 _set_perm_user(obj, user=member_id, perm=perm)
465 _set_perm_user(obj, user=member_id, perm=perm)
468 elif member_type == 'user_group':
466 elif member_type == 'user_group':
469 # check if we have permissions to alter this usergroup
467 # check if we have permissions to alter this usergroup
470 member_name = UserGroup.get(member_id).users_group_name
468 member_name = UserGroup.get(member_id).users_group_name
471 if not check_perms or has_group_perm(member_name,
469 if not check_perms or has_group_perm(member_name,
472 user=cur_user):
470 user=cur_user):
473 _set_perm_group(obj, users_group=member_id, perm=perm)
471 _set_perm_group(obj, users_group=member_id, perm=perm)
474 else:
472 else:
475 raise ValueError("member_type must be 'user' or 'user_group' "
473 raise ValueError("member_type must be 'user' or 'user_group' "
476 "got {} instead".format(member_type))
474 "got {} instead".format(member_type))
477
475
478 changes['added'].append(
476 changes['added'].append(
479 {'change_obj': change_obj, 'type': member_type,
477 {'change_obj': change_obj, 'type': member_type,
480 'id': member_id, 'name': member_name, 'new_perm': perm})
478 'id': member_id, 'name': member_name, 'new_perm': perm})
481
479
482 # delete permissions
480 # delete permissions
483 for member_id, perm, member_type in perm_deletions:
481 for member_id, perm, member_type in perm_deletions:
484 member_id = int(member_id)
482 member_id = int(member_id)
485 if member_type == 'user':
483 if member_type == 'user':
486 member_name = User.get(member_id).username
484 member_name = User.get(member_id).username
487 _revoke_perm_user(obj, user=member_id)
485 _revoke_perm_user(obj, user=member_id)
488 elif member_type == 'user_group':
486 elif member_type == 'user_group':
489 # check if we have permissions to alter this usergroup
487 # check if we have permissions to alter this usergroup
490 member_name = UserGroup.get(member_id).users_group_name
488 member_name = UserGroup.get(member_id).users_group_name
491 if not check_perms or has_group_perm(member_name,
489 if not check_perms or has_group_perm(member_name,
492 user=cur_user):
490 user=cur_user):
493 _revoke_perm_group(obj, user_group=member_id)
491 _revoke_perm_group(obj, user_group=member_id)
494 else:
492 else:
495 raise ValueError("member_type must be 'user' or 'user_group' "
493 raise ValueError("member_type must be 'user' or 'user_group' "
496 "got {} instead".format(member_type))
494 "got {} instead".format(member_type))
497
495
498 changes['deleted'].append(
496 changes['deleted'].append(
499 {'change_obj': change_obj, 'type': member_type,
497 {'change_obj': change_obj, 'type': member_type,
500 'id': member_id, 'name': member_name, 'new_perm': perm})
498 'id': member_id, 'name': member_name, 'new_perm': perm})
501
499
502 # if it's not recursive call for all,repos,groups
500 # if it's not recursive call for all,repos,groups
503 # break the loop and don't proceed with other changes
501 # break the loop and don't proceed with other changes
504 if recursive not in ['all', 'repos', 'groups']:
502 if recursive not in ['all', 'repos', 'groups']:
505 break
503 break
506
504
507 return changes
505 return changes
508
506
509 def update(self, repo_group, form_data):
507 def update(self, repo_group, form_data):
510 try:
508 try:
511 repo_group = self._get_repo_group(repo_group)
509 repo_group = self._get_repo_group(repo_group)
512 old_path = repo_group.full_path
510 old_path = repo_group.full_path
513
511
514 # change properties
512 # change properties
515 if 'group_description' in form_data:
513 if 'group_description' in form_data:
516 repo_group.group_description = form_data['group_description']
514 repo_group.group_description = form_data['group_description']
517
515
518 if 'enable_locking' in form_data:
516 if 'enable_locking' in form_data:
519 repo_group.enable_locking = form_data['enable_locking']
517 repo_group.enable_locking = form_data['enable_locking']
520
518
521 if 'group_parent_id' in form_data:
519 if 'group_parent_id' in form_data:
522 parent_group = (
520 parent_group = (
523 self._get_repo_group(form_data['group_parent_id']))
521 self._get_repo_group(form_data['group_parent_id']))
524 repo_group.group_parent_id = (
522 repo_group.group_parent_id = (
525 parent_group.group_id if parent_group else None)
523 parent_group.group_id if parent_group else None)
526 repo_group.parent_group = parent_group
524 repo_group.parent_group = parent_group
527
525
528 # mikhail: to update the full_path, we have to explicitly
526 # mikhail: to update the full_path, we have to explicitly
529 # update group_name
527 # update group_name
530 group_name = form_data.get('group_name', repo_group.name)
528 group_name = form_data.get('group_name', repo_group.name)
531 repo_group.group_name = repo_group.get_new_name(group_name)
529 repo_group.group_name = repo_group.get_new_name(group_name)
532
530
533 new_path = repo_group.full_path
531 new_path = repo_group.full_path
534
532
535 affected_user_ids = []
533 affected_user_ids = []
536 if 'user' in form_data:
534 if 'user' in form_data:
537 old_owner_id = repo_group.user.user_id
535 old_owner_id = repo_group.user.user_id
538 new_owner = User.get_by_username(form_data['user'])
536 new_owner = User.get_by_username(form_data['user'])
539 repo_group.user = new_owner
537 repo_group.user = new_owner
540
538
541 if old_owner_id != new_owner.user_id:
539 if old_owner_id != new_owner.user_id:
542 affected_user_ids = [new_owner.user_id, old_owner_id]
540 affected_user_ids = [new_owner.user_id, old_owner_id]
543
541
544 self.sa.add(repo_group)
542 self.sa.add(repo_group)
545
543
546 # iterate over all members of this groups and do fixes
544 # iterate over all members of this groups and do fixes
547 # set locking if given
545 # set locking if given
548 # if obj is a repoGroup also fix the name of the group according
546 # if obj is a repoGroup also fix the name of the group according
549 # to the parent
547 # to the parent
550 # if obj is a Repo fix it's name
548 # if obj is a Repo fix it's name
551 # this can be potentially heavy operation
549 # this can be potentially heavy operation
552 for obj in repo_group.recursive_groups_and_repos():
550 for obj in repo_group.recursive_groups_and_repos():
553 # set the value from it's parent
551 # set the value from it's parent
554 obj.enable_locking = repo_group.enable_locking
552 obj.enable_locking = repo_group.enable_locking
555 if isinstance(obj, RepoGroup):
553 if isinstance(obj, RepoGroup):
556 new_name = obj.get_new_name(obj.name)
554 new_name = obj.get_new_name(obj.name)
557 log.debug('Fixing group %s to new name %s',
555 log.debug('Fixing group %s to new name %s',
558 obj.group_name, new_name)
556 obj.group_name, new_name)
559 obj.group_name = new_name
557 obj.group_name = new_name
560
558
561 elif isinstance(obj, Repository):
559 elif isinstance(obj, Repository):
562 # we need to get all repositories from this new group and
560 # we need to get all repositories from this new group and
563 # rename them accordingly to new group path
561 # rename them accordingly to new group path
564 new_name = obj.get_new_name(obj.just_name)
562 new_name = obj.get_new_name(obj.just_name)
565 log.debug('Fixing repo %s to new name %s',
563 log.debug('Fixing repo %s to new name %s',
566 obj.repo_name, new_name)
564 obj.repo_name, new_name)
567 obj.repo_name = new_name
565 obj.repo_name = new_name
568
566
569 self.sa.add(obj)
567 self.sa.add(obj)
570
568
571 self._rename_group(old_path, new_path)
569 self._rename_group(old_path, new_path)
572
570
573 # Trigger update event.
571 # Trigger update event.
574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
572 events.trigger(events.RepoGroupUpdateEvent(repo_group))
575
573
576 if affected_user_ids:
574 if affected_user_ids:
577 PermissionModel().trigger_permission_flush(affected_user_ids)
575 PermissionModel().trigger_permission_flush(affected_user_ids)
578
576
579 return repo_group
577 return repo_group
580 except Exception:
578 except Exception:
581 log.error(traceback.format_exc())
579 log.error(traceback.format_exc())
582 raise
580 raise
583
581
584 def delete(self, repo_group, force_delete=False, fs_remove=True):
582 def delete(self, repo_group, force_delete=False, fs_remove=True):
585 repo_group = self._get_repo_group(repo_group)
583 repo_group = self._get_repo_group(repo_group)
586 if not repo_group:
584 if not repo_group:
587 return False
585 return False
588 try:
586 try:
589 self.sa.delete(repo_group)
587 self.sa.delete(repo_group)
590 if fs_remove:
588 if fs_remove:
591 self._delete_filesystem_group(repo_group, force_delete)
589 self._delete_filesystem_group(repo_group, force_delete)
592 else:
590 else:
593 log.debug('skipping removal from filesystem')
591 log.debug('skipping removal from filesystem')
594
592
595 # Trigger delete event.
593 # Trigger delete event.
596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
594 events.trigger(events.RepoGroupDeleteEvent(repo_group))
597 return True
595 return True
598
596
599 except Exception:
597 except Exception:
600 log.error('Error removing repo_group %s', repo_group)
598 log.error('Error removing repo_group %s', repo_group)
601 raise
599 raise
602
600
603 def grant_user_permission(self, repo_group, user, perm):
601 def grant_user_permission(self, repo_group, user, perm):
604 """
602 """
605 Grant permission for user on given repository group, or update
603 Grant permission for user on given repository group, or update
606 existing one if found
604 existing one if found
607
605
608 :param repo_group: Instance of RepoGroup, repositories_group_id,
606 :param repo_group: Instance of RepoGroup, repositories_group_id,
609 or repositories_group name
607 or repositories_group name
610 :param user: Instance of User, user_id or username
608 :param user: Instance of User, user_id or username
611 :param perm: Instance of Permission, or permission_name
609 :param perm: Instance of Permission, or permission_name
612 """
610 """
613
611
614 repo_group = self._get_repo_group(repo_group)
612 repo_group = self._get_repo_group(repo_group)
615 user = self._get_user(user)
613 user = self._get_user(user)
616 permission = self._get_perm(perm)
614 permission = self._get_perm(perm)
617
615
618 # check if we have that permission already
616 # check if we have that permission already
619 obj = self.sa.query(UserRepoGroupToPerm)\
617 obj = self.sa.query(UserRepoGroupToPerm)\
620 .filter(UserRepoGroupToPerm.user == user)\
618 .filter(UserRepoGroupToPerm.user == user)\
621 .filter(UserRepoGroupToPerm.group == repo_group)\
619 .filter(UserRepoGroupToPerm.group == repo_group)\
622 .scalar()
620 .scalar()
623 if obj is None:
621 if obj is None:
624 # create new !
622 # create new !
625 obj = UserRepoGroupToPerm()
623 obj = UserRepoGroupToPerm()
626 obj.group = repo_group
624 obj.group = repo_group
627 obj.user = user
625 obj.user = user
628 obj.permission = permission
626 obj.permission = permission
629 self.sa.add(obj)
627 self.sa.add(obj)
630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
628 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
631 action_logger_generic(
629 action_logger_generic(
632 'granted permission: {} to user: {} on repogroup: {}'.format(
630 'granted permission: {} to user: {} on repogroup: {}'.format(
633 perm, user, repo_group), namespace='security.repogroup')
631 perm, user, repo_group), namespace='security.repogroup')
634 return obj
632 return obj
635
633
636 def revoke_user_permission(self, repo_group, user):
634 def revoke_user_permission(self, repo_group, user):
637 """
635 """
638 Revoke permission for user on given repository group
636 Revoke permission for user on given repository group
639
637
640 :param repo_group: Instance of RepoGroup, repositories_group_id,
638 :param repo_group: Instance of RepoGroup, repositories_group_id,
641 or repositories_group name
639 or repositories_group name
642 :param user: Instance of User, user_id or username
640 :param user: Instance of User, user_id or username
643 """
641 """
644
642
645 repo_group = self._get_repo_group(repo_group)
643 repo_group = self._get_repo_group(repo_group)
646 user = self._get_user(user)
644 user = self._get_user(user)
647
645
648 obj = self.sa.query(UserRepoGroupToPerm)\
646 obj = self.sa.query(UserRepoGroupToPerm)\
649 .filter(UserRepoGroupToPerm.user == user)\
647 .filter(UserRepoGroupToPerm.user == user)\
650 .filter(UserRepoGroupToPerm.group == repo_group)\
648 .filter(UserRepoGroupToPerm.group == repo_group)\
651 .scalar()
649 .scalar()
652 if obj:
650 if obj:
653 self.sa.delete(obj)
651 self.sa.delete(obj)
654 log.debug('Revoked perm on %s on %s', repo_group, user)
652 log.debug('Revoked perm on %s on %s', repo_group, user)
655 action_logger_generic(
653 action_logger_generic(
656 'revoked permission from user: {} on repogroup: {}'.format(
654 'revoked permission from user: {} on repogroup: {}'.format(
657 user, repo_group), namespace='security.repogroup')
655 user, repo_group), namespace='security.repogroup')
658
656
659 def grant_user_group_permission(self, repo_group, group_name, perm):
657 def grant_user_group_permission(self, repo_group, group_name, perm):
660 """
658 """
661 Grant permission for user group on given repository group, or update
659 Grant permission for user group on given repository group, or update
662 existing one if found
660 existing one if found
663
661
664 :param repo_group: Instance of RepoGroup, repositories_group_id,
662 :param repo_group: Instance of RepoGroup, repositories_group_id,
665 or repositories_group name
663 or repositories_group name
666 :param group_name: Instance of UserGroup, users_group_id,
664 :param group_name: Instance of UserGroup, users_group_id,
667 or user group name
665 or user group name
668 :param perm: Instance of Permission, or permission_name
666 :param perm: Instance of Permission, or permission_name
669 """
667 """
670 repo_group = self._get_repo_group(repo_group)
668 repo_group = self._get_repo_group(repo_group)
671 group_name = self._get_user_group(group_name)
669 group_name = self._get_user_group(group_name)
672 permission = self._get_perm(perm)
670 permission = self._get_perm(perm)
673
671
674 # check if we have that permission already
672 # check if we have that permission already
675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
673 obj = self.sa.query(UserGroupRepoGroupToPerm)\
676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
674 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
675 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
678 .scalar()
676 .scalar()
679
677
680 if obj is None:
678 if obj is None:
681 # create new
679 # create new
682 obj = UserGroupRepoGroupToPerm()
680 obj = UserGroupRepoGroupToPerm()
683
681
684 obj.group = repo_group
682 obj.group = repo_group
685 obj.users_group = group_name
683 obj.users_group = group_name
686 obj.permission = permission
684 obj.permission = permission
687 self.sa.add(obj)
685 self.sa.add(obj)
688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
686 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
689 action_logger_generic(
687 action_logger_generic(
690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
688 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
691 perm, group_name, repo_group), namespace='security.repogroup')
689 perm, group_name, repo_group), namespace='security.repogroup')
692 return obj
690 return obj
693
691
694 def revoke_user_group_permission(self, repo_group, group_name):
692 def revoke_user_group_permission(self, repo_group, group_name):
695 """
693 """
696 Revoke permission for user group on given repository group
694 Revoke permission for user group on given repository group
697
695
698 :param repo_group: Instance of RepoGroup, repositories_group_id,
696 :param repo_group: Instance of RepoGroup, repositories_group_id,
699 or repositories_group name
697 or repositories_group name
700 :param group_name: Instance of UserGroup, users_group_id,
698 :param group_name: Instance of UserGroup, users_group_id,
701 or user group name
699 or user group name
702 """
700 """
703 repo_group = self._get_repo_group(repo_group)
701 repo_group = self._get_repo_group(repo_group)
704 group_name = self._get_user_group(group_name)
702 group_name = self._get_user_group(group_name)
705
703
706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
704 obj = self.sa.query(UserGroupRepoGroupToPerm)\
707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
705 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
706 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
709 .scalar()
707 .scalar()
710 if obj:
708 if obj:
711 self.sa.delete(obj)
709 self.sa.delete(obj)
712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
710 log.debug('Revoked perm to %s on %s', repo_group, group_name)
713 action_logger_generic(
711 action_logger_generic(
714 'revoked permission from usergroup: {} on repogroup: {}'.format(
712 'revoked permission from usergroup: {} on repogroup: {}'.format(
715 group_name, repo_group), namespace='security.repogroup')
713 group_name, repo_group), namespace='security.repogroup')
716
714
717 @classmethod
715 @classmethod
718 def update_commit_cache(cls, repo_groups=None):
716 def update_commit_cache(cls, repo_groups=None):
719 if not repo_groups:
717 if not repo_groups:
720 repo_groups = RepoGroup.getAll()
718 repo_groups = RepoGroup.getAll()
721 for repo_group in repo_groups:
719 for repo_group in repo_groups:
722 repo_group.update_commit_cache()
720 repo_group.update_commit_cache()
723
721
724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
722 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
725 super_user_actions=False):
723 super_user_actions=False):
726
724
727 from pyramid.threadlocal import get_current_request
725 from pyramid.threadlocal import get_current_request
728 _render = get_current_request().get_partial_renderer(
726 _render = get_current_request().get_partial_renderer(
729 'rhodecode:templates/data_table/_dt_elements.mako')
727 'rhodecode:templates/data_table/_dt_elements.mako')
730 c = _render.get_call_context()
728 c = _render.get_call_context()
731 h = _render.get_helpers()
729 h = _render.get_helpers()
732
730
733 def quick_menu(repo_group_name):
731 def quick_menu(repo_group_name):
734 return _render('quick_repo_group_menu', repo_group_name)
732 return _render('quick_repo_group_menu', repo_group_name)
735
733
736 def repo_group_lnk(repo_group_name):
734 def repo_group_lnk(repo_group_name):
737 return _render('repo_group_name', repo_group_name)
735 return _render('repo_group_name', repo_group_name)
738
736
739 def last_change(last_change):
737 def last_change(last_change):
740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
738 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
741 ts = time.time()
739 ts = time.time()
742 utc_offset = (datetime.datetime.fromtimestamp(ts)
740 utc_offset = (datetime.datetime.fromtimestamp(ts)
743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
741 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
742 last_change = last_change + datetime.timedelta(seconds=utc_offset)
745 return _render("last_change", last_change)
743 return _render("last_change", last_change)
746
744
747 def desc(desc, personal):
745 def desc(desc, personal):
748 return _render(
746 return _render(
749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
747 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
750
748
751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
749 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
752 return _render(
750 return _render(
753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
751 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
754
752
755 def repo_group_name(repo_group_name, children_groups):
753 def repo_group_name(repo_group_name, children_groups):
756 return _render("repo_group_name", repo_group_name, children_groups)
754 return _render("repo_group_name", repo_group_name, children_groups)
757
755
758 def user_profile(username):
756 def user_profile(username):
759 return _render('user_profile', username)
757 return _render('user_profile', username)
760
758
761 repo_group_data = []
759 repo_group_data = []
762 for group in repo_group_list:
760 for group in repo_group_list:
763 # NOTE(marcink): because we use only raw column we need to load it like that
761 # NOTE(marcink): because we use only raw column we need to load it like that
764 changeset_cache = RepoGroup._load_changeset_cache(
762 changeset_cache = RepoGroup._load_changeset_cache(
765 '', group._changeset_cache)
763 '', group._changeset_cache)
766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
764 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
767 row = {
765 row = {
768 "menu": quick_menu(group.group_name),
766 "menu": quick_menu(group.group_name),
769 "name": repo_group_lnk(group.group_name),
767 "name": repo_group_lnk(group.group_name),
770 "name_raw": group.group_name,
768 "name_raw": group.group_name,
771
769
772 "last_change": last_change(last_commit_change),
770 "last_change": last_change(last_commit_change),
773
771
774 "last_changeset": "",
772 "last_changeset": "",
775 "last_changeset_raw": "",
773 "last_changeset_raw": "",
776
774
777 "desc": desc(h.escape(group.group_description), group.personal),
775 "desc": desc(h.escape(group.group_description), group.personal),
778 "top_level_repos": 0,
776 "top_level_repos": 0,
779 "owner": user_profile(group.User.username)
777 "owner": user_profile(group.User.username)
780 }
778 }
781 if admin:
779 if admin:
782 repo_count = group.repositories.count()
780 repo_count = group.repositories.count()
783 children_groups = list(map(
781 children_groups = list(map(
784 h.safe_str,
782 h.safe_str,
785 itertools.chain((g.name for g in group.parents),
783 itertools.chain((g.name for g in group.parents),
786 (x.name for x in [group]))))
784 (x.name for x in [group]))))
787 row.update({
785 row.update({
788 "action": repo_group_actions(
786 "action": repo_group_actions(
789 group.group_id, group.group_name, repo_count),
787 group.group_id, group.group_name, repo_count),
790 "top_level_repos": repo_count,
788 "top_level_repos": repo_count,
791 "name": repo_group_name(group.group_name, children_groups),
789 "name": repo_group_name(group.group_name, children_groups),
792
790
793 })
791 })
794 repo_group_data.append(row)
792 repo_group_data.append(row)
795
793
796 return repo_group_data
794 return repo_group_data
797
795
798 def get_repo_groups_data_table(
796 def get_repo_groups_data_table(
799 self, draw, start, limit,
797 self, draw, start, limit,
800 search_q, order_by, order_dir,
798 search_q, order_by, order_dir,
801 auth_user, repo_group_id):
799 auth_user, repo_group_id):
802 from rhodecode.model.scm import RepoGroupList
800 from rhodecode.model.scm import RepoGroupList
803
801
804 _perms = ['group.read', 'group.write', 'group.admin']
802 _perms = ['group.read', 'group.write', 'group.admin']
805 repo_groups = RepoGroup.query() \
803 repo_groups = RepoGroup.query() \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
804 .filter(RepoGroup.group_parent_id == repo_group_id) \
807 .all()
805 .all()
808 auth_repo_group_list = RepoGroupList(
806 auth_repo_group_list = RepoGroupList(
809 repo_groups, perm_set=_perms,
807 repo_groups, perm_set=_perms,
810 extra_kwargs=dict(user=auth_user))
808 extra_kwargs=dict(user=auth_user))
811
809
812 allowed_ids = [-1]
810 allowed_ids = [-1]
813 for repo_group in auth_repo_group_list:
811 for repo_group in auth_repo_group_list:
814 allowed_ids.append(repo_group.group_id)
812 allowed_ids.append(repo_group.group_id)
815
813
816 repo_groups_data_total_count = RepoGroup.query() \
814 repo_groups_data_total_count = RepoGroup.query() \
817 .filter(RepoGroup.group_parent_id == repo_group_id) \
815 .filter(RepoGroup.group_parent_id == repo_group_id) \
818 .filter(or_(
816 .filter(or_(
819 # generate multiple IN to fix limitation problems
817 # generate multiple IN to fix limitation problems
820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
818 *in_filter_generator(RepoGroup.group_id, allowed_ids))
821 ) \
819 ) \
822 .count()
820 .count()
823
821
824 base_q = Session.query(
822 base_q = Session.query(
825 RepoGroup.group_name,
823 RepoGroup.group_name,
826 RepoGroup.group_name_hash,
824 RepoGroup.group_name_hash,
827 RepoGroup.group_description,
825 RepoGroup.group_description,
828 RepoGroup.group_id,
826 RepoGroup.group_id,
829 RepoGroup.personal,
827 RepoGroup.personal,
830 RepoGroup.updated_on,
828 RepoGroup.updated_on,
831 RepoGroup._changeset_cache,
829 RepoGroup._changeset_cache,
832 User,
830 User,
833 ) \
831 ) \
834 .filter(RepoGroup.group_parent_id == repo_group_id) \
832 .filter(RepoGroup.group_parent_id == repo_group_id) \
835 .filter(or_(
833 .filter(or_(
836 # generate multiple IN to fix limitation problems
834 # generate multiple IN to fix limitation problems
837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
835 *in_filter_generator(RepoGroup.group_id, allowed_ids))
838 ) \
836 ) \
839 .join(User, User.user_id == RepoGroup.user_id) \
837 .join(User, User.user_id == RepoGroup.user_id) \
840 .group_by(RepoGroup, User)
838 .group_by(RepoGroup, User)
841
839
842 repo_groups_data_total_filtered_count = base_q.count()
840 repo_groups_data_total_filtered_count = base_q.count()
843
841
844 sort_defined = False
842 sort_defined = False
845
843
846 if order_by == 'group_name':
844 if order_by == 'group_name':
847 sort_col = func.lower(RepoGroup.group_name)
845 sort_col = func.lower(RepoGroup.group_name)
848 sort_defined = True
846 sort_defined = True
849 elif order_by == 'user_username':
847 elif order_by == 'user_username':
850 sort_col = User.username
848 sort_col = User.username
851 else:
849 else:
852 sort_col = getattr(RepoGroup, order_by, None)
850 sort_col = getattr(RepoGroup, order_by, None)
853
851
854 if sort_defined or sort_col:
852 if sort_defined or sort_col:
855 if order_dir == 'asc':
853 if order_dir == 'asc':
856 sort_col = sort_col.asc()
854 sort_col = sort_col.asc()
857 else:
855 else:
858 sort_col = sort_col.desc()
856 sort_col = sort_col.desc()
859
857
860 base_q = base_q.order_by(sort_col)
858 base_q = base_q.order_by(sort_col)
861 base_q = base_q.offset(start).limit(limit)
859 base_q = base_q.offset(start).limit(limit)
862
860
863 repo_group_list = base_q.all()
861 repo_group_list = base_q.all()
864
862
865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
863 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
866 repo_group_list=repo_group_list, admin=False)
864 repo_group_list=repo_group_list, admin=False)
867
865
868 data = ({
866 data = ({
869 'draw': draw,
867 'draw': draw,
870 'data': repo_groups_data,
868 'data': repo_groups_data,
871 'recordsTotal': repo_groups_data_total_count,
869 'recordsTotal': repo_groups_data_total_count,
872 'recordsFiltered': repo_groups_data_total_filtered_count,
870 'recordsFiltered': repo_groups_data_total_filtered_count,
873 })
871 })
874 return data
872 return data
875
873
876 def _get_defaults(self, repo_group_name):
874 def _get_defaults(self, repo_group_name):
877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
875 repo_group = RepoGroup.get_by_group_name(repo_group_name)
878
876
879 if repo_group is None:
877 if repo_group is None:
880 return None
878 return None
881
879
882 defaults = repo_group.get_dict()
880 defaults = repo_group.get_dict()
883 defaults['repo_group_name'] = repo_group.name
881 defaults['repo_group_name'] = repo_group.name
884 defaults['repo_group_description'] = repo_group.group_description
882 defaults['repo_group_description'] = repo_group.group_description
885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
883 defaults['repo_group_enable_locking'] = repo_group.enable_locking
886
884
887 # we use -1 as this is how in HTML, we mark an empty group
885 # we use -1 as this is how in HTML, we mark an empty group
888 defaults['repo_group'] = defaults['group_parent_id'] or -1
886 defaults['repo_group'] = defaults['group_parent_id'] or -1
889
887
890 # fill owner
888 # fill owner
891 if repo_group.user:
889 if repo_group.user:
892 defaults.update({'user': repo_group.user.username})
890 defaults.update({'user': repo_group.user.username})
893 else:
891 else:
894 replacement_user = User.get_first_super_admin().username
892 replacement_user = User.get_first_super_admin().username
895 defaults.update({'user': replacement_user})
893 defaults.update({'user': replacement_user})
896
894
897 return defaults
895 return defaults
@@ -1,102 +1,100 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 repository permission model for RhodeCode
21 repository permission model for RhodeCode
24 """
22 """
25
23
26 import logging
24 import logging
27 from rhodecode.model import BaseModel
25 from rhodecode.model import BaseModel
28 from rhodecode.model.db import UserRepoToPerm, UserGroupRepoToPerm, \
26 from rhodecode.model.db import UserRepoToPerm, UserGroupRepoToPerm, \
29 Permission
27 Permission
30
28
31 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
32
30
33
31
34 class RepositoryPermissionModel(BaseModel):
32 class RepositoryPermissionModel(BaseModel):
35
33
36 cls = UserRepoToPerm
34 cls = UserRepoToPerm
37
35
38 def get_user_permission(self, repository, user):
36 def get_user_permission(self, repository, user):
39 repository = self._get_repo(repository)
37 repository = self._get_repo(repository)
40 user = self._get_user(user)
38 user = self._get_user(user)
41
39
42 return UserRepoToPerm.query() \
40 return UserRepoToPerm.query() \
43 .filter(UserRepoToPerm.user == user) \
41 .filter(UserRepoToPerm.user == user) \
44 .filter(UserRepoToPerm.repository == repository) \
42 .filter(UserRepoToPerm.repository == repository) \
45 .scalar()
43 .scalar()
46
44
47 def update_user_permission(self, repository, user, permission):
45 def update_user_permission(self, repository, user, permission):
48 permission = Permission.get_by_key(permission)
46 permission = Permission.get_by_key(permission)
49 current = self.get_user_permission(repository, user)
47 current = self.get_user_permission(repository, user)
50 if current:
48 if current:
51 if current.permission is not permission:
49 if current.permission is not permission:
52 current.permission = permission
50 current.permission = permission
53 else:
51 else:
54 p = UserRepoToPerm()
52 p = UserRepoToPerm()
55 p.user = user
53 p.user = user
56 p.repository = repository
54 p.repository = repository
57 p.permission = permission
55 p.permission = permission
58 self.sa.add(p)
56 self.sa.add(p)
59
57
60 def delete_user_permission(self, repository, user):
58 def delete_user_permission(self, repository, user):
61 current = self.get_user_permission(repository, user)
59 current = self.get_user_permission(repository, user)
62 if current:
60 if current:
63 self.sa.delete(current)
61 self.sa.delete(current)
64
62
65 def get_users_group_permission(self, repository, users_group):
63 def get_users_group_permission(self, repository, users_group):
66 return UserGroupRepoToPerm.query() \
64 return UserGroupRepoToPerm.query() \
67 .filter(UserGroupRepoToPerm.users_group == users_group) \
65 .filter(UserGroupRepoToPerm.users_group == users_group) \
68 .filter(UserGroupRepoToPerm.repository == repository) \
66 .filter(UserGroupRepoToPerm.repository == repository) \
69 .scalar()
67 .scalar()
70
68
71 def update_user_group_permission(self, repository, users_group,
69 def update_user_group_permission(self, repository, users_group,
72 permission):
70 permission):
73 permission = Permission.get_by_key(permission)
71 permission = Permission.get_by_key(permission)
74 current = self.get_users_group_permission(repository, users_group)
72 current = self.get_users_group_permission(repository, users_group)
75 if current:
73 if current:
76 if current.permission is not permission:
74 if current.permission is not permission:
77 current.permission = permission
75 current.permission = permission
78 else:
76 else:
79 p = UserGroupRepoToPerm()
77 p = UserGroupRepoToPerm()
80 p.users_group = users_group
78 p.users_group = users_group
81 p.repository = repository
79 p.repository = repository
82 p.permission = permission
80 p.permission = permission
83 self.sa.add(p)
81 self.sa.add(p)
84
82
85 def delete_users_group_permission(self, repository, users_group):
83 def delete_users_group_permission(self, repository, users_group):
86 current = self.get_users_group_permission(repository, users_group)
84 current = self.get_users_group_permission(repository, users_group)
87 if current:
85 if current:
88 self.sa.delete(current)
86 self.sa.delete(current)
89
87
90 def update_or_delete_user_permission(self, repository, user, permission):
88 def update_or_delete_user_permission(self, repository, user, permission):
91 if permission:
89 if permission:
92 self.update_user_permission(repository, user, permission)
90 self.update_user_permission(repository, user, permission)
93 else:
91 else:
94 self.delete_user_permission(repository, user)
92 self.delete_user_permission(repository, user)
95
93
96 def update_or_delete_users_group_permission(
94 def update_or_delete_users_group_permission(
97 self, repository, user_group, permission):
95 self, repository, user_group, permission):
98 if permission:
96 if permission:
99 self.update_user_group_permission(
97 self.update_user_group_permission(
100 repository, user_group, permission)
98 repository, user_group, permission)
101 else:
99 else:
102 self.delete_users_group_permission(repository, user_group)
100 self.delete_users_group_permission(repository, user_group)
@@ -1,1042 +1,1041 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Scm model for RhodeCode
20 Scm model for RhodeCode
22 """
21 """
23
22
24 import os.path
23 import os.path
25 import traceback
24 import traceback
26 import logging
25 import logging
27 import io
26 import io
28
27
29 from sqlalchemy import func
28 from sqlalchemy import func
30 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31
30
32 import rhodecode
31 import rhodecode
33 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
34 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
47 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
48 from rhodecode.model.db import (
50 or_, false,
49 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
51 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58
57
59 class UserTemp(object):
58 class UserTemp(object):
60 def __init__(self, user_id):
59 def __init__(self, user_id):
61 self.user_id = user_id
60 self.user_id = user_id
62
61
63 def __repr__(self):
62 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
65
64
66
65
67 class RepoTemp(object):
66 class RepoTemp(object):
68 def __init__(self, repo_id):
67 def __init__(self, repo_id):
69 self.repo_id = repo_id
68 self.repo_id = repo_id
70
69
71 def __repr__(self):
70 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
73
72
74
73
75 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
76 """
75 """
77 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
77 and with cache usage
79 """
78 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
81 self.repos_path = repos_path
83 self.order_by = order_by
82 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
84 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
86 'repository.admin']
88 self.perm_set = perm_set
87 self.perm_set = perm_set
89
88
90 def __len__(self):
89 def __len__(self):
91 return len(self.db_repo_list)
90 return len(self.db_repo_list)
92
91
93 def __repr__(self):
92 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
95
94
96 def __iter__(self):
95 def __iter__(self):
97 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
98 # check permission at this level
97 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
100 if not has_perm:
102 continue
101 continue
103
102
104 tmp_d = {
103 tmp_d = {
105 'name': dbr.repo_name,
104 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
107 }
109 yield tmp_d
108 yield tmp_d
110
109
111
110
112 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
113
112
114 def __init__(
113 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
115 extra_kwargs=None):
117 """
116 """
118 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
118 checking permission for them from perm_set var
120
119
121 :param obj_list: list of db objects
120 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
125 """
124 """
126 self.obj_list = obj_list
125 self.obj_list = obj_list
127 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
128 self.perm_set = perm_set
127 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
131
130
132 def __len__(self):
131 def __len__(self):
133 return len(self.obj_list)
132 return len(self.obj_list)
134
133
135 def __repr__(self):
134 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
137
136
138 def __iter__(self):
137 def __iter__(self):
139 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
140 # check permission at this level
139 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
143 continue
145
144
146 yield db_obj
145 yield db_obj
147
146
148
147
149 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
150
149
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
151 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
153
155 super(RepoList, self).__init__(
154 super().__init__(
156 obj_list=db_repo_list,
155 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
160
159
161
160
162 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
163
162
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
164 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
167
166
168 super(RepoGroupList, self).__init__(
167 super().__init__(
169 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
173
172
174
173
175 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
176
175
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
177 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
179
181 super(UserGroupList, self).__init__(
180 super().__init__(
182 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
186
185
187
186
188 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
189 """
188 """
190 Generic Scm Model
189 Generic Scm Model
191 """
190 """
192
191
193 @LazyProperty
192 @LazyProperty
194 def repos_path(self):
193 def repos_path(self):
195 """
194 """
196 Gets the repositories root path from database
195 Gets the repositories root path from database
197 """
196 """
198
197
199 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
201
200
202 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
203 """
202 """
204 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
206
205
207 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
208 """
207 """
209
208
210 if repos_path is None:
209 if repos_path is None:
211 repos_path = self.repos_path
210 repos_path = self.repos_path
212
211
213 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
214
213
215 config = make_db_config()
214 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
217 repos = {}
216 repos = {}
218
217
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
223
222
224 try:
223 try:
225 if name in repos:
224 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
226 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
228 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
230 with_wire={"cache": False})
232 except OSError:
231 except OSError:
233 continue
232 continue
234 except RepositoryError:
233 except RepositoryError:
235 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
236 continue
235 continue
237
236
238 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
239 return repos
238 return repos
240
239
241 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
242 """
241 """
243 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
244 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
245
244
246 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
247 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
248
247
249 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
250 """
249 """
251 if all_repos is None:
250 if all_repos is None:
252 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
253 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
254 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
255 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 return repo_iter
256 return repo_iter
258
257
259 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
260 if all_groups is None:
259 if all_groups is None:
261 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
262 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
263 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
264
263
265 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
266 """
265 """
267 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
268 removes the cache entries
267 removes the cache entries
269
268
270 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
271 invalid, or deleted
270 invalid, or deleted
272 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
273 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
274 """
273 """
275 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
276
275
277 if repo:
276 if repo:
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281
280
282 repo_id = repo.repo_id
281 repo_id = repo.repo_id
283 config = repo._config
282 config = repo._config
284 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
285 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
285 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
286 cache_namespace_uid = f'cache_repo.{repo_id}'
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
289
288
290 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
291
290
292 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
295
294
296 if f is not None:
295 if f is not None:
297 try:
296 try:
298 self.sa.delete(f)
297 self.sa.delete(f)
299 return
298 return
300 except Exception:
299 except Exception:
301 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
302 raise
301 raise
303
302
304 try:
303 try:
305 f = UserFollowing()
304 f = UserFollowing()
306 f.user_id = user_id
305 f.user_id = user_id
307 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
308 self.sa.add(f)
307 self.sa.add(f)
309 except Exception:
308 except Exception:
310 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
311 raise
310 raise
312
311
313 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
314 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
317
316
318 if f is not None:
317 if f is not None:
319 try:
318 try:
320 self.sa.delete(f)
319 self.sa.delete(f)
321 return
320 return
322 except Exception:
321 except Exception:
323 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
324 raise
323 raise
325
324
326 try:
325 try:
327 f = UserFollowing()
326 f = UserFollowing()
328 f.user_id = user_id
327 f.user_id = user_id
329 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
330 self.sa.add(f)
329 self.sa.add(f)
331 except Exception:
330 except Exception:
332 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
333 raise
332 raise
334
333
335 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
336 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
337 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
338
337
339 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
342
341
343 return f is not None
342 return f is not None
344
343
345 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
346 u = User.get_by_username(username)
345 u = User.get_by_username(username)
347
346
348 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
351
350
352 return f is not None
351 return f is not None
353
352
354 def get_followers(self, repo):
353 def get_followers(self, repo):
355 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
356
355
357 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
358 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
359
358
360 def get_forks(self, repo):
359 def get_forks(self, repo):
361 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
362 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
363 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
364
363
365 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
366 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
367 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
368 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370
369
371 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
372 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
373 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
374 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376
375
377 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
378 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
379 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
380 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
381 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
382
381
383 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
384 raise RepositoryError(
383 raise RepositoryError(
385 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
386
385
387 repo.fork = fork
386 repo.fork = fork
388 self.sa.add(repo)
387 self.sa.add(repo)
389 return repo
388 return repo
390
389
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
393 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
394 if not remote_uri:
393 if not remote_uri:
395 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
396
395
397 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
398 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
399
398
400 try:
399 try:
401 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
402 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
403 # proper validation of remote_uri
402 # proper validation of remote_uri
404 if validate_uri:
403 if validate_uri:
405 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
406 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
407 except InvalidCloneUrl:
406 except InvalidCloneUrl:
408 raise
407 raise
409
408
410 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
411 try:
410 try:
412 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
413 repo.fetch(remote_uri)
412 repo.fetch(remote_uri)
414
413
415 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
416 except Exception:
415 except Exception:
417 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
418 raise
417 raise
419
418
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
422 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
423 if not remote_uri:
422 if not remote_uri:
424 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
425
424
426 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
427 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
428
427
429 try:
428 try:
430 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
431 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
432 # proper validation of remote_uri
431 # proper validation of remote_uri
433 if validate_uri:
432 if validate_uri:
434 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
435 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
436 except InvalidCloneUrl:
435 except InvalidCloneUrl:
437 raise
436 raise
438
437
439 try:
438 try:
440 repo.push(remote_uri)
439 repo.push(remote_uri)
441 except Exception:
440 except Exception:
442 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
443 raise
442 raise
444
443
445 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
446 content: bytes, f_path: bytes):
445 content: bytes, f_path: bytes):
447 """
446 """
448 Commits changes
447 Commits changes
449 """
448 """
450 user = self._get_user(user)
449 user = self._get_user(user)
451
450
452 # message and author needs to be unicode
451 # message and author needs to be unicode
453 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
454 message = safe_str(message)
453 message = safe_str(message)
455 author = safe_str(author)
454 author = safe_str(author)
456 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
457 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
458 try:
457 try:
459 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
460 tip = imc.commit(
459 tip = imc.commit(
461 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
462 branch=commit.branch)
461 branch=commit.branch)
463 except Exception as e:
462 except Exception as e:
464 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
465 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
466 finally:
465 finally:
467 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
468 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
469
468
470 # We trigger the post-push action
469 # We trigger the post-push action
471 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
472 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
473 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
474 return tip
473 return tip
475
474
476 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
477 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
478 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
479 if f_path:
478 if f_path:
480 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
481 return f_path
480 return f_path
482
481
483 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
484 if not dir_node.is_dir():
483 if not dir_node.is_dir():
485 return []
484 return []
486
485
487 data = []
486 data = []
488 for node in dir_node:
487 for node in dir_node:
489 if not node.is_file():
488 if not node.is_file():
490 # we skip file-nodes
489 # we skip file-nodes
491 continue
490 continue
492
491
493 last_commit = node.last_commit
492 last_commit = node.last_commit
494 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
495 data.append({
494 data.append({
496 'name': node.name,
495 'name': node.name,
497 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
498 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
499 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
500 'revision': last_commit.revision,
499 'revision': last_commit.revision,
501 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
502 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
503 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
504 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
505 request, last_commit.author),
504 request, last_commit.author),
506 })
505 })
507
506
508 return data
507 return data
509
508
510 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
511 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
512 """
511 """
513 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
514 based on repository walk function
513 based on repository walk function
515
514
516 :param repo_name: name of repository
515 :param repo_name: name of repository
517 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
518 :param root_path: root path to list
517 :param root_path: root path to list
519 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
520 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
521 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
522 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
523
522
524 """
523 """
525 _files = list()
524 _files = list()
526 _dirs = list()
525 _dirs = list()
527
526
528 try:
527 try:
529 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
530 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
531 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
532
531
533 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
534 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
535 extended_info_pre_load = []
534 extended_info_pre_load = []
536 if extended_info:
535 if extended_info:
537 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
538 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
539
538
540 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
541
540
542 for f in files:
541 for f in files:
543 _content = None
542 _content = None
544 _data = f_name = f.str_path
543 _data = f_name = f.str_path
545
544
546 if not flat:
545 if not flat:
547 _data = {
546 _data = {
548 "name": h.escape(f_name),
547 "name": h.escape(f_name),
549 "type": "file",
548 "type": "file",
550 }
549 }
551 if extended_info:
550 if extended_info:
552 _data.update({
551 _data.update({
553 "md5": f.md5,
552 "md5": f.md5,
554 "binary": f.is_binary,
553 "binary": f.is_binary,
555 "size": f.size,
554 "size": f.size,
556 "extension": f.extension,
555 "extension": f.extension,
557 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
558 "lines": f.lines()[0]
557 "lines": f.lines()[0]
559 })
558 })
560
559
561 if content:
560 if content:
562 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
563 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
564 full_content = None
563 full_content = None
565 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
566 full_content = f.str_content
565 full_content = f.str_content
567
566
568 _data.update({
567 _data.update({
569 "content": full_content,
568 "content": full_content,
570 })
569 })
571 _files.append(_data)
570 _files.append(_data)
572
571
573 for d in dirs:
572 for d in dirs:
574 _data = d_name = d.str_path
573 _data = d_name = d.str_path
575 if not flat:
574 if not flat:
576 _data = {
575 _data = {
577 "name": h.escape(d_name),
576 "name": h.escape(d_name),
578 "type": "dir",
577 "type": "dir",
579 }
578 }
580 if extended_info:
579 if extended_info:
581 _data.update({
580 _data.update({
582 "md5": "",
581 "md5": "",
583 "binary": False,
582 "binary": False,
584 "size": 0,
583 "size": 0,
585 "extension": "",
584 "extension": "",
586 })
585 })
587 if content:
586 if content:
588 _data.update({
587 _data.update({
589 "content": None
588 "content": None
590 })
589 })
591 _dirs.append(_data)
590 _dirs.append(_data)
592 except RepositoryError:
591 except RepositoryError:
593 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
594 raise
593 raise
595
594
596 return _dirs, _files
595 return _dirs, _files
597
596
598 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
599 """
598 """
600 Generate files for quick filter in files view
599 Generate files for quick filter in files view
601 """
600 """
602
601
603 _files = list()
602 _files = list()
604 _dirs = list()
603 _dirs = list()
605 try:
604 try:
606 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
607 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
608 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
609 for __, dirs, files in commit.walk(root_path):
608 for __, dirs, files in commit.walk(root_path):
610
609
611 for f in files:
610 for f in files:
612
611
613 _data = {
612 _data = {
614 "name": h.escape(f.str_path),
613 "name": h.escape(f.str_path),
615 "type": "file",
614 "type": "file",
616 }
615 }
617
616
618 _files.append(_data)
617 _files.append(_data)
619
618
620 for d in dirs:
619 for d in dirs:
621
620
622 _data = {
621 _data = {
623 "name": h.escape(d.str_path),
622 "name": h.escape(d.str_path),
624 "type": "dir",
623 "type": "dir",
625 }
624 }
626
625
627 _dirs.append(_data)
626 _dirs.append(_data)
628 except RepositoryError:
627 except RepositoryError:
629 log.exception("Exception in get_quick_filter_nodes")
628 log.exception("Exception in get_quick_filter_nodes")
630 raise
629 raise
631
630
632 return _dirs, _files
631 return _dirs, _files
633
632
634 def get_node(self, repo_name, commit_id, file_path,
633 def get_node(self, repo_name, commit_id, file_path,
635 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
636 """
635 """
637 retrieve single node from commit
636 retrieve single node from commit
638 """
637 """
639
638
640 try:
639 try:
641
640
642 _repo = self._get_repo(repo_name)
641 _repo = self._get_repo(repo_name)
643 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
644
643
645 file_node = commit.get_node(file_path)
644 file_node = commit.get_node(file_path)
646 if file_node.is_dir():
645 if file_node.is_dir():
647 raise RepositoryError('The given path is a directory')
646 raise RepositoryError('The given path is a directory')
648
647
649 _content = None
648 _content = None
650 f_name = file_node.str_path
649 f_name = file_node.str_path
651
650
652 file_data = {
651 file_data = {
653 "name": h.escape(f_name),
652 "name": h.escape(f_name),
654 "type": "file",
653 "type": "file",
655 }
654 }
656
655
657 if extended_info:
656 if extended_info:
658 file_data.update({
657 file_data.update({
659 "extension": file_node.extension,
658 "extension": file_node.extension,
660 "mimetype": file_node.mimetype,
659 "mimetype": file_node.mimetype,
661 })
660 })
662
661
663 if cache:
662 if cache:
664 md5 = file_node.md5
663 md5 = file_node.md5
665 is_binary = file_node.is_binary
664 is_binary = file_node.is_binary
666 size = file_node.size
665 size = file_node.size
667 else:
666 else:
668 is_binary, md5, size, _content = file_node.metadata_uncached()
667 is_binary, md5, size, _content = file_node.metadata_uncached()
669
668
670 file_data.update({
669 file_data.update({
671 "md5": md5,
670 "md5": md5,
672 "binary": is_binary,
671 "binary": is_binary,
673 "size": size,
672 "size": size,
674 })
673 })
675
674
676 if content and cache:
675 if content and cache:
677 # get content + cache
676 # get content + cache
678 size = file_node.size
677 size = file_node.size
679 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
680 full_content = None
679 full_content = None
681 all_lines = 0
680 all_lines = 0
682 if not file_node.is_binary and not over_size_limit:
681 if not file_node.is_binary and not over_size_limit:
683 full_content = safe_str(file_node.content)
682 full_content = safe_str(file_node.content)
684 all_lines, empty_lines = file_node.count_lines(full_content)
683 all_lines, empty_lines = file_node.count_lines(full_content)
685
684
686 file_data.update({
685 file_data.update({
687 "content": full_content,
686 "content": full_content,
688 "lines": all_lines
687 "lines": all_lines
689 })
688 })
690 elif content:
689 elif content:
691 # get content *without* cache
690 # get content *without* cache
692 if _content is None:
691 if _content is None:
693 is_binary, md5, size, _content = file_node.metadata_uncached()
692 is_binary, md5, size, _content = file_node.metadata_uncached()
694
693
695 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
696 full_content = None
695 full_content = None
697 all_lines = 0
696 all_lines = 0
698 if not is_binary and not over_size_limit:
697 if not is_binary and not over_size_limit:
699 full_content = safe_str(_content)
698 full_content = safe_str(_content)
700 all_lines, empty_lines = file_node.count_lines(full_content)
699 all_lines, empty_lines = file_node.count_lines(full_content)
701
700
702 file_data.update({
701 file_data.update({
703 "content": full_content,
702 "content": full_content,
704 "lines": all_lines
703 "lines": all_lines
705 })
704 })
706
705
707 except RepositoryError:
706 except RepositoryError:
708 log.exception("Exception in get_node")
707 log.exception("Exception in get_node")
709 raise
708 raise
710
709
711 return file_data
710 return file_data
712
711
713 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 def get_fts_data(self, repo_name, commit_id, root_path='/'):
714 """
713 """
715 Fetch node tree for usage in full text search
714 Fetch node tree for usage in full text search
716 """
715 """
717
716
718 tree_info = list()
717 tree_info = list()
719
718
720 try:
719 try:
721 _repo = self._get_repo(repo_name)
720 _repo = self._get_repo(repo_name)
722 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
723 root_path = root_path.lstrip('/')
722 root_path = root_path.lstrip('/')
724 top_node = commit.get_node(root_path)
723 top_node = commit.get_node(root_path)
725 top_node.default_pre_load = []
724 top_node.default_pre_load = []
726
725
727 for __, dirs, files in commit.walk(top_node):
726 for __, dirs, files in commit.walk(top_node):
728
727
729 for f in files:
728 for f in files:
730 is_binary, md5, size, _content = f.metadata_uncached()
729 is_binary, md5, size, _content = f.metadata_uncached()
731 _data = {
730 _data = {
732 "name": f.str_path,
731 "name": f.str_path,
733 "md5": md5,
732 "md5": md5,
734 "extension": f.extension,
733 "extension": f.extension,
735 "binary": is_binary,
734 "binary": is_binary,
736 "size": size
735 "size": size
737 }
736 }
738
737
739 tree_info.append(_data)
738 tree_info.append(_data)
740
739
741 except RepositoryError:
740 except RepositoryError:
742 log.exception("Exception in get_nodes")
741 log.exception("Exception in get_nodes")
743 raise
742 raise
744
743
745 return tree_info
744 return tree_info
746
745
747 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
746 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
748 author=None, trigger_push_hook=True):
747 author=None, trigger_push_hook=True):
749 """
748 """
750 Commits given multiple nodes into repo
749 Commits given multiple nodes into repo
751
750
752 :param user: RhodeCode User object or user_id, the commiter
751 :param user: RhodeCode User object or user_id, the commiter
753 :param repo: RhodeCode Repository object
752 :param repo: RhodeCode Repository object
754 :param message: commit message
753 :param message: commit message
755 :param nodes: mapping {filename:{'content':content},...}
754 :param nodes: mapping {filename:{'content':content},...}
756 :param parent_commit: parent commit, can be empty than it's
755 :param parent_commit: parent commit, can be empty than it's
757 initial commit
756 initial commit
758 :param author: author of commit, cna be different that commiter
757 :param author: author of commit, cna be different that commiter
759 only for git
758 only for git
760 :param trigger_push_hook: trigger push hooks
759 :param trigger_push_hook: trigger push hooks
761
760
762 :returns: new committed commit
761 :returns: new committed commit
763 """
762 """
764
763
765 user = self._get_user(user)
764 user = self._get_user(user)
766 scm_instance = repo.scm_instance(cache=False)
765 scm_instance = repo.scm_instance(cache=False)
767
766
768 message = safe_str(message)
767 message = safe_str(message)
769 commiter = user.full_contact
768 commiter = user.full_contact
770 author = safe_str(author) if author else commiter
769 author = safe_str(author) if author else commiter
771
770
772 imc = scm_instance.in_memory_commit
771 imc = scm_instance.in_memory_commit
773
772
774 if not parent_commit:
773 if not parent_commit:
775 parent_commit = EmptyCommit(alias=scm_instance.alias)
774 parent_commit = EmptyCommit(alias=scm_instance.alias)
776
775
777 if isinstance(parent_commit, EmptyCommit):
776 if isinstance(parent_commit, EmptyCommit):
778 # EmptyCommit means we're editing empty repository
777 # EmptyCommit means we're editing empty repository
779 parents = None
778 parents = None
780 else:
779 else:
781 parents = [parent_commit]
780 parents = [parent_commit]
782
781
783 upload_file_types = (io.BytesIO, io.BufferedRandom)
782 upload_file_types = (io.BytesIO, io.BufferedRandom)
784 processed_nodes = []
783 processed_nodes = []
785 for filename, content_dict in nodes.items():
784 for filename, content_dict in nodes.items():
786 if not isinstance(filename, bytes):
785 if not isinstance(filename, bytes):
787 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
786 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
788 content = content_dict['content']
787 content = content_dict['content']
789 if not isinstance(content, upload_file_types + (bytes,)):
788 if not isinstance(content, upload_file_types + (bytes,)):
790 raise ValueError('content key value in nodes needs to be bytes')
789 raise ValueError('content key value in nodes needs to be bytes')
791
790
792 for f_path in nodes:
791 for f_path in nodes:
793 f_path = self._sanitize_path(f_path)
792 f_path = self._sanitize_path(f_path)
794 content = nodes[f_path]['content']
793 content = nodes[f_path]['content']
795
794
796 # decoding here will force that we have proper encoded values
795 # decoding here will force that we have proper encoded values
797 # in any other case this will throw exceptions and deny commit
796 # in any other case this will throw exceptions and deny commit
798
797
799 if isinstance(content, bytes):
798 if isinstance(content, bytes):
800 pass
799 pass
801 elif isinstance(content, upload_file_types):
800 elif isinstance(content, upload_file_types):
802 content = content.read()
801 content = content.read()
803 else:
802 else:
804 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
803 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
805 processed_nodes.append((f_path, content))
804 processed_nodes.append((f_path, content))
806
805
807 # add multiple nodes
806 # add multiple nodes
808 for path, content in processed_nodes:
807 for path, content in processed_nodes:
809 imc.add(FileNode(path, content=content))
808 imc.add(FileNode(path, content=content))
810
809
811 # TODO: handle pre push scenario
810 # TODO: handle pre push scenario
812 tip = imc.commit(message=message,
811 tip = imc.commit(message=message,
813 author=author,
812 author=author,
814 parents=parents,
813 parents=parents,
815 branch=parent_commit.branch)
814 branch=parent_commit.branch)
816
815
817 self.mark_for_invalidation(repo.repo_name)
816 self.mark_for_invalidation(repo.repo_name)
818 if trigger_push_hook:
817 if trigger_push_hook:
819 hooks_utils.trigger_post_push_hook(
818 hooks_utils.trigger_post_push_hook(
820 username=user.username, action='push_local',
819 username=user.username, action='push_local',
821 repo_name=repo.repo_name, repo_type=scm_instance.alias,
820 repo_name=repo.repo_name, repo_type=scm_instance.alias,
822 hook_type='post_push',
821 hook_type='post_push',
823 commit_ids=[tip.raw_id])
822 commit_ids=[tip.raw_id])
824 return tip
823 return tip
825
824
826 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
825 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
827 author=None, trigger_push_hook=True):
826 author=None, trigger_push_hook=True):
828 user = self._get_user(user)
827 user = self._get_user(user)
829 scm_instance = repo.scm_instance(cache=False)
828 scm_instance = repo.scm_instance(cache=False)
830
829
831 message = safe_str(message)
830 message = safe_str(message)
832 commiter = user.full_contact
831 commiter = user.full_contact
833 author = safe_str(author) if author else commiter
832 author = safe_str(author) if author else commiter
834
833
835 imc = scm_instance.in_memory_commit
834 imc = scm_instance.in_memory_commit
836
835
837 if not parent_commit:
836 if not parent_commit:
838 parent_commit = EmptyCommit(alias=scm_instance.alias)
837 parent_commit = EmptyCommit(alias=scm_instance.alias)
839
838
840 if isinstance(parent_commit, EmptyCommit):
839 if isinstance(parent_commit, EmptyCommit):
841 # EmptyCommit means we we're editing empty repository
840 # EmptyCommit means we we're editing empty repository
842 parents = None
841 parents = None
843 else:
842 else:
844 parents = [parent_commit]
843 parents = [parent_commit]
845
844
846 # add multiple nodes
845 # add multiple nodes
847 for _filename, data in nodes.items():
846 for _filename, data in nodes.items():
848 # new filename, can be renamed from the old one, also sanitaze
847 # new filename, can be renamed from the old one, also sanitaze
849 # the path for any hack around relative paths like ../../ etc.
848 # the path for any hack around relative paths like ../../ etc.
850 filename = self._sanitize_path(data['filename'])
849 filename = self._sanitize_path(data['filename'])
851 old_filename = self._sanitize_path(_filename)
850 old_filename = self._sanitize_path(_filename)
852 content = data['content']
851 content = data['content']
853 file_mode = data.get('mode')
852 file_mode = data.get('mode')
854 filenode = FileNode(old_filename, content=content, mode=file_mode)
853 filenode = FileNode(old_filename, content=content, mode=file_mode)
855 op = data['op']
854 op = data['op']
856 if op == 'add':
855 if op == 'add':
857 imc.add(filenode)
856 imc.add(filenode)
858 elif op == 'del':
857 elif op == 'del':
859 imc.remove(filenode)
858 imc.remove(filenode)
860 elif op == 'mod':
859 elif op == 'mod':
861 if filename != old_filename:
860 if filename != old_filename:
862 # TODO: handle renames more efficient, needs vcs lib changes
861 # TODO: handle renames more efficient, needs vcs lib changes
863 imc.remove(filenode)
862 imc.remove(filenode)
864 imc.add(FileNode(filename, content=content, mode=file_mode))
863 imc.add(FileNode(filename, content=content, mode=file_mode))
865 else:
864 else:
866 imc.change(filenode)
865 imc.change(filenode)
867
866
868 try:
867 try:
869 # TODO: handle pre push scenario commit changes
868 # TODO: handle pre push scenario commit changes
870 tip = imc.commit(message=message,
869 tip = imc.commit(message=message,
871 author=author,
870 author=author,
872 parents=parents,
871 parents=parents,
873 branch=parent_commit.branch)
872 branch=parent_commit.branch)
874 except NodeNotChangedError:
873 except NodeNotChangedError:
875 raise
874 raise
876 except Exception as e:
875 except Exception as e:
877 log.exception("Unexpected exception during call to imc.commit")
876 log.exception("Unexpected exception during call to imc.commit")
878 raise IMCCommitError(str(e))
877 raise IMCCommitError(str(e))
879 finally:
878 finally:
880 # always clear caches, if commit fails we want fresh object also
879 # always clear caches, if commit fails we want fresh object also
881 self.mark_for_invalidation(repo.repo_name)
880 self.mark_for_invalidation(repo.repo_name)
882
881
883 if trigger_push_hook:
882 if trigger_push_hook:
884 hooks_utils.trigger_post_push_hook(
883 hooks_utils.trigger_post_push_hook(
885 username=user.username, action='push_local', hook_type='post_push',
884 username=user.username, action='push_local', hook_type='post_push',
886 repo_name=repo.repo_name, repo_type=scm_instance.alias,
885 repo_name=repo.repo_name, repo_type=scm_instance.alias,
887 commit_ids=[tip.raw_id])
886 commit_ids=[tip.raw_id])
888
887
889 return tip
888 return tip
890
889
891 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
890 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
892 author=None, trigger_push_hook=True):
891 author=None, trigger_push_hook=True):
893 """
892 """
894 Deletes given multiple nodes into `repo`
893 Deletes given multiple nodes into `repo`
895
894
896 :param user: RhodeCode User object or user_id, the committer
895 :param user: RhodeCode User object or user_id, the committer
897 :param repo: RhodeCode Repository object
896 :param repo: RhodeCode Repository object
898 :param message: commit message
897 :param message: commit message
899 :param nodes: mapping {filename:{'content':content},...}
898 :param nodes: mapping {filename:{'content':content},...}
900 :param parent_commit: parent commit, can be empty than it's initial
899 :param parent_commit: parent commit, can be empty than it's initial
901 commit
900 commit
902 :param author: author of commit, cna be different that commiter only
901 :param author: author of commit, cna be different that commiter only
903 for git
902 for git
904 :param trigger_push_hook: trigger push hooks
903 :param trigger_push_hook: trigger push hooks
905
904
906 :returns: new commit after deletion
905 :returns: new commit after deletion
907 """
906 """
908
907
909 user = self._get_user(user)
908 user = self._get_user(user)
910 scm_instance = repo.scm_instance(cache=False)
909 scm_instance = repo.scm_instance(cache=False)
911
910
912 processed_nodes = []
911 processed_nodes = []
913 for f_path in nodes:
912 for f_path in nodes:
914 f_path = self._sanitize_path(f_path)
913 f_path = self._sanitize_path(f_path)
915 # content can be empty but for compatibility it allows same dicts
914 # content can be empty but for compatibility it allows same dicts
916 # structure as add_nodes
915 # structure as add_nodes
917 content = nodes[f_path].get('content')
916 content = nodes[f_path].get('content')
918 processed_nodes.append((safe_bytes(f_path), content))
917 processed_nodes.append((safe_bytes(f_path), content))
919
918
920 message = safe_str(message)
919 message = safe_str(message)
921 commiter = user.full_contact
920 commiter = user.full_contact
922 author = safe_str(author) if author else commiter
921 author = safe_str(author) if author else commiter
923
922
924 imc = scm_instance.in_memory_commit
923 imc = scm_instance.in_memory_commit
925
924
926 if not parent_commit:
925 if not parent_commit:
927 parent_commit = EmptyCommit(alias=scm_instance.alias)
926 parent_commit = EmptyCommit(alias=scm_instance.alias)
928
927
929 if isinstance(parent_commit, EmptyCommit):
928 if isinstance(parent_commit, EmptyCommit):
930 # EmptyCommit means we we're editing empty repository
929 # EmptyCommit means we we're editing empty repository
931 parents = None
930 parents = None
932 else:
931 else:
933 parents = [parent_commit]
932 parents = [parent_commit]
934 # add multiple nodes
933 # add multiple nodes
935 for path, content in processed_nodes:
934 for path, content in processed_nodes:
936 imc.remove(FileNode(path, content=content))
935 imc.remove(FileNode(path, content=content))
937
936
938 # TODO: handle pre push scenario
937 # TODO: handle pre push scenario
939 tip = imc.commit(message=message,
938 tip = imc.commit(message=message,
940 author=author,
939 author=author,
941 parents=parents,
940 parents=parents,
942 branch=parent_commit.branch)
941 branch=parent_commit.branch)
943
942
944 self.mark_for_invalidation(repo.repo_name)
943 self.mark_for_invalidation(repo.repo_name)
945 if trigger_push_hook:
944 if trigger_push_hook:
946 hooks_utils.trigger_post_push_hook(
945 hooks_utils.trigger_post_push_hook(
947 username=user.username, action='push_local', hook_type='post_push',
946 username=user.username, action='push_local', hook_type='post_push',
948 repo_name=repo.repo_name, repo_type=scm_instance.alias,
947 repo_name=repo.repo_name, repo_type=scm_instance.alias,
949 commit_ids=[tip.raw_id])
948 commit_ids=[tip.raw_id])
950 return tip
949 return tip
951
950
952 def strip(self, repo, commit_id, branch):
951 def strip(self, repo, commit_id, branch):
953 scm_instance = repo.scm_instance(cache=False)
952 scm_instance = repo.scm_instance(cache=False)
954 scm_instance.config.clear_section('hooks')
953 scm_instance.config.clear_section('hooks')
955 scm_instance.strip(commit_id, branch)
954 scm_instance.strip(commit_id, branch)
956 self.mark_for_invalidation(repo.repo_name)
955 self.mark_for_invalidation(repo.repo_name)
957
956
958 def get_unread_journal(self):
957 def get_unread_journal(self):
959 return self.sa.query(UserLog).count()
958 return self.sa.query(UserLog).count()
960
959
961 @classmethod
960 @classmethod
962 def backend_landing_ref(cls, repo_type):
961 def backend_landing_ref(cls, repo_type):
963 """
962 """
964 Return a default landing ref based on a repository type.
963 Return a default landing ref based on a repository type.
965 """
964 """
966
965
967 landing_ref = {
966 landing_ref = {
968 'hg': ('branch:default', 'default'),
967 'hg': ('branch:default', 'default'),
969 'git': ('branch:master', 'master'),
968 'git': ('branch:master', 'master'),
970 'svn': ('rev:tip', 'latest tip'),
969 'svn': ('rev:tip', 'latest tip'),
971 'default': ('rev:tip', 'latest tip'),
970 'default': ('rev:tip', 'latest tip'),
972 }
971 }
973
972
974 return landing_ref.get(repo_type) or landing_ref['default']
973 return landing_ref.get(repo_type) or landing_ref['default']
975
974
976 def get_repo_landing_revs(self, translator, repo=None):
975 def get_repo_landing_revs(self, translator, repo=None):
977 """
976 """
978 Generates select option with tags branches and bookmarks (for hg only)
977 Generates select option with tags branches and bookmarks (for hg only)
979 grouped by type
978 grouped by type
980
979
981 :param repo:
980 :param repo:
982 """
981 """
983 from rhodecode.lib.vcs.backends.git import GitRepository
982 from rhodecode.lib.vcs.backends.git import GitRepository
984
983
985 _ = translator
984 _ = translator
986 repo = self._get_repo(repo)
985 repo = self._get_repo(repo)
987
986
988 if repo:
987 if repo:
989 repo_type = repo.repo_type
988 repo_type = repo.repo_type
990 else:
989 else:
991 repo_type = 'default'
990 repo_type = 'default'
992
991
993 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
992 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
994
993
995 default_ref_options = [
994 default_ref_options = [
996 [default_landing_ref, landing_ref_lbl]
995 [default_landing_ref, landing_ref_lbl]
997 ]
996 ]
998 default_choices = [
997 default_choices = [
999 default_landing_ref
998 default_landing_ref
1000 ]
999 ]
1001
1000
1002 if not repo:
1001 if not repo:
1003 # presented at NEW repo creation
1002 # presented at NEW repo creation
1004 return default_choices, default_ref_options
1003 return default_choices, default_ref_options
1005
1004
1006 repo = repo.scm_instance()
1005 repo = repo.scm_instance()
1007
1006
1008 ref_options = [(default_landing_ref, landing_ref_lbl)]
1007 ref_options = [(default_landing_ref, landing_ref_lbl)]
1009 choices = [default_landing_ref]
1008 choices = [default_landing_ref]
1010
1009
1011 # branches
1010 # branches
1012 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1011 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1013 if not branch_group:
1012 if not branch_group:
1014 # new repo, or without maybe a branch?
1013 # new repo, or without maybe a branch?
1015 branch_group = default_ref_options
1014 branch_group = default_ref_options
1016
1015
1017 branches_group = (branch_group, _("Branches"))
1016 branches_group = (branch_group, _("Branches"))
1018 ref_options.append(branches_group)
1017 ref_options.append(branches_group)
1019 choices.extend([x[0] for x in branches_group[0]])
1018 choices.extend([x[0] for x in branches_group[0]])
1020
1019
1021 # bookmarks for HG
1020 # bookmarks for HG
1022 if repo.alias == 'hg':
1021 if repo.alias == 'hg':
1023 bookmarks_group = (
1022 bookmarks_group = (
1024 [(f'book:{safe_str(b)}', safe_str(b))
1023 [(f'book:{safe_str(b)}', safe_str(b))
1025 for b in repo.bookmarks],
1024 for b in repo.bookmarks],
1026 _("Bookmarks"))
1025 _("Bookmarks"))
1027 ref_options.append(bookmarks_group)
1026 ref_options.append(bookmarks_group)
1028 choices.extend([x[0] for x in bookmarks_group[0]])
1027 choices.extend([x[0] for x in bookmarks_group[0]])
1029
1028
1030 # tags
1029 # tags
1031 tags_group = (
1030 tags_group = (
1032 [(f'tag:{safe_str(t)}', safe_str(t))
1031 [(f'tag:{safe_str(t)}', safe_str(t))
1033 for t in repo.tags],
1032 for t in repo.tags],
1034 _("Tags"))
1033 _("Tags"))
1035 ref_options.append(tags_group)
1034 ref_options.append(tags_group)
1036 choices.extend([x[0] for x in tags_group[0]])
1035 choices.extend([x[0] for x in tags_group[0]])
1037
1036
1038 return choices, ref_options
1037 return choices, ref_options
1039
1038
1040 def get_server_info(self, environ=None):
1039 def get_server_info(self, environ=None):
1041 server_info = get_system_info(environ)
1040 server_info = get_system_info(environ)
1042 return server_info
1041 return server_info
@@ -1,1046 +1,1046 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 users model for RhodeCode
20 users model for RhodeCode
21 """
21 """
22
22
23 import logging
23 import logging
24 import traceback
24 import traceback
25 import datetime
25 import datetime
26 import ipaddress
26 import ipaddress
27
27
28 from pyramid.threadlocal import get_current_request
28 from pyramid.threadlocal import get_current_request
29 from sqlalchemy.exc import DatabaseError
29 from sqlalchemy.exc import DatabaseError
30
30
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.lib.user_log_filter import user_log_filter
32 from rhodecode.lib.user_log_filter import user_log_filter
33 from rhodecode.lib.utils2 import (
33 from rhodecode.lib.utils2 import (
34 get_current_rhodecode_user, action_logger_generic,
34 get_current_rhodecode_user, action_logger_generic,
35 AttributeDict, str2bool)
35 AttributeDict, str2bool)
36 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.str_utils import safe_str
37 from rhodecode.lib.exceptions import (
37 from rhodecode.lib.exceptions import (
38 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
38 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
39 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
40 UserOwnsPullRequestsException, UserOwnsArtifactsException)
40 UserOwnsPullRequestsException, UserOwnsArtifactsException)
41 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
44 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
45 UserEmailMap, UserIpMap, UserLog)
45 UserEmailMap, UserIpMap, UserLog)
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.auth_token import AuthTokenModel
47 from rhodecode.model.auth_token import AuthTokenModel
48 from rhodecode.model.repo_group import RepoGroupModel
48 from rhodecode.model.repo_group import RepoGroupModel
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class UserModel(BaseModel):
53 class UserModel(BaseModel):
54 cls = User
54 cls = User
55
55
56 def get(self, user_id, cache=False):
56 def get(self, user_id, cache=False):
57 user = self.sa.query(User)
57 user = self.sa.query(User)
58 if cache:
58 if cache:
59 user = user.options(
59 user = user.options(
60 FromCache("sql_cache_short", f"get_user_{user_id}"))
60 FromCache("sql_cache_short", f"get_user_{user_id}"))
61 return user.get(user_id)
61 return user.get(user_id)
62
62
63 def get_user(self, user):
63 def get_user(self, user):
64 return self._get_user(user)
64 return self._get_user(user)
65
65
66 def _serialize_user(self, user):
66 def _serialize_user(self, user):
67 import rhodecode.lib.helpers as h
67 import rhodecode.lib.helpers as h
68
68
69 return {
69 return {
70 'id': user.user_id,
70 'id': user.user_id,
71 'first_name': user.first_name,
71 'first_name': user.first_name,
72 'last_name': user.last_name,
72 'last_name': user.last_name,
73 'username': user.username,
73 'username': user.username,
74 'email': user.email,
74 'email': user.email,
75 'icon_link': h.gravatar_url(user.email, 30),
75 'icon_link': h.gravatar_url(user.email, 30),
76 'profile_link': h.link_to_user(user),
76 'profile_link': h.link_to_user(user),
77 'value_display': h.escape(h.person(user)),
77 'value_display': h.escape(h.person(user)),
78 'value': user.username,
78 'value': user.username,
79 'value_type': 'user',
79 'value_type': 'user',
80 'active': user.active,
80 'active': user.active,
81 }
81 }
82
82
83 def get_users(self, name_contains=None, limit=20, only_active=True):
83 def get_users(self, name_contains=None, limit=20, only_active=True):
84
84
85 query = self.sa.query(User)
85 query = self.sa.query(User)
86 if only_active:
86 if only_active:
87 query = query.filter(User.active == true())
87 query = query.filter(User.active == true())
88
88
89 if name_contains:
89 if name_contains:
90 ilike_expression = u'%{}%'.format(safe_str(name_contains))
90 ilike_expression = f'%{safe_str(name_contains)}%'
91 query = query.filter(
91 query = query.filter(
92 or_(
92 or_(
93 User.name.ilike(ilike_expression),
93 User.name.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
95 User.username.ilike(ilike_expression)
95 User.username.ilike(ilike_expression)
96 )
96 )
97 )
97 )
98 # sort by len to have top most matches first
98 # sort by len to have top most matches first
99 query = query.order_by(func.length(User.username))\
99 query = query.order_by(func.length(User.username))\
100 .order_by(User.username)
100 .order_by(User.username)
101 query = query.limit(limit)
101 query = query.limit(limit)
102
102
103 users = query.all()
103 users = query.all()
104
104
105 _users = [
105 _users = [
106 self._serialize_user(user) for user in users
106 self._serialize_user(user) for user in users
107 ]
107 ]
108 return _users
108 return _users
109
109
110 def get_by_username(self, username, cache=False, case_insensitive=False):
110 def get_by_username(self, username, cache=False, case_insensitive=False):
111
111
112 if case_insensitive:
112 if case_insensitive:
113 user = self.sa.query(User).filter(User.username.ilike(username))
113 user = self.sa.query(User).filter(User.username.ilike(username))
114 else:
114 else:
115 user = self.sa.query(User)\
115 user = self.sa.query(User)\
116 .filter(User.username == username)
116 .filter(User.username == username)
117 if cache:
117 if cache:
118 name_key = _hash_key(username)
118 name_key = _hash_key(username)
119 user = user.options(
119 user = user.options(
120 FromCache("sql_cache_short", f"get_user_{name_key}"))
120 FromCache("sql_cache_short", f"get_user_{name_key}"))
121 return user.scalar()
121 return user.scalar()
122
122
123 def get_by_email(self, email, cache=False, case_insensitive=False):
123 def get_by_email(self, email, cache=False, case_insensitive=False):
124 return User.get_by_email(email, case_insensitive, cache)
124 return User.get_by_email(email, case_insensitive, cache)
125
125
126 def get_by_auth_token(self, auth_token, cache=False):
126 def get_by_auth_token(self, auth_token, cache=False):
127 return User.get_by_auth_token(auth_token, cache)
127 return User.get_by_auth_token(auth_token, cache)
128
128
129 def get_active_user_count(self, cache=False):
129 def get_active_user_count(self, cache=False):
130 qry = User.query().filter(
130 qry = User.query().filter(
131 User.active == true()).filter(
131 User.active == true()).filter(
132 User.username != User.DEFAULT_USER)
132 User.username != User.DEFAULT_USER)
133 if cache:
133 if cache:
134 qry = qry.options(
134 qry = qry.options(
135 FromCache("sql_cache_short", "get_active_users"))
135 FromCache("sql_cache_short", "get_active_users"))
136 return qry.count()
136 return qry.count()
137
137
138 def create(self, form_data, cur_user=None):
138 def create(self, form_data, cur_user=None):
139 if not cur_user:
139 if not cur_user:
140 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
140 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
141
141
142 user_data = {
142 user_data = {
143 'username': form_data['username'],
143 'username': form_data['username'],
144 'password': form_data['password'],
144 'password': form_data['password'],
145 'email': form_data['email'],
145 'email': form_data['email'],
146 'firstname': form_data['firstname'],
146 'firstname': form_data['firstname'],
147 'lastname': form_data['lastname'],
147 'lastname': form_data['lastname'],
148 'active': form_data['active'],
148 'active': form_data['active'],
149 'extern_type': form_data['extern_type'],
149 'extern_type': form_data['extern_type'],
150 'extern_name': form_data['extern_name'],
150 'extern_name': form_data['extern_name'],
151 'admin': False,
151 'admin': False,
152 'cur_user': cur_user
152 'cur_user': cur_user
153 }
153 }
154
154
155 if 'create_repo_group' in form_data:
155 if 'create_repo_group' in form_data:
156 user_data['create_repo_group'] = str2bool(
156 user_data['create_repo_group'] = str2bool(
157 form_data.get('create_repo_group'))
157 form_data.get('create_repo_group'))
158
158
159 try:
159 try:
160 if form_data.get('password_change'):
160 if form_data.get('password_change'):
161 user_data['force_password_change'] = True
161 user_data['force_password_change'] = True
162 return UserModel().create_or_update(**user_data)
162 return UserModel().create_or_update(**user_data)
163 except Exception:
163 except Exception:
164 log.error(traceback.format_exc())
164 log.error(traceback.format_exc())
165 raise
165 raise
166
166
167 def update_user(self, user, skip_attrs=None, **kwargs):
167 def update_user(self, user, skip_attrs=None, **kwargs):
168 from rhodecode.lib.auth import get_crypt_password
168 from rhodecode.lib.auth import get_crypt_password
169
169
170 user = self._get_user(user)
170 user = self._get_user(user)
171 if user.username == User.DEFAULT_USER:
171 if user.username == User.DEFAULT_USER:
172 raise DefaultUserException(
172 raise DefaultUserException(
173 "You can't edit this user (`%(username)s`) since it's "
173 "You can't edit this user (`%(username)s`) since it's "
174 "crucial for entire application" % {
174 "crucial for entire application" % {
175 'username': user.username})
175 'username': user.username})
176
176
177 # first store only defaults
177 # first store only defaults
178 user_attrs = {
178 user_attrs = {
179 'updating_user_id': user.user_id,
179 'updating_user_id': user.user_id,
180 'username': user.username,
180 'username': user.username,
181 'password': user.password,
181 'password': user.password,
182 'email': user.email,
182 'email': user.email,
183 'firstname': user.name,
183 'firstname': user.name,
184 'lastname': user.lastname,
184 'lastname': user.lastname,
185 'description': user.description,
185 'description': user.description,
186 'active': user.active,
186 'active': user.active,
187 'admin': user.admin,
187 'admin': user.admin,
188 'extern_name': user.extern_name,
188 'extern_name': user.extern_name,
189 'extern_type': user.extern_type,
189 'extern_type': user.extern_type,
190 'language': user.user_data.get('language')
190 'language': user.user_data.get('language')
191 }
191 }
192
192
193 # in case there's new_password, that comes from form, use it to
193 # in case there's new_password, that comes from form, use it to
194 # store password
194 # store password
195 if kwargs.get('new_password'):
195 if kwargs.get('new_password'):
196 kwargs['password'] = kwargs['new_password']
196 kwargs['password'] = kwargs['new_password']
197
197
198 # cleanups, my_account password change form
198 # cleanups, my_account password change form
199 kwargs.pop('current_password', None)
199 kwargs.pop('current_password', None)
200 kwargs.pop('new_password', None)
200 kwargs.pop('new_password', None)
201
201
202 # cleanups, user edit password change form
202 # cleanups, user edit password change form
203 kwargs.pop('password_confirmation', None)
203 kwargs.pop('password_confirmation', None)
204 kwargs.pop('password_change', None)
204 kwargs.pop('password_change', None)
205
205
206 # create repo group on user creation
206 # create repo group on user creation
207 kwargs.pop('create_repo_group', None)
207 kwargs.pop('create_repo_group', None)
208
208
209 # legacy forms send name, which is the firstname
209 # legacy forms send name, which is the firstname
210 firstname = kwargs.pop('name', None)
210 firstname = kwargs.pop('name', None)
211 if firstname:
211 if firstname:
212 kwargs['firstname'] = firstname
212 kwargs['firstname'] = firstname
213
213
214 for k, v in kwargs.items():
214 for k, v in kwargs.items():
215 # skip if we don't want to update this
215 # skip if we don't want to update this
216 if skip_attrs and k in skip_attrs:
216 if skip_attrs and k in skip_attrs:
217 continue
217 continue
218
218
219 user_attrs[k] = v
219 user_attrs[k] = v
220
220
221 try:
221 try:
222 return self.create_or_update(**user_attrs)
222 return self.create_or_update(**user_attrs)
223 except Exception:
223 except Exception:
224 log.error(traceback.format_exc())
224 log.error(traceback.format_exc())
225 raise
225 raise
226
226
227 def create_or_update(
227 def create_or_update(
228 self, username, password, email, firstname='', lastname='',
228 self, username, password, email, firstname='', lastname='',
229 active=True, admin=False, extern_type=None, extern_name=None,
229 active=True, admin=False, extern_type=None, extern_name=None,
230 cur_user=None, plugin=None, force_password_change=False,
230 cur_user=None, plugin=None, force_password_change=False,
231 allow_to_create_user=True, create_repo_group=None,
231 allow_to_create_user=True, create_repo_group=None,
232 updating_user_id=None, language=None, description='',
232 updating_user_id=None, language=None, description='',
233 strict_creation_check=True):
233 strict_creation_check=True):
234 """
234 """
235 Creates a new instance if not found, or updates current one
235 Creates a new instance if not found, or updates current one
236
236
237 :param username:
237 :param username:
238 :param password:
238 :param password:
239 :param email:
239 :param email:
240 :param firstname:
240 :param firstname:
241 :param lastname:
241 :param lastname:
242 :param active:
242 :param active:
243 :param admin:
243 :param admin:
244 :param extern_type:
244 :param extern_type:
245 :param extern_name:
245 :param extern_name:
246 :param cur_user:
246 :param cur_user:
247 :param plugin: optional plugin this method was called from
247 :param plugin: optional plugin this method was called from
248 :param force_password_change: toggles new or existing user flag
248 :param force_password_change: toggles new or existing user flag
249 for password change
249 for password change
250 :param allow_to_create_user: Defines if the method can actually create
250 :param allow_to_create_user: Defines if the method can actually create
251 new users
251 new users
252 :param create_repo_group: Defines if the method should also
252 :param create_repo_group: Defines if the method should also
253 create an repo group with user name, and owner
253 create an repo group with user name, and owner
254 :param updating_user_id: if we set it up this is the user we want to
254 :param updating_user_id: if we set it up this is the user we want to
255 update this allows to editing username.
255 update this allows to editing username.
256 :param language: language of user from interface.
256 :param language: language of user from interface.
257 :param description: user description
257 :param description: user description
258 :param strict_creation_check: checks for allowed creation license wise etc.
258 :param strict_creation_check: checks for allowed creation license wise etc.
259
259
260 :returns: new User object with injected `is_new_user` attribute.
260 :returns: new User object with injected `is_new_user` attribute.
261 """
261 """
262
262
263 if not cur_user:
263 if not cur_user:
264 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
264 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
265
265
266 from rhodecode.lib.auth import (
266 from rhodecode.lib.auth import (
267 get_crypt_password, check_password)
267 get_crypt_password, check_password)
268 from rhodecode.lib import hooks_base
268 from rhodecode.lib import hooks_base
269
269
270 def _password_change(new_user, password):
270 def _password_change(new_user, password):
271 old_password = new_user.password or ''
271 old_password = new_user.password or ''
272 # empty password
272 # empty password
273 if not old_password:
273 if not old_password:
274 return False
274 return False
275
275
276 # password check is only needed for RhodeCode internal auth calls
276 # password check is only needed for RhodeCode internal auth calls
277 # in case it's a plugin we don't care
277 # in case it's a plugin we don't care
278 if not plugin:
278 if not plugin:
279
279
280 # first check if we gave crypted password back, and if it
280 # first check if we gave crypted password back, and if it
281 # matches it's not password change
281 # matches it's not password change
282 if new_user.password == password:
282 if new_user.password == password:
283 return False
283 return False
284
284
285 password_match = check_password(password, old_password)
285 password_match = check_password(password, old_password)
286 if not password_match:
286 if not password_match:
287 return True
287 return True
288
288
289 return False
289 return False
290
290
291 # read settings on default personal repo group creation
291 # read settings on default personal repo group creation
292 if create_repo_group is None:
292 if create_repo_group is None:
293 default_create_repo_group = RepoGroupModel()\
293 default_create_repo_group = RepoGroupModel()\
294 .get_default_create_personal_repo_group()
294 .get_default_create_personal_repo_group()
295 create_repo_group = default_create_repo_group
295 create_repo_group = default_create_repo_group
296
296
297 user_data = {
297 user_data = {
298 'username': username,
298 'username': username,
299 'password': password,
299 'password': password,
300 'email': email,
300 'email': email,
301 'firstname': firstname,
301 'firstname': firstname,
302 'lastname': lastname,
302 'lastname': lastname,
303 'active': active,
303 'active': active,
304 'admin': admin
304 'admin': admin
305 }
305 }
306
306
307 if updating_user_id:
307 if updating_user_id:
308 log.debug('Checking for existing account in RhodeCode '
308 log.debug('Checking for existing account in RhodeCode '
309 'database with user_id `%s` ', updating_user_id)
309 'database with user_id `%s` ', updating_user_id)
310 user = User.get(updating_user_id)
310 user = User.get(updating_user_id)
311 else:
311 else:
312 log.debug('Checking for existing account in RhodeCode '
312 log.debug('Checking for existing account in RhodeCode '
313 'database with username `%s` ', username)
313 'database with username `%s` ', username)
314 user = User.get_by_username(username, case_insensitive=True)
314 user = User.get_by_username(username, case_insensitive=True)
315
315
316 if user is None:
316 if user is None:
317 # we check internal flag if this method is actually allowed to
317 # we check internal flag if this method is actually allowed to
318 # create new user
318 # create new user
319 if not allow_to_create_user:
319 if not allow_to_create_user:
320 msg = ('Method wants to create new user, but it is not '
320 msg = ('Method wants to create new user, but it is not '
321 'allowed to do so')
321 'allowed to do so')
322 log.warning(msg)
322 log.warning(msg)
323 raise NotAllowedToCreateUserError(msg)
323 raise NotAllowedToCreateUserError(msg)
324
324
325 log.debug('Creating new user %s', username)
325 log.debug('Creating new user %s', username)
326
326
327 # only if we create user that is active
327 # only if we create user that is active
328 new_active_user = active
328 new_active_user = active
329 if new_active_user and strict_creation_check:
329 if new_active_user and strict_creation_check:
330 # raises UserCreationError if it's not allowed for any reason to
330 # raises UserCreationError if it's not allowed for any reason to
331 # create new active user, this also executes pre-create hooks
331 # create new active user, this also executes pre-create hooks
332 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
332 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
333 events.trigger(events.UserPreCreate(user_data))
333 events.trigger(events.UserPreCreate(user_data))
334 new_user = User()
334 new_user = User()
335 edit = False
335 edit = False
336 else:
336 else:
337 log.debug('updating user `%s`', username)
337 log.debug('updating user `%s`', username)
338 events.trigger(events.UserPreUpdate(user, user_data))
338 events.trigger(events.UserPreUpdate(user, user_data))
339 new_user = user
339 new_user = user
340 edit = True
340 edit = True
341
341
342 # we're not allowed to edit default user
342 # we're not allowed to edit default user
343 if user.username == User.DEFAULT_USER:
343 if user.username == User.DEFAULT_USER:
344 raise DefaultUserException(
344 raise DefaultUserException(
345 "You can't edit this user (`%(username)s`) since it's "
345 "You can't edit this user (`%(username)s`) since it's "
346 "crucial for entire application"
346 "crucial for entire application"
347 % {'username': user.username})
347 % {'username': user.username})
348
348
349 # inject special attribute that will tell us if User is new or old
349 # inject special attribute that will tell us if User is new or old
350 new_user.is_new_user = not edit
350 new_user.is_new_user = not edit
351 # for users that didn's specify auth type, we use RhodeCode built in
351 # for users that didn's specify auth type, we use RhodeCode built in
352 from rhodecode.authentication.plugins import auth_rhodecode
352 from rhodecode.authentication.plugins import auth_rhodecode
353 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
353 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
355
355
356 try:
356 try:
357 new_user.username = username
357 new_user.username = username
358 new_user.admin = admin
358 new_user.admin = admin
359 new_user.email = email
359 new_user.email = email
360 new_user.active = active
360 new_user.active = active
361 new_user.extern_name = safe_str(extern_name)
361 new_user.extern_name = safe_str(extern_name)
362 new_user.extern_type = safe_str(extern_type)
362 new_user.extern_type = safe_str(extern_type)
363 new_user.name = firstname
363 new_user.name = firstname
364 new_user.lastname = lastname
364 new_user.lastname = lastname
365 new_user.description = description
365 new_user.description = description
366
366
367 # set password only if creating an user or password is changed
367 # set password only if creating an user or password is changed
368 if not edit or _password_change(new_user, password):
368 if not edit or _password_change(new_user, password):
369 reason = 'new password' if edit else 'new user'
369 reason = 'new password' if edit else 'new user'
370 log.debug('Updating password reason=>%s', reason)
370 log.debug('Updating password reason=>%s', reason)
371 new_user.password = get_crypt_password(password) if password else None
371 new_user.password = get_crypt_password(password) if password else None
372
372
373 if force_password_change:
373 if force_password_change:
374 new_user.update_userdata(force_password_change=True)
374 new_user.update_userdata(force_password_change=True)
375 if language:
375 if language:
376 new_user.update_userdata(language=language)
376 new_user.update_userdata(language=language)
377 new_user.update_userdata(notification_status=True)
377 new_user.update_userdata(notification_status=True)
378
378
379 self.sa.add(new_user)
379 self.sa.add(new_user)
380
380
381 if not edit and create_repo_group:
381 if not edit and create_repo_group:
382 RepoGroupModel().create_personal_repo_group(
382 RepoGroupModel().create_personal_repo_group(
383 new_user, commit_early=False)
383 new_user, commit_early=False)
384
384
385 if not edit:
385 if not edit:
386 # add the RSS token
386 # add the RSS token
387 self.add_auth_token(
387 self.add_auth_token(
388 user=username, lifetime_minutes=-1,
388 user=username, lifetime_minutes=-1,
389 role=self.auth_token_role.ROLE_FEED,
389 role=self.auth_token_role.ROLE_FEED,
390 description=u'Generated feed token')
390 description='Generated feed token')
391
391
392 kwargs = new_user.get_dict()
392 kwargs = new_user.get_dict()
393 # backward compat, require api_keys present
393 # backward compat, require api_keys present
394 kwargs['api_keys'] = kwargs['auth_tokens']
394 kwargs['api_keys'] = kwargs['auth_tokens']
395 hooks_base.create_user(created_by=cur_user, **kwargs)
395 hooks_base.create_user(created_by=cur_user, **kwargs)
396 events.trigger(events.UserPostCreate(user_data))
396 events.trigger(events.UserPostCreate(user_data))
397 return new_user
397 return new_user
398 except (DatabaseError,):
398 except (DatabaseError,):
399 log.error(traceback.format_exc())
399 log.error(traceback.format_exc())
400 raise
400 raise
401
401
402 def create_registration(self, form_data,
402 def create_registration(self, form_data,
403 extern_name='rhodecode', extern_type='rhodecode'):
403 extern_name='rhodecode', extern_type='rhodecode'):
404 from rhodecode.model.notification import NotificationModel
404 from rhodecode.model.notification import NotificationModel
405 from rhodecode.model.notification import EmailNotificationModel
405 from rhodecode.model.notification import EmailNotificationModel
406
406
407 try:
407 try:
408 form_data['admin'] = False
408 form_data['admin'] = False
409 form_data['extern_name'] = extern_name
409 form_data['extern_name'] = extern_name
410 form_data['extern_type'] = extern_type
410 form_data['extern_type'] = extern_type
411 new_user = self.create(form_data)
411 new_user = self.create(form_data)
412
412
413 self.sa.add(new_user)
413 self.sa.add(new_user)
414 self.sa.flush()
414 self.sa.flush()
415
415
416 user_data = new_user.get_dict()
416 user_data = new_user.get_dict()
417 user_data.update({
417 user_data.update({
418 'first_name': user_data.get('firstname'),
418 'first_name': user_data.get('firstname'),
419 'last_name': user_data.get('lastname'),
419 'last_name': user_data.get('lastname'),
420 })
420 })
421 kwargs = {
421 kwargs = {
422 # use SQLALCHEMY safe dump of user data
422 # use SQLALCHEMY safe dump of user data
423 'user': AttributeDict(user_data),
423 'user': AttributeDict(user_data),
424 'date': datetime.datetime.now()
424 'date': datetime.datetime.now()
425 }
425 }
426 notification_type = EmailNotificationModel.TYPE_REGISTRATION
426 notification_type = EmailNotificationModel.TYPE_REGISTRATION
427
427
428 # create notification objects, and emails
428 # create notification objects, and emails
429 NotificationModel().create(
429 NotificationModel().create(
430 created_by=new_user,
430 created_by=new_user,
431 notification_subject='', # Filled in based on the notification_type
431 notification_subject='', # Filled in based on the notification_type
432 notification_body='', # Filled in based on the notification_type
432 notification_body='', # Filled in based on the notification_type
433 notification_type=notification_type,
433 notification_type=notification_type,
434 recipients=None, # all admins
434 recipients=None, # all admins
435 email_kwargs=kwargs,
435 email_kwargs=kwargs,
436 )
436 )
437
437
438 return new_user
438 return new_user
439 except Exception:
439 except Exception:
440 log.error(traceback.format_exc())
440 log.error(traceback.format_exc())
441 raise
441 raise
442
442
443 def _handle_user_repos(self, username, repositories, handle_user,
443 def _handle_user_repos(self, username, repositories, handle_user,
444 handle_mode=None):
444 handle_mode=None):
445
445
446 left_overs = True
446 left_overs = True
447
447
448 from rhodecode.model.repo import RepoModel
448 from rhodecode.model.repo import RepoModel
449
449
450 if handle_mode == 'detach':
450 if handle_mode == 'detach':
451 for obj in repositories:
451 for obj in repositories:
452 obj.user = handle_user
452 obj.user = handle_user
453 # set description we know why we super admin now owns
453 # set description we know why we super admin now owns
454 # additional repositories that were orphaned !
454 # additional repositories that were orphaned !
455 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
455 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
456 self.sa.add(obj)
456 self.sa.add(obj)
457 left_overs = False
457 left_overs = False
458 elif handle_mode == 'delete':
458 elif handle_mode == 'delete':
459 for obj in repositories:
459 for obj in repositories:
460 RepoModel().delete(obj, forks='detach')
460 RepoModel().delete(obj, forks='detach')
461 left_overs = False
461 left_overs = False
462
462
463 # if nothing is done we have left overs left
463 # if nothing is done we have left overs left
464 return left_overs
464 return left_overs
465
465
466 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
466 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
467 handle_mode=None):
467 handle_mode=None):
468
468
469 left_overs = True
469 left_overs = True
470
470
471 from rhodecode.model.repo_group import RepoGroupModel
471 from rhodecode.model.repo_group import RepoGroupModel
472
472
473 if handle_mode == 'detach':
473 if handle_mode == 'detach':
474 for r in repository_groups:
474 for r in repository_groups:
475 r.user = handle_user
475 r.user = handle_user
476 # set description we know why we super admin now owns
476 # set description we know why we super admin now owns
477 # additional repositories that were orphaned !
477 # additional repositories that were orphaned !
478 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
478 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
479 r.personal = False
479 r.personal = False
480 self.sa.add(r)
480 self.sa.add(r)
481 left_overs = False
481 left_overs = False
482 elif handle_mode == 'delete':
482 elif handle_mode == 'delete':
483 for r in repository_groups:
483 for r in repository_groups:
484 RepoGroupModel().delete(r)
484 RepoGroupModel().delete(r)
485 left_overs = False
485 left_overs = False
486
486
487 # if nothing is done we have left overs left
487 # if nothing is done we have left overs left
488 return left_overs
488 return left_overs
489
489
490 def _handle_user_user_groups(self, username, user_groups, handle_user,
490 def _handle_user_user_groups(self, username, user_groups, handle_user,
491 handle_mode=None):
491 handle_mode=None):
492
492
493 left_overs = True
493 left_overs = True
494
494
495 from rhodecode.model.user_group import UserGroupModel
495 from rhodecode.model.user_group import UserGroupModel
496
496
497 if handle_mode == 'detach':
497 if handle_mode == 'detach':
498 for r in user_groups:
498 for r in user_groups:
499 for user_user_group_to_perm in r.user_user_group_to_perm:
499 for user_user_group_to_perm in r.user_user_group_to_perm:
500 if user_user_group_to_perm.user.username == username:
500 if user_user_group_to_perm.user.username == username:
501 user_user_group_to_perm.user = handle_user
501 user_user_group_to_perm.user = handle_user
502 r.user = handle_user
502 r.user = handle_user
503 # set description we know why we super admin now owns
503 # set description we know why we super admin now owns
504 # additional repositories that were orphaned !
504 # additional repositories that were orphaned !
505 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
505 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
506 self.sa.add(r)
506 self.sa.add(r)
507 left_overs = False
507 left_overs = False
508 elif handle_mode == 'delete':
508 elif handle_mode == 'delete':
509 for r in user_groups:
509 for r in user_groups:
510 UserGroupModel().delete(r)
510 UserGroupModel().delete(r)
511 left_overs = False
511 left_overs = False
512
512
513 # if nothing is done we have left overs left
513 # if nothing is done we have left overs left
514 return left_overs
514 return left_overs
515
515
516 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
516 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
517 handle_mode=None):
517 handle_mode=None):
518 left_overs = True
518 left_overs = True
519
519
520 from rhodecode.model.pull_request import PullRequestModel
520 from rhodecode.model.pull_request import PullRequestModel
521
521
522 if handle_mode == 'detach':
522 if handle_mode == 'detach':
523 for pr in pull_requests:
523 for pr in pull_requests:
524 pr.user_id = handle_user.user_id
524 pr.user_id = handle_user.user_id
525 # set description we know why we super admin now owns
525 # set description we know why we super admin now owns
526 # additional repositories that were orphaned !
526 # additional repositories that were orphaned !
527 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
527 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
528 self.sa.add(pr)
528 self.sa.add(pr)
529 left_overs = False
529 left_overs = False
530 elif handle_mode == 'delete':
530 elif handle_mode == 'delete':
531 for pr in pull_requests:
531 for pr in pull_requests:
532 PullRequestModel().delete(pr)
532 PullRequestModel().delete(pr)
533
533
534 left_overs = False
534 left_overs = False
535
535
536 # if nothing is done we have leftovers left
536 # if nothing is done we have leftovers left
537 return left_overs
537 return left_overs
538
538
539 def _handle_user_artifacts(self, username, artifacts, handle_user,
539 def _handle_user_artifacts(self, username, artifacts, handle_user,
540 handle_mode=None):
540 handle_mode=None):
541
541
542 left_overs = True
542 left_overs = True
543
543
544 if handle_mode == 'detach':
544 if handle_mode == 'detach':
545 for a in artifacts:
545 for a in artifacts:
546 a.upload_user = handle_user
546 a.upload_user = handle_user
547 # set description we know why we super admin now owns
547 # set description we know why we super admin now owns
548 # additional artifacts that were orphaned !
548 # additional artifacts that were orphaned !
549 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
549 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
550 self.sa.add(a)
550 self.sa.add(a)
551 left_overs = False
551 left_overs = False
552 elif handle_mode == 'delete':
552 elif handle_mode == 'delete':
553 from rhodecode.apps.file_store import utils as store_utils
553 from rhodecode.apps.file_store import utils as store_utils
554 request = get_current_request()
554 request = get_current_request()
555 storage = store_utils.get_file_storage(request.registry.settings)
555 storage = store_utils.get_file_storage(request.registry.settings)
556 for a in artifacts:
556 for a in artifacts:
557 file_uid = a.file_uid
557 file_uid = a.file_uid
558 storage.delete(file_uid)
558 storage.delete(file_uid)
559 self.sa.delete(a)
559 self.sa.delete(a)
560
560
561 left_overs = False
561 left_overs = False
562
562
563 # if nothing is done we have left overs left
563 # if nothing is done we have left overs left
564 return left_overs
564 return left_overs
565
565
566 def delete(self, user, cur_user=None, handle_repos=None,
566 def delete(self, user, cur_user=None, handle_repos=None,
567 handle_repo_groups=None, handle_user_groups=None,
567 handle_repo_groups=None, handle_user_groups=None,
568 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
568 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
569 from rhodecode.lib import hooks_base
569 from rhodecode.lib import hooks_base
570
570
571 if not cur_user:
571 if not cur_user:
572 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
572 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
573
573
574 user = self._get_user(user)
574 user = self._get_user(user)
575
575
576 try:
576 try:
577 if user.username == User.DEFAULT_USER:
577 if user.username == User.DEFAULT_USER:
578 raise DefaultUserException(
578 raise DefaultUserException(
579 "You can't remove this user since it's"
579 "You can't remove this user since it's"
580 " crucial for entire application")
580 " crucial for entire application")
581 handle_user = handle_new_owner or self.cls.get_first_super_admin()
581 handle_user = handle_new_owner or self.cls.get_first_super_admin()
582 log.debug('New detached objects owner %s', handle_user)
582 log.debug('New detached objects owner %s', handle_user)
583
583
584 left_overs = self._handle_user_repos(
584 left_overs = self._handle_user_repos(
585 user.username, user.repositories, handle_user, handle_repos)
585 user.username, user.repositories, handle_user, handle_repos)
586 if left_overs and user.repositories:
586 if left_overs and user.repositories:
587 repos = [x.repo_name for x in user.repositories]
587 repos = [x.repo_name for x in user.repositories]
588 raise UserOwnsReposException(
588 raise UserOwnsReposException(
589 'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
589 'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
590 'removed. Switch owners or remove those repositories:%(list_repos)s'
590 'removed. Switch owners or remove those repositories:%(list_repos)s'
591 % {'username': user.username, 'len_repos': len(repos),
591 % {'username': user.username, 'len_repos': len(repos),
592 'list_repos': ', '.join(repos)})
592 'list_repos': ', '.join(repos)})
593
593
594 left_overs = self._handle_user_repo_groups(
594 left_overs = self._handle_user_repo_groups(
595 user.username, user.repository_groups, handle_user, handle_repo_groups)
595 user.username, user.repository_groups, handle_user, handle_repo_groups)
596 if left_overs and user.repository_groups:
596 if left_overs and user.repository_groups:
597 repo_groups = [x.group_name for x in user.repository_groups]
597 repo_groups = [x.group_name for x in user.repository_groups]
598 raise UserOwnsRepoGroupsException(
598 raise UserOwnsRepoGroupsException(
599 'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
599 'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
600 'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
600 'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
601 % {'username': user.username, 'len_repo_groups': len(repo_groups),
601 % {'username': user.username, 'len_repo_groups': len(repo_groups),
602 'list_repo_groups': ', '.join(repo_groups)})
602 'list_repo_groups': ', '.join(repo_groups)})
603
603
604 left_overs = self._handle_user_user_groups(
604 left_overs = self._handle_user_user_groups(
605 user.username, user.user_groups, handle_user, handle_user_groups)
605 user.username, user.user_groups, handle_user, handle_user_groups)
606 if left_overs and user.user_groups:
606 if left_overs and user.user_groups:
607 user_groups = [x.users_group_name for x in user.user_groups]
607 user_groups = [x.users_group_name for x in user.user_groups]
608 raise UserOwnsUserGroupsException(
608 raise UserOwnsUserGroupsException(
609 'user "%s" still owns %s user groups and cannot be '
609 'user "%s" still owns %s user groups and cannot be '
610 'removed. Switch owners or remove those user groups:%s'
610 'removed. Switch owners or remove those user groups:%s'
611 % (user.username, len(user_groups), ', '.join(user_groups)))
611 % (user.username, len(user_groups), ', '.join(user_groups)))
612
612
613 left_overs = self._handle_user_pull_requests(
613 left_overs = self._handle_user_pull_requests(
614 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
614 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
615 if left_overs and user.user_pull_requests:
615 if left_overs and user.user_pull_requests:
616 pull_requests = [f'!{x.pull_request_id}' for x in user.user_pull_requests]
616 pull_requests = [f'!{x.pull_request_id}' for x in user.user_pull_requests]
617 raise UserOwnsPullRequestsException(
617 raise UserOwnsPullRequestsException(
618 'user "%s" still owns %s pull requests and cannot be '
618 'user "%s" still owns %s pull requests and cannot be '
619 'removed. Switch owners or remove those pull requests:%s'
619 'removed. Switch owners or remove those pull requests:%s'
620 % (user.username, len(pull_requests), ', '.join(pull_requests)))
620 % (user.username, len(pull_requests), ', '.join(pull_requests)))
621
621
622 left_overs = self._handle_user_artifacts(
622 left_overs = self._handle_user_artifacts(
623 user.username, user.artifacts, handle_user, handle_artifacts)
623 user.username, user.artifacts, handle_user, handle_artifacts)
624 if left_overs and user.artifacts:
624 if left_overs and user.artifacts:
625 artifacts = [x.file_uid for x in user.artifacts]
625 artifacts = [x.file_uid for x in user.artifacts]
626 raise UserOwnsArtifactsException(
626 raise UserOwnsArtifactsException(
627 'user "%s" still owns %s artifacts and cannot be '
627 'user "%s" still owns %s artifacts and cannot be '
628 'removed. Switch owners or remove those artifacts:%s'
628 'removed. Switch owners or remove those artifacts:%s'
629 % (user.username, len(artifacts), ', '.join(artifacts)))
629 % (user.username, len(artifacts), ', '.join(artifacts)))
630
630
631 user_data = user.get_dict() # fetch user data before expire
631 user_data = user.get_dict() # fetch user data before expire
632
632
633 # we might change the user data with detach/delete, make sure
633 # we might change the user data with detach/delete, make sure
634 # the object is marked as expired before actually deleting !
634 # the object is marked as expired before actually deleting !
635 self.sa.expire(user)
635 self.sa.expire(user)
636 self.sa.delete(user)
636 self.sa.delete(user)
637
637
638 hooks_base.delete_user(deleted_by=cur_user, **user_data)
638 hooks_base.delete_user(deleted_by=cur_user, **user_data)
639 except Exception:
639 except Exception:
640 log.error(traceback.format_exc())
640 log.error(traceback.format_exc())
641 raise
641 raise
642
642
643 def reset_password_link(self, data, pwd_reset_url):
643 def reset_password_link(self, data, pwd_reset_url):
644 from rhodecode.lib.celerylib import tasks, run_task
644 from rhodecode.lib.celerylib import tasks, run_task
645 from rhodecode.model.notification import EmailNotificationModel
645 from rhodecode.model.notification import EmailNotificationModel
646 user_email = data['email']
646 user_email = data['email']
647 try:
647 try:
648 user = User.get_by_email(user_email)
648 user = User.get_by_email(user_email)
649 if user:
649 if user:
650 log.debug('password reset user found %s', user)
650 log.debug('password reset user found %s', user)
651
651
652 email_kwargs = {
652 email_kwargs = {
653 'password_reset_url': pwd_reset_url,
653 'password_reset_url': pwd_reset_url,
654 'user': user,
654 'user': user,
655 'email': user_email,
655 'email': user_email,
656 'date': datetime.datetime.now(),
656 'date': datetime.datetime.now(),
657 'first_admin_email': User.get_first_super_admin().email
657 'first_admin_email': User.get_first_super_admin().email
658 }
658 }
659
659
660 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
660 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
661 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
661 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
662
662
663 recipients = [user_email]
663 recipients = [user_email]
664
664
665 action_logger_generic(
665 action_logger_generic(
666 'sending password reset email to user: {}'.format(
666 'sending password reset email to user: {}'.format(
667 user), namespace='security.password_reset')
667 user), namespace='security.password_reset')
668
668
669 run_task(tasks.send_email, recipients, subject,
669 run_task(tasks.send_email, recipients, subject,
670 email_body_plaintext, email_body)
670 email_body_plaintext, email_body)
671
671
672 else:
672 else:
673 log.debug("password reset email %s not found", user_email)
673 log.debug("password reset email %s not found", user_email)
674 except Exception:
674 except Exception:
675 log.error(traceback.format_exc())
675 log.error(traceback.format_exc())
676 return False
676 return False
677
677
678 return True
678 return True
679
679
680 def reset_password(self, data):
680 def reset_password(self, data):
681 from rhodecode.lib.celerylib import tasks, run_task
681 from rhodecode.lib.celerylib import tasks, run_task
682 from rhodecode.model.notification import EmailNotificationModel
682 from rhodecode.model.notification import EmailNotificationModel
683 from rhodecode.lib import auth
683 from rhodecode.lib import auth
684 user_email = data['email']
684 user_email = data['email']
685 pre_db = True
685 pre_db = True
686 try:
686 try:
687 user = User.get_by_email(user_email)
687 user = User.get_by_email(user_email)
688 new_passwd = auth.PasswordGenerator().gen_password(
688 new_passwd = auth.PasswordGenerator().gen_password(
689 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
689 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
690 if user:
690 if user:
691 user.password = auth.get_crypt_password(new_passwd)
691 user.password = auth.get_crypt_password(new_passwd)
692 # also force this user to reset his password !
692 # also force this user to reset his password !
693 user.update_userdata(force_password_change=True)
693 user.update_userdata(force_password_change=True)
694
694
695 Session().add(user)
695 Session().add(user)
696
696
697 # now delete the token in question
697 # now delete the token in question
698 UserApiKeys = AuthTokenModel.cls
698 UserApiKeys = AuthTokenModel.cls
699 UserApiKeys().query().filter(
699 UserApiKeys().query().filter(
700 UserApiKeys.api_key == data['token']).delete()
700 UserApiKeys.api_key == data['token']).delete()
701
701
702 Session().commit()
702 Session().commit()
703 log.info('successfully reset password for `%s`', user_email)
703 log.info('successfully reset password for `%s`', user_email)
704
704
705 if new_passwd is None:
705 if new_passwd is None:
706 raise Exception('unable to generate new password')
706 raise Exception('unable to generate new password')
707
707
708 pre_db = False
708 pre_db = False
709
709
710 email_kwargs = {
710 email_kwargs = {
711 'new_password': new_passwd,
711 'new_password': new_passwd,
712 'user': user,
712 'user': user,
713 'email': user_email,
713 'email': user_email,
714 'date': datetime.datetime.now(),
714 'date': datetime.datetime.now(),
715 'first_admin_email': User.get_first_super_admin().email
715 'first_admin_email': User.get_first_super_admin().email
716 }
716 }
717
717
718 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
718 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
719 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
719 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
720 **email_kwargs)
720 **email_kwargs)
721
721
722 recipients = [user_email]
722 recipients = [user_email]
723
723
724 action_logger_generic(
724 action_logger_generic(
725 'sent new password to user: {} with email: {}'.format(
725 'sent new password to user: {} with email: {}'.format(
726 user, user_email), namespace='security.password_reset')
726 user, user_email), namespace='security.password_reset')
727
727
728 run_task(tasks.send_email, recipients, subject,
728 run_task(tasks.send_email, recipients, subject,
729 email_body_plaintext, email_body)
729 email_body_plaintext, email_body)
730
730
731 except Exception:
731 except Exception:
732 log.error('Failed to update user password')
732 log.error('Failed to update user password')
733 log.error(traceback.format_exc())
733 log.error(traceback.format_exc())
734 if pre_db:
734 if pre_db:
735 # we rollback only if local db stuff fails. If it goes into
735 # we rollback only if local db stuff fails. If it goes into
736 # run_task, we're pass rollback state this wouldn't work then
736 # run_task, we're pass rollback state this wouldn't work then
737 Session().rollback()
737 Session().rollback()
738
738
739 return True
739 return True
740
740
741 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
741 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
742 """
742 """
743 Fetches auth_user by user_id,or api_key if present.
743 Fetches auth_user by user_id,or api_key if present.
744 Fills auth_user attributes with those taken from database.
744 Fills auth_user attributes with those taken from database.
745 Additionally set's is_authenitated if lookup fails
745 Additionally set's is_authenitated if lookup fails
746 present in database
746 present in database
747
747
748 :param auth_user: instance of user to set attributes
748 :param auth_user: instance of user to set attributes
749 :param user_id: user id to fetch by
749 :param user_id: user id to fetch by
750 :param api_key: api key to fetch by
750 :param api_key: api key to fetch by
751 :param username: username to fetch by
751 :param username: username to fetch by
752 """
752 """
753 def token_obfuscate(token):
753 def token_obfuscate(token):
754 if token:
754 if token:
755 return token[:4] + "****"
755 return token[:4] + "****"
756
756
757 if user_id is None and api_key is None and username is None:
757 if user_id is None and api_key is None and username is None:
758 raise Exception('You need to pass user_id, api_key or username')
758 raise Exception('You need to pass user_id, api_key or username')
759
759
760 log.debug(
760 log.debug(
761 'AuthUser: fill data execution based on: '
761 'AuthUser: fill data execution based on: '
762 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
762 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
763 try:
763 try:
764 dbuser = None
764 dbuser = None
765 if user_id:
765 if user_id:
766 dbuser = self.get(user_id)
766 dbuser = self.get(user_id)
767 elif api_key:
767 elif api_key:
768 dbuser = self.get_by_auth_token(api_key)
768 dbuser = self.get_by_auth_token(api_key)
769 elif username:
769 elif username:
770 dbuser = self.get_by_username(username)
770 dbuser = self.get_by_username(username)
771
771
772 if not dbuser:
772 if not dbuser:
773 log.warning(
773 log.warning(
774 'Unable to lookup user by id:%s api_key:%s username:%s',
774 'Unable to lookup user by id:%s api_key:%s username:%s',
775 user_id, token_obfuscate(api_key), username)
775 user_id, token_obfuscate(api_key), username)
776 return False
776 return False
777 if not dbuser.active:
777 if not dbuser.active:
778 log.debug('User `%s:%s` is inactive, skipping fill data',
778 log.debug('User `%s:%s` is inactive, skipping fill data',
779 username, user_id)
779 username, user_id)
780 return False
780 return False
781
781
782 log.debug('AuthUser: filling found user:%s data', dbuser)
782 log.debug('AuthUser: filling found user:%s data', dbuser)
783
783
784 attrs = {
784 attrs = {
785 'user_id': dbuser.user_id,
785 'user_id': dbuser.user_id,
786 'username': dbuser.username,
786 'username': dbuser.username,
787 'name': dbuser.name,
787 'name': dbuser.name,
788 'first_name': dbuser.first_name,
788 'first_name': dbuser.first_name,
789 'firstname': dbuser.firstname,
789 'firstname': dbuser.firstname,
790 'last_name': dbuser.last_name,
790 'last_name': dbuser.last_name,
791 'lastname': dbuser.lastname,
791 'lastname': dbuser.lastname,
792 'admin': dbuser.admin,
792 'admin': dbuser.admin,
793 'active': dbuser.active,
793 'active': dbuser.active,
794
794
795 'email': dbuser.email,
795 'email': dbuser.email,
796 'emails': dbuser.emails_cached(),
796 'emails': dbuser.emails_cached(),
797 'short_contact': dbuser.short_contact,
797 'short_contact': dbuser.short_contact,
798 'full_contact': dbuser.full_contact,
798 'full_contact': dbuser.full_contact,
799 'full_name': dbuser.full_name,
799 'full_name': dbuser.full_name,
800 'full_name_or_username': dbuser.full_name_or_username,
800 'full_name_or_username': dbuser.full_name_or_username,
801
801
802 '_api_key': dbuser._api_key,
802 '_api_key': dbuser._api_key,
803 '_user_data': dbuser._user_data,
803 '_user_data': dbuser._user_data,
804
804
805 'created_on': dbuser.created_on,
805 'created_on': dbuser.created_on,
806 'extern_name': dbuser.extern_name,
806 'extern_name': dbuser.extern_name,
807 'extern_type': dbuser.extern_type,
807 'extern_type': dbuser.extern_type,
808
808
809 'inherit_default_permissions': dbuser.inherit_default_permissions,
809 'inherit_default_permissions': dbuser.inherit_default_permissions,
810
810
811 'language': dbuser.language,
811 'language': dbuser.language,
812 'last_activity': dbuser.last_activity,
812 'last_activity': dbuser.last_activity,
813 'last_login': dbuser.last_login,
813 'last_login': dbuser.last_login,
814 'password': dbuser.password,
814 'password': dbuser.password,
815 }
815 }
816 auth_user.__dict__.update(attrs)
816 auth_user.__dict__.update(attrs)
817 except Exception:
817 except Exception:
818 log.error(traceback.format_exc())
818 log.error(traceback.format_exc())
819 auth_user.is_authenticated = False
819 auth_user.is_authenticated = False
820 return False
820 return False
821
821
822 return True
822 return True
823
823
824 def has_perm(self, user, perm):
824 def has_perm(self, user, perm):
825 perm = self._get_perm(perm)
825 perm = self._get_perm(perm)
826 user = self._get_user(user)
826 user = self._get_user(user)
827
827
828 return UserToPerm.query().filter(UserToPerm.user == user)\
828 return UserToPerm.query().filter(UserToPerm.user == user)\
829 .filter(UserToPerm.permission == perm).scalar() is not None
829 .filter(UserToPerm.permission == perm).scalar() is not None
830
830
831 def grant_perm(self, user, perm):
831 def grant_perm(self, user, perm):
832 """
832 """
833 Grant user global permissions
833 Grant user global permissions
834
834
835 :param user:
835 :param user:
836 :param perm:
836 :param perm:
837 """
837 """
838 user = self._get_user(user)
838 user = self._get_user(user)
839 perm = self._get_perm(perm)
839 perm = self._get_perm(perm)
840 # if this permission is already granted skip it
840 # if this permission is already granted skip it
841 _perm = UserToPerm.query()\
841 _perm = UserToPerm.query()\
842 .filter(UserToPerm.user == user)\
842 .filter(UserToPerm.user == user)\
843 .filter(UserToPerm.permission == perm)\
843 .filter(UserToPerm.permission == perm)\
844 .scalar()
844 .scalar()
845 if _perm:
845 if _perm:
846 return
846 return
847 new = UserToPerm()
847 new = UserToPerm()
848 new.user = user
848 new.user = user
849 new.permission = perm
849 new.permission = perm
850 self.sa.add(new)
850 self.sa.add(new)
851 return new
851 return new
852
852
853 def revoke_perm(self, user, perm):
853 def revoke_perm(self, user, perm):
854 """
854 """
855 Revoke users global permissions
855 Revoke users global permissions
856
856
857 :param user:
857 :param user:
858 :param perm:
858 :param perm:
859 """
859 """
860 user = self._get_user(user)
860 user = self._get_user(user)
861 perm = self._get_perm(perm)
861 perm = self._get_perm(perm)
862
862
863 obj = UserToPerm.query()\
863 obj = UserToPerm.query()\
864 .filter(UserToPerm.user == user)\
864 .filter(UserToPerm.user == user)\
865 .filter(UserToPerm.permission == perm)\
865 .filter(UserToPerm.permission == perm)\
866 .scalar()
866 .scalar()
867 if obj:
867 if obj:
868 self.sa.delete(obj)
868 self.sa.delete(obj)
869
869
870 def add_extra_email(self, user, email):
870 def add_extra_email(self, user, email):
871 """
871 """
872 Adds email address to UserEmailMap
872 Adds email address to UserEmailMap
873
873
874 :param user:
874 :param user:
875 :param email:
875 :param email:
876 """
876 """
877
877
878 user = self._get_user(user)
878 user = self._get_user(user)
879
879
880 obj = UserEmailMap()
880 obj = UserEmailMap()
881 obj.user = user
881 obj.user = user
882 obj.email = email
882 obj.email = email
883 self.sa.add(obj)
883 self.sa.add(obj)
884 return obj
884 return obj
885
885
886 def delete_extra_email(self, user, email_id):
886 def delete_extra_email(self, user, email_id):
887 """
887 """
888 Removes email address from UserEmailMap
888 Removes email address from UserEmailMap
889
889
890 :param user:
890 :param user:
891 :param email_id:
891 :param email_id:
892 """
892 """
893 user = self._get_user(user)
893 user = self._get_user(user)
894 obj = UserEmailMap.query().get(email_id)
894 obj = UserEmailMap.query().get(email_id)
895 if obj and obj.user_id == user.user_id:
895 if obj and obj.user_id == user.user_id:
896 self.sa.delete(obj)
896 self.sa.delete(obj)
897
897
898 def parse_ip_range(self, ip_range):
898 def parse_ip_range(self, ip_range):
899 ip_list = []
899 ip_list = []
900
900
901 def make_unique(value):
901 def make_unique(value):
902 seen = []
902 seen = []
903 return [c for c in value if not (c in seen or seen.append(c))]
903 return [c for c in value if not (c in seen or seen.append(c))]
904
904
905 # firsts split by commas
905 # firsts split by commas
906 for ip_range in ip_range.split(','):
906 for ip_range in ip_range.split(','):
907 if not ip_range:
907 if not ip_range:
908 continue
908 continue
909 ip_range = ip_range.strip()
909 ip_range = ip_range.strip()
910 if '-' in ip_range:
910 if '-' in ip_range:
911 start_ip, end_ip = ip_range.split('-', 1)
911 start_ip, end_ip = ip_range.split('-', 1)
912 start_ip = ipaddress.ip_address(safe_str(start_ip.strip()))
912 start_ip = ipaddress.ip_address(safe_str(start_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_str(end_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_str(end_ip.strip()))
914 parsed_ip_range = []
914 parsed_ip_range = []
915
915
916 for index in range(int(start_ip), int(end_ip) + 1):
916 for index in range(int(start_ip), int(end_ip) + 1):
917 new_ip = ipaddress.ip_address(index)
917 new_ip = ipaddress.ip_address(index)
918 parsed_ip_range.append(str(new_ip))
918 parsed_ip_range.append(str(new_ip))
919 ip_list.extend(parsed_ip_range)
919 ip_list.extend(parsed_ip_range)
920 else:
920 else:
921 ip_list.append(ip_range)
921 ip_list.append(ip_range)
922
922
923 return make_unique(ip_list)
923 return make_unique(ip_list)
924
924
925 def add_extra_ip(self, user, ip, description=None):
925 def add_extra_ip(self, user, ip, description=None):
926 """
926 """
927 Adds ip address to UserIpMap
927 Adds ip address to UserIpMap
928
928
929 :param user:
929 :param user:
930 :param ip:
930 :param ip:
931 """
931 """
932
932
933 user = self._get_user(user)
933 user = self._get_user(user)
934 obj = UserIpMap()
934 obj = UserIpMap()
935 obj.user = user
935 obj.user = user
936 obj.ip_addr = ip
936 obj.ip_addr = ip
937 obj.description = description
937 obj.description = description
938 self.sa.add(obj)
938 self.sa.add(obj)
939 return obj
939 return obj
940
940
941 auth_token_role = AuthTokenModel.cls
941 auth_token_role = AuthTokenModel.cls
942
942
943 def add_auth_token(self, user, lifetime_minutes, role, description='',
943 def add_auth_token(self, user, lifetime_minutes, role, description='',
944 scope_callback=None):
944 scope_callback=None):
945 """
945 """
946 Add AuthToken for user.
946 Add AuthToken for user.
947
947
948 :param user: username/user_id
948 :param user: username/user_id
949 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
949 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
950 :param role: one of AuthTokenModel.cls.ROLE_*
950 :param role: one of AuthTokenModel.cls.ROLE_*
951 :param description: optional string description
951 :param description: optional string description
952 """
952 """
953
953
954 token = AuthTokenModel().create(
954 token = AuthTokenModel().create(
955 user, description, lifetime_minutes, role)
955 user, description, lifetime_minutes, role)
956 if scope_callback and callable(scope_callback):
956 if scope_callback and callable(scope_callback):
957 # call the callback if we provide, used to attach scope for EE edition
957 # call the callback if we provide, used to attach scope for EE edition
958 scope_callback(token)
958 scope_callback(token)
959 return token
959 return token
960
960
961 def delete_extra_ip(self, user, ip_id):
961 def delete_extra_ip(self, user, ip_id):
962 """
962 """
963 Removes ip address from UserIpMap
963 Removes ip address from UserIpMap
964
964
965 :param user:
965 :param user:
966 :param ip_id:
966 :param ip_id:
967 """
967 """
968 user = self._get_user(user)
968 user = self._get_user(user)
969 obj = UserIpMap.query().get(ip_id)
969 obj = UserIpMap.query().get(ip_id)
970 if obj and obj.user_id == user.user_id:
970 if obj and obj.user_id == user.user_id:
971 self.sa.delete(obj)
971 self.sa.delete(obj)
972
972
973 def get_accounts_in_creation_order(self, current_user=None):
973 def get_accounts_in_creation_order(self, current_user=None):
974 """
974 """
975 Get accounts in order of creation for deactivation for license limits
975 Get accounts in order of creation for deactivation for license limits
976
976
977 pick currently logged in user, and append to the list in position 0
977 pick currently logged in user, and append to the list in position 0
978 pick all super-admins in order of creation date and add it to the list
978 pick all super-admins in order of creation date and add it to the list
979 pick all other accounts in order of creation and add it to the list.
979 pick all other accounts in order of creation and add it to the list.
980
980
981 Based on that list, the last accounts can be disabled as they are
981 Based on that list, the last accounts can be disabled as they are
982 created at the end and don't include any of the super admins as well
982 created at the end and don't include any of the super admins as well
983 as the current user.
983 as the current user.
984
984
985 :param current_user: optionally current user running this operation
985 :param current_user: optionally current user running this operation
986 """
986 """
987
987
988 if not current_user:
988 if not current_user:
989 current_user = get_current_rhodecode_user()
989 current_user = get_current_rhodecode_user()
990 active_super_admins = [
990 active_super_admins = [
991 x.user_id for x in User.query()
991 x.user_id for x in User.query()
992 .filter(User.user_id != current_user.user_id)
992 .filter(User.user_id != current_user.user_id)
993 .filter(User.active == true())
993 .filter(User.active == true())
994 .filter(User.admin == true())
994 .filter(User.admin == true())
995 .order_by(User.created_on.asc())]
995 .order_by(User.created_on.asc())]
996
996
997 active_regular_users = [
997 active_regular_users = [
998 x.user_id for x in User.query()
998 x.user_id for x in User.query()
999 .filter(User.user_id != current_user.user_id)
999 .filter(User.user_id != current_user.user_id)
1000 .filter(User.active == true())
1000 .filter(User.active == true())
1001 .filter(User.admin == false())
1001 .filter(User.admin == false())
1002 .order_by(User.created_on.asc())]
1002 .order_by(User.created_on.asc())]
1003
1003
1004 list_of_accounts = [current_user.user_id]
1004 list_of_accounts = [current_user.user_id]
1005 list_of_accounts += active_super_admins
1005 list_of_accounts += active_super_admins
1006 list_of_accounts += active_regular_users
1006 list_of_accounts += active_regular_users
1007
1007
1008 return list_of_accounts
1008 return list_of_accounts
1009
1009
1010 def deactivate_last_users(self, expected_users, current_user=None):
1010 def deactivate_last_users(self, expected_users, current_user=None):
1011 """
1011 """
1012 Deactivate accounts that are over the license limits.
1012 Deactivate accounts that are over the license limits.
1013 Algorithm of which accounts to disabled is based on the formula:
1013 Algorithm of which accounts to disabled is based on the formula:
1014
1014
1015 Get current user, then super admins in creation order, then regular
1015 Get current user, then super admins in creation order, then regular
1016 active users in creation order.
1016 active users in creation order.
1017
1017
1018 Using that list we mark all accounts from the end of it as inactive.
1018 Using that list we mark all accounts from the end of it as inactive.
1019 This way we block only latest created accounts.
1019 This way we block only latest created accounts.
1020
1020
1021 :param expected_users: list of users in special order, we deactivate
1021 :param expected_users: list of users in special order, we deactivate
1022 the end N amount of users from that list
1022 the end N amount of users from that list
1023 """
1023 """
1024
1024
1025 list_of_accounts = self.get_accounts_in_creation_order(
1025 list_of_accounts = self.get_accounts_in_creation_order(
1026 current_user=current_user)
1026 current_user=current_user)
1027
1027
1028 for acc_id in list_of_accounts[expected_users + 1:]:
1028 for acc_id in list_of_accounts[expected_users + 1:]:
1029 user = User.get(acc_id)
1029 user = User.get(acc_id)
1030 log.info('Deactivating account %s for license unlock', user)
1030 log.info('Deactivating account %s for license unlock', user)
1031 user.active = False
1031 user.active = False
1032 Session().add(user)
1032 Session().add(user)
1033 Session().commit()
1033 Session().commit()
1034
1034
1035 return
1035 return
1036
1036
1037 def get_user_log(self, user, filter_term):
1037 def get_user_log(self, user, filter_term):
1038 user_log = UserLog.query()\
1038 user_log = UserLog.query()\
1039 .filter(or_(UserLog.user_id == user.user_id,
1039 .filter(or_(UserLog.user_id == user.user_id,
1040 UserLog.username == user.username))\
1040 UserLog.username == user.username))\
1041 .options(joinedload(UserLog.user))\
1041 .options(joinedload(UserLog.user))\
1042 .options(joinedload(UserLog.repository))\
1042 .options(joinedload(UserLog.repository))\
1043 .order_by(UserLog.action_date.desc())
1043 .order_by(UserLog.action_date.desc())
1044
1044
1045 user_log = user_log_filter(user_log, filter_term)
1045 user_log = user_log_filter(user_log, filter_term)
1046 return user_log
1046 return user_log
@@ -1,1115 +1,1115 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Set of generic validators
20 Set of generic validators
21 """
21 """
22
22
23
23
24 import os
24 import os
25 import re
25 import re
26 import logging
26 import logging
27 import collections
27 import collections
28
28
29 import formencode
29 import formencode
30 import ipaddress
30 import ipaddress
31 from formencode.validators import (
31 from formencode.validators import (
32 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
32 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
33 NotEmpty, IPAddress, CIDR, String, FancyValidator
33 NotEmpty, IPAddress, CIDR, String, FancyValidator
34 )
34 )
35
35
36 from sqlalchemy.sql.expression import true
36 from sqlalchemy.sql.expression import true
37 from sqlalchemy.util import OrderedSet
37 from sqlalchemy.util import OrderedSet
38
38
39 from rhodecode.authentication import (
39 from rhodecode.authentication import (
40 legacy_plugin_prefix, _import_legacy_plugin)
40 legacy_plugin_prefix, _import_legacy_plugin)
41 from rhodecode.authentication.base import loadplugin
41 from rhodecode.authentication.base import loadplugin
42 from rhodecode.apps._base import ADMIN_PREFIX
42 from rhodecode.apps._base import ADMIN_PREFIX
43 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
43 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
44 from rhodecode.lib.utils import repo_name_slug, make_db_config
44 from rhodecode.lib.utils import repo_name_slug, make_db_config
45 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
45 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
46 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.str_utils import safe_str
47 from rhodecode.lib.hash_utils import md5_safe
47 from rhodecode.lib.hash_utils import md5_safe
48 from rhodecode.lib.vcs.backends.git.repository import GitRepository
48 from rhodecode.lib.vcs.backends.git.repository import GitRepository
49 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
49 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
50 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
50 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
51 from rhodecode.model.db import (
51 from rhodecode.model.db import (
52 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
52 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54
54
55 # silence warnings and pylint
55 # silence warnings and pylint
56 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
56 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
57 NotEmpty, IPAddress, CIDR, String, FancyValidator
57 NotEmpty, IPAddress, CIDR, String, FancyValidator
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class _Missing(object):
62 class _Missing(object):
63 pass
63 pass
64
64
65
65
66 Missing = _Missing()
66 Missing = _Missing()
67
67
68
68
69 def M(self, key, state, **kwargs):
69 def M(self, key, state, **kwargs):
70 """
70 """
71 returns string from self.message based on given key,
71 returns string from self.message based on given key,
72 passed kw params are used to substitute %(named)s params inside
72 passed kw params are used to substitute %(named)s params inside
73 translated strings
73 translated strings
74
74
75 :param msg:
75 :param msg:
76 :param state:
76 :param state:
77 """
77 """
78
78
79 #state._ = staticmethod(_)
79 #state._ = staticmethod(_)
80 # inject validator into state object
80 # inject validator into state object
81 return self.message(key, state, **kwargs)
81 return self.message(key, state, **kwargs)
82
82
83
83
84 def UniqueList(localizer, convert=None):
84 def UniqueList(localizer, convert=None):
85 _ = localizer
85 _ = localizer
86
86
87 class _validator(formencode.FancyValidator):
87 class _validator(formencode.FancyValidator):
88 """
88 """
89 Unique List !
89 Unique List !
90 """
90 """
91 accept_iterator = True
91 accept_iterator = True
92
92
93 messages = {
93 messages = {
94 'empty': _('Value cannot be an empty list'),
94 'empty': _('Value cannot be an empty list'),
95 'missing_value': _('Value cannot be an empty list'),
95 'missing_value': _('Value cannot be an empty list'),
96 }
96 }
97
97
98 def _convert_to_python(self, value, state):
98 def _convert_to_python(self, value, state):
99
99
100 def make_unique(_value):
100 def make_unique(_value):
101 seen = []
101 seen = []
102 return [c for c in _value if not (c in seen or seen.append(c))]
102 return [c for c in _value if not (c in seen or seen.append(c))]
103
103
104 if isinstance(value, list):
104 if isinstance(value, list):
105 ret_val = make_unique(value)
105 ret_val = make_unique(value)
106 elif isinstance(value, set):
106 elif isinstance(value, set):
107 ret_val = make_unique(list(value))
107 ret_val = make_unique(list(value))
108 elif isinstance(value, tuple):
108 elif isinstance(value, tuple):
109 ret_val = make_unique(list(value))
109 ret_val = make_unique(list(value))
110 elif value is None:
110 elif value is None:
111 ret_val = []
111 ret_val = []
112 else:
112 else:
113 ret_val = [value]
113 ret_val = [value]
114
114
115 if convert:
115 if convert:
116 ret_val = list(map(convert, ret_val))
116 ret_val = list(map(convert, ret_val))
117 return ret_val
117 return ret_val
118
118
119 def empty_value(self, value):
119 def empty_value(self, value):
120 return []
120 return []
121
121
122 return _validator
122 return _validator
123
123
124
124
125 def UniqueListFromString(localizer):
125 def UniqueListFromString(localizer):
126 _ = localizer
126 _ = localizer
127
127
128 class _validator(UniqueList(localizer)):
128 class _validator(UniqueList(localizer)):
129 def _convert_to_python(self, value, state):
129 def _convert_to_python(self, value, state):
130 if isinstance(value, str):
130 if isinstance(value, str):
131 value = aslist(value, ',')
131 value = aslist(value, ',')
132 return super(_validator, self)._convert_to_python(value, state)
132 return super()._convert_to_python(value, state)
133 return _validator
133 return _validator
134
134
135
135
136 def ValidSvnPattern(localizer, section, repo_name=None):
136 def ValidSvnPattern(localizer, section, repo_name=None):
137 _ = localizer
137 _ = localizer
138
138
139 class _validator(formencode.validators.FancyValidator):
139 class _validator(formencode.validators.FancyValidator):
140 messages = {
140 messages = {
141 'pattern_exists': _('Pattern already exists'),
141 'pattern_exists': _('Pattern already exists'),
142 }
142 }
143
143
144 def _validate_python(self, value, state):
144 def _validate_python(self, value, state):
145 if not value:
145 if not value:
146 return
146 return
147 model = VcsSettingsModel(repo=repo_name)
147 model = VcsSettingsModel(repo=repo_name)
148 ui_settings = model.get_svn_patterns(section=section)
148 ui_settings = model.get_svn_patterns(section=section)
149 for entry in ui_settings:
149 for entry in ui_settings:
150 if value == entry.value:
150 if value == entry.value:
151 msg = M(self, 'pattern_exists', state)
151 msg = M(self, 'pattern_exists', state)
152 raise formencode.Invalid(msg, value, state)
152 raise formencode.Invalid(msg, value, state)
153 return _validator
153 return _validator
154
154
155
155
156 def ValidUsername(localizer, edit=False, old_data=None):
156 def ValidUsername(localizer, edit=False, old_data=None):
157 _ = localizer
157 _ = localizer
158 old_data = old_data or {}
158 old_data = old_data or {}
159
159
160 class _validator(formencode.validators.FancyValidator):
160 class _validator(formencode.validators.FancyValidator):
161 messages = {
161 messages = {
162 'username_exists': _('Username "%(username)s" already exists'),
162 'username_exists': _('Username "%(username)s" already exists'),
163 'system_invalid_username':
163 'system_invalid_username':
164 _('Username "%(username)s" is forbidden'),
164 _('Username "%(username)s" is forbidden'),
165 'invalid_username':
165 'invalid_username':
166 _('Username may only contain alphanumeric characters '
166 _('Username may only contain alphanumeric characters '
167 'underscores, periods or dashes and must begin with '
167 'underscores, periods or dashes and must begin with '
168 'alphanumeric character or underscore')
168 'alphanumeric character or underscore')
169 }
169 }
170
170
171 def _validate_python(self, value, state):
171 def _validate_python(self, value, state):
172 if value in ['default', 'new_user']:
172 if value in ['default', 'new_user']:
173 msg = M(self, 'system_invalid_username', state, username=value)
173 msg = M(self, 'system_invalid_username', state, username=value)
174 raise formencode.Invalid(msg, value, state)
174 raise formencode.Invalid(msg, value, state)
175 # check if user is unique
175 # check if user is unique
176 old_un = None
176 old_un = None
177 if edit:
177 if edit:
178 old_un = User.get(old_data.get('user_id')).username
178 old_un = User.get(old_data.get('user_id')).username
179
179
180 if old_un != value or not edit:
180 if old_un != value or not edit:
181 if User.get_by_username(value, case_insensitive=True):
181 if User.get_by_username(value, case_insensitive=True):
182 msg = M(self, 'username_exists', state, username=value)
182 msg = M(self, 'username_exists', state, username=value)
183 raise formencode.Invalid(msg, value, state)
183 raise formencode.Invalid(msg, value, state)
184
184
185 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
185 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
186 is None):
186 is None):
187 msg = M(self, 'invalid_username', state)
187 msg = M(self, 'invalid_username', state)
188 raise formencode.Invalid(msg, value, state)
188 raise formencode.Invalid(msg, value, state)
189 return _validator
189 return _validator
190
190
191
191
192 def ValidRepoUser(localizer, allow_disabled=False):
192 def ValidRepoUser(localizer, allow_disabled=False):
193 _ = localizer
193 _ = localizer
194
194
195 class _validator(formencode.validators.FancyValidator):
195 class _validator(formencode.validators.FancyValidator):
196 messages = {
196 messages = {
197 'invalid_username': _('Username %(username)s is not valid'),
197 'invalid_username': _('Username %(username)s is not valid'),
198 'disabled_username': _('Username %(username)s is disabled')
198 'disabled_username': _('Username %(username)s is disabled')
199 }
199 }
200
200
201 def _validate_python(self, value, state):
201 def _validate_python(self, value, state):
202 try:
202 try:
203 user = User.query().filter(User.username == value).one()
203 user = User.query().filter(User.username == value).one()
204 except Exception:
204 except Exception:
205 msg = M(self, 'invalid_username', state, username=value)
205 msg = M(self, 'invalid_username', state, username=value)
206 raise formencode.Invalid(
206 raise formencode.Invalid(
207 msg, value, state, error_dict={'username': msg}
207 msg, value, state, error_dict={'username': msg}
208 )
208 )
209 if user and (not allow_disabled and not user.active):
209 if user and (not allow_disabled and not user.active):
210 msg = M(self, 'disabled_username', state, username=value)
210 msg = M(self, 'disabled_username', state, username=value)
211 raise formencode.Invalid(
211 raise formencode.Invalid(
212 msg, value, state, error_dict={'username': msg}
212 msg, value, state, error_dict={'username': msg}
213 )
213 )
214 return _validator
214 return _validator
215
215
216
216
217 def ValidUserGroup(localizer, edit=False, old_data=None):
217 def ValidUserGroup(localizer, edit=False, old_data=None):
218 _ = localizer
218 _ = localizer
219 old_data = old_data or {}
219 old_data = old_data or {}
220
220
221 class _validator(formencode.validators.FancyValidator):
221 class _validator(formencode.validators.FancyValidator):
222 messages = {
222 messages = {
223 'invalid_group': _('Invalid user group name'),
223 'invalid_group': _('Invalid user group name'),
224 'group_exist': _('User group `%(usergroup)s` already exists'),
224 'group_exist': _('User group `%(usergroup)s` already exists'),
225 'invalid_usergroup_name':
225 'invalid_usergroup_name':
226 _('user group name may only contain alphanumeric '
226 _('user group name may only contain alphanumeric '
227 'characters underscores, periods or dashes and must begin '
227 'characters underscores, periods or dashes and must begin '
228 'with alphanumeric character')
228 'with alphanumeric character')
229 }
229 }
230
230
231 def _validate_python(self, value, state):
231 def _validate_python(self, value, state):
232 if value in ['default']:
232 if value in ['default']:
233 msg = M(self, 'invalid_group', state)
233 msg = M(self, 'invalid_group', state)
234 raise formencode.Invalid(
234 raise formencode.Invalid(
235 msg, value, state, error_dict={'users_group_name': msg}
235 msg, value, state, error_dict={'users_group_name': msg}
236 )
236 )
237 # check if group is unique
237 # check if group is unique
238 old_ugname = None
238 old_ugname = None
239 if edit:
239 if edit:
240 old_id = old_data.get('users_group_id')
240 old_id = old_data.get('users_group_id')
241 old_ugname = UserGroup.get(old_id).users_group_name
241 old_ugname = UserGroup.get(old_id).users_group_name
242
242
243 if old_ugname != value or not edit:
243 if old_ugname != value or not edit:
244 is_existing_group = UserGroup.get_by_group_name(
244 is_existing_group = UserGroup.get_by_group_name(
245 value, case_insensitive=True)
245 value, case_insensitive=True)
246 if is_existing_group:
246 if is_existing_group:
247 msg = M(self, 'group_exist', state, usergroup=value)
247 msg = M(self, 'group_exist', state, usergroup=value)
248 raise formencode.Invalid(
248 raise formencode.Invalid(
249 msg, value, state, error_dict={'users_group_name': msg}
249 msg, value, state, error_dict={'users_group_name': msg}
250 )
250 )
251
251
252 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
252 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
253 msg = M(self, 'invalid_usergroup_name', state)
253 msg = M(self, 'invalid_usergroup_name', state)
254 raise formencode.Invalid(
254 raise formencode.Invalid(
255 msg, value, state, error_dict={'users_group_name': msg}
255 msg, value, state, error_dict={'users_group_name': msg}
256 )
256 )
257 return _validator
257 return _validator
258
258
259
259
260 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
260 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
261 _ = localizer
261 _ = localizer
262 old_data = old_data or {}
262 old_data = old_data or {}
263
263
264 class _validator(formencode.validators.FancyValidator):
264 class _validator(formencode.validators.FancyValidator):
265 messages = {
265 messages = {
266 'group_parent_id': _('Cannot assign this group as parent'),
266 'group_parent_id': _('Cannot assign this group as parent'),
267 'group_exists': _('Group "%(group_name)s" already exists'),
267 'group_exists': _('Group "%(group_name)s" already exists'),
268 'repo_exists': _('Repository with name "%(group_name)s" '
268 'repo_exists': _('Repository with name "%(group_name)s" '
269 'already exists'),
269 'already exists'),
270 'permission_denied': _("no permission to store repository group"
270 'permission_denied': _("no permission to store repository group"
271 "in this location"),
271 "in this location"),
272 'permission_denied_root': _(
272 'permission_denied_root': _(
273 "no permission to store repository group "
273 "no permission to store repository group "
274 "in root location")
274 "in root location")
275 }
275 }
276
276
277 def _convert_to_python(self, value, state):
277 def _convert_to_python(self, value, state):
278 group_name = repo_name_slug(value.get('group_name', ''))
278 group_name = repo_name_slug(value.get('group_name', ''))
279 group_parent_id = safe_int(value.get('group_parent_id'))
279 group_parent_id = safe_int(value.get('group_parent_id'))
280 gr = RepoGroup.get(group_parent_id)
280 gr = RepoGroup.get(group_parent_id)
281 if gr:
281 if gr:
282 parent_group_path = gr.full_path
282 parent_group_path = gr.full_path
283 # value needs to be aware of group name in order to check
283 # value needs to be aware of group name in order to check
284 # db key This is an actual just the name to store in the
284 # db key This is an actual just the name to store in the
285 # database
285 # database
286 group_name_full = (
286 group_name_full = (
287 parent_group_path + RepoGroup.url_sep() + group_name)
287 parent_group_path + RepoGroup.url_sep() + group_name)
288 else:
288 else:
289 group_name_full = group_name
289 group_name_full = group_name
290
290
291 value['group_name'] = group_name
291 value['group_name'] = group_name
292 value['group_name_full'] = group_name_full
292 value['group_name_full'] = group_name_full
293 value['group_parent_id'] = group_parent_id
293 value['group_parent_id'] = group_parent_id
294 return value
294 return value
295
295
296 def _validate_python(self, value, state):
296 def _validate_python(self, value, state):
297
297
298 old_group_name = None
298 old_group_name = None
299 group_name = value.get('group_name')
299 group_name = value.get('group_name')
300 group_name_full = value.get('group_name_full')
300 group_name_full = value.get('group_name_full')
301 group_parent_id = safe_int(value.get('group_parent_id'))
301 group_parent_id = safe_int(value.get('group_parent_id'))
302 if group_parent_id == -1:
302 if group_parent_id == -1:
303 group_parent_id = None
303 group_parent_id = None
304
304
305 group_obj = RepoGroup.get(old_data.get('group_id'))
305 group_obj = RepoGroup.get(old_data.get('group_id'))
306 parent_group_changed = False
306 parent_group_changed = False
307 if edit:
307 if edit:
308 old_group_name = group_obj.group_name
308 old_group_name = group_obj.group_name
309 old_group_parent_id = group_obj.group_parent_id
309 old_group_parent_id = group_obj.group_parent_id
310
310
311 if group_parent_id != old_group_parent_id:
311 if group_parent_id != old_group_parent_id:
312 parent_group_changed = True
312 parent_group_changed = True
313
313
314 # TODO: mikhail: the following if statement is not reached
314 # TODO: mikhail: the following if statement is not reached
315 # since group_parent_id's OneOf validation fails before.
315 # since group_parent_id's OneOf validation fails before.
316 # Can be removed.
316 # Can be removed.
317
317
318 # check against setting a parent of self
318 # check against setting a parent of self
319 parent_of_self = (
319 parent_of_self = (
320 old_data['group_id'] == group_parent_id
320 old_data['group_id'] == group_parent_id
321 if group_parent_id else False
321 if group_parent_id else False
322 )
322 )
323 if parent_of_self:
323 if parent_of_self:
324 msg = M(self, 'group_parent_id', state)
324 msg = M(self, 'group_parent_id', state)
325 raise formencode.Invalid(
325 raise formencode.Invalid(
326 msg, value, state, error_dict={'group_parent_id': msg}
326 msg, value, state, error_dict={'group_parent_id': msg}
327 )
327 )
328
328
329 # group we're moving current group inside
329 # group we're moving current group inside
330 child_group = None
330 child_group = None
331 if group_parent_id:
331 if group_parent_id:
332 child_group = RepoGroup.query().filter(
332 child_group = RepoGroup.query().filter(
333 RepoGroup.group_id == group_parent_id).scalar()
333 RepoGroup.group_id == group_parent_id).scalar()
334
334
335 # do a special check that we cannot move a group to one of
335 # do a special check that we cannot move a group to one of
336 # it's children
336 # it's children
337 if edit and child_group:
337 if edit and child_group:
338 parents = [x.group_id for x in child_group.parents]
338 parents = [x.group_id for x in child_group.parents]
339 move_to_children = old_data['group_id'] in parents
339 move_to_children = old_data['group_id'] in parents
340 if move_to_children:
340 if move_to_children:
341 msg = M(self, 'group_parent_id', state)
341 msg = M(self, 'group_parent_id', state)
342 raise formencode.Invalid(
342 raise formencode.Invalid(
343 msg, value, state, error_dict={'group_parent_id': msg})
343 msg, value, state, error_dict={'group_parent_id': msg})
344
344
345 # Check if we have permission to store in the parent.
345 # Check if we have permission to store in the parent.
346 # Only check if the parent group changed.
346 # Only check if the parent group changed.
347 if parent_group_changed:
347 if parent_group_changed:
348 if child_group is None:
348 if child_group is None:
349 if not can_create_in_root:
349 if not can_create_in_root:
350 msg = M(self, 'permission_denied_root', state)
350 msg = M(self, 'permission_denied_root', state)
351 raise formencode.Invalid(
351 raise formencode.Invalid(
352 msg, value, state,
352 msg, value, state,
353 error_dict={'group_parent_id': msg})
353 error_dict={'group_parent_id': msg})
354 else:
354 else:
355 valid = HasRepoGroupPermissionAny('group.admin')
355 valid = HasRepoGroupPermissionAny('group.admin')
356 forbidden = not valid(
356 forbidden = not valid(
357 child_group.group_name, 'can create group validator')
357 child_group.group_name, 'can create group validator')
358 if forbidden:
358 if forbidden:
359 msg = M(self, 'permission_denied', state)
359 msg = M(self, 'permission_denied', state)
360 raise formencode.Invalid(
360 raise formencode.Invalid(
361 msg, value, state,
361 msg, value, state,
362 error_dict={'group_parent_id': msg})
362 error_dict={'group_parent_id': msg})
363
363
364 # if we change the name or it's new group, check for existing names
364 # if we change the name or it's new group, check for existing names
365 # or repositories with the same name
365 # or repositories with the same name
366 if old_group_name != group_name_full or not edit:
366 if old_group_name != group_name_full or not edit:
367 # check group
367 # check group
368 gr = RepoGroup.get_by_group_name(group_name_full)
368 gr = RepoGroup.get_by_group_name(group_name_full)
369 if gr:
369 if gr:
370 msg = M(self, 'group_exists', state, group_name=group_name)
370 msg = M(self, 'group_exists', state, group_name=group_name)
371 raise formencode.Invalid(
371 raise formencode.Invalid(
372 msg, value, state, error_dict={'group_name': msg})
372 msg, value, state, error_dict={'group_name': msg})
373
373
374 # check for same repo
374 # check for same repo
375 repo = Repository.get_by_repo_name(group_name_full)
375 repo = Repository.get_by_repo_name(group_name_full)
376 if repo:
376 if repo:
377 msg = M(self, 'repo_exists', state, group_name=group_name)
377 msg = M(self, 'repo_exists', state, group_name=group_name)
378 raise formencode.Invalid(
378 raise formencode.Invalid(
379 msg, value, state, error_dict={'group_name': msg})
379 msg, value, state, error_dict={'group_name': msg})
380 return _validator
380 return _validator
381
381
382
382
383 def ValidPassword(localizer):
383 def ValidPassword(localizer):
384 _ = localizer
384 _ = localizer
385
385
386 class _validator(formencode.validators.FancyValidator):
386 class _validator(formencode.validators.FancyValidator):
387 messages = {
387 messages = {
388 'invalid_password':
388 'invalid_password':
389 _('Invalid characters (non-ascii) in password')
389 _('Invalid characters (non-ascii) in password')
390 }
390 }
391
391
392 def _validate_python(self, value, state):
392 def _validate_python(self, value, state):
393 if value and not value.isascii():
393 if value and not value.isascii():
394 msg = M(self, 'invalid_password', state)
394 msg = M(self, 'invalid_password', state)
395 raise formencode.Invalid(msg, value, state,)
395 raise formencode.Invalid(msg, value, state,)
396 return _validator
396 return _validator
397
397
398
398
399 def ValidPasswordsMatch(
399 def ValidPasswordsMatch(
400 localizer, passwd='new_password',
400 localizer, passwd='new_password',
401 passwd_confirmation='password_confirmation'):
401 passwd_confirmation='password_confirmation'):
402 _ = localizer
402 _ = localizer
403
403
404 class _validator(formencode.validators.FancyValidator):
404 class _validator(formencode.validators.FancyValidator):
405 messages = {
405 messages = {
406 'password_mismatch': _('Passwords do not match'),
406 'password_mismatch': _('Passwords do not match'),
407 }
407 }
408
408
409 def _validate_python(self, value, state):
409 def _validate_python(self, value, state):
410
410
411 pass_val = value.get('password') or value.get(passwd)
411 pass_val = value.get('password') or value.get(passwd)
412 if pass_val != value[passwd_confirmation]:
412 if pass_val != value[passwd_confirmation]:
413 msg = M(self, 'password_mismatch', state)
413 msg = M(self, 'password_mismatch', state)
414 raise formencode.Invalid(
414 raise formencode.Invalid(
415 msg, value, state,
415 msg, value, state,
416 error_dict={passwd: msg, passwd_confirmation: msg}
416 error_dict={passwd: msg, passwd_confirmation: msg}
417 )
417 )
418 return _validator
418 return _validator
419
419
420
420
421 def ValidAuth(localizer):
421 def ValidAuth(localizer):
422 _ = localizer
422 _ = localizer
423
423
424 class _validator(formencode.validators.FancyValidator):
424 class _validator(formencode.validators.FancyValidator):
425 messages = {
425 messages = {
426 'invalid_password': _('invalid password'),
426 'invalid_password': _('invalid password'),
427 'invalid_username': _('invalid user name'),
427 'invalid_username': _('invalid user name'),
428 'disabled_account': _('Your account is disabled')
428 'disabled_account': _('Your account is disabled')
429 }
429 }
430
430
431 def _validate_python(self, value, state):
431 def _validate_python(self, value, state):
432 from rhodecode.authentication.base import authenticate, HTTP_TYPE
432 from rhodecode.authentication.base import authenticate, HTTP_TYPE
433
433
434 password = value['password']
434 password = value['password']
435 username = value['username']
435 username = value['username']
436
436
437 if not authenticate(username, password, '', HTTP_TYPE,
437 if not authenticate(username, password, '', HTTP_TYPE,
438 skip_missing=True):
438 skip_missing=True):
439 user = User.get_by_username(username)
439 user = User.get_by_username(username)
440 if user and not user.active:
440 if user and not user.active:
441 log.warning('user %s is disabled', username)
441 log.warning('user %s is disabled', username)
442 msg = M(self, 'disabled_account', state)
442 msg = M(self, 'disabled_account', state)
443 raise formencode.Invalid(
443 raise formencode.Invalid(
444 msg, value, state, error_dict={'username': msg}
444 msg, value, state, error_dict={'username': msg}
445 )
445 )
446 else:
446 else:
447 log.warning('user `%s` failed to authenticate', username)
447 log.warning('user `%s` failed to authenticate', username)
448 msg = M(self, 'invalid_username', state)
448 msg = M(self, 'invalid_username', state)
449 msg2 = M(self, 'invalid_password', state)
449 msg2 = M(self, 'invalid_password', state)
450 raise formencode.Invalid(
450 raise formencode.Invalid(
451 msg, value, state,
451 msg, value, state,
452 error_dict={'username': msg, 'password': msg2}
452 error_dict={'username': msg, 'password': msg2}
453 )
453 )
454 return _validator
454 return _validator
455
455
456
456
457 def ValidRepoName(localizer, edit=False, old_data=None):
457 def ValidRepoName(localizer, edit=False, old_data=None):
458 old_data = old_data or {}
458 old_data = old_data or {}
459 _ = localizer
459 _ = localizer
460
460
461 class _validator(formencode.validators.FancyValidator):
461 class _validator(formencode.validators.FancyValidator):
462 messages = {
462 messages = {
463 'invalid_repo_name':
463 'invalid_repo_name':
464 _('Repository name %(repo)s is disallowed'),
464 _('Repository name %(repo)s is disallowed'),
465 # top level
465 # top level
466 'repository_exists': _('Repository with name %(repo)s '
466 'repository_exists': _('Repository with name %(repo)s '
467 'already exists'),
467 'already exists'),
468 'group_exists': _('Repository group with name "%(repo)s" '
468 'group_exists': _('Repository group with name "%(repo)s" '
469 'already exists'),
469 'already exists'),
470 # inside a group
470 # inside a group
471 'repository_in_group_exists': _('Repository with name %(repo)s '
471 'repository_in_group_exists': _('Repository with name %(repo)s '
472 'exists in group "%(group)s"'),
472 'exists in group "%(group)s"'),
473 'group_in_group_exists': _(
473 'group_in_group_exists': _(
474 'Repository group with name "%(repo)s" '
474 'Repository group with name "%(repo)s" '
475 'exists in group "%(group)s"'),
475 'exists in group "%(group)s"'),
476 }
476 }
477
477
478 def _convert_to_python(self, value, state):
478 def _convert_to_python(self, value, state):
479 repo_name = repo_name_slug(value.get('repo_name', ''))
479 repo_name = repo_name_slug(value.get('repo_name', ''))
480 repo_group = value.get('repo_group')
480 repo_group = value.get('repo_group')
481 if repo_group:
481 if repo_group:
482 gr = RepoGroup.get(repo_group)
482 gr = RepoGroup.get(repo_group)
483 group_path = gr.full_path
483 group_path = gr.full_path
484 group_name = gr.group_name
484 group_name = gr.group_name
485 # value needs to be aware of group name in order to check
485 # value needs to be aware of group name in order to check
486 # db key This is an actual just the name to store in the
486 # db key This is an actual just the name to store in the
487 # database
487 # database
488 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
488 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
489 else:
489 else:
490 group_name = group_path = ''
490 group_name = group_path = ''
491 repo_name_full = repo_name
491 repo_name_full = repo_name
492
492
493 value['repo_name'] = repo_name
493 value['repo_name'] = repo_name
494 value['repo_name_full'] = repo_name_full
494 value['repo_name_full'] = repo_name_full
495 value['group_path'] = group_path
495 value['group_path'] = group_path
496 value['group_name'] = group_name
496 value['group_name'] = group_name
497 return value
497 return value
498
498
499 def _validate_python(self, value, state):
499 def _validate_python(self, value, state):
500
500
501 repo_name = value.get('repo_name')
501 repo_name = value.get('repo_name')
502 repo_name_full = value.get('repo_name_full')
502 repo_name_full = value.get('repo_name_full')
503 group_path = value.get('group_path')
503 group_path = value.get('group_path')
504 group_name = value.get('group_name')
504 group_name = value.get('group_name')
505
505
506 if repo_name in [ADMIN_PREFIX, '']:
506 if repo_name in [ADMIN_PREFIX, '']:
507 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
507 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
508 raise formencode.Invalid(
508 raise formencode.Invalid(
509 msg, value, state, error_dict={'repo_name': msg})
509 msg, value, state, error_dict={'repo_name': msg})
510
510
511 rename = old_data.get('repo_name') != repo_name_full
511 rename = old_data.get('repo_name') != repo_name_full
512 create = not edit
512 create = not edit
513 if rename or create:
513 if rename or create:
514
514
515 if group_path:
515 if group_path:
516 if Repository.get_by_repo_name(repo_name_full):
516 if Repository.get_by_repo_name(repo_name_full):
517 msg = M(self, 'repository_in_group_exists', state,
517 msg = M(self, 'repository_in_group_exists', state,
518 repo=repo_name, group=group_name)
518 repo=repo_name, group=group_name)
519 raise formencode.Invalid(
519 raise formencode.Invalid(
520 msg, value, state, error_dict={'repo_name': msg})
520 msg, value, state, error_dict={'repo_name': msg})
521 if RepoGroup.get_by_group_name(repo_name_full):
521 if RepoGroup.get_by_group_name(repo_name_full):
522 msg = M(self, 'group_in_group_exists', state,
522 msg = M(self, 'group_in_group_exists', state,
523 repo=repo_name, group=group_name)
523 repo=repo_name, group=group_name)
524 raise formencode.Invalid(
524 raise formencode.Invalid(
525 msg, value, state, error_dict={'repo_name': msg})
525 msg, value, state, error_dict={'repo_name': msg})
526 else:
526 else:
527 if RepoGroup.get_by_group_name(repo_name_full):
527 if RepoGroup.get_by_group_name(repo_name_full):
528 msg = M(self, 'group_exists', state, repo=repo_name)
528 msg = M(self, 'group_exists', state, repo=repo_name)
529 raise formencode.Invalid(
529 raise formencode.Invalid(
530 msg, value, state, error_dict={'repo_name': msg})
530 msg, value, state, error_dict={'repo_name': msg})
531
531
532 if Repository.get_by_repo_name(repo_name_full):
532 if Repository.get_by_repo_name(repo_name_full):
533 msg = M(
533 msg = M(
534 self, 'repository_exists', state, repo=repo_name)
534 self, 'repository_exists', state, repo=repo_name)
535 raise formencode.Invalid(
535 raise formencode.Invalid(
536 msg, value, state, error_dict={'repo_name': msg})
536 msg, value, state, error_dict={'repo_name': msg})
537 return value
537 return value
538 return _validator
538 return _validator
539
539
540
540
541 def ValidForkName(localizer, *args, **kwargs):
541 def ValidForkName(localizer, *args, **kwargs):
542 _ = localizer
542 _ = localizer
543
543
544 return ValidRepoName(localizer, *args, **kwargs)
544 return ValidRepoName(localizer, *args, **kwargs)
545
545
546
546
547 def SlugifyName(localizer):
547 def SlugifyName(localizer):
548 _ = localizer
548 _ = localizer
549
549
550 class _validator(formencode.validators.FancyValidator):
550 class _validator(formencode.validators.FancyValidator):
551
551
552 def _convert_to_python(self, value, state):
552 def _convert_to_python(self, value, state):
553 return repo_name_slug(value)
553 return repo_name_slug(value)
554
554
555 def _validate_python(self, value, state):
555 def _validate_python(self, value, state):
556 pass
556 pass
557 return _validator
557 return _validator
558
558
559
559
560 def CannotHaveGitSuffix(localizer):
560 def CannotHaveGitSuffix(localizer):
561 _ = localizer
561 _ = localizer
562
562
563 class _validator(formencode.validators.FancyValidator):
563 class _validator(formencode.validators.FancyValidator):
564 messages = {
564 messages = {
565 'has_git_suffix':
565 'has_git_suffix':
566 _('Repository name cannot end with .git'),
566 _('Repository name cannot end with .git'),
567 }
567 }
568
568
569 def _convert_to_python(self, value, state):
569 def _convert_to_python(self, value, state):
570 return value
570 return value
571
571
572 def _validate_python(self, value, state):
572 def _validate_python(self, value, state):
573 if value and value.endswith('.git'):
573 if value and value.endswith('.git'):
574 msg = M(
574 msg = M(
575 self, 'has_git_suffix', state)
575 self, 'has_git_suffix', state)
576 raise formencode.Invalid(
576 raise formencode.Invalid(
577 msg, value, state, error_dict={'repo_name': msg})
577 msg, value, state, error_dict={'repo_name': msg})
578 return _validator
578 return _validator
579
579
580
580
581 def ValidCloneUri(localizer):
581 def ValidCloneUri(localizer):
582 _ = localizer
582 _ = localizer
583
583
584 class InvalidCloneUrl(Exception):
584 class InvalidCloneUrl(Exception):
585 allowed_prefixes = ()
585 allowed_prefixes = ()
586
586
587 def url_handler(repo_type, url):
587 def url_handler(repo_type, url):
588 config = make_db_config(clear_session=False)
588 config = make_db_config(clear_session=False)
589 if repo_type == 'hg':
589 if repo_type == 'hg':
590 allowed_prefixes = ('http', 'svn+http', 'git+http')
590 allowed_prefixes = ('http', 'svn+http', 'git+http')
591
591
592 if 'http' in url[:4]:
592 if 'http' in url[:4]:
593 # initially check if it's at least the proper URL
593 # initially check if it's at least the proper URL
594 # or does it pass basic auth
594 # or does it pass basic auth
595 MercurialRepository.check_url(url, config)
595 MercurialRepository.check_url(url, config)
596 elif 'svn+http' in url[:8]: # svn->hg import
596 elif 'svn+http' in url[:8]: # svn->hg import
597 SubversionRepository.check_url(url, config)
597 SubversionRepository.check_url(url, config)
598 elif 'git+http' in url[:8]: # git->hg import
598 elif 'git+http' in url[:8]: # git->hg import
599 raise NotImplementedError()
599 raise NotImplementedError()
600 else:
600 else:
601 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
601 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
602 'Allowed url must start with one of %s'
602 'Allowed url must start with one of %s'
603 % (url, ','.join(allowed_prefixes)))
603 % (url, ','.join(allowed_prefixes)))
604 exc.allowed_prefixes = allowed_prefixes
604 exc.allowed_prefixes = allowed_prefixes
605 raise exc
605 raise exc
606
606
607 elif repo_type == 'git':
607 elif repo_type == 'git':
608 allowed_prefixes = ('http', 'svn+http', 'hg+http')
608 allowed_prefixes = ('http', 'svn+http', 'hg+http')
609 if 'http' in url[:4]:
609 if 'http' in url[:4]:
610 # initially check if it's at least the proper URL
610 # initially check if it's at least the proper URL
611 # or does it pass basic auth
611 # or does it pass basic auth
612 GitRepository.check_url(url, config)
612 GitRepository.check_url(url, config)
613 elif 'svn+http' in url[:8]: # svn->git import
613 elif 'svn+http' in url[:8]: # svn->git import
614 raise NotImplementedError()
614 raise NotImplementedError()
615 elif 'hg+http' in url[:8]: # hg->git import
615 elif 'hg+http' in url[:8]: # hg->git import
616 raise NotImplementedError()
616 raise NotImplementedError()
617 else:
617 else:
618 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
618 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
619 'Allowed url must start with one of %s'
619 'Allowed url must start with one of %s'
620 % (url, ','.join(allowed_prefixes)))
620 % (url, ','.join(allowed_prefixes)))
621 exc.allowed_prefixes = allowed_prefixes
621 exc.allowed_prefixes = allowed_prefixes
622 raise exc
622 raise exc
623
623
624 class _validator(formencode.validators.FancyValidator):
624 class _validator(formencode.validators.FancyValidator):
625 messages = {
625 messages = {
626 'clone_uri': _('invalid clone url or credentials for %(rtype)s repository'),
626 'clone_uri': _('invalid clone url or credentials for %(rtype)s repository'),
627 'invalid_clone_uri': _(
627 'invalid_clone_uri': _(
628 'Invalid clone url, provide a valid clone '
628 'Invalid clone url, provide a valid clone '
629 'url starting with one of %(allowed_prefixes)s')
629 'url starting with one of %(allowed_prefixes)s')
630 }
630 }
631
631
632 def _validate_python(self, value, state):
632 def _validate_python(self, value, state):
633 repo_type = value.get('repo_type')
633 repo_type = value.get('repo_type')
634 url = value.get('clone_uri')
634 url = value.get('clone_uri')
635
635
636 if url:
636 if url:
637 try:
637 try:
638 url_handler(repo_type, url)
638 url_handler(repo_type, url)
639 except InvalidCloneUrl as e:
639 except InvalidCloneUrl as e:
640 log.warning(e)
640 log.warning(e)
641 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
641 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
642 allowed_prefixes=','.join(e.allowed_prefixes))
642 allowed_prefixes=','.join(e.allowed_prefixes))
643 raise formencode.Invalid(msg, value, state,
643 raise formencode.Invalid(msg, value, state,
644 error_dict={'clone_uri': msg})
644 error_dict={'clone_uri': msg})
645 except Exception:
645 except Exception:
646 log.exception('Url validation failed')
646 log.exception('Url validation failed')
647 msg = M(self, 'clone_uri', state, rtype=repo_type)
647 msg = M(self, 'clone_uri', state, rtype=repo_type)
648 raise formencode.Invalid(msg, value, state,
648 raise formencode.Invalid(msg, value, state,
649 error_dict={'clone_uri': msg})
649 error_dict={'clone_uri': msg})
650 return _validator
650 return _validator
651
651
652
652
653 def ValidForkType(localizer, old_data=None):
653 def ValidForkType(localizer, old_data=None):
654 _ = localizer
654 _ = localizer
655 old_data = old_data or {}
655 old_data = old_data or {}
656
656
657 class _validator(formencode.validators.FancyValidator):
657 class _validator(formencode.validators.FancyValidator):
658 messages = {
658 messages = {
659 'invalid_fork_type': _('Fork have to be the same type as parent')
659 'invalid_fork_type': _('Fork have to be the same type as parent')
660 }
660 }
661
661
662 def _validate_python(self, value, state):
662 def _validate_python(self, value, state):
663 if old_data['repo_type'] != value:
663 if old_data['repo_type'] != value:
664 msg = M(self, 'invalid_fork_type', state)
664 msg = M(self, 'invalid_fork_type', state)
665 raise formencode.Invalid(
665 raise formencode.Invalid(
666 msg, value, state, error_dict={'repo_type': msg}
666 msg, value, state, error_dict={'repo_type': msg}
667 )
667 )
668 return _validator
668 return _validator
669
669
670
670
671 def CanWriteGroup(localizer, old_data=None):
671 def CanWriteGroup(localizer, old_data=None):
672 _ = localizer
672 _ = localizer
673
673
674 class _validator(formencode.validators.FancyValidator):
674 class _validator(formencode.validators.FancyValidator):
675 messages = {
675 messages = {
676 'permission_denied': _(
676 'permission_denied': _(
677 "You do not have the permission "
677 "You do not have the permission "
678 "to create repositories in this group."),
678 "to create repositories in this group."),
679 'permission_denied_root': _(
679 'permission_denied_root': _(
680 "You do not have the permission to store repositories in "
680 "You do not have the permission to store repositories in "
681 "the root location.")
681 "the root location.")
682 }
682 }
683
683
684 def _convert_to_python(self, value, state):
684 def _convert_to_python(self, value, state):
685 # root location
685 # root location
686 if value in [-1, "-1"]:
686 if value in [-1, "-1"]:
687 return None
687 return None
688 return value
688 return value
689
689
690 def _validate_python(self, value, state):
690 def _validate_python(self, value, state):
691 gr = RepoGroup.get(value)
691 gr = RepoGroup.get(value)
692 gr_name = gr.group_name if gr else None # None means ROOT location
692 gr_name = gr.group_name if gr else None # None means ROOT location
693 # create repositories with write permission on group is set to true
693 # create repositories with write permission on group is set to true
694 create_on_write = HasPermissionAny(
694 create_on_write = HasPermissionAny(
695 'hg.create.write_on_repogroup.true')()
695 'hg.create.write_on_repogroup.true')()
696 group_admin = HasRepoGroupPermissionAny('group.admin')(
696 group_admin = HasRepoGroupPermissionAny('group.admin')(
697 gr_name, 'can write into group validator')
697 gr_name, 'can write into group validator')
698 group_write = HasRepoGroupPermissionAny('group.write')(
698 group_write = HasRepoGroupPermissionAny('group.write')(
699 gr_name, 'can write into group validator')
699 gr_name, 'can write into group validator')
700 forbidden = not (group_admin or (group_write and create_on_write))
700 forbidden = not (group_admin or (group_write and create_on_write))
701 can_create_repos = HasPermissionAny(
701 can_create_repos = HasPermissionAny(
702 'hg.admin', 'hg.create.repository')
702 'hg.admin', 'hg.create.repository')
703 gid = (old_data['repo_group'].get('group_id')
703 gid = (old_data['repo_group'].get('group_id')
704 if (old_data and 'repo_group' in old_data) else None)
704 if (old_data and 'repo_group' in old_data) else None)
705 value_changed = gid != safe_int(value)
705 value_changed = gid != safe_int(value)
706 new = not old_data
706 new = not old_data
707 # do check if we changed the value, there's a case that someone got
707 # do check if we changed the value, there's a case that someone got
708 # revoked write permissions to a repository, he still created, we
708 # revoked write permissions to a repository, he still created, we
709 # don't need to check permission if he didn't change the value of
709 # don't need to check permission if he didn't change the value of
710 # groups in form box
710 # groups in form box
711 if value_changed or new:
711 if value_changed or new:
712 # parent group need to be existing
712 # parent group need to be existing
713 if gr and forbidden:
713 if gr and forbidden:
714 msg = M(self, 'permission_denied', state)
714 msg = M(self, 'permission_denied', state)
715 raise formencode.Invalid(
715 raise formencode.Invalid(
716 msg, value, state, error_dict={'repo_type': msg}
716 msg, value, state, error_dict={'repo_type': msg}
717 )
717 )
718 # check if we can write to root location !
718 # check if we can write to root location !
719 elif gr is None and not can_create_repos():
719 elif gr is None and not can_create_repos():
720 msg = M(self, 'permission_denied_root', state)
720 msg = M(self, 'permission_denied_root', state)
721 raise formencode.Invalid(
721 raise formencode.Invalid(
722 msg, value, state, error_dict={'repo_type': msg}
722 msg, value, state, error_dict={'repo_type': msg}
723 )
723 )
724 return _validator
724 return _validator
725
725
726
726
727 def ValidPerms(localizer, type_='repo'):
727 def ValidPerms(localizer, type_='repo'):
728 _ = localizer
728 _ = localizer
729 if type_ == 'repo_group':
729 if type_ == 'repo_group':
730 EMPTY_PERM = 'group.none'
730 EMPTY_PERM = 'group.none'
731 elif type_ == 'repo':
731 elif type_ == 'repo':
732 EMPTY_PERM = 'repository.none'
732 EMPTY_PERM = 'repository.none'
733 elif type_ == 'user_group':
733 elif type_ == 'user_group':
734 EMPTY_PERM = 'usergroup.none'
734 EMPTY_PERM = 'usergroup.none'
735
735
736 class _validator(formencode.validators.FancyValidator):
736 class _validator(formencode.validators.FancyValidator):
737 messages = {
737 messages = {
738 'perm_new_member_name':
738 'perm_new_member_name':
739 _('This username or user group name is not valid')
739 _('This username or user group name is not valid')
740 }
740 }
741
741
742 def _convert_to_python(self, value, state):
742 def _convert_to_python(self, value, state):
743 perm_updates = OrderedSet()
743 perm_updates = OrderedSet()
744 perm_additions = OrderedSet()
744 perm_additions = OrderedSet()
745 perm_deletions = OrderedSet()
745 perm_deletions = OrderedSet()
746 # build a list of permission to update/delete and new permission
746 # build a list of permission to update/delete and new permission
747
747
748 # Read the perm_new_member/perm_del_member attributes and group
748 # Read the perm_new_member/perm_del_member attributes and group
749 # them by they IDs
749 # them by they IDs
750 new_perms_group = collections.defaultdict(dict)
750 new_perms_group = collections.defaultdict(dict)
751 del_perms_group = collections.defaultdict(dict)
751 del_perms_group = collections.defaultdict(dict)
752 for k, v in list(value.copy().items()):
752 for k, v in list(value.copy().items()):
753 if k.startswith('perm_del_member'):
753 if k.startswith('perm_del_member'):
754 # delete from org storage so we don't process that later
754 # delete from org storage so we don't process that later
755 del value[k]
755 del value[k]
756 # part is `id`, `type`
756 # part is `id`, `type`
757 _type, part = k.split('perm_del_member_')
757 _type, part = k.split('perm_del_member_')
758 args = part.split('_')
758 args = part.split('_')
759 if len(args) == 2:
759 if len(args) == 2:
760 _key, pos = args
760 _key, pos = args
761 del_perms_group[pos][_key] = v
761 del_perms_group[pos][_key] = v
762 if k.startswith('perm_new_member'):
762 if k.startswith('perm_new_member'):
763 # delete from org storage so we don't process that later
763 # delete from org storage so we don't process that later
764 del value[k]
764 del value[k]
765 # part is `id`, `type`, `perm`
765 # part is `id`, `type`, `perm`
766 _type, part = k.split('perm_new_member_')
766 _type, part = k.split('perm_new_member_')
767 args = part.split('_')
767 args = part.split('_')
768 if len(args) == 2:
768 if len(args) == 2:
769 _key, pos = args
769 _key, pos = args
770 new_perms_group[pos][_key] = v
770 new_perms_group[pos][_key] = v
771
771
772 # store the deletes
772 # store the deletes
773 for k in sorted(del_perms_group.keys()):
773 for k in sorted(del_perms_group.keys()):
774 perm_dict = del_perms_group[k]
774 perm_dict = del_perms_group[k]
775 del_member = perm_dict.get('id')
775 del_member = perm_dict.get('id')
776 del_type = perm_dict.get('type')
776 del_type = perm_dict.get('type')
777 if del_member and del_type:
777 if del_member and del_type:
778 perm_deletions.add(
778 perm_deletions.add(
779 (del_member, None, del_type))
779 (del_member, None, del_type))
780
780
781 # store additions in order of how they were added in web form
781 # store additions in order of how they were added in web form
782 for k in sorted(new_perms_group.keys()):
782 for k in sorted(new_perms_group.keys()):
783 perm_dict = new_perms_group[k]
783 perm_dict = new_perms_group[k]
784 new_member = perm_dict.get('id')
784 new_member = perm_dict.get('id')
785 new_type = perm_dict.get('type')
785 new_type = perm_dict.get('type')
786 new_perm = perm_dict.get('perm')
786 new_perm = perm_dict.get('perm')
787 if new_member and new_perm and new_type:
787 if new_member and new_perm and new_type:
788 perm_additions.add(
788 perm_additions.add(
789 (new_member, new_perm, new_type))
789 (new_member, new_perm, new_type))
790
790
791 # get updates of permissions
791 # get updates of permissions
792 # (read the existing radio button states)
792 # (read the existing radio button states)
793 default_user_id = User.get_default_user_id()
793 default_user_id = User.get_default_user_id()
794
794
795 for k, update_value in list(value.items()):
795 for k, update_value in list(value.items()):
796 if k.startswith('u_perm_') or k.startswith('g_perm_'):
796 if k.startswith('u_perm_') or k.startswith('g_perm_'):
797 obj_type = k[0]
797 obj_type = k[0]
798 obj_id = k[7:]
798 obj_id = k[7:]
799 update_type = {'u': 'user',
799 update_type = {'u': 'user',
800 'g': 'user_group'}[obj_type]
800 'g': 'user_group'}[obj_type]
801
801
802 if obj_type == 'u' and safe_int(obj_id) == default_user_id:
802 if obj_type == 'u' and safe_int(obj_id) == default_user_id:
803 if str2bool(value.get('repo_private')):
803 if str2bool(value.get('repo_private')):
804 # prevent from updating default user permissions
804 # prevent from updating default user permissions
805 # when this repository is marked as private
805 # when this repository is marked as private
806 update_value = EMPTY_PERM
806 update_value = EMPTY_PERM
807
807
808 perm_updates.add(
808 perm_updates.add(
809 (obj_id, update_value, update_type))
809 (obj_id, update_value, update_type))
810
810
811 value['perm_additions'] = [] # propagated later
811 value['perm_additions'] = [] # propagated later
812 value['perm_updates'] = list(perm_updates)
812 value['perm_updates'] = list(perm_updates)
813 value['perm_deletions'] = list(perm_deletions)
813 value['perm_deletions'] = list(perm_deletions)
814
814
815 updates_map = dict(
815 updates_map = dict(
816 (x[0], (x[1], x[2])) for x in value['perm_updates'])
816 (x[0], (x[1], x[2])) for x in value['perm_updates'])
817 # make sure Additions don't override updates.
817 # make sure Additions don't override updates.
818 for member_id, perm, member_type in list(perm_additions):
818 for member_id, perm, member_type in list(perm_additions):
819 if member_id in updates_map:
819 if member_id in updates_map:
820 perm = updates_map[member_id][0]
820 perm = updates_map[member_id][0]
821 value['perm_additions'].append((member_id, perm, member_type))
821 value['perm_additions'].append((member_id, perm, member_type))
822
822
823 # on new entries validate users they exist and they are active !
823 # on new entries validate users they exist and they are active !
824 # this leaves feedback to the form
824 # this leaves feedback to the form
825 try:
825 try:
826 if member_type == 'user':
826 if member_type == 'user':
827 User.query()\
827 User.query()\
828 .filter(User.active == true())\
828 .filter(User.active == true())\
829 .filter(User.user_id == member_id).one()
829 .filter(User.user_id == member_id).one()
830 if member_type == 'user_group':
830 if member_type == 'user_group':
831 UserGroup.query()\
831 UserGroup.query()\
832 .filter(UserGroup.users_group_active == true())\
832 .filter(UserGroup.users_group_active == true())\
833 .filter(UserGroup.users_group_id == member_id)\
833 .filter(UserGroup.users_group_id == member_id)\
834 .one()
834 .one()
835
835
836 except Exception:
836 except Exception:
837 log.exception('Updated permission failed: org_exc:')
837 log.exception('Updated permission failed: org_exc:')
838 msg = M(self, 'perm_new_member_type', state)
838 msg = M(self, 'perm_new_member_type', state)
839 raise formencode.Invalid(
839 raise formencode.Invalid(
840 msg, value, state, error_dict={
840 msg, value, state, error_dict={
841 'perm_new_member_name': msg}
841 'perm_new_member_name': msg}
842 )
842 )
843 return value
843 return value
844 return _validator
844 return _validator
845
845
846
846
847 def ValidPath(localizer):
847 def ValidPath(localizer):
848 _ = localizer
848 _ = localizer
849
849
850 class _validator(formencode.validators.FancyValidator):
850 class _validator(formencode.validators.FancyValidator):
851 messages = {
851 messages = {
852 'invalid_path': _('This is not a valid path')
852 'invalid_path': _('This is not a valid path')
853 }
853 }
854
854
855 def _validate_python(self, value, state):
855 def _validate_python(self, value, state):
856 if not os.path.isdir(value):
856 if not os.path.isdir(value):
857 msg = M(self, 'invalid_path', state)
857 msg = M(self, 'invalid_path', state)
858 raise formencode.Invalid(
858 raise formencode.Invalid(
859 msg, value, state, error_dict={'paths_root_path': msg}
859 msg, value, state, error_dict={'paths_root_path': msg}
860 )
860 )
861 return _validator
861 return _validator
862
862
863
863
864 def UniqSystemEmail(localizer, old_data=None):
864 def UniqSystemEmail(localizer, old_data=None):
865 _ = localizer
865 _ = localizer
866 old_data = old_data or {}
866 old_data = old_data or {}
867
867
868 class _validator(formencode.validators.FancyValidator):
868 class _validator(formencode.validators.FancyValidator):
869 messages = {
869 messages = {
870 'email_taken': _('This e-mail address is already taken')
870 'email_taken': _('This e-mail address is already taken')
871 }
871 }
872
872
873 def _convert_to_python(self, value, state):
873 def _convert_to_python(self, value, state):
874 return value.lower()
874 return value.lower()
875
875
876 def _validate_python(self, value, state):
876 def _validate_python(self, value, state):
877 if (old_data.get('email') or '').lower() != value:
877 if (old_data.get('email') or '').lower() != value:
878 user = User.get_by_email(value, case_insensitive=True)
878 user = User.get_by_email(value, case_insensitive=True)
879 if user:
879 if user:
880 msg = M(self, 'email_taken', state)
880 msg = M(self, 'email_taken', state)
881 raise formencode.Invalid(
881 raise formencode.Invalid(
882 msg, value, state, error_dict={'email': msg}
882 msg, value, state, error_dict={'email': msg}
883 )
883 )
884 return _validator
884 return _validator
885
885
886
886
887 def ValidSystemEmail(localizer):
887 def ValidSystemEmail(localizer):
888 _ = localizer
888 _ = localizer
889
889
890 class _validator(formencode.validators.FancyValidator):
890 class _validator(formencode.validators.FancyValidator):
891 messages = {
891 messages = {
892 'non_existing_email': _('e-mail "%(email)s" does not exist.')
892 'non_existing_email': _('e-mail "%(email)s" does not exist.')
893 }
893 }
894
894
895 def _convert_to_python(self, value, state):
895 def _convert_to_python(self, value, state):
896 return value.lower()
896 return value.lower()
897
897
898 def _validate_python(self, value, state):
898 def _validate_python(self, value, state):
899 user = User.get_by_email(value, case_insensitive=True)
899 user = User.get_by_email(value, case_insensitive=True)
900 if user is None:
900 if user is None:
901 msg = M(self, 'non_existing_email', state, email=value)
901 msg = M(self, 'non_existing_email', state, email=value)
902 raise formencode.Invalid(
902 raise formencode.Invalid(
903 msg, value, state, error_dict={'email': msg}
903 msg, value, state, error_dict={'email': msg}
904 )
904 )
905 return _validator
905 return _validator
906
906
907
907
908 def NotReviewedRevisions(localizer, repo_id):
908 def NotReviewedRevisions(localizer, repo_id):
909 _ = localizer
909 _ = localizer
910 class _validator(formencode.validators.FancyValidator):
910 class _validator(formencode.validators.FancyValidator):
911 messages = {
911 messages = {
912 'rev_already_reviewed':
912 'rev_already_reviewed':
913 _('Revisions %(revs)s are already part of pull request '
913 _('Revisions %(revs)s are already part of pull request '
914 'or have set status'),
914 'or have set status'),
915 }
915 }
916
916
917 def _validate_python(self, value, state):
917 def _validate_python(self, value, state):
918 # check revisions if they are not reviewed, or a part of another
918 # check revisions if they are not reviewed, or a part of another
919 # pull request
919 # pull request
920 statuses = ChangesetStatus.query()\
920 statuses = ChangesetStatus.query()\
921 .filter(ChangesetStatus.revision.in_(value))\
921 .filter(ChangesetStatus.revision.in_(value))\
922 .filter(ChangesetStatus.repo_id == repo_id)\
922 .filter(ChangesetStatus.repo_id == repo_id)\
923 .all()
923 .all()
924
924
925 errors = []
925 errors = []
926 for status in statuses:
926 for status in statuses:
927 if status.pull_request_id:
927 if status.pull_request_id:
928 errors.append(['pull_req', status.revision[:12]])
928 errors.append(['pull_req', status.revision[:12]])
929 elif status.status:
929 elif status.status:
930 errors.append(['status', status.revision[:12]])
930 errors.append(['status', status.revision[:12]])
931
931
932 if errors:
932 if errors:
933 revs = ','.join([x[1] for x in errors])
933 revs = ','.join([x[1] for x in errors])
934 msg = M(self, 'rev_already_reviewed', state, revs=revs)
934 msg = M(self, 'rev_already_reviewed', state, revs=revs)
935 raise formencode.Invalid(
935 raise formencode.Invalid(
936 msg, value, state, error_dict={'revisions': revs})
936 msg, value, state, error_dict={'revisions': revs})
937
937
938 return _validator
938 return _validator
939
939
940
940
941 def ValidIp(localizer):
941 def ValidIp(localizer):
942 _ = localizer
942 _ = localizer
943
943
944 class _validator(CIDR):
944 class _validator(CIDR):
945 messages = {
945 messages = {
946 'badFormat': _('Please enter a valid IPv4 or IpV6 address'),
946 'badFormat': _('Please enter a valid IPv4 or IpV6 address'),
947 'illegalBits': _(
947 'illegalBits': _(
948 'The network size (bits) must be within the range '
948 'The network size (bits) must be within the range '
949 'of 0-32 (not %(bits)r)'),
949 'of 0-32 (not %(bits)r)'),
950 }
950 }
951
951
952 # we override the default to_python() call
952 # we override the default to_python() call
953 def to_python(self, value, state):
953 def to_python(self, value, state):
954 v = super(_validator, self).to_python(value, state)
954 v = super().to_python(value, state)
955 v = safe_str(v.strip())
955 v = safe_str(v.strip())
956 net = ipaddress.ip_network(address=v, strict=False)
956 net = ipaddress.ip_network(address=v, strict=False)
957 return str(net)
957 return str(net)
958
958
959 def _validate_python(self, value, state):
959 def _validate_python(self, value, state):
960 try:
960 try:
961 addr = safe_str(value.strip())
961 addr = safe_str(value.strip())
962 # this raises an ValueError if address is not IpV4 or IpV6
962 # this raises an ValueError if address is not IpV4 or IpV6
963 ipaddress.ip_network(addr, strict=False)
963 ipaddress.ip_network(addr, strict=False)
964 except ValueError:
964 except ValueError:
965 raise formencode.Invalid(self.message('badFormat', state),
965 raise formencode.Invalid(self.message('badFormat', state),
966 value, state)
966 value, state)
967 return _validator
967 return _validator
968
968
969
969
970 def FieldKey(localizer):
970 def FieldKey(localizer):
971 _ = localizer
971 _ = localizer
972
972
973 class _validator(formencode.validators.FancyValidator):
973 class _validator(formencode.validators.FancyValidator):
974 messages = {
974 messages = {
975 'badFormat': _(
975 'badFormat': _(
976 'Key name can only consist of letters, '
976 'Key name can only consist of letters, '
977 'underscore, dash or numbers'),
977 'underscore, dash or numbers'),
978 }
978 }
979
979
980 def _validate_python(self, value, state):
980 def _validate_python(self, value, state):
981 if not re.match('[a-zA-Z0-9_-]+$', value):
981 if not re.match('[a-zA-Z0-9_-]+$', value):
982 raise formencode.Invalid(self.message('badFormat', state),
982 raise formencode.Invalid(self.message('badFormat', state),
983 value, state)
983 value, state)
984 return _validator
984 return _validator
985
985
986
986
987 def ValidAuthPlugins(localizer):
987 def ValidAuthPlugins(localizer):
988 _ = localizer
988 _ = localizer
989
989
990 class _validator(formencode.validators.FancyValidator):
990 class _validator(formencode.validators.FancyValidator):
991 messages = {
991 messages = {
992 'import_duplicate': _(
992 'import_duplicate': _(
993 'Plugins %(loaded)s and %(next_to_load)s '
993 'Plugins %(loaded)s and %(next_to_load)s '
994 'both export the same name'),
994 'both export the same name'),
995 'missing_includeme': _(
995 'missing_includeme': _(
996 'The plugin "%(plugin_id)s" is missing an includeme '
996 'The plugin "%(plugin_id)s" is missing an includeme '
997 'function.'),
997 'function.'),
998 'import_error': _(
998 'import_error': _(
999 'Can not load plugin "%(plugin_id)s"'),
999 'Can not load plugin "%(plugin_id)s"'),
1000 'no_plugin': _(
1000 'no_plugin': _(
1001 'No plugin available with ID "%(plugin_id)s"'),
1001 'No plugin available with ID "%(plugin_id)s"'),
1002 }
1002 }
1003
1003
1004 def _convert_to_python(self, value, state):
1004 def _convert_to_python(self, value, state):
1005 # filter empty values
1005 # filter empty values
1006 return [s for s in value if s not in [None, '']]
1006 return [s for s in value if s not in [None, '']]
1007
1007
1008 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1008 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1009 """
1009 """
1010 Validates that the plugin import works. It also checks that the
1010 Validates that the plugin import works. It also checks that the
1011 plugin has an includeme attribute.
1011 plugin has an includeme attribute.
1012 """
1012 """
1013 try:
1013 try:
1014 plugin = _import_legacy_plugin(plugin_id)
1014 plugin = _import_legacy_plugin(plugin_id)
1015 except Exception as e:
1015 except Exception as e:
1016 log.exception(
1016 log.exception(
1017 'Exception during import of auth legacy plugin "{}"'
1017 'Exception during import of auth legacy plugin "{}"'
1018 .format(plugin_id))
1018 .format(plugin_id))
1019 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1019 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1020 raise formencode.Invalid(msg, value, state)
1020 raise formencode.Invalid(msg, value, state)
1021
1021
1022 if not hasattr(plugin, 'includeme'):
1022 if not hasattr(plugin, 'includeme'):
1023 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1023 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1024 raise formencode.Invalid(msg, value, state)
1024 raise formencode.Invalid(msg, value, state)
1025
1025
1026 return plugin
1026 return plugin
1027
1027
1028 def _validate_plugin_id(self, plugin_id, value, state):
1028 def _validate_plugin_id(self, plugin_id, value, state):
1029 """
1029 """
1030 Plugins are already imported during app start up. Therefore this
1030 Plugins are already imported during app start up. Therefore this
1031 validation only retrieves the plugin from the plugin registry and
1031 validation only retrieves the plugin from the plugin registry and
1032 if it returns something not None everything is OK.
1032 if it returns something not None everything is OK.
1033 """
1033 """
1034 plugin = loadplugin(plugin_id)
1034 plugin = loadplugin(plugin_id)
1035
1035
1036 if plugin is None:
1036 if plugin is None:
1037 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1037 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1038 raise formencode.Invalid(msg, value, state)
1038 raise formencode.Invalid(msg, value, state)
1039
1039
1040 return plugin
1040 return plugin
1041
1041
1042 def _validate_python(self, value, state):
1042 def _validate_python(self, value, state):
1043 unique_names = {}
1043 unique_names = {}
1044 for plugin_id in value:
1044 for plugin_id in value:
1045
1045
1046 # Validate legacy or normal plugin.
1046 # Validate legacy or normal plugin.
1047 if plugin_id.startswith(legacy_plugin_prefix):
1047 if plugin_id.startswith(legacy_plugin_prefix):
1048 plugin = self._validate_legacy_plugin_id(
1048 plugin = self._validate_legacy_plugin_id(
1049 plugin_id, value, state)
1049 plugin_id, value, state)
1050 else:
1050 else:
1051 plugin = self._validate_plugin_id(plugin_id, value, state)
1051 plugin = self._validate_plugin_id(plugin_id, value, state)
1052
1052
1053 # Only allow unique plugin names.
1053 # Only allow unique plugin names.
1054 if plugin.name in unique_names:
1054 if plugin.name in unique_names:
1055 msg = M(self, 'import_duplicate', state,
1055 msg = M(self, 'import_duplicate', state,
1056 loaded=unique_names[plugin.name],
1056 loaded=unique_names[plugin.name],
1057 next_to_load=plugin)
1057 next_to_load=plugin)
1058 raise formencode.Invalid(msg, value, state)
1058 raise formencode.Invalid(msg, value, state)
1059 unique_names[plugin.name] = plugin
1059 unique_names[plugin.name] = plugin
1060 return _validator
1060 return _validator
1061
1061
1062
1062
1063 def ValidPattern(localizer):
1063 def ValidPattern(localizer):
1064 _ = localizer
1064 _ = localizer
1065
1065
1066 class _validator(formencode.validators.FancyValidator):
1066 class _validator(formencode.validators.FancyValidator):
1067 messages = {
1067 messages = {
1068 'bad_format': _('Url must start with http or /'),
1068 'bad_format': _('Url must start with http or /'),
1069 }
1069 }
1070
1070
1071 def _convert_to_python(self, value, state):
1071 def _convert_to_python(self, value, state):
1072 patterns = []
1072 patterns = []
1073
1073
1074 prefix = 'new_pattern'
1074 prefix = 'new_pattern'
1075 for name, v in list(value.items()):
1075 for name, v in list(value.items()):
1076 pattern_name = '_'.join((prefix, 'pattern'))
1076 pattern_name = '_'.join((prefix, 'pattern'))
1077 if name.startswith(pattern_name):
1077 if name.startswith(pattern_name):
1078 new_item_id = name[len(pattern_name)+1:]
1078 new_item_id = name[len(pattern_name)+1:]
1079
1079
1080 def _field(name):
1080 def _field(name):
1081 return '%s_%s_%s' % (prefix, name, new_item_id)
1081 return '{}_{}_{}'.format(prefix, name, new_item_id)
1082
1082
1083 values = {
1083 values = {
1084 'issuetracker_pat': value.get(_field('pattern')),
1084 'issuetracker_pat': value.get(_field('pattern')),
1085 'issuetracker_url': value.get(_field('url')),
1085 'issuetracker_url': value.get(_field('url')),
1086 'issuetracker_pref': value.get(_field('prefix')),
1086 'issuetracker_pref': value.get(_field('prefix')),
1087 'issuetracker_desc': value.get(_field('description'))
1087 'issuetracker_desc': value.get(_field('description'))
1088 }
1088 }
1089 new_uid = md5_safe(values['issuetracker_pat'])
1089 new_uid = md5_safe(values['issuetracker_pat'])
1090
1090
1091 has_required_fields = (
1091 has_required_fields = (
1092 values['issuetracker_pat']
1092 values['issuetracker_pat']
1093 and values['issuetracker_url'])
1093 and values['issuetracker_url'])
1094
1094
1095 if has_required_fields:
1095 if has_required_fields:
1096 # validate url that it starts with http or /
1096 # validate url that it starts with http or /
1097 # otherwise it can lead to JS injections
1097 # otherwise it can lead to JS injections
1098 # e.g specifig javascript:<malicios code>
1098 # e.g specifig javascript:<malicios code>
1099 if not values['issuetracker_url'].startswith(('http', '/')):
1099 if not values['issuetracker_url'].startswith(('http', '/')):
1100 raise formencode.Invalid(
1100 raise formencode.Invalid(
1101 self.message('bad_format', state),
1101 self.message('bad_format', state),
1102 value, state)
1102 value, state)
1103
1103
1104 settings = [
1104 settings = [
1105 ('_'.join((key, new_uid)), values[key], 'unicode')
1105 ('_'.join((key, new_uid)), values[key], 'unicode')
1106 for key in values]
1106 for key in values]
1107 patterns.append(settings)
1107 patterns.append(settings)
1108
1108
1109 value['patterns'] = patterns
1109 value['patterns'] = patterns
1110 delete_patterns = value.get('uid') or []
1110 delete_patterns = value.get('uid') or []
1111 if not isinstance(delete_patterns, (list, tuple)):
1111 if not isinstance(delete_patterns, (list, tuple)):
1112 delete_patterns = [delete_patterns]
1112 delete_patterns = [delete_patterns]
1113 value['delete_patterns'] = delete_patterns
1113 value['delete_patterns'] = delete_patterns
1114 return value
1114 return value
1115 return _validator
1115 return _validator
General Comments 0
You need to be logged in to leave comments. Login now