##// END OF EJS Templates
modernize: python3 updates
super-admin -
r5096:a0018795 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,71 +1,69 b''
1
2
3 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import atexit
19 import atexit
22 import logging
20 import logging
23 import os
21 import os
24 import signal
22 import signal
25 import sys
23 import sys
26
24
27 import rhodecode
25 import rhodecode
28
26
29 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
30
28
31 cache_keys_by_pid = set()
29 cache_keys_by_pid = set()
32
30
33
31
34 def sigHandler(signo, frame):
32 def sigHandler(signo, frame):
35 """
33 """
36 signals trigger sys.exit so there's a single handler to cleanup the code.
34 signals trigger sys.exit so there's a single handler to cleanup the code.
37 """
35 """
38 if rhodecode.is_test:
36 if rhodecode.is_test:
39 return
37 return
40
38
41 sys.exit(0)
39 sys.exit(0)
42
40
43
41
44 def free_cache_keys(*args):
42 def free_cache_keys(*args):
45 from rhodecode.model.db import CacheKey, Session
43 from rhodecode.model.db import CacheKey, Session
46
44
47 if rhodecode.is_test:
45 if rhodecode.is_test:
48 return
46 return
49
47
50 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
48 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
51 if ssh_cmd:
49 if ssh_cmd:
52 return
50 return
53
51
54 if cache_keys_by_pid:
52 if cache_keys_by_pid:
55 try:
53 try:
56 for cache_proc in set(cache_keys_by_pid):
54 for cache_proc in set(cache_keys_by_pid):
57 like_expression = '{}%'.format(cache_proc)
55 like_expression = f'{cache_proc}%'
58 qry = CacheKey.query().filter(CacheKey.cache_key.like(like_expression))
56 qry = CacheKey.query().filter(CacheKey.cache_key.like(like_expression))
59 count = qry.count()
57 count = qry.count()
60 log.info('Clearing %s: %s cache keys, total: %s', cache_proc, len(cache_keys_by_pid), count)
58 log.info('Clearing %s: %s cache keys, total: %s', cache_proc, len(cache_keys_by_pid), count)
61 qry.delete(synchronize_session='fetch')
59 qry.delete(synchronize_session='fetch')
62 cache_keys_by_pid.remove(cache_proc)
60 cache_keys_by_pid.remove(cache_proc)
63 Session().commit()
61 Session().commit()
64 except Exception:
62 except Exception:
65 log.exception('Failed to clear keys, exiting gracefully')
63 log.exception('Failed to clear keys, exiting gracefully')
66
64
67 atexit.register(free_cache_keys)
65 atexit.register(free_cache_keys)
68
66
69 signal.signal(signal.SIGTERM, sigHandler)
67 signal.signal(signal.SIGTERM, sigHandler)
70 signal.signal(signal.SIGINT, sigHandler)
68 signal.signal(signal.SIGINT, sigHandler)
71
69
@@ -1,405 +1,405 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.lib.hash_utils import sha1
29 from rhodecode.lib.hash_utils import sha1
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool
31 from rhodecode.lib.type_utils import str2bool
32
32
33 from . import region_meta, cache_key_meta
33 from . import region_meta, cache_key_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'{self.__class__}(name={self.name})'
48 return f'{self.__class__}(name={self.name})'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80
80
81 timeout = expiration_time() if expiration_time_is_callable \
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
82 else expiration_time
83
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
86
87 def cache_decorator(user_func):
87 def cache_decorator(user_func):
88 if to_str is str:
88 if to_str is str:
89 # backwards compatible
89 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
90 key_generator = function_key_generator(namespace, user_func)
91 else:
91 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
93
94 def refresh(*arg, **kw):
94 def refresh(*arg, **kw):
95 """
95 """
96 Like invalidate, but regenerates the value instead
96 Like invalidate, but regenerates the value instead
97 """
97 """
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
99 value = user_func(*arg, **kw)
100 self.set(key, value)
100 self.set(key, value)
101 return value
101 return value
102
102
103 def invalidate(*arg, **kw):
103 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
105 self.delete(key)
105 self.delete(key)
106
106
107 def set_(value, *arg, **kw):
107 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
108 key = key_generator(*arg, **kw)
109 self.set(key, value)
109 self.set(key, value)
110
110
111 def get(*arg, **kw):
111 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
112 key = key_generator(*arg, **kw)
113 return self.get(key)
113 return self.get(key)
114
114
115 user_func.set = set_
115 user_func.set = set_
116 user_func.invalidate = invalidate
116 user_func.invalidate = invalidate
117 user_func.get = get
117 user_func.get = get
118 user_func.refresh = refresh
118 user_func.refresh = refresh
119 user_func.key_generator = key_generator
119 user_func.key_generator = key_generator
120 user_func.original = user_func
120 user_func.original = user_func
121
121
122 # Use `decorate` to preserve the signature of :param:`user_func`.
122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
123 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
124 get_or_create_for_user_func, key_generator))
125
125
126 return cache_decorator
126 return cache_decorator
127
127
128
128
129 def make_region(*arg, **kw):
129 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
130 return RhodeCodeCacheRegion(*arg, **kw)
131
131
132
132
133 def get_default_cache_settings(settings, prefixes=None):
133 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
134 prefixes = prefixes or []
135 cache_settings = {}
135 cache_settings = {}
136 for key in settings.keys():
136 for key in settings.keys():
137 for prefix in prefixes:
137 for prefix in prefixes:
138 if key.startswith(prefix):
138 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
139 name = key.split(prefix)[1].strip()
140 val = settings[key]
140 val = settings[key]
141 if isinstance(val, str):
141 if isinstance(val, str):
142 val = val.strip()
142 val = val.strip()
143 cache_settings[name] = val
143 cache_settings[name] = val
144 return cache_settings
144 return cache_settings
145
145
146
146
147 def compute_key_from_params(*args):
147 def compute_key_from_params(*args):
148 """
148 """
149 Helper to compute key from given params to be used in cache manager
149 Helper to compute key from given params to be used in cache manager
150 """
150 """
151 return sha1(safe_bytes("_".join(map(str, args))))
151 return sha1(safe_bytes("_".join(map(str, args))))
152
152
153
153
154 def custom_key_generator(backend, namespace, fn):
154 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
155 func_name = fn.__name__
156
156
157 def generate_key(*args):
157 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
159 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
160 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
162
163 return final_key
163 return final_key
164
164
165 return generate_key
165 return generate_key
166
166
167
167
168 def backend_key_generator(backend):
168 def backend_key_generator(backend):
169 """
169 """
170 Special wrapper that also sends over the backend to the key generator
170 Special wrapper that also sends over the backend to the key generator
171 """
171 """
172 def wrapper(namespace, fn):
172 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
173 return custom_key_generator(backend, namespace, fn)
174 return wrapper
174 return wrapper
175
175
176
176
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 from .backends import FileNamespaceBackend
178 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
179 from . import async_creation_runner
180
180
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
182 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
185
186 region_uid_name = f'{region_name}:{region_namespace}'
186 region_uid_name = f'{region_name}:{region_namespace}'
187
187
188 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
188 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 if not region_namespace:
189 if not region_namespace:
190 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
190 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191
191
192 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
192 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 if region_exist:
193 if region_exist:
194 log.debug('Using already configured region: %s', region_namespace)
194 log.debug('Using already configured region: %s', region_namespace)
195 return region_exist
195 return region_exist
196
196
197 expiration_time = region_obj.expiration_time
197 expiration_time = region_obj.expiration_time
198
198
199 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
199 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 namespace_cache_dir = cache_dir
200 namespace_cache_dir = cache_dir
201
201
202 # we default the namespace_cache_dir to our default cache dir.
202 # we default the namespace_cache_dir to our default cache dir.
203 # however if this backend is configured with filename= param, we prioritize that
203 # however if this backend is configured with filename= param, we prioritize that
204 # so all caches within that particular region, even those namespaced end up in the same path
204 # so all caches within that particular region, even those namespaced end up in the same path
205 if region_obj.actual_backend.filename:
205 if region_obj.actual_backend.filename:
206 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
206 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207
207
208 if not os.path.isdir(namespace_cache_dir):
208 if not os.path.isdir(namespace_cache_dir):
209 os.makedirs(namespace_cache_dir)
209 os.makedirs(namespace_cache_dir)
210 new_region = make_region(
210 new_region = make_region(
211 name=region_uid_name,
211 name=region_uid_name,
212 function_key_generator=backend_key_generator(region_obj.actual_backend)
212 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 )
213 )
214
214
215 namespace_filename = os.path.join(
215 namespace_filename = os.path.join(
216 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
216 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 # special type that allows 1db per namespace
217 # special type that allows 1db per namespace
218 new_region.configure(
218 new_region.configure(
219 backend='dogpile.cache.rc.file_namespace',
219 backend='dogpile.cache.rc.file_namespace',
220 expiration_time=expiration_time,
220 expiration_time=expiration_time,
221 arguments={"filename": namespace_filename}
221 arguments={"filename": namespace_filename}
222 )
222 )
223
223
224 # create and save in region caches
224 # create and save in region caches
225 log.debug('configuring new region: %s', region_uid_name)
225 log.debug('configuring new region: %s', region_uid_name)
226 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
226 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227
227
228 region_obj._default_namespace = region_namespace
228 region_obj._default_namespace = region_namespace
229 if use_async_runner:
229 if use_async_runner:
230 region_obj.async_creation_runner = async_creation_runner
230 region_obj.async_creation_runner = async_creation_runner
231 return region_obj
231 return region_obj
232
232
233
233
234 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
234 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
235 from . import CLEAR_DELETE, CLEAR_INVALIDATE
235 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236
236
237 if not isinstance(cache_region, RhodeCodeCacheRegion):
237 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
238 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 log.debug('clearing cache region: %s with method=%s', cache_region, method)
239 log.debug('clearing cache region: %s with method=%s', cache_region, method)
240
240
241 num_affected_keys = None
241 num_affected_keys = None
242
242
243 if method == CLEAR_INVALIDATE:
243 if method == CLEAR_INVALIDATE:
244 # NOTE: The CacheRegion.invalidate() method’s default mode of
244 # NOTE: The CacheRegion.invalidate() method’s default mode of
245 # operation is to set a timestamp local to this CacheRegion in this Python process only.
245 # operation is to set a timestamp local to this CacheRegion in this Python process only.
246 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
246 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
247 cache_region.invalidate(hard=True)
247 cache_region.invalidate(hard=True)
248
248
249 if method == CLEAR_DELETE:
249 if method == CLEAR_DELETE:
250 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
250 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
251 num_affected_keys = len(cache_keys)
251 num_affected_keys = len(cache_keys)
252 if num_affected_keys:
252 if num_affected_keys:
253 cache_region.delete_multi(cache_keys)
253 cache_region.delete_multi(cache_keys)
254
254
255 return num_affected_keys
255 return num_affected_keys
256
256
257
257
258 class ActiveRegionCache(object):
258 class ActiveRegionCache(object):
259 def __init__(self, context, cache_data):
259 def __init__(self, context, cache_data):
260 self.context = context
260 self.context = context
261 self.cache_data = cache_data
261 self.cache_data = cache_data
262
262
263 def should_invalidate(self):
263 def should_invalidate(self):
264 return False
264 return False
265
265
266
266
267 class FreshRegionCache(object):
267 class FreshRegionCache(object):
268 def __init__(self, context, cache_data):
268 def __init__(self, context, cache_data):
269 self.context = context
269 self.context = context
270 self.cache_data = cache_data
270 self.cache_data = cache_data
271
271
272 def should_invalidate(self):
272 def should_invalidate(self):
273 return True
273 return True
274
274
275
275
276 class InvalidationContext(object):
276 class InvalidationContext(object):
277 """
277 """
278 usage::
278 usage::
279
279
280 from rhodecode.lib import rc_cache
280 from rhodecode.lib import rc_cache
281
281
282 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
282 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
283 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
283 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
284
284
285 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
285 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
286 def heavy_compute(cache_name, param1, param2):
286 def heavy_compute(cache_name, param1, param2):
287 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
287 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
288
288
289 # invalidation namespace is shared namespace key for all process caches
289 # invalidation namespace is shared namespace key for all process caches
290 # we use it to send a global signal
290 # we use it to send a global signal
291 invalidation_namespace = 'repo_cache:1'
291 invalidation_namespace = 'repo_cache:1'
292
292
293 inv_context_manager = rc_cache.InvalidationContext(
293 inv_context_manager = rc_cache.InvalidationContext(
294 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
294 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
295 with inv_context_manager as invalidation_context:
295 with inv_context_manager as invalidation_context:
296 args = ('one', 'two')
296 args = ('one', 'two')
297 # re-compute and store cache if we get invalidate signal
297 # re-compute and store cache if we get invalidate signal
298 if invalidation_context.should_invalidate():
298 if invalidation_context.should_invalidate():
299 result = heavy_compute.refresh(*args)
299 result = heavy_compute.refresh(*args)
300 else:
300 else:
301 result = heavy_compute(*args)
301 result = heavy_compute(*args)
302
302
303 compute_time = inv_context_manager.compute_time
303 compute_time = inv_context_manager.compute_time
304 log.debug('result computed in %.4fs', compute_time)
304 log.debug('result computed in %.4fs', compute_time)
305
305
306 # To send global invalidation signal, simply run
306 # To send global invalidation signal, simply run
307 CacheKey.set_invalidate(invalidation_namespace)
307 CacheKey.set_invalidate(invalidation_namespace)
308
308
309 """
309 """
310
310
311 def __repr__(self):
311 def __repr__(self):
312 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
312 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
313
313
314 def __init__(self, uid, invalidation_namespace='',
314 def __init__(self, uid, invalidation_namespace='',
315 raise_exception=False, thread_scoped=None):
315 raise_exception=False, thread_scoped=None):
316 self.uid = uid
316 self.uid = uid
317 self.invalidation_namespace = invalidation_namespace
317 self.invalidation_namespace = invalidation_namespace
318 self.raise_exception = raise_exception
318 self.raise_exception = raise_exception
319 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
319 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
320 self.thread_id = 'global'
320 self.thread_id = 'global'
321
321
322 if thread_scoped is None:
322 if thread_scoped is None:
323 # if we set "default" we can override this via .ini settings
323 # if we set "default" we can override this via .ini settings
324 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
324 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
325
325
326 # Append the thread id to the cache key if this invalidation context
326 # Append the thread id to the cache key if this invalidation context
327 # should be scoped to the current thread.
327 # should be scoped to the current thread.
328 if thread_scoped is True:
328 if thread_scoped is True:
329 self.thread_id = threading.current_thread().ident
329 self.thread_id = threading.current_thread().ident
330
330
331 self.cache_key = compute_key_from_params(uid)
331 self.cache_key = compute_key_from_params(uid)
332 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
332 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
333 self.proc_id, self.thread_id, self.cache_key)
333 self.proc_id, self.thread_id, self.cache_key)
334 self.proc_key = 'proc:{}'.format(self.proc_id)
334 self.proc_key = f'proc:{self.proc_id}'
335 self.compute_time = 0
335 self.compute_time = 0
336
336
337 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
337 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
338 from rhodecode.model.db import CacheKey
338 from rhodecode.model.db import CacheKey
339
339
340 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
340 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
341 # fetch all cache keys for this namespace and convert them to a map to find if we
341 # fetch all cache keys for this namespace and convert them to a map to find if we
342 # have specific cache_key object registered. We do this because we want to have
342 # have specific cache_key object registered. We do this because we want to have
343 # all consistent cache_state_uid for newly registered objects
343 # all consistent cache_state_uid for newly registered objects
344 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
344 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
345 cache_obj = cache_obj_map.get(self.cache_key)
345 cache_obj = cache_obj_map.get(self.cache_key)
346 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
346 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
347
347
348 if not cache_obj:
348 if not cache_obj:
349 new_cache_args = invalidation_namespace
349 new_cache_args = invalidation_namespace
350 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
350 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
351 cache_state_uid = None
351 cache_state_uid = None
352 if first_cache_obj:
352 if first_cache_obj:
353 cache_state_uid = first_cache_obj.cache_state_uid
353 cache_state_uid = first_cache_obj.cache_state_uid
354 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
354 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
355 cache_state_uid=cache_state_uid)
355 cache_state_uid=cache_state_uid)
356 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
356 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
357
357
358 return cache_obj
358 return cache_obj
359
359
360 def __enter__(self):
360 def __enter__(self):
361 """
361 """
362 Test if current object is valid, and return CacheRegion function
362 Test if current object is valid, and return CacheRegion function
363 that does invalidation and calculation
363 that does invalidation and calculation
364 """
364 """
365 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
365 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
366 # register or get a new key based on uid
366 # register or get a new key based on uid
367 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
367 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
368 cache_data = self.cache_obj.get_dict()
368 cache_data = self.cache_obj.get_dict()
369 self._start_time = time.time()
369 self._start_time = time.time()
370 if self.cache_obj.cache_active:
370 if self.cache_obj.cache_active:
371 # means our cache obj is existing and marked as it's
371 # means our cache obj is existing and marked as it's
372 # cache is not outdated, we return ActiveRegionCache
372 # cache is not outdated, we return ActiveRegionCache
373 self.skip_cache_active_change = True
373 self.skip_cache_active_change = True
374
374
375 return ActiveRegionCache(context=self, cache_data=cache_data)
375 return ActiveRegionCache(context=self, cache_data=cache_data)
376
376
377 # the key is either not existing or set to False, we return
377 # the key is either not existing or set to False, we return
378 # the real invalidator which re-computes value. We additionally set
378 # the real invalidator which re-computes value. We additionally set
379 # the flag to actually update the Database objects
379 # the flag to actually update the Database objects
380 self.skip_cache_active_change = False
380 self.skip_cache_active_change = False
381 return FreshRegionCache(context=self, cache_data=cache_data)
381 return FreshRegionCache(context=self, cache_data=cache_data)
382
382
383 def __exit__(self, exc_type, exc_val, exc_tb):
383 def __exit__(self, exc_type, exc_val, exc_tb):
384 from rhodecode.model.db import IntegrityError, Session
384 from rhodecode.model.db import IntegrityError, Session
385
385
386 # save compute time
386 # save compute time
387 self.compute_time = time.time() - self._start_time
387 self.compute_time = time.time() - self._start_time
388
388
389 if self.skip_cache_active_change:
389 if self.skip_cache_active_change:
390 return
390 return
391
391
392 try:
392 try:
393 self.cache_obj.cache_active = True
393 self.cache_obj.cache_active = True
394 Session().add(self.cache_obj)
394 Session().add(self.cache_obj)
395 Session().commit()
395 Session().commit()
396 except IntegrityError:
396 except IntegrityError:
397 # if we catch integrity error, it means we inserted this object
397 # if we catch integrity error, it means we inserted this object
398 # assumption is that's really an edge race-condition case and
398 # assumption is that's really an edge race-condition case and
399 # it's safe is to skip it
399 # it's safe is to skip it
400 Session().rollback()
400 Session().rollback()
401 except Exception:
401 except Exception:
402 log.exception('Failed to commit on cache key update')
402 log.exception('Failed to commit on cache key update')
403 Session().rollback()
403 Session().rollback()
404 if self.raise_exception:
404 if self.raise_exception:
405 raise
405 raise
@@ -1,107 +1,105 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import sys
19 import sys
22 import logging
20 import logging
23
21
24 import click
22 import click
25
23
26 from rhodecode.lib.pyramid_utils import bootstrap
24 from rhodecode.lib.pyramid_utils import bootstrap
27 from rhodecode.model.db import Session, User, Repository
25 from rhodecode.model.db import Session, User, Repository
28 from rhodecode.model.user import UserModel
26 from rhodecode.model.user import UserModel
29 from rhodecode.apps.file_store import utils as store_utils
27 from rhodecode.apps.file_store import utils as store_utils
30
28
31 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
32
30
33
31
34 @click.command()
32 @click.command()
35 @click.argument('ini_path', type=click.Path(exists=True))
33 @click.argument('ini_path', type=click.Path(exists=True))
36 @click.option(
34 @click.option(
37 '--filename',
35 '--filename',
38 required=True,
36 required=True,
39 help='Filename for artifact.')
37 help='Filename for artifact.')
40 @click.option(
38 @click.option(
41 '--file-path',
39 '--file-path',
42 required=True,
40 required=True,
43 type=click.Path(exists=True, dir_okay=False, readable=True),
41 type=click.Path(exists=True, dir_okay=False, readable=True),
44 help='Path to a file to be added as artifact')
42 help='Path to a file to be added as artifact')
45 @click.option(
43 @click.option(
46 '--repo-id',
44 '--repo-id',
47 required=True,
45 required=True,
48 type=int,
46 type=int,
49 help='ID of repository to add this artifact to.')
47 help='ID of repository to add this artifact to.')
50 @click.option(
48 @click.option(
51 '--user-id',
49 '--user-id',
52 default=None,
50 default=None,
53 type=int,
51 type=int,
54 help='User ID for creator of artifact. '
52 help='User ID for creator of artifact. '
55 'Default would be first super admin.')
53 'Default would be first super admin.')
56 @click.option(
54 @click.option(
57 '--description',
55 '--description',
58 default=None,
56 default=None,
59 type=str,
57 type=str,
60 help='Add description to this artifact')
58 help='Add description to this artifact')
61 def main(ini_path, filename, file_path, repo_id, user_id, description):
59 def main(ini_path, filename, file_path, repo_id, user_id, description):
62 return command(ini_path, filename, file_path, repo_id, user_id, description)
60 return command(ini_path, filename, file_path, repo_id, user_id, description)
63
61
64
62
65 def command(ini_path, filename, file_path, repo_id, user_id, description):
63 def command(ini_path, filename, file_path, repo_id, user_id, description):
66 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
64 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
67 try:
65 try:
68 from rc_ee.api.views.store_api import _store_file
66 from rc_ee.api.views.store_api import _store_file
69 except ImportError:
67 except ImportError:
70 click.secho('ERROR: Unable to import store_api. '
68 click.secho('ERROR: Unable to import store_api. '
71 'store_api is only available in EE edition of RhodeCode',
69 'store_api is only available in EE edition of RhodeCode',
72 fg='red')
70 fg='red')
73 sys.exit(-1)
71 sys.exit(-1)
74
72
75 request = env['request']
73 request = env['request']
76
74
77 repo = Repository.get(repo_id)
75 repo = Repository.get(repo_id)
78 if not repo:
76 if not repo:
79 click.secho('ERROR: Unable to find repository with id `{}`'.format(repo_id),
77 click.secho(f'ERROR: Unable to find repository with id `{repo_id}`',
80 fg='red')
78 fg='red')
81 sys.exit(-1)
79 sys.exit(-1)
82
80
83 # if we don't give user, or it's "DEFAULT" user we pick super-admin
81 # if we don't give user, or it's "DEFAULT" user we pick super-admin
84 if user_id is not None or user_id == 1:
82 if user_id is not None or user_id == 1:
85 db_user = User.get(user_id)
83 db_user = User.get(user_id)
86 else:
84 else:
87 db_user = User.get_first_super_admin()
85 db_user = User.get_first_super_admin()
88
86
89 if not db_user:
87 if not db_user:
90 click.secho('ERROR: Unable to find user with id/username `{}`'.format(user_id),
88 click.secho(f'ERROR: Unable to find user with id/username `{user_id}`',
91 fg='red')
89 fg='red')
92 sys.exit(-1)
90 sys.exit(-1)
93
91
94 auth_user = db_user.AuthUser(ip_addr='127.0.0.1')
92 auth_user = db_user.AuthUser(ip_addr='127.0.0.1')
95
93
96 storage = store_utils.get_file_storage(request.registry.settings)
94 storage = store_utils.get_file_storage(request.registry.settings)
97
95
98 with open(file_path, 'rb') as f:
96 with open(file_path, 'rb') as f:
99 click.secho('Adding new artifact from path: `{}`'.format(file_path),
97 click.secho(f'Adding new artifact from path: `{file_path}`',
100 fg='green')
98 fg='green')
101
99
102 file_data = _store_file(
100 file_data = _store_file(
103 storage, auth_user, filename, content=None, check_acl=True,
101 storage, auth_user, filename, content=None, check_acl=True,
104 file_obj=f, description=description,
102 file_obj=f, description=description,
105 scope_repo_id=repo.repo_id)
103 scope_repo_id=repo.repo_id)
106 click.secho('File Data: {}'.format(file_data),
104 click.secho(f'File Data: {file_data}',
107 fg='green')
105 fg='green')
@@ -1,59 +1,57 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import click
19 import click
22
20
23 from rhodecode.lib.pyramid_utils import bootstrap
21 from rhodecode.lib.pyramid_utils import bootstrap
24 import pyramid.paster
22 import pyramid.paster
25
23
26 # imports, used in ipython shell
24 # imports, used in ipython shell
27 import os
25 import os
28 import sys
26 import sys
29 import time
27 import time
30 import shutil
28 import shutil
31 import datetime
29 import datetime
32 from rhodecode.model.db import *
30 from rhodecode.model.db import *
33
31
34 welcome_banner = """Welcome to RhodeCode iShell.
32 welcome_banner = """Welcome to RhodeCode iShell.
35 Type `exit` to exit the shell.
33 Type `exit` to exit the shell.
36 iShell is interactive shell to interact directly with the
34 iShell is interactive shell to interact directly with the
37 internal RhodeCode APIs. You can rescue your lost password,
35 internal RhodeCode APIs. You can rescue your lost password,
38 or reset some user/system settings.
36 or reset some user/system settings.
39 """
37 """
40
38
41
39
42 @click.command()
40 @click.command()
43 @click.argument('ini_path', type=click.Path(exists=True))
41 @click.argument('ini_path', type=click.Path(exists=True))
44 def main(ini_path):
42 def main(ini_path):
45 pyramid.paster.setup_logging(ini_path)
43 pyramid.paster.setup_logging(ini_path)
46
44
47 with bootstrap(ini_path) as env:
45 with bootstrap(ini_path) as env:
48
46
49 try:
47 try:
50 from IPython import embed
48 from IPython import embed
51 from traitlets.config import Config
49 from traitlets.config import Config
52 cfg = Config()
50 cfg = Config()
53 cfg.InteractiveShellEmbed.confirm_exit = False
51 cfg.InteractiveShellEmbed.confirm_exit = False
54 embed(config=cfg, banner1=welcome_banner)
52 embed(config=cfg, banner1=welcome_banner)
55 except ImportError:
53 except ImportError:
56 print('ipython installation required for ishell')
54 print('ipython installation required for ishell')
57 sys.exit(-1)
55 sys.exit(-1)
58
56
59
57
@@ -1,125 +1,123 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
18 import logging
21
19
22 import click
20 import click
23 import pyramid.paster
21 import pyramid.paster
24
22
25 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
23 from rhodecode.lib.pyramid_utils import bootstrap, get_app_config
26 from rhodecode.lib.db_manage import DbManage
24 from rhodecode.lib.db_manage import DbManage
27 from rhodecode.lib.utils2 import get_encryption_key
25 from rhodecode.lib.utils2 import get_encryption_key
28 from rhodecode.model.db import Session
26 from rhodecode.model.db import Session
29
27
30
28
31 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
32
30
33
31
34 @click.command()
32 @click.command()
35 @click.argument('ini_path', type=click.Path(exists=True))
33 @click.argument('ini_path', type=click.Path(exists=True))
36 @click.option(
34 @click.option(
37 '--force-yes/--force-no', default=None,
35 '--force-yes/--force-no', default=None,
38 help="Force yes/no to every question")
36 help="Force yes/no to every question")
39 @click.option(
37 @click.option(
40 '--user',
38 '--user',
41 default=None,
39 default=None,
42 help='Initial super-admin username')
40 help='Initial super-admin username')
43 @click.option(
41 @click.option(
44 '--email',
42 '--email',
45 default=None,
43 default=None,
46 help='Initial super-admin email address.')
44 help='Initial super-admin email address.')
47 @click.option(
45 @click.option(
48 '--password',
46 '--password',
49 default=None,
47 default=None,
50 help='Initial super-admin password. Minimum 6 chars.')
48 help='Initial super-admin password. Minimum 6 chars.')
51 @click.option(
49 @click.option(
52 '--api-key',
50 '--api-key',
53 help='Initial API key for the admin user')
51 help='Initial API key for the admin user')
54 @click.option(
52 @click.option(
55 '--repos',
53 '--repos',
56 default=None,
54 default=None,
57 help='Absolute path to storage location. This is storage for all '
55 help='Absolute path to storage location. This is storage for all '
58 'existing and future repositories, and repository groups.')
56 'existing and future repositories, and repository groups.')
59 @click.option(
57 @click.option(
60 '--public-access/--no-public-access',
58 '--public-access/--no-public-access',
61 default=None,
59 default=None,
62 help='Enable public access on this installation. '
60 help='Enable public access on this installation. '
63 'Default is public access enabled.')
61 'Default is public access enabled.')
64 @click.option(
62 @click.option(
65 '--skip-existing-db',
63 '--skip-existing-db',
66 default=False,
64 default=False,
67 is_flag=True,
65 is_flag=True,
68 help='Do not destroy and re-initialize the database if it already exist.')
66 help='Do not destroy and re-initialize the database if it already exist.')
69 @click.option(
67 @click.option(
70 '--apply-license-key',
68 '--apply-license-key',
71 default=False,
69 default=False,
72 is_flag=True,
70 is_flag=True,
73 help='Get the license key from a license file or ENV and apply during DB creation.')
71 help='Get the license key from a license file or ENV and apply during DB creation.')
74 def main(ini_path, force_yes, user, email, password, api_key, repos,
72 def main(ini_path, force_yes, user, email, password, api_key, repos,
75 public_access, skip_existing_db, apply_license_key):
73 public_access, skip_existing_db, apply_license_key):
76 return command(ini_path, force_yes, user, email, password, api_key,
74 return command(ini_path, force_yes, user, email, password, api_key,
77 repos, public_access, skip_existing_db, apply_license_key)
75 repos, public_access, skip_existing_db, apply_license_key)
78
76
79
77
80 def command(ini_path, force_yes, user, email, password, api_key, repos,
78 def command(ini_path, force_yes, user, email, password, api_key, repos,
81 public_access, skip_existing_db, apply_license_key):
79 public_access, skip_existing_db, apply_license_key):
82 # mapping of old parameters to new CLI from click
80 # mapping of old parameters to new CLI from click
83 options = dict(
81 options = dict(
84 username=user,
82 username=user,
85 email=email,
83 email=email,
86 password=password,
84 password=password,
87 api_key=api_key,
85 api_key=api_key,
88 repos_location=repos,
86 repos_location=repos,
89 force_ask=force_yes,
87 force_ask=force_yes,
90 public_access=public_access
88 public_access=public_access
91 )
89 )
92 pyramid.paster.setup_logging(ini_path)
90 pyramid.paster.setup_logging(ini_path)
93
91
94 config = get_app_config(ini_path)
92 config = get_app_config(ini_path)
95
93
96 db_uri = config['sqlalchemy.db1.url']
94 db_uri = config['sqlalchemy.db1.url']
97 enc_key = get_encryption_key(config)
95 enc_key = get_encryption_key(config)
98 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
96 dbmanage = DbManage(log_sql=True, dbconf=db_uri, root='.',
99 tests=False, cli_args=options, enc_key=enc_key)
97 tests=False, cli_args=options, enc_key=enc_key)
100 if skip_existing_db and dbmanage.db_exists():
98 if skip_existing_db and dbmanage.db_exists():
101 return
99 return
102
100
103 dbmanage.create_tables(override=True)
101 dbmanage.create_tables(override=True)
104 dbmanage.set_db_version()
102 dbmanage.set_db_version()
105 opts = dbmanage.config_prompt(None)
103 opts = dbmanage.config_prompt(None)
106 dbmanage.create_settings(opts)
104 dbmanage.create_settings(opts)
107 dbmanage.create_default_user()
105 dbmanage.create_default_user()
108 dbmanage.create_admin_and_prompt()
106 dbmanage.create_admin_and_prompt()
109 dbmanage.create_permissions()
107 dbmanage.create_permissions()
110 dbmanage.populate_default_permissions()
108 dbmanage.populate_default_permissions()
111 if apply_license_key:
109 if apply_license_key:
112 try:
110 try:
113 from rc_license.models import apply_trial_license_if_missing
111 from rc_license.models import apply_trial_license_if_missing
114 apply_trial_license_if_missing(force=True)
112 apply_trial_license_if_missing(force=True)
115 except ImportError:
113 except ImportError:
116 pass
114 pass
117
115
118 Session().commit()
116 Session().commit()
119
117
120 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
118 with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env:
121 msg = 'Successfully initialized database, schema and default data.'
119 msg = 'Successfully initialized database, schema and default data.'
122 print()
120 print()
123 print('*' * len(msg))
121 print('*' * len(msg))
124 print(msg.upper())
122 print(msg.upper())
125 print('*' * len(msg))
123 print('*' * len(msg))
@@ -1,56 +1,54 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import logging
19 import logging
22
20
23 import click
21 import click
24 import pyramid.paster
22 import pyramid.paster
25
23
26 from rhodecode.lib.pyramid_utils import bootstrap
24 from rhodecode.lib.pyramid_utils import bootstrap
27 from rhodecode.lib.db_manage import DbManage
25 from rhodecode.lib.db_manage import DbManage
28 from rhodecode.lib.utils2 import safe_int, get_encryption_key
26 from rhodecode.lib.utils2 import safe_int, get_encryption_key
29
27
30 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
31
29
32
30
33 @click.command()
31 @click.command()
34 @click.argument('ini_path', type=click.Path(exists=True))
32 @click.argument('ini_path', type=click.Path(exists=True))
35 @click.option('--force-yes/--force-no', default=None,
33 @click.option('--force-yes/--force-no', default=None,
36 help="Force yes/no to every question")
34 help="Force yes/no to every question")
37 @click.option('--force-version', default=None,
35 @click.option('--force-version', default=None,
38 help="Force upgrade from version")
36 help="Force upgrade from version")
39 def main(ini_path, force_yes, force_version):
37 def main(ini_path, force_yes, force_version):
40 return command(ini_path, force_yes, force_version)
38 return command(ini_path, force_yes, force_version)
41
39
42
40
43 def command(ini_path, force_yes, force_version):
41 def command(ini_path, force_yes, force_version):
44 pyramid.paster.setup_logging(ini_path)
42 pyramid.paster.setup_logging(ini_path)
45
43
46 with bootstrap(ini_path, env={'RC_CMD_UPGRADE_DB': '1'}) as env:
44 with bootstrap(ini_path, env={'RC_CMD_UPGRADE_DB': '1'}) as env:
47 config = env['registry'].settings
45 config = env['registry'].settings
48 db_uri = config['sqlalchemy.db1.url']
46 db_uri = config['sqlalchemy.db1.url']
49 enc_key = get_encryption_key(config)
47 enc_key = get_encryption_key(config)
50 options = {}
48 options = {}
51 if force_yes is not None:
49 if force_yes is not None:
52 options['force_ask'] = force_yes
50 options['force_ask'] = force_yes
53 dbmanage = DbManage(
51 dbmanage = DbManage(
54 log_sql=True, dbconf=db_uri, root='.', tests=False,
52 log_sql=True, dbconf=db_uri, root='.', tests=False,
55 cli_args=options, enc_key=enc_key)
53 cli_args=options, enc_key=enc_key)
56 dbmanage.upgrade(version=safe_int(force_version))
54 dbmanage.upgrade(version=safe_int(force_version))
@@ -1,193 +1,191 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
18 import logging
21
19
22 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
23
21
24
22
25 class MaintenanceTask(object):
23 class MaintenanceTask(object):
26 human_name = 'undefined'
24 human_name = 'undefined'
27
25
28 def __init__(self, db_repo):
26 def __init__(self, db_repo):
29 self.db_repo = db_repo
27 self.db_repo = db_repo
30
28
31 def run(self):
29 def run(self):
32 """Execute task and return task human value"""
30 """Execute task and return task human value"""
33 raise NotImplementedError()
31 raise NotImplementedError()
34
32
35
33
36 class GitGC(MaintenanceTask):
34 class GitGC(MaintenanceTask):
37 human_name = 'GIT Garbage collect'
35 human_name = 'GIT Garbage collect'
38
36
39 def _count_objects(self, repo):
37 def _count_objects(self, repo):
40 stdout, stderr = repo.run_git_command(
38 stdout, stderr = repo.run_git_command(
41 ['count-objects', '-v'], fail_on_stderr=False)
39 ['count-objects', '-v'], fail_on_stderr=False)
42
40
43 errors = ' '
41 errors = ' '
44 objects = ' '.join(stdout.splitlines())
42 objects = ' '.join(stdout.splitlines())
45
43
46 if stderr:
44 if stderr:
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
45 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
48 return objects + errors
46 return objects + errors
49
47
50 def run(self):
48 def run(self):
51 output = []
49 output = []
52 instance = self.db_repo.scm_instance()
50 instance = self.db_repo.scm_instance()
53
51
54 objects_before = self._count_objects(instance)
52 objects_before = self._count_objects(instance)
55
53
56 log.debug('GIT objects:%s', objects_before)
54 log.debug('GIT objects:%s', objects_before)
57 cmd = ['gc', '--aggressive']
55 cmd = ['gc', '--aggressive']
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
56 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
59
57
60 out = 'executed {}'.format(' '.join(cmd))
58 out = 'executed {}'.format(' '.join(cmd))
61 output.append(out)
59 output.append(out)
62
60
63 out = ''
61 out = ''
64 if stderr:
62 if stderr:
65 out += ''.join(stderr.splitlines())
63 out += ''.join(stderr.splitlines())
66
64
67 if stdout:
65 if stdout:
68 out += ''.join(stdout.splitlines())
66 out += ''.join(stdout.splitlines())
69
67
70 if out:
68 if out:
71 output.append(out)
69 output.append(out)
72
70
73 objects_after = self._count_objects(instance)
71 objects_after = self._count_objects(instance)
74 log.debug('GIT objects:%s', objects_after)
72 log.debug('GIT objects:%s', objects_after)
75 output.append('objects before :' + objects_before)
73 output.append('objects before :' + objects_before)
76 output.append('objects after :' + objects_after)
74 output.append('objects after :' + objects_after)
77
75
78 return '\n'.join(output)
76 return '\n'.join(output)
79
77
80
78
81 class GitFSCK(MaintenanceTask):
79 class GitFSCK(MaintenanceTask):
82 human_name = 'GIT FSCK'
80 human_name = 'GIT FSCK'
83
81
84 def run(self):
82 def run(self):
85 output = []
83 output = []
86 instance = self.db_repo.scm_instance()
84 instance = self.db_repo.scm_instance()
87
85
88 cmd = ['fsck', '--full']
86 cmd = ['fsck', '--full']
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
87 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90
88
91 out = 'executed {}'.format(' '.join(cmd))
89 out = 'executed {}'.format(' '.join(cmd))
92 output.append(out)
90 output.append(out)
93
91
94 out = ''
92 out = ''
95 if stderr:
93 if stderr:
96 out += ''.join(stderr.splitlines())
94 out += ''.join(stderr.splitlines())
97
95
98 if stdout:
96 if stdout:
99 out += ''.join(stdout.splitlines())
97 out += ''.join(stdout.splitlines())
100
98
101 if out:
99 if out:
102 output.append(out)
100 output.append(out)
103
101
104 return '\n'.join(output)
102 return '\n'.join(output)
105
103
106
104
107 class GitRepack(MaintenanceTask):
105 class GitRepack(MaintenanceTask):
108 human_name = 'GIT Repack'
106 human_name = 'GIT Repack'
109
107
110 def run(self):
108 def run(self):
111 output = []
109 output = []
112 instance = self.db_repo.scm_instance()
110 instance = self.db_repo.scm_instance()
113 cmd = ['repack', '-a', '-d',
111 cmd = ['repack', '-a', '-d',
114 '--window-memory', '10m', '--max-pack-size', '100m']
112 '--window-memory', '10m', '--max-pack-size', '100m']
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
113 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116
114
117 out = 'executed {}'.format(' '.join(cmd))
115 out = 'executed {}'.format(' '.join(cmd))
118 output.append(out)
116 output.append(out)
119 out = ''
117 out = ''
120
118
121 if stderr:
119 if stderr:
122 out += ''.join(stderr.splitlines())
120 out += ''.join(stderr.splitlines())
123
121
124 if stdout:
122 if stdout:
125 out += ''.join(stdout.splitlines())
123 out += ''.join(stdout.splitlines())
126
124
127 if out:
125 if out:
128 output.append(out)
126 output.append(out)
129
127
130 return '\n'.join(output)
128 return '\n'.join(output)
131
129
132
130
133 class HGVerify(MaintenanceTask):
131 class HGVerify(MaintenanceTask):
134 human_name = 'HG Verify repo'
132 human_name = 'HG Verify repo'
135
133
136 def run(self):
134 def run(self):
137 instance = self.db_repo.scm_instance()
135 instance = self.db_repo.scm_instance()
138 res = instance.verify()
136 res = instance.verify()
139 return res
137 return res
140
138
141
139
142 class HGUpdateCaches(MaintenanceTask):
140 class HGUpdateCaches(MaintenanceTask):
143 human_name = 'HG update caches'
141 human_name = 'HG update caches'
144
142
145 def run(self):
143 def run(self):
146 instance = self.db_repo.scm_instance()
144 instance = self.db_repo.scm_instance()
147 res = instance.hg_update_cache()
145 res = instance.hg_update_cache()
148 return res
146 return res
149
147
150
148
151 class HGRebuildFnCaches(MaintenanceTask):
149 class HGRebuildFnCaches(MaintenanceTask):
152 human_name = 'HG rebuild fn caches'
150 human_name = 'HG rebuild fn caches'
153
151
154 def run(self):
152 def run(self):
155 instance = self.db_repo.scm_instance()
153 instance = self.db_repo.scm_instance()
156 res = instance.hg_rebuild_fn_cache()
154 res = instance.hg_rebuild_fn_cache()
157 return res
155 return res
158
156
159
157
160 class SVNVerify(MaintenanceTask):
158 class SVNVerify(MaintenanceTask):
161 human_name = 'SVN Verify repo'
159 human_name = 'SVN Verify repo'
162
160
163 def run(self):
161 def run(self):
164 instance = self.db_repo.scm_instance()
162 instance = self.db_repo.scm_instance()
165 res = instance.verify()
163 res = instance.verify()
166 return res
164 return res
167
165
168
166
169 class RepoMaintenance(object):
167 class RepoMaintenance(object):
170 """
168 """
171 Performs maintenance of repository based on it's type
169 Performs maintenance of repository based on it's type
172 """
170 """
173 tasks = {
171 tasks = {
174 'hg': [HGVerify, HGUpdateCaches, HGRebuildFnCaches],
172 'hg': [HGVerify, HGUpdateCaches, HGRebuildFnCaches],
175 'git': [GitFSCK, GitGC, GitRepack],
173 'git': [GitFSCK, GitGC, GitRepack],
176 'svn': [SVNVerify],
174 'svn': [SVNVerify],
177 }
175 }
178
176
179 def get_tasks_for_repo(self, db_repo):
177 def get_tasks_for_repo(self, db_repo):
180 """
178 """
181 fetches human names of tasks pending for execution for given type of repo
179 fetches human names of tasks pending for execution for given type of repo
182 """
180 """
183 tasks = []
181 tasks = []
184 for task in self.tasks[db_repo.repo_type]:
182 for task in self.tasks[db_repo.repo_type]:
185 tasks.append(task.human_name)
183 tasks.append(task.human_name)
186 return tasks
184 return tasks
187
185
188 def execute(self, db_repo):
186 def execute(self, db_repo):
189 executed_tasks = []
187 executed_tasks = []
190 for task in self.tasks[db_repo.repo_type]:
188 for task in self.tasks[db_repo.repo_type]:
191 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
189 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
192 executed_tasks.append(output)
190 executed_tasks.append(output)
193 return executed_tasks
191 return executed_tasks
@@ -1,109 +1,107 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 from uuid import uuid4
19 from uuid import uuid4
22 import pyramid.testing
20 import pyramid.testing
23 from pyramid.decorator import reify
21 from pyramid.decorator import reify
24 from pyramid.request import Request as _Request
22 from pyramid.request import Request as _Request
25 from rhodecode.lib.type_utils import StrictAttributeDict
23 from rhodecode.lib.type_utils import StrictAttributeDict
26
24
27
25
28 class TemplateArgs(StrictAttributeDict):
26 class TemplateArgs(StrictAttributeDict):
29 pass
27 pass
30
28
31
29
32 # Base Class with DummyMethods, testing / CLI scripts
30 # Base Class with DummyMethods, testing / CLI scripts
33 class RequestBase(object):
31 class RequestBase(object):
34 _req_id_bucket = list()
32 _req_id_bucket = list()
35 _call_context = TemplateArgs()
33 _call_context = TemplateArgs()
36 _call_context.visual = TemplateArgs()
34 _call_context.visual = TemplateArgs()
37 _call_context.visual.show_sha_length = 12
35 _call_context.visual.show_sha_length = 12
38 _call_context.visual.show_revision_number = True
36 _call_context.visual.show_revision_number = True
39
37
40 @reify
38 @reify
41 def req_id(self):
39 def req_id(self):
42 return str(uuid4())
40 return str(uuid4())
43
41
44 @property
42 @property
45 def req_id_bucket(self):
43 def req_id_bucket(self):
46 return self._req_id_bucket
44 return self._req_id_bucket
47
45
48 def req_id_records_init(self):
46 def req_id_records_init(self):
49 self._req_id_bucket = list()
47 self._req_id_bucket = list()
50
48
51 def translate(self, *args, **kwargs):
49 def translate(self, *args, **kwargs):
52 raise NotImplementedError()
50 raise NotImplementedError()
53
51
54 def plularize(self, *args, **kwargs):
52 def plularize(self, *args, **kwargs):
55 raise NotImplementedError()
53 raise NotImplementedError()
56
54
57 def get_partial_renderer(self, tmpl_name):
55 def get_partial_renderer(self, tmpl_name):
58 raise NotImplementedError()
56 raise NotImplementedError()
59
57
60 @property
58 @property
61 def call_context(self):
59 def call_context(self):
62 return self._call_context
60 return self._call_context
63
61
64 def set_call_context(self, new_context):
62 def set_call_context(self, new_context):
65 self._call_context = new_context
63 self._call_context = new_context
66
64
67
65
68 # for thin non-web/cli etc
66 # for thin non-web/cli etc
69 class ThinRequest(RequestBase, pyramid.testing.DummyRequest):
67 class ThinRequest(RequestBase, pyramid.testing.DummyRequest):
70
68
71 def translate(self, msg):
69 def translate(self, msg):
72 return msg
70 return msg
73
71
74 def plularize(self, singular, plural, n):
72 def plularize(self, singular, plural, n):
75 return singular
73 return singular
76
74
77 def get_partial_renderer(self, tmpl_name):
75 def get_partial_renderer(self, tmpl_name):
78 from rhodecode.lib.partial_renderer import get_partial_renderer
76 from rhodecode.lib.partial_renderer import get_partial_renderer
79 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
77 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
80
78
81
79
82 # for real-web-based
80 # for real-web-based
83 class RealRequest(RequestBase, _Request):
81 class RealRequest(RequestBase, _Request):
84 def get_partial_renderer(self, tmpl_name):
82 def get_partial_renderer(self, tmpl_name):
85 from rhodecode.lib.partial_renderer import get_partial_renderer
83 from rhodecode.lib.partial_renderer import get_partial_renderer
86 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
84 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
87
85
88 def request_count(self):
86 def request_count(self):
89 from rhodecode.lib.request_counter import get_request_counter
87 from rhodecode.lib.request_counter import get_request_counter
90 return get_request_counter()
88 return get_request_counter()
91
89
92 def plularize(self, *args, **kwargs):
90 def plularize(self, *args, **kwargs):
93 return self.localizer.pluralize(*args, **kwargs)
91 return self.localizer.pluralize(*args, **kwargs)
94
92
95 def translate(self, *args, **kwargs):
93 def translate(self, *args, **kwargs):
96 localizer = self.localizer
94 localizer = self.localizer
97 from rhodecode.translation import _ as tsf
95 from rhodecode.translation import _ as tsf
98
96
99 def auto_translate(*_args, **_kwargs):
97 def auto_translate(*_args, **_kwargs):
100 return localizer.translate(tsf(*_args, **_kwargs))
98 return localizer.translate(tsf(*_args, **_kwargs))
101
99
102 return auto_translate(*args, **kwargs)
100 return auto_translate(*args, **kwargs)
103
101
104
102
105 class Request(RealRequest):
103 class Request(RealRequest):
106 """
104 """
107 This is the main request object used in web-context
105 This is the main request object used in web-context
108 """
106 """
109 pass
107 pass
@@ -1,27 +1,25 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 counter = 0
19 counter = 0
22
20
23
21
24 def get_request_counter():
22 def get_request_counter():
25 global counter
23 global counter
26 counter += 1
24 counter += 1
27 return counter
25 return counter
@@ -1,49 +1,49 b''
1 from rhodecode.lib._vendor.statsd import client_from_config
1 from rhodecode.lib._vendor.statsd import client_from_config
2
2
3
3
4 class StatsdClientNotInitialised(Exception):
4 class StatsdClientNotInitialised(Exception):
5 pass
5 pass
6
6
7
7
8 class _Singleton(type):
8 class _Singleton(type):
9 """A metaclass that creates a Singleton base class when called."""
9 """A metaclass that creates a Singleton base class when called."""
10
10
11 _instances = {}
11 _instances = {}
12
12
13 def __call__(cls, *args, **kwargs):
13 def __call__(cls, *args, **kwargs):
14 if cls not in cls._instances:
14 if cls not in cls._instances:
15 cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
15 cls._instances[cls] = super().__call__(*args, **kwargs)
16 return cls._instances[cls]
16 return cls._instances[cls]
17
17
18
18
19 class Singleton(_Singleton("SingletonMeta", (object,), {})):
19 class Singleton(_Singleton("SingletonMeta", (object,), {})):
20 pass
20 pass
21
21
22
22
23 class StatsdClientClass(Singleton):
23 class StatsdClientClass(Singleton):
24 setup_run = False
24 setup_run = False
25 statsd_client = None
25 statsd_client = None
26 statsd = None
26 statsd = None
27
27
28 def __getattribute__(self, name):
28 def __getattribute__(self, name):
29
29
30 if name.startswith("statsd"):
30 if name.startswith("statsd"):
31 if self.setup_run:
31 if self.setup_run:
32 return super(StatsdClientClass, self).__getattribute__(name)
32 return super().__getattribute__(name)
33 else:
33 else:
34 return None
34 return None
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
36
36
37 return super(StatsdClientClass, self).__getattribute__(name)
37 return super().__getattribute__(name)
38
38
39 def setup(self, settings):
39 def setup(self, settings):
40 """
40 """
41 Initialize the client
41 Initialize the client
42 """
42 """
43 statsd = client_from_config(settings)
43 statsd = client_from_config(settings)
44 self.statsd = statsd
44 self.statsd = statsd
45 self.statsd_client = statsd
45 self.statsd_client = statsd
46 self.setup_run = True
46 self.setup_run = True
47
47
48
48
49 StatsdClient = StatsdClientClass()
49 StatsdClient = StatsdClientClass()
@@ -1,170 +1,169 b''
1
2 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import typing
19 import typing
21 import base64
20 import base64
22 import logging
21 import logging
23 from unidecode import unidecode
22 from unidecode import unidecode
24
23
25 import rhodecode
24 import rhodecode
26 from rhodecode.lib.type_utils import aslist
25 from rhodecode.lib.type_utils import aslist
27
26
28
27
29 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
30
29
31
30
32 def safe_int(val, default=None) -> int:
31 def safe_int(val, default=None) -> int:
33 """
32 """
34 Returns int() of val if val is not convertable to int use default
33 Returns int() of val if val is not convertable to int use default
35 instead
34 instead
36
35
37 :param val:
36 :param val:
38 :param default:
37 :param default:
39 """
38 """
40
39
41 try:
40 try:
42 val = int(val)
41 val = int(val)
43 except (ValueError, TypeError):
42 except (ValueError, TypeError):
44 val = default
43 val = default
45
44
46 return val
45 return val
47
46
48
47
49 def safe_float(val, default=None) -> float:
48 def safe_float(val, default=None) -> float:
50 """
49 """
51 Returns float() of val if val is not convertable to float use default
50 Returns float() of val if val is not convertable to float use default
52 instead
51 instead
53
52
54 :param val:
53 :param val:
55 :param default:
54 :param default:
56 """
55 """
57
56
58 try:
57 try:
59 val = float(val)
58 val = float(val)
60 except (ValueError, TypeError):
59 except (ValueError, TypeError):
61 val = default
60 val = default
62
61
63 return val
62 return val
64
63
65
64
66 def base64_to_str(text) -> str:
65 def base64_to_str(text) -> str:
67 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
66 return safe_str(base64.encodebytes(safe_bytes(text))).strip()
68
67
69
68
70 def get_default_encodings() -> typing.List[str]:
69 def get_default_encodings() -> list[str]:
71 return aslist(rhodecode.CONFIG.get('default_encoding', 'utf8'), sep=',')
70 return aslist(rhodecode.CONFIG.get('default_encoding', 'utf8'), sep=',')
72
71
73
72
74 DEFAULT_ENCODINGS = get_default_encodings()
73 DEFAULT_ENCODINGS = get_default_encodings()
75
74
76
75
77 def safe_str(str_, to_encoding=None) -> str:
76 def safe_str(str_, to_encoding=None) -> str:
78 """
77 """
79 safe str function. Does few trick to turn unicode_ into string
78 safe str function. Does few trick to turn unicode_ into string
80
79
81 :param str_: str to encode
80 :param str_: str to encode
82 :param to_encoding: encode to this type UTF8 default
81 :param to_encoding: encode to this type UTF8 default
83 """
82 """
84 if isinstance(str_, str):
83 if isinstance(str_, str):
85 return str_
84 return str_
86
85
87 # if it's bytes cast to str
86 # if it's bytes cast to str
88 if not isinstance(str_, bytes):
87 if not isinstance(str_, bytes):
89 return str(str_)
88 return str(str_)
90
89
91 to_encoding = to_encoding or DEFAULT_ENCODINGS
90 to_encoding = to_encoding or DEFAULT_ENCODINGS
92 if not isinstance(to_encoding, (list, tuple)):
91 if not isinstance(to_encoding, (list, tuple)):
93 to_encoding = [to_encoding]
92 to_encoding = [to_encoding]
94
93
95 for enc in to_encoding:
94 for enc in to_encoding:
96 try:
95 try:
97 return str(str_, enc)
96 return str(str_, enc)
98 except UnicodeDecodeError:
97 except UnicodeDecodeError:
99 pass
98 pass
100
99
101 return str(str_, to_encoding[0], 'replace')
100 return str(str_, to_encoding[0], 'replace')
102
101
103
102
104 def safe_bytes(str_, from_encoding=None) -> bytes:
103 def safe_bytes(str_, from_encoding=None) -> bytes:
105 """
104 """
106 safe bytes function. Does few trick to turn str_ into bytes string:
105 safe bytes function. Does few trick to turn str_ into bytes string:
107
106
108 :param str_: string to decode
107 :param str_: string to decode
109 :param from_encoding: encode from this type UTF8 default
108 :param from_encoding: encode from this type UTF8 default
110 """
109 """
111 if isinstance(str_, bytes):
110 if isinstance(str_, bytes):
112 return str_
111 return str_
113
112
114 if not isinstance(str_, str):
113 if not isinstance(str_, str):
115 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
114 raise ValueError(f'safe_bytes cannot convert other types than str: got: {type(str_)}')
116
115
117 from_encoding = from_encoding or get_default_encodings()
116 from_encoding = from_encoding or get_default_encodings()
118 if not isinstance(from_encoding, (list, tuple)):
117 if not isinstance(from_encoding, (list, tuple)):
119 from_encoding = [from_encoding]
118 from_encoding = [from_encoding]
120
119
121 for enc in from_encoding:
120 for enc in from_encoding:
122 try:
121 try:
123 return str_.encode(enc)
122 return str_.encode(enc)
124 except UnicodeDecodeError:
123 except UnicodeDecodeError:
125 pass
124 pass
126
125
127 return str_.encode(from_encoding[0], 'replace')
126 return str_.encode(from_encoding[0], 'replace')
128
127
129
128
130 def ascii_bytes(str_, allow_bytes=False) -> bytes:
129 def ascii_bytes(str_, allow_bytes=False) -> bytes:
131 """
130 """
132 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
131 Simple conversion from str to bytes, with assumption that str_ is pure ASCII.
133 Fails with UnicodeError on invalid input.
132 Fails with UnicodeError on invalid input.
134 This should be used where encoding and "safe" ambiguity should be avoided.
133 This should be used where encoding and "safe" ambiguity should be avoided.
135 Where strings already have been encoded in other ways but still are unicode
134 Where strings already have been encoded in other ways but still are unicode
136 string - for example to hex, base64, json, urlencoding, or are known to be
135 string - for example to hex, base64, json, urlencoding, or are known to be
137 identifiers.
136 identifiers.
138 """
137 """
139 if allow_bytes and isinstance(str_, bytes):
138 if allow_bytes and isinstance(str_, bytes):
140 return str_
139 return str_
141
140
142 if not isinstance(str_, str):
141 if not isinstance(str_, str):
143 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
142 raise ValueError(f'ascii_bytes cannot convert other types than str: got: {type(str_)}')
144 return str_.encode('ascii')
143 return str_.encode('ascii')
145
144
146
145
147 def ascii_str(str_) -> str:
146 def ascii_str(str_) -> str:
148 """
147 """
149 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
148 Simple conversion from bytes to str, with assumption that str_ is pure ASCII.
150 Fails with UnicodeError on invalid input.
149 Fails with UnicodeError on invalid input.
151 This should be used where encoding and "safe" ambiguity should be avoided.
150 This should be used where encoding and "safe" ambiguity should be avoided.
152 Where strings are encoded but also in other ways are known to be ASCII, and
151 Where strings are encoded but also in other ways are known to be ASCII, and
153 where a unicode string is wanted without caring about encoding. For example
152 where a unicode string is wanted without caring about encoding. For example
154 to hex, base64, urlencoding, or are known to be identifiers.
153 to hex, base64, urlencoding, or are known to be identifiers.
155 """
154 """
156
155
157 if not isinstance(str_, bytes):
156 if not isinstance(str_, bytes):
158 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
157 raise ValueError(f'ascii_str cannot convert other types than bytes: got: {type(str_)}')
159 return str_.decode('ascii')
158 return str_.decode('ascii')
160
159
161
160
162 def convert_special_chars(str_) -> str:
161 def convert_special_chars(str_) -> str:
163 """
162 """
164 trie to replace non-ascii letters to their ascii representation eg::
163 trie to replace non-ascii letters to their ascii representation eg::
165
164
166 `żołw` converts into `zolw`
165 `żołw` converts into `zolw`
167 """
166 """
168 value = safe_str(str_)
167 value = safe_str(str_)
169 converted_value = unidecode(value)
168 converted_value = unidecode(value)
170 return converted_value
169 return converted_value
@@ -1,34 +1,33 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 def html(info):
20 def html(info):
22 """
21 """
23 Custom string as html content_type renderer for pyramid
22 Custom string as html content_type renderer for pyramid
24 """
23 """
25 def _render(value, system):
24 def _render(value, system):
26 request = system.get('request')
25 request = system.get('request')
27 if request is not None:
26 if request is not None:
28 response = request.response
27 response = request.response
29 ct = response.content_type
28 ct = response.content_type
30 if ct == response.default_content_type:
29 if ct == response.default_content_type:
31 response.content_type = 'text/html'
30 response.content_type = 'text/html'
32 return value
31 return value
33
32
34 return _render
33 return _render
@@ -1,848 +1,846 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 import os
20 import os
23 import sys
21 import sys
24 import time
22 import time
25 import platform
23 import platform
26 import collections
24 import collections
27 import psutil
25 import psutil
28 from functools import wraps
26 from functools import wraps
29
27
30 import pkg_resources
28 import pkg_resources
31 import logging
29 import logging
32 import resource
30 import resource
33
31
34 import configparser
32 import configparser
35
33
36 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.str_utils import safe_str
37
35
38 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
39
37
40
38
41 _NA = 'NOT AVAILABLE'
39 _NA = 'NOT AVAILABLE'
42 _NA_FLOAT = 0.0
40 _NA_FLOAT = 0.0
43
41
44 STATE_OK = 'ok'
42 STATE_OK = 'ok'
45 STATE_ERR = 'error'
43 STATE_ERR = 'error'
46 STATE_WARN = 'warning'
44 STATE_WARN = 'warning'
47
45
48 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
46 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
49
47
50
48
51 registered_helpers = {}
49 registered_helpers = {}
52
50
53
51
54 def register_sysinfo(func):
52 def register_sysinfo(func):
55 """
53 """
56 @register_helper
54 @register_helper
57 def db_check():
55 def db_check():
58 pass
56 pass
59
57
60 db_check == registered_helpers['db_check']
58 db_check == registered_helpers['db_check']
61 """
59 """
62 global registered_helpers
60 global registered_helpers
63 registered_helpers[func.__name__] = func
61 registered_helpers[func.__name__] = func
64
62
65 @wraps(func)
63 @wraps(func)
66 def _wrapper(*args, **kwargs):
64 def _wrapper(*args, **kwargs):
67 return func(*args, **kwargs)
65 return func(*args, **kwargs)
68 return _wrapper
66 return _wrapper
69
67
70
68
71 # HELPERS
69 # HELPERS
72 def percentage(part: (int, float), whole: (int, float)):
70 def percentage(part: (int, float), whole: (int, float)):
73 whole = float(whole)
71 whole = float(whole)
74 if whole > 0:
72 if whole > 0:
75 return round(100 * float(part) / whole, 1)
73 return round(100 * float(part) / whole, 1)
76 return 0.0
74 return 0.0
77
75
78
76
79 def get_storage_size(storage_path):
77 def get_storage_size(storage_path):
80 sizes = []
78 sizes = []
81 for file_ in os.listdir(storage_path):
79 for file_ in os.listdir(storage_path):
82 storage_file = os.path.join(storage_path, file_)
80 storage_file = os.path.join(storage_path, file_)
83 if os.path.isfile(storage_file):
81 if os.path.isfile(storage_file):
84 try:
82 try:
85 sizes.append(os.path.getsize(storage_file))
83 sizes.append(os.path.getsize(storage_file))
86 except OSError:
84 except OSError:
87 log.exception('Failed to get size of storage file %s', storage_file)
85 log.exception('Failed to get size of storage file %s', storage_file)
88 pass
86 pass
89
87
90 return sum(sizes)
88 return sum(sizes)
91
89
92
90
93 def get_resource(resource_type):
91 def get_resource(resource_type):
94 try:
92 try:
95 return resource.getrlimit(resource_type)
93 return resource.getrlimit(resource_type)
96 except Exception:
94 except Exception:
97 return 'NOT_SUPPORTED'
95 return 'NOT_SUPPORTED'
98
96
99
97
100 def get_cert_path(ini_path):
98 def get_cert_path(ini_path):
101 default = '/etc/ssl/certs/ca-certificates.crt'
99 default = '/etc/ssl/certs/ca-certificates.crt'
102 control_ca_bundle = os.path.join(
100 control_ca_bundle = os.path.join(
103 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
101 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
104 '.rccontrol-profile/etc/ca-bundle.crt')
102 '.rccontrol-profile/etc/ca-bundle.crt')
105 if os.path.isfile(control_ca_bundle):
103 if os.path.isfile(control_ca_bundle):
106 default = control_ca_bundle
104 default = control_ca_bundle
107
105
108 return default
106 return default
109
107
110
108
111 class SysInfoRes(object):
109 class SysInfoRes(object):
112 def __init__(self, value, state=None, human_value=None):
110 def __init__(self, value, state=None, human_value=None):
113 self.value = value
111 self.value = value
114 self.state = state or STATE_OK_DEFAULT
112 self.state = state or STATE_OK_DEFAULT
115 self.human_value = human_value or value
113 self.human_value = human_value or value
116
114
117 def __json__(self):
115 def __json__(self):
118 return {
116 return {
119 'value': self.value,
117 'value': self.value,
120 'state': self.state,
118 'state': self.state,
121 'human_value': self.human_value,
119 'human_value': self.human_value,
122 }
120 }
123
121
124 def get_value(self):
122 def get_value(self):
125 return self.__json__()
123 return self.__json__()
126
124
127 def __str__(self):
125 def __str__(self):
128 return '<SysInfoRes({})>'.format(self.__json__())
126 return f'<SysInfoRes({self.__json__()})>'
129
127
130
128
131 class SysInfo(object):
129 class SysInfo(object):
132
130
133 def __init__(self, func_name, **kwargs):
131 def __init__(self, func_name, **kwargs):
134 self.function_name = func_name
132 self.function_name = func_name
135 self.value = _NA
133 self.value = _NA
136 self.state = None
134 self.state = None
137 self.kwargs = kwargs or {}
135 self.kwargs = kwargs or {}
138
136
139 def __call__(self):
137 def __call__(self):
140 computed = self.compute(**self.kwargs)
138 computed = self.compute(**self.kwargs)
141 if not isinstance(computed, SysInfoRes):
139 if not isinstance(computed, SysInfoRes):
142 raise ValueError(
140 raise ValueError(
143 'computed value for {} is not instance of '
141 'computed value for {} is not instance of '
144 '{}, got {} instead'.format(
142 '{}, got {} instead'.format(
145 self.function_name, SysInfoRes, type(computed)))
143 self.function_name, SysInfoRes, type(computed)))
146 return computed.__json__()
144 return computed.__json__()
147
145
148 def __str__(self):
146 def __str__(self):
149 return '<SysInfo({})>'.format(self.function_name)
147 return f'<SysInfo({self.function_name})>'
150
148
151 def compute(self, **kwargs):
149 def compute(self, **kwargs):
152 return self.function_name(**kwargs)
150 return self.function_name(**kwargs)
153
151
154
152
155 # SysInfo functions
153 # SysInfo functions
156 @register_sysinfo
154 @register_sysinfo
157 def python_info():
155 def python_info():
158 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
156 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
159 executable=sys.executable)
157 executable=sys.executable)
160 return SysInfoRes(value=value)
158 return SysInfoRes(value=value)
161
159
162
160
163 @register_sysinfo
161 @register_sysinfo
164 def py_modules():
162 def py_modules():
165 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
163 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
166 for p in pkg_resources.working_set])
164 for p in pkg_resources.working_set])
167
165
168 value = sorted(mods.items(), key=lambda k: k[0].lower())
166 value = sorted(mods.items(), key=lambda k: k[0].lower())
169 return SysInfoRes(value=value)
167 return SysInfoRes(value=value)
170
168
171
169
172 @register_sysinfo
170 @register_sysinfo
173 def platform_type():
171 def platform_type():
174 from rhodecode.lib.utils import generate_platform_uuid
172 from rhodecode.lib.utils import generate_platform_uuid
175
173
176 value = dict(
174 value = dict(
177 name=safe_str(platform.platform()),
175 name=safe_str(platform.platform()),
178 uuid=generate_platform_uuid()
176 uuid=generate_platform_uuid()
179 )
177 )
180 return SysInfoRes(value=value)
178 return SysInfoRes(value=value)
181
179
182
180
183 @register_sysinfo
181 @register_sysinfo
184 def locale_info():
182 def locale_info():
185 import locale
183 import locale
186
184
187 def safe_get_locale(locale_name):
185 def safe_get_locale(locale_name):
188 try:
186 try:
189 locale.getlocale(locale_name)
187 locale.getlocale(locale_name)
190 except TypeError:
188 except TypeError:
191 return f'FAILED_LOCALE_GET:{locale_name}'
189 return f'FAILED_LOCALE_GET:{locale_name}'
192
190
193 value = dict(
191 value = dict(
194 locale_default=locale.getlocale(),
192 locale_default=locale.getlocale(),
195 locale_lc_all=safe_get_locale(locale.LC_ALL),
193 locale_lc_all=safe_get_locale(locale.LC_ALL),
196 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
194 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
197 lang_env=os.environ.get('LANG'),
195 lang_env=os.environ.get('LANG'),
198 lc_all_env=os.environ.get('LC_ALL'),
196 lc_all_env=os.environ.get('LC_ALL'),
199 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
197 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
200 )
198 )
201 human_value = \
199 human_value = \
202 f"LANG: {value['lang_env']}, \
200 f"LANG: {value['lang_env']}, \
203 locale LC_ALL: {value['locale_lc_all']}, \
201 locale LC_ALL: {value['locale_lc_all']}, \
204 locale LC_CTYPE: {value['locale_lc_ctype']}, \
202 locale LC_CTYPE: {value['locale_lc_ctype']}, \
205 Default locales: {value['locale_default']}"
203 Default locales: {value['locale_default']}"
206
204
207 return SysInfoRes(value=value, human_value=human_value)
205 return SysInfoRes(value=value, human_value=human_value)
208
206
209
207
210 @register_sysinfo
208 @register_sysinfo
211 def ulimit_info():
209 def ulimit_info():
212 data = collections.OrderedDict([
210 data = collections.OrderedDict([
213 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
211 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
214 ('file size', get_resource(resource.RLIMIT_FSIZE)),
212 ('file size', get_resource(resource.RLIMIT_FSIZE)),
215 ('stack size', get_resource(resource.RLIMIT_STACK)),
213 ('stack size', get_resource(resource.RLIMIT_STACK)),
216 ('core file size', get_resource(resource.RLIMIT_CORE)),
214 ('core file size', get_resource(resource.RLIMIT_CORE)),
217 ('address space size', get_resource(resource.RLIMIT_AS)),
215 ('address space size', get_resource(resource.RLIMIT_AS)),
218 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
216 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
219 ('heap size', get_resource(resource.RLIMIT_DATA)),
217 ('heap size', get_resource(resource.RLIMIT_DATA)),
220 ('rss size', get_resource(resource.RLIMIT_RSS)),
218 ('rss size', get_resource(resource.RLIMIT_RSS)),
221 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
219 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
222 ('open files', get_resource(resource.RLIMIT_NOFILE)),
220 ('open files', get_resource(resource.RLIMIT_NOFILE)),
223 ])
221 ])
224
222
225 text = ', '.join(f'{k}:{v}' for k, v in data.items())
223 text = ', '.join(f'{k}:{v}' for k, v in data.items())
226
224
227 value = {
225 value = {
228 'limits': data,
226 'limits': data,
229 'text': text,
227 'text': text,
230 }
228 }
231 return SysInfoRes(value=value)
229 return SysInfoRes(value=value)
232
230
233
231
234 @register_sysinfo
232 @register_sysinfo
235 def uptime():
233 def uptime():
236 from rhodecode.lib.helpers import age, time_to_datetime
234 from rhodecode.lib.helpers import age, time_to_datetime
237 from rhodecode.translation import TranslationString
235 from rhodecode.translation import TranslationString
238
236
239 value = dict(boot_time=0, uptime=0, text='')
237 value = dict(boot_time=0, uptime=0, text='')
240 state = STATE_OK_DEFAULT
238 state = STATE_OK_DEFAULT
241
239
242 boot_time = psutil.boot_time()
240 boot_time = psutil.boot_time()
243 value['boot_time'] = boot_time
241 value['boot_time'] = boot_time
244 value['uptime'] = time.time() - boot_time
242 value['uptime'] = time.time() - boot_time
245
243
246 date_or_age = age(time_to_datetime(boot_time))
244 date_or_age = age(time_to_datetime(boot_time))
247 if isinstance(date_or_age, TranslationString):
245 if isinstance(date_or_age, TranslationString):
248 date_or_age = date_or_age.interpolate()
246 date_or_age = date_or_age.interpolate()
249
247
250 human_value = value.copy()
248 human_value = value.copy()
251 human_value['boot_time'] = time_to_datetime(boot_time)
249 human_value['boot_time'] = time_to_datetime(boot_time)
252 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
250 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
253
251
254 human_value['text'] = 'Server started {}'.format(date_or_age)
252 human_value['text'] = f'Server started {date_or_age}'
255 return SysInfoRes(value=value, human_value=human_value)
253 return SysInfoRes(value=value, human_value=human_value)
256
254
257
255
258 @register_sysinfo
256 @register_sysinfo
259 def memory():
257 def memory():
260 from rhodecode.lib.helpers import format_byte_size_binary
258 from rhodecode.lib.helpers import format_byte_size_binary
261 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
259 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
262 percent_used=0, free=0, inactive=0, active=0, shared=0,
260 percent_used=0, free=0, inactive=0, active=0, shared=0,
263 total=0, buffers=0, text='')
261 total=0, buffers=0, text='')
264
262
265 state = STATE_OK_DEFAULT
263 state = STATE_OK_DEFAULT
266
264
267 value.update(dict(psutil.virtual_memory()._asdict()))
265 value.update(dict(psutil.virtual_memory()._asdict()))
268 value['used_real'] = value['total'] - value['available']
266 value['used_real'] = value['total'] - value['available']
269 value['percent_used'] = psutil._common.usage_percent(
267 value['percent_used'] = psutil._common.usage_percent(
270 value['used_real'], value['total'], 1)
268 value['used_real'], value['total'], 1)
271
269
272 human_value = value.copy()
270 human_value = value.copy()
273 human_value['text'] = '%s/%s, %s%% used' % (
271 human_value['text'] = '{}/{}, {}% used'.format(
274 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
275 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
276 value['percent_used'],)
274 value['percent_used'])
277
275
278 keys = list(value.keys())[::]
276 keys = list(value.keys())[::]
279 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
280 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
281 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
282 for k in keys:
280 for k in keys:
283 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
284
282
285 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
286 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
287 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
288
286
289 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
290 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
291 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
292
290
293 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
294
292
295
293
296 @register_sysinfo
294 @register_sysinfo
297 def machine_load():
295 def machine_load():
298 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
299 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
300
298
301 # load averages
299 # load averages
302 if hasattr(psutil.os, 'getloadavg'):
300 if hasattr(psutil.os, 'getloadavg'):
303 value.update(dict(
301 value.update(dict(
304 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
305 ))
303 ))
306
304
307 human_value = value.copy()
305 human_value = value.copy()
308 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
309 value['1_min'], value['5_min'], value['15_min'])
307 value['1_min'], value['5_min'], value['15_min'])
310
308
311 if state['type'] == STATE_OK and value['15_min'] > 5.0:
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
312 msg = 'Warning: your machine load is very high.'
310 msg = 'Warning: your machine load is very high.'
313 state = {'message': msg, 'type': STATE_WARN}
311 state = {'message': msg, 'type': STATE_WARN}
314
312
315 return SysInfoRes(value=value, state=state, human_value=human_value)
313 return SysInfoRes(value=value, state=state, human_value=human_value)
316
314
317
315
318 @register_sysinfo
316 @register_sysinfo
319 def cpu():
317 def cpu():
320 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
321 state = STATE_OK_DEFAULT
319 state = STATE_OK_DEFAULT
322
320
323 value['cpu'] = psutil.cpu_percent(0.5)
321 value['cpu'] = psutil.cpu_percent(0.5)
324 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
325 value['cpu_count'] = psutil.cpu_count()
323 value['cpu_count'] = psutil.cpu_count()
326
324
327 human_value = value.copy()
325 human_value = value.copy()
328 human_value['text'] = '{} cores at {} %'.format(
326 human_value['text'] = '{} cores at {} %'.format(
329 value['cpu_count'], value['cpu'])
327 value['cpu_count'], value['cpu'])
330
328
331 return SysInfoRes(value=value, state=state, human_value=human_value)
329 return SysInfoRes(value=value, state=state, human_value=human_value)
332
330
333
331
334 @register_sysinfo
332 @register_sysinfo
335 def storage():
333 def storage():
336 from rhodecode.lib.helpers import format_byte_size_binary
334 from rhodecode.lib.helpers import format_byte_size_binary
337 from rhodecode.model.settings import VcsSettingsModel
335 from rhodecode.model.settings import VcsSettingsModel
338 path = VcsSettingsModel().get_repos_location()
336 path = VcsSettingsModel().get_repos_location()
339
337
340 value = dict(percent=0, used=0, total=0, path=path, text='')
338 value = dict(percent=0, used=0, total=0, path=path, text='')
341 state = STATE_OK_DEFAULT
339 state = STATE_OK_DEFAULT
342
340
343 try:
341 try:
344 value.update(dict(psutil.disk_usage(path)._asdict()))
342 value.update(dict(psutil.disk_usage(path)._asdict()))
345 except Exception as e:
343 except Exception as e:
346 log.exception('Failed to fetch disk info')
344 log.exception('Failed to fetch disk info')
347 state = {'message': str(e), 'type': STATE_ERR}
345 state = {'message': str(e), 'type': STATE_ERR}
348
346
349 human_value = value.copy()
347 human_value = value.copy()
350 human_value['used'] = format_byte_size_binary(value['used'])
348 human_value['used'] = format_byte_size_binary(value['used'])
351 human_value['total'] = format_byte_size_binary(value['total'])
349 human_value['total'] = format_byte_size_binary(value['total'])
352 human_value['text'] = "{}/{}, {}% used".format(
350 human_value['text'] = "{}/{}, {}% used".format(
353 format_byte_size_binary(value['used']),
351 format_byte_size_binary(value['used']),
354 format_byte_size_binary(value['total']),
352 format_byte_size_binary(value['total']),
355 value['percent'])
353 value['percent'])
356
354
357 if state['type'] == STATE_OK and value['percent'] > 90:
355 if state['type'] == STATE_OK and value['percent'] > 90:
358 msg = 'Critical: your disk space is very low.'
356 msg = 'Critical: your disk space is very low.'
359 state = {'message': msg, 'type': STATE_ERR}
357 state = {'message': msg, 'type': STATE_ERR}
360
358
361 elif state['type'] == STATE_OK and value['percent'] > 70:
359 elif state['type'] == STATE_OK and value['percent'] > 70:
362 msg = 'Warning: your disk space is running low.'
360 msg = 'Warning: your disk space is running low.'
363 state = {'message': msg, 'type': STATE_WARN}
361 state = {'message': msg, 'type': STATE_WARN}
364
362
365 return SysInfoRes(value=value, state=state, human_value=human_value)
363 return SysInfoRes(value=value, state=state, human_value=human_value)
366
364
367
365
368 @register_sysinfo
366 @register_sysinfo
369 def storage_inodes():
367 def storage_inodes():
370 from rhodecode.model.settings import VcsSettingsModel
368 from rhodecode.model.settings import VcsSettingsModel
371 path = VcsSettingsModel().get_repos_location()
369 path = VcsSettingsModel().get_repos_location()
372
370
373 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
374 state = STATE_OK_DEFAULT
372 state = STATE_OK_DEFAULT
375
373
376 try:
374 try:
377 i_stat = os.statvfs(path)
375 i_stat = os.statvfs(path)
378 value['free'] = i_stat.f_ffree
376 value['free'] = i_stat.f_ffree
379 value['used'] = i_stat.f_files-i_stat.f_favail
377 value['used'] = i_stat.f_files-i_stat.f_favail
380 value['total'] = i_stat.f_files
378 value['total'] = i_stat.f_files
381 value['percent'] = percentage(value['used'], value['total'])
379 value['percent'] = percentage(value['used'], value['total'])
382 except Exception as e:
380 except Exception as e:
383 log.exception('Failed to fetch disk inodes info')
381 log.exception('Failed to fetch disk inodes info')
384 state = {'message': str(e), 'type': STATE_ERR}
382 state = {'message': str(e), 'type': STATE_ERR}
385
383
386 human_value = value.copy()
384 human_value = value.copy()
387 human_value['text'] = "{}/{}, {}% used".format(
385 human_value['text'] = "{}/{}, {}% used".format(
388 value['used'], value['total'], value['percent'])
386 value['used'], value['total'], value['percent'])
389
387
390 if state['type'] == STATE_OK and value['percent'] > 90:
388 if state['type'] == STATE_OK and value['percent'] > 90:
391 msg = 'Critical: your disk free inodes are very low.'
389 msg = 'Critical: your disk free inodes are very low.'
392 state = {'message': msg, 'type': STATE_ERR}
390 state = {'message': msg, 'type': STATE_ERR}
393
391
394 elif state['type'] == STATE_OK and value['percent'] > 70:
392 elif state['type'] == STATE_OK and value['percent'] > 70:
395 msg = 'Warning: your disk free inodes are running low.'
393 msg = 'Warning: your disk free inodes are running low.'
396 state = {'message': msg, 'type': STATE_WARN}
394 state = {'message': msg, 'type': STATE_WARN}
397
395
398 return SysInfoRes(value=value, state=state, human_value=human_value)
396 return SysInfoRes(value=value, state=state, human_value=human_value)
399
397
400
398
401 @register_sysinfo
399 @register_sysinfo
402 def storage_archives():
400 def storage_archives():
403 import rhodecode
401 import rhodecode
404 from rhodecode.lib.utils import safe_str
402 from rhodecode.lib.utils import safe_str
405 from rhodecode.lib.helpers import format_byte_size_binary
403 from rhodecode.lib.helpers import format_byte_size_binary
406
404
407 msg = 'Archive cache storage is controlled by ' \
405 msg = 'Archive cache storage is controlled by ' \
408 'archive_cache.store_dir=/path/to/cache option in the .ini file'
406 'archive_cache.store_dir=/path/to/cache option in the .ini file'
409 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
407 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
410
408
411 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
409 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
412 state = STATE_OK_DEFAULT
410 state = STATE_OK_DEFAULT
413 try:
411 try:
414 items_count = 0
412 items_count = 0
415 used = 0
413 used = 0
416 for root, dirs, files in os.walk(path):
414 for root, dirs, files in os.walk(path):
417 if root == path:
415 if root == path:
418 items_count = len(dirs)
416 items_count = len(dirs)
419
417
420 for f in files:
418 for f in files:
421 try:
419 try:
422 used += os.path.getsize(os.path.join(root, f))
420 used += os.path.getsize(os.path.join(root, f))
423 except OSError:
421 except OSError:
424 pass
422 pass
425 value.update({
423 value.update({
426 'percent': 100,
424 'percent': 100,
427 'used': used,
425 'used': used,
428 'total': used,
426 'total': used,
429 'items': items_count
427 'items': items_count
430 })
428 })
431
429
432 except Exception as e:
430 except Exception as e:
433 log.exception('failed to fetch archive cache storage')
431 log.exception('failed to fetch archive cache storage')
434 state = {'message': str(e), 'type': STATE_ERR}
432 state = {'message': str(e), 'type': STATE_ERR}
435
433
436 human_value = value.copy()
434 human_value = value.copy()
437 human_value['used'] = format_byte_size_binary(value['used'])
435 human_value['used'] = format_byte_size_binary(value['used'])
438 human_value['total'] = format_byte_size_binary(value['total'])
436 human_value['total'] = format_byte_size_binary(value['total'])
439 human_value['text'] = "{} ({} items)".format(
437 human_value['text'] = "{} ({} items)".format(
440 human_value['used'], value['items'])
438 human_value['used'], value['items'])
441
439
442 return SysInfoRes(value=value, state=state, human_value=human_value)
440 return SysInfoRes(value=value, state=state, human_value=human_value)
443
441
444
442
445 @register_sysinfo
443 @register_sysinfo
446 def storage_gist():
444 def storage_gist():
447 from rhodecode.model.gist import GIST_STORE_LOC
445 from rhodecode.model.gist import GIST_STORE_LOC
448 from rhodecode.model.settings import VcsSettingsModel
446 from rhodecode.model.settings import VcsSettingsModel
449 from rhodecode.lib.utils import safe_str
447 from rhodecode.lib.utils import safe_str
450 from rhodecode.lib.helpers import format_byte_size_binary
448 from rhodecode.lib.helpers import format_byte_size_binary
451 path = safe_str(os.path.join(
449 path = safe_str(os.path.join(
452 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
450 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
453
451
454 # gist storage
452 # gist storage
455 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
453 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
456 state = STATE_OK_DEFAULT
454 state = STATE_OK_DEFAULT
457
455
458 try:
456 try:
459 items_count = 0
457 items_count = 0
460 used = 0
458 used = 0
461 for root, dirs, files in os.walk(path):
459 for root, dirs, files in os.walk(path):
462 if root == path:
460 if root == path:
463 items_count = len(dirs)
461 items_count = len(dirs)
464
462
465 for f in files:
463 for f in files:
466 try:
464 try:
467 used += os.path.getsize(os.path.join(root, f))
465 used += os.path.getsize(os.path.join(root, f))
468 except OSError:
466 except OSError:
469 pass
467 pass
470 value.update({
468 value.update({
471 'percent': 100,
469 'percent': 100,
472 'used': used,
470 'used': used,
473 'total': used,
471 'total': used,
474 'items': items_count
472 'items': items_count
475 })
473 })
476 except Exception as e:
474 except Exception as e:
477 log.exception('failed to fetch gist storage items')
475 log.exception('failed to fetch gist storage items')
478 state = {'message': str(e), 'type': STATE_ERR}
476 state = {'message': str(e), 'type': STATE_ERR}
479
477
480 human_value = value.copy()
478 human_value = value.copy()
481 human_value['used'] = format_byte_size_binary(value['used'])
479 human_value['used'] = format_byte_size_binary(value['used'])
482 human_value['total'] = format_byte_size_binary(value['total'])
480 human_value['total'] = format_byte_size_binary(value['total'])
483 human_value['text'] = "{} ({} items)".format(
481 human_value['text'] = "{} ({} items)".format(
484 human_value['used'], value['items'])
482 human_value['used'], value['items'])
485
483
486 return SysInfoRes(value=value, state=state, human_value=human_value)
484 return SysInfoRes(value=value, state=state, human_value=human_value)
487
485
488
486
489 @register_sysinfo
487 @register_sysinfo
490 def storage_temp():
488 def storage_temp():
491 import tempfile
489 import tempfile
492 from rhodecode.lib.helpers import format_byte_size_binary
490 from rhodecode.lib.helpers import format_byte_size_binary
493
491
494 path = tempfile.gettempdir()
492 path = tempfile.gettempdir()
495 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
493 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
496 state = STATE_OK_DEFAULT
494 state = STATE_OK_DEFAULT
497
495
498 if not psutil:
496 if not psutil:
499 return SysInfoRes(value=value, state=state)
497 return SysInfoRes(value=value, state=state)
500
498
501 try:
499 try:
502 value.update(dict(psutil.disk_usage(path)._asdict()))
500 value.update(dict(psutil.disk_usage(path)._asdict()))
503 except Exception as e:
501 except Exception as e:
504 log.exception('Failed to fetch temp dir info')
502 log.exception('Failed to fetch temp dir info')
505 state = {'message': str(e), 'type': STATE_ERR}
503 state = {'message': str(e), 'type': STATE_ERR}
506
504
507 human_value = value.copy()
505 human_value = value.copy()
508 human_value['used'] = format_byte_size_binary(value['used'])
506 human_value['used'] = format_byte_size_binary(value['used'])
509 human_value['total'] = format_byte_size_binary(value['total'])
507 human_value['total'] = format_byte_size_binary(value['total'])
510 human_value['text'] = "{}/{}, {}% used".format(
508 human_value['text'] = "{}/{}, {}% used".format(
511 format_byte_size_binary(value['used']),
509 format_byte_size_binary(value['used']),
512 format_byte_size_binary(value['total']),
510 format_byte_size_binary(value['total']),
513 value['percent'])
511 value['percent'])
514
512
515 return SysInfoRes(value=value, state=state, human_value=human_value)
513 return SysInfoRes(value=value, state=state, human_value=human_value)
516
514
517
515
518 @register_sysinfo
516 @register_sysinfo
519 def search_info():
517 def search_info():
520 import rhodecode
518 import rhodecode
521 from rhodecode.lib.index import searcher_from_config
519 from rhodecode.lib.index import searcher_from_config
522
520
523 backend = rhodecode.CONFIG.get('search.module', '')
521 backend = rhodecode.CONFIG.get('search.module', '')
524 location = rhodecode.CONFIG.get('search.location', '')
522 location = rhodecode.CONFIG.get('search.location', '')
525
523
526 try:
524 try:
527 searcher = searcher_from_config(rhodecode.CONFIG)
525 searcher = searcher_from_config(rhodecode.CONFIG)
528 searcher = searcher.__class__.__name__
526 searcher = searcher.__class__.__name__
529 except Exception:
527 except Exception:
530 searcher = None
528 searcher = None
531
529
532 value = dict(
530 value = dict(
533 backend=backend, searcher=searcher, location=location, text='')
531 backend=backend, searcher=searcher, location=location, text='')
534 state = STATE_OK_DEFAULT
532 state = STATE_OK_DEFAULT
535
533
536 human_value = value.copy()
534 human_value = value.copy()
537 human_value['text'] = "backend:`{}`".format(human_value['backend'])
535 human_value['text'] = "backend:`{}`".format(human_value['backend'])
538
536
539 return SysInfoRes(value=value, state=state, human_value=human_value)
537 return SysInfoRes(value=value, state=state, human_value=human_value)
540
538
541
539
542 @register_sysinfo
540 @register_sysinfo
543 def git_info():
541 def git_info():
544 from rhodecode.lib.vcs.backends import git
542 from rhodecode.lib.vcs.backends import git
545 state = STATE_OK_DEFAULT
543 state = STATE_OK_DEFAULT
546 value = human_value = ''
544 value = human_value = ''
547 try:
545 try:
548 value = git.discover_git_version(raise_on_exc=True)
546 value = git.discover_git_version(raise_on_exc=True)
549 human_value = 'version reported from VCSServer: {}'.format(value)
547 human_value = f'version reported from VCSServer: {value}'
550 except Exception as e:
548 except Exception as e:
551 state = {'message': str(e), 'type': STATE_ERR}
549 state = {'message': str(e), 'type': STATE_ERR}
552
550
553 return SysInfoRes(value=value, state=state, human_value=human_value)
551 return SysInfoRes(value=value, state=state, human_value=human_value)
554
552
555
553
556 @register_sysinfo
554 @register_sysinfo
557 def hg_info():
555 def hg_info():
558 from rhodecode.lib.vcs.backends import hg
556 from rhodecode.lib.vcs.backends import hg
559 state = STATE_OK_DEFAULT
557 state = STATE_OK_DEFAULT
560 value = human_value = ''
558 value = human_value = ''
561 try:
559 try:
562 value = hg.discover_hg_version(raise_on_exc=True)
560 value = hg.discover_hg_version(raise_on_exc=True)
563 human_value = 'version reported from VCSServer: {}'.format(value)
561 human_value = f'version reported from VCSServer: {value}'
564 except Exception as e:
562 except Exception as e:
565 state = {'message': str(e), 'type': STATE_ERR}
563 state = {'message': str(e), 'type': STATE_ERR}
566 return SysInfoRes(value=value, state=state, human_value=human_value)
564 return SysInfoRes(value=value, state=state, human_value=human_value)
567
565
568
566
569 @register_sysinfo
567 @register_sysinfo
570 def svn_info():
568 def svn_info():
571 from rhodecode.lib.vcs.backends import svn
569 from rhodecode.lib.vcs.backends import svn
572 state = STATE_OK_DEFAULT
570 state = STATE_OK_DEFAULT
573 value = human_value = ''
571 value = human_value = ''
574 try:
572 try:
575 value = svn.discover_svn_version(raise_on_exc=True)
573 value = svn.discover_svn_version(raise_on_exc=True)
576 human_value = 'version reported from VCSServer: {}'.format(value)
574 human_value = f'version reported from VCSServer: {value}'
577 except Exception as e:
575 except Exception as e:
578 state = {'message': str(e), 'type': STATE_ERR}
576 state = {'message': str(e), 'type': STATE_ERR}
579 return SysInfoRes(value=value, state=state, human_value=human_value)
577 return SysInfoRes(value=value, state=state, human_value=human_value)
580
578
581
579
582 @register_sysinfo
580 @register_sysinfo
583 def vcs_backends():
581 def vcs_backends():
584 import rhodecode
582 import rhodecode
585 value = rhodecode.CONFIG.get('vcs.backends')
583 value = rhodecode.CONFIG.get('vcs.backends')
586 human_value = 'Enabled backends in order: {}'.format(','.join(value))
584 human_value = 'Enabled backends in order: {}'.format(','.join(value))
587 return SysInfoRes(value=value, human_value=human_value)
585 return SysInfoRes(value=value, human_value=human_value)
588
586
589
587
590 @register_sysinfo
588 @register_sysinfo
591 def vcs_server():
589 def vcs_server():
592 import rhodecode
590 import rhodecode
593 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
591 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
594
592
595 server_url = rhodecode.CONFIG.get('vcs.server')
593 server_url = rhodecode.CONFIG.get('vcs.server')
596 enabled = rhodecode.CONFIG.get('vcs.server.enable')
594 enabled = rhodecode.CONFIG.get('vcs.server.enable')
597 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
595 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
598 state = STATE_OK_DEFAULT
596 state = STATE_OK_DEFAULT
599 version = None
597 version = None
600 workers = 0
598 workers = 0
601
599
602 try:
600 try:
603 data = get_vcsserver_service_data()
601 data = get_vcsserver_service_data()
604 if data and 'version' in data:
602 if data and 'version' in data:
605 version = data['version']
603 version = data['version']
606
604
607 if data and 'config' in data:
605 if data and 'config' in data:
608 conf = data['config']
606 conf = data['config']
609 workers = conf.get('workers', 'NOT AVAILABLE')
607 workers = conf.get('workers', 'NOT AVAILABLE')
610
608
611 connection = 'connected'
609 connection = 'connected'
612 except Exception as e:
610 except Exception as e:
613 connection = 'failed'
611 connection = 'failed'
614 state = {'message': str(e), 'type': STATE_ERR}
612 state = {'message': str(e), 'type': STATE_ERR}
615
613
616 value = dict(
614 value = dict(
617 url=server_url,
615 url=server_url,
618 enabled=enabled,
616 enabled=enabled,
619 protocol=protocol,
617 protocol=protocol,
620 connection=connection,
618 connection=connection,
621 version=version,
619 version=version,
622 text='',
620 text='',
623 )
621 )
624
622
625 human_value = value.copy()
623 human_value = value.copy()
626 human_value['text'] = \
624 human_value['text'] = \
627 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
625 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
628 url=server_url, ver=version, workers=workers, mode=protocol,
626 url=server_url, ver=version, workers=workers, mode=protocol,
629 conn=connection)
627 conn=connection)
630
628
631 return SysInfoRes(value=value, state=state, human_value=human_value)
629 return SysInfoRes(value=value, state=state, human_value=human_value)
632
630
633
631
634 @register_sysinfo
632 @register_sysinfo
635 def vcs_server_config():
633 def vcs_server_config():
636 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
634 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
637 state = STATE_OK_DEFAULT
635 state = STATE_OK_DEFAULT
638
636
639 value = {}
637 value = {}
640 try:
638 try:
641 data = get_vcsserver_service_data()
639 data = get_vcsserver_service_data()
642 value = data['app_config']
640 value = data['app_config']
643 except Exception as e:
641 except Exception as e:
644 state = {'message': str(e), 'type': STATE_ERR}
642 state = {'message': str(e), 'type': STATE_ERR}
645
643
646 human_value = value.copy()
644 human_value = value.copy()
647 human_value['text'] = 'VCS Server config'
645 human_value['text'] = 'VCS Server config'
648
646
649 return SysInfoRes(value=value, state=state, human_value=human_value)
647 return SysInfoRes(value=value, state=state, human_value=human_value)
650
648
651
649
652 @register_sysinfo
650 @register_sysinfo
653 def rhodecode_app_info():
651 def rhodecode_app_info():
654 import rhodecode
652 import rhodecode
655 edition = rhodecode.CONFIG.get('rhodecode.edition')
653 edition = rhodecode.CONFIG.get('rhodecode.edition')
656
654
657 value = dict(
655 value = dict(
658 rhodecode_version=rhodecode.__version__,
656 rhodecode_version=rhodecode.__version__,
659 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
657 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
660 text=''
658 text=''
661 )
659 )
662 human_value = value.copy()
660 human_value = value.copy()
663 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
661 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
664 edition=edition, ver=value['rhodecode_version']
662 edition=edition, ver=value['rhodecode_version']
665 )
663 )
666 return SysInfoRes(value=value, human_value=human_value)
664 return SysInfoRes(value=value, human_value=human_value)
667
665
668
666
669 @register_sysinfo
667 @register_sysinfo
670 def rhodecode_config():
668 def rhodecode_config():
671 import rhodecode
669 import rhodecode
672 path = rhodecode.CONFIG.get('__file__')
670 path = rhodecode.CONFIG.get('__file__')
673 rhodecode_ini_safe = rhodecode.CONFIG.copy()
671 rhodecode_ini_safe = rhodecode.CONFIG.copy()
674 cert_path = get_cert_path(path)
672 cert_path = get_cert_path(path)
675
673
676 try:
674 try:
677 config = configparser.ConfigParser()
675 config = configparser.ConfigParser()
678 config.read(path)
676 config.read(path)
679 parsed_ini = config
677 parsed_ini = config
680 if parsed_ini.has_section('server:main'):
678 if parsed_ini.has_section('server:main'):
681 parsed_ini = dict(parsed_ini.items('server:main'))
679 parsed_ini = dict(parsed_ini.items('server:main'))
682 except Exception:
680 except Exception:
683 log.exception('Failed to read .ini file for display')
681 log.exception('Failed to read .ini file for display')
684 parsed_ini = {}
682 parsed_ini = {}
685
683
686 rhodecode_ini_safe['server:main'] = parsed_ini
684 rhodecode_ini_safe['server:main'] = parsed_ini
687
685
688 blacklist = [
686 blacklist = [
689 'rhodecode_license_key',
687 'rhodecode_license_key',
690 'routes.map',
688 'routes.map',
691 'sqlalchemy.db1.url',
689 'sqlalchemy.db1.url',
692 'channelstream.secret',
690 'channelstream.secret',
693 'beaker.session.secret',
691 'beaker.session.secret',
694 'rhodecode.encrypted_values.secret',
692 'rhodecode.encrypted_values.secret',
695 'rhodecode_auth_github_consumer_key',
693 'rhodecode_auth_github_consumer_key',
696 'rhodecode_auth_github_consumer_secret',
694 'rhodecode_auth_github_consumer_secret',
697 'rhodecode_auth_google_consumer_key',
695 'rhodecode_auth_google_consumer_key',
698 'rhodecode_auth_google_consumer_secret',
696 'rhodecode_auth_google_consumer_secret',
699 'rhodecode_auth_bitbucket_consumer_secret',
697 'rhodecode_auth_bitbucket_consumer_secret',
700 'rhodecode_auth_bitbucket_consumer_key',
698 'rhodecode_auth_bitbucket_consumer_key',
701 'rhodecode_auth_twitter_consumer_secret',
699 'rhodecode_auth_twitter_consumer_secret',
702 'rhodecode_auth_twitter_consumer_key',
700 'rhodecode_auth_twitter_consumer_key',
703
701
704 'rhodecode_auth_twitter_secret',
702 'rhodecode_auth_twitter_secret',
705 'rhodecode_auth_github_secret',
703 'rhodecode_auth_github_secret',
706 'rhodecode_auth_google_secret',
704 'rhodecode_auth_google_secret',
707 'rhodecode_auth_bitbucket_secret',
705 'rhodecode_auth_bitbucket_secret',
708
706
709 'appenlight.api_key',
707 'appenlight.api_key',
710 ('app_conf', 'sqlalchemy.db1.url')
708 ('app_conf', 'sqlalchemy.db1.url')
711 ]
709 ]
712 for k in blacklist:
710 for k in blacklist:
713 if isinstance(k, tuple):
711 if isinstance(k, tuple):
714 section, key = k
712 section, key = k
715 if section in rhodecode_ini_safe:
713 if section in rhodecode_ini_safe:
716 rhodecode_ini_safe[section] = '**OBFUSCATED**'
714 rhodecode_ini_safe[section] = '**OBFUSCATED**'
717 else:
715 else:
718 rhodecode_ini_safe.pop(k, None)
716 rhodecode_ini_safe.pop(k, None)
719
717
720 # TODO: maybe put some CONFIG checks here ?
718 # TODO: maybe put some CONFIG checks here ?
721 return SysInfoRes(value={'config': rhodecode_ini_safe,
719 return SysInfoRes(value={'config': rhodecode_ini_safe,
722 'path': path, 'cert_path': cert_path})
720 'path': path, 'cert_path': cert_path})
723
721
724
722
725 @register_sysinfo
723 @register_sysinfo
726 def database_info():
724 def database_info():
727 import rhodecode
725 import rhodecode
728 from sqlalchemy.engine import url as engine_url
726 from sqlalchemy.engine import url as engine_url
729 from rhodecode.model import meta
727 from rhodecode.model import meta
730 from rhodecode.model.meta import Session
728 from rhodecode.model.meta import Session
731 from rhodecode.model.db import DbMigrateVersion
729 from rhodecode.model.db import DbMigrateVersion
732
730
733 state = STATE_OK_DEFAULT
731 state = STATE_OK_DEFAULT
734
732
735 db_migrate = DbMigrateVersion.query().filter(
733 db_migrate = DbMigrateVersion.query().filter(
736 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
734 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
737
735
738 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
736 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
739
737
740 try:
738 try:
741 engine = meta.get_engine()
739 engine = meta.get_engine()
742 db_server_info = engine.dialect._get_server_version_info(
740 db_server_info = engine.dialect._get_server_version_info(
743 Session.connection(bind=engine))
741 Session.connection(bind=engine))
744 db_version = '.'.join(map(str, db_server_info))
742 db_version = '.'.join(map(str, db_server_info))
745 except Exception:
743 except Exception:
746 log.exception('failed to fetch db version')
744 log.exception('failed to fetch db version')
747 db_version = 'UNKNOWN'
745 db_version = 'UNKNOWN'
748
746
749 db_info = dict(
747 db_info = dict(
750 migrate_version=db_migrate.version,
748 migrate_version=db_migrate.version,
751 type=db_url_obj.get_backend_name(),
749 type=db_url_obj.get_backend_name(),
752 version=db_version,
750 version=db_version,
753 url=repr(db_url_obj)
751 url=repr(db_url_obj)
754 )
752 )
755 current_version = db_migrate.version
753 current_version = db_migrate.version
756 expected_version = rhodecode.__dbversion__
754 expected_version = rhodecode.__dbversion__
757 if state['type'] == STATE_OK and current_version != expected_version:
755 if state['type'] == STATE_OK and current_version != expected_version:
758 msg = 'Critical: database schema mismatch, ' \
756 msg = 'Critical: database schema mismatch, ' \
759 'expected version {}, got {}. ' \
757 'expected version {}, got {}. ' \
760 'Please run migrations on your database.'.format(
758 'Please run migrations on your database.'.format(
761 expected_version, current_version)
759 expected_version, current_version)
762 state = {'message': msg, 'type': STATE_ERR}
760 state = {'message': msg, 'type': STATE_ERR}
763
761
764 human_value = db_info.copy()
762 human_value = db_info.copy()
765 human_value['url'] = "{} @ migration version: {}".format(
763 human_value['url'] = "{} @ migration version: {}".format(
766 db_info['url'], db_info['migrate_version'])
764 db_info['url'], db_info['migrate_version'])
767 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
765 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
768 return SysInfoRes(value=db_info, state=state, human_value=human_value)
766 return SysInfoRes(value=db_info, state=state, human_value=human_value)
769
767
770
768
771 @register_sysinfo
769 @register_sysinfo
772 def server_info(environ):
770 def server_info(environ):
773 import rhodecode
771 import rhodecode
774 from rhodecode.lib.base import get_server_ip_addr, get_server_port
772 from rhodecode.lib.base import get_server_ip_addr, get_server_port
775
773
776 value = {
774 value = {
777 'server_ip': '%s:%s' % (
775 'server_ip': '{}:{}'.format(
778 get_server_ip_addr(environ, log_errors=False),
776 get_server_ip_addr(environ, log_errors=False),
779 get_server_port(environ)
777 get_server_port(environ)
780 ),
778 ),
781 'server_id': rhodecode.CONFIG.get('instance_id'),
779 'server_id': rhodecode.CONFIG.get('instance_id'),
782 }
780 }
783 return SysInfoRes(value=value)
781 return SysInfoRes(value=value)
784
782
785
783
786 @register_sysinfo
784 @register_sysinfo
787 def usage_info():
785 def usage_info():
788 from rhodecode.model.db import User, Repository
786 from rhodecode.model.db import User, Repository
789 value = {
787 value = {
790 'users': User.query().count(),
788 'users': User.query().count(),
791 'users_active': User.query().filter(User.active == True).count(),
789 'users_active': User.query().filter(User.active == True).count(),
792 'repositories': Repository.query().count(),
790 'repositories': Repository.query().count(),
793 'repository_types': {
791 'repository_types': {
794 'hg': Repository.query().filter(
792 'hg': Repository.query().filter(
795 Repository.repo_type == 'hg').count(),
793 Repository.repo_type == 'hg').count(),
796 'git': Repository.query().filter(
794 'git': Repository.query().filter(
797 Repository.repo_type == 'git').count(),
795 Repository.repo_type == 'git').count(),
798 'svn': Repository.query().filter(
796 'svn': Repository.query().filter(
799 Repository.repo_type == 'svn').count(),
797 Repository.repo_type == 'svn').count(),
800 },
798 },
801 }
799 }
802 return SysInfoRes(value=value)
800 return SysInfoRes(value=value)
803
801
804
802
805 def get_system_info(environ):
803 def get_system_info(environ):
806 environ = environ or {}
804 environ = environ or {}
807 return {
805 return {
808 'rhodecode_app': SysInfo(rhodecode_app_info)(),
806 'rhodecode_app': SysInfo(rhodecode_app_info)(),
809 'rhodecode_config': SysInfo(rhodecode_config)(),
807 'rhodecode_config': SysInfo(rhodecode_config)(),
810 'rhodecode_usage': SysInfo(usage_info)(),
808 'rhodecode_usage': SysInfo(usage_info)(),
811 'python': SysInfo(python_info)(),
809 'python': SysInfo(python_info)(),
812 'py_modules': SysInfo(py_modules)(),
810 'py_modules': SysInfo(py_modules)(),
813
811
814 'platform': SysInfo(platform_type)(),
812 'platform': SysInfo(platform_type)(),
815 'locale': SysInfo(locale_info)(),
813 'locale': SysInfo(locale_info)(),
816 'server': SysInfo(server_info, environ=environ)(),
814 'server': SysInfo(server_info, environ=environ)(),
817 'database': SysInfo(database_info)(),
815 'database': SysInfo(database_info)(),
818 'ulimit': SysInfo(ulimit_info)(),
816 'ulimit': SysInfo(ulimit_info)(),
819 'storage': SysInfo(storage)(),
817 'storage': SysInfo(storage)(),
820 'storage_inodes': SysInfo(storage_inodes)(),
818 'storage_inodes': SysInfo(storage_inodes)(),
821 'storage_archive': SysInfo(storage_archives)(),
819 'storage_archive': SysInfo(storage_archives)(),
822 'storage_gist': SysInfo(storage_gist)(),
820 'storage_gist': SysInfo(storage_gist)(),
823 'storage_temp': SysInfo(storage_temp)(),
821 'storage_temp': SysInfo(storage_temp)(),
824
822
825 'search': SysInfo(search_info)(),
823 'search': SysInfo(search_info)(),
826
824
827 'uptime': SysInfo(uptime)(),
825 'uptime': SysInfo(uptime)(),
828 'load': SysInfo(machine_load)(),
826 'load': SysInfo(machine_load)(),
829 'cpu': SysInfo(cpu)(),
827 'cpu': SysInfo(cpu)(),
830 'memory': SysInfo(memory)(),
828 'memory': SysInfo(memory)(),
831
829
832 'vcs_backends': SysInfo(vcs_backends)(),
830 'vcs_backends': SysInfo(vcs_backends)(),
833 'vcs_server': SysInfo(vcs_server)(),
831 'vcs_server': SysInfo(vcs_server)(),
834
832
835 'vcs_server_config': SysInfo(vcs_server_config)(),
833 'vcs_server_config': SysInfo(vcs_server_config)(),
836
834
837 'git': SysInfo(git_info)(),
835 'git': SysInfo(git_info)(),
838 'hg': SysInfo(hg_info)(),
836 'hg': SysInfo(hg_info)(),
839 'svn': SysInfo(svn_info)(),
837 'svn': SysInfo(svn_info)(),
840 }
838 }
841
839
842
840
843 def load_system_info(key):
841 def load_system_info(key):
844 """
842 """
845 get_sys_info('vcs_server')
843 get_sys_info('vcs_server')
846 get_sys_info('database')
844 get_sys_info('database')
847 """
845 """
848 return SysInfo(registered_helpers[key])()
846 return SysInfo(registered_helpers[key])()
@@ -1,93 +1,91 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import logging
19 import logging
22
20
23 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
24
22
25
23
26 def str2bool(str_):
24 def str2bool(str_):
27 """
25 """
28 returns True/False value from given string, it tries to translate the
26 returns True/False value from given string, it tries to translate the
29 string into boolean
27 string into boolean
30
28
31 :param str_: string value to translate into boolean
29 :param str_: string value to translate into boolean
32 :rtype: boolean
30 :rtype: boolean
33 :returns: boolean from given string
31 :returns: boolean from given string
34 """
32 """
35 if str_ is None:
33 if str_ is None:
36 return False
34 return False
37 if str_ in (True, False):
35 if str_ in (True, False):
38 return str_
36 return str_
39 str_ = str(str_).strip().lower()
37 str_ = str(str_).strip().lower()
40 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
38 return str_ in ('t', 'true', 'y', 'yes', 'on', '1')
41
39
42
40
43 def aslist(obj, sep=None, strip=True) -> list:
41 def aslist(obj, sep=None, strip=True) -> list:
44 """
42 """
45 Returns given string separated by sep as list
43 Returns given string separated by sep as list
46
44
47 :param obj:
45 :param obj:
48 :param sep:
46 :param sep:
49 :param strip:
47 :param strip:
50 """
48 """
51 if isinstance(obj, str):
49 if isinstance(obj, str):
52 if obj in ['', ""]:
50 if obj in ['', ""]:
53 return []
51 return []
54
52
55 lst = obj.split(sep)
53 lst = obj.split(sep)
56 if strip:
54 if strip:
57 lst = [v.strip() for v in lst]
55 lst = [v.strip() for v in lst]
58 return lst
56 return lst
59 elif isinstance(obj, (list, tuple)):
57 elif isinstance(obj, (list, tuple)):
60 return obj
58 return obj
61 elif obj is None:
59 elif obj is None:
62 return []
60 return []
63 else:
61 else:
64 return [obj]
62 return [obj]
65
63
66
64
67 class AttributeDictBase(dict):
65 class AttributeDictBase(dict):
68 def __getstate__(self):
66 def __getstate__(self):
69 odict = self.__dict__ # get attribute dictionary
67 odict = self.__dict__ # get attribute dictionary
70 return odict
68 return odict
71
69
72 def __setstate__(self, dict):
70 def __setstate__(self, dict):
73 self.__dict__ = dict
71 self.__dict__ = dict
74
72
75 __setattr__ = dict.__setitem__
73 __setattr__ = dict.__setitem__
76 __delattr__ = dict.__delitem__
74 __delattr__ = dict.__delitem__
77
75
78
76
79 class StrictAttributeDict(AttributeDictBase):
77 class StrictAttributeDict(AttributeDictBase):
80 """
78 """
81 Strict Version of Attribute dict which raises an Attribute error when
79 Strict Version of Attribute dict which raises an Attribute error when
82 requested attribute is not set
80 requested attribute is not set
83 """
81 """
84 def __getattr__(self, attr):
82 def __getattr__(self, attr):
85 try:
83 try:
86 return self[attr]
84 return self[attr]
87 except KeyError:
85 except KeyError:
88 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
86 raise AttributeError(f'{self.__class__} object has no attribute {attr}')
89
87
90
88
91 class AttributeDict(AttributeDictBase):
89 class AttributeDict(AttributeDictBase):
92 def __getattr__(self, attr):
90 def __getattr__(self, attr):
93 return self.get(attr, None)
91 return self.get(attr, None)
@@ -1,128 +1,127 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import logging
19 import logging
21
20
22 from whoosh.qparser.default import QueryParser, query
21 from whoosh.qparser.default import QueryParser, query
23 from whoosh.qparser.dateparse import DateParserPlugin
22 from whoosh.qparser.dateparse import DateParserPlugin
24 from whoosh.fields import (TEXT, Schema, DATETIME, KEYWORD)
23 from whoosh.fields import (TEXT, Schema, DATETIME, KEYWORD)
25 from sqlalchemy.sql.expression import or_, and_, not_, func
24 from sqlalchemy.sql.expression import or_, and_, not_, func
26
25
27 from rhodecode.model.db import UserLog
26 from rhodecode.model.db import UserLog
28 from rhodecode.lib.utils2 import remove_prefix, remove_suffix
27 from rhodecode.lib.utils2 import remove_prefix, remove_suffix
29 from rhodecode.lib.str_utils import safe_str
28 from rhodecode.lib.str_utils import safe_str
30
29
31
30
32 # JOURNAL SCHEMA used only to generate queries in journal. We use whoosh
31 # JOURNAL SCHEMA used only to generate queries in journal. We use whoosh
33 # querylang to build sql queries and filter journals
32 # querylang to build sql queries and filter journals
34 AUDIT_LOG_SCHEMA = Schema(
33 AUDIT_LOG_SCHEMA = Schema(
35 username=KEYWORD(),
34 username=KEYWORD(),
36 repository=KEYWORD(),
35 repository=KEYWORD(),
37
36
38 date=DATETIME(),
37 date=DATETIME(),
39 action=TEXT(),
38 action=TEXT(),
40 ip=TEXT(),
39 ip=TEXT(),
41 )
40 )
42
41
43 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
44
43
45
44
46 def user_log_filter(user_log, search_term):
45 def user_log_filter(user_log, search_term):
47 """
46 """
48 Filters sqlalchemy user_log based on search_term with whoosh Query language
47 Filters sqlalchemy user_log based on search_term with whoosh Query language
49 http://packages.python.org/Whoosh/querylang.html
48 http://packages.python.org/Whoosh/querylang.html
50
49
51 :param user_log:
50 :param user_log:
52 :param search_term:
51 :param search_term:
53 """
52 """
54 log.debug('Initial search term: %r', search_term)
53 log.debug('Initial search term: %r', search_term)
55 qry = None
54 qry = None
56 if search_term:
55 if search_term:
57 qp = QueryParser('repository', schema=AUDIT_LOG_SCHEMA)
56 qp = QueryParser('repository', schema=AUDIT_LOG_SCHEMA)
58 qp.add_plugin(DateParserPlugin())
57 qp.add_plugin(DateParserPlugin())
59 qry = qp.parse(safe_str(search_term))
58 qry = qp.parse(safe_str(search_term))
60 log.debug('Filtering using parsed query %r', qry)
59 log.debug('Filtering using parsed query %r', qry)
61
60
62 def wildcard_handler(col, wc_term):
61 def wildcard_handler(col, wc_term):
63 if wc_term.startswith('*') and not wc_term.endswith('*'):
62 if wc_term.startswith('*') and not wc_term.endswith('*'):
64 # postfix == endswith
63 # postfix == endswith
65 wc_term = remove_prefix(wc_term, prefix='*')
64 wc_term = remove_prefix(wc_term, prefix='*')
66 return func.lower(col).endswith(wc_term)
65 return func.lower(col).endswith(wc_term)
67 elif wc_term.startswith('*') and wc_term.endswith('*'):
66 elif wc_term.startswith('*') and wc_term.endswith('*'):
68 # wildcard == ilike
67 # wildcard == ilike
69 wc_term = remove_prefix(wc_term, prefix='*')
68 wc_term = remove_prefix(wc_term, prefix='*')
70 wc_term = remove_suffix(wc_term, suffix='*')
69 wc_term = remove_suffix(wc_term, suffix='*')
71 return func.lower(col).contains(wc_term)
70 return func.lower(col).contains(wc_term)
72
71
73 def get_filterion(field, val, term):
72 def get_filterion(field, val, term):
74
73
75 if field == 'repository':
74 if field == 'repository':
76 field = getattr(UserLog, 'repository_name')
75 field = getattr(UserLog, 'repository_name')
77 elif field == 'ip':
76 elif field == 'ip':
78 field = getattr(UserLog, 'user_ip')
77 field = getattr(UserLog, 'user_ip')
79 elif field == 'date':
78 elif field == 'date':
80 field = getattr(UserLog, 'action_date')
79 field = getattr(UserLog, 'action_date')
81 elif field == 'username':
80 elif field == 'username':
82 field = getattr(UserLog, 'username')
81 field = getattr(UserLog, 'username')
83 else:
82 else:
84 field = getattr(UserLog, field)
83 field = getattr(UserLog, field)
85 log.debug('filter field: %s val=>%s', field, val)
84 log.debug('filter field: %s val=>%s', field, val)
86
85
87 # sql filtering
86 # sql filtering
88 if isinstance(term, query.Wildcard):
87 if isinstance(term, query.Wildcard):
89 return wildcard_handler(field, val)
88 return wildcard_handler(field, val)
90 elif isinstance(term, query.Prefix):
89 elif isinstance(term, query.Prefix):
91 return func.lower(field).startswith(func.lower(val))
90 return func.lower(field).startswith(func.lower(val))
92 elif isinstance(term, query.DateRange):
91 elif isinstance(term, query.DateRange):
93 return and_(field >= val[0], field <= val[1])
92 return and_(field >= val[0], field <= val[1])
94 elif isinstance(term, query.Not):
93 elif isinstance(term, query.Not):
95 return not_(field == val)
94 return not_(field == val)
96 return func.lower(field) == func.lower(val)
95 return func.lower(field) == func.lower(val)
97
96
98 if isinstance(qry, (query.And, query.Not, query.Term, query.Prefix,
97 if isinstance(qry, (query.And, query.Not, query.Term, query.Prefix,
99 query.Wildcard, query.DateRange)):
98 query.Wildcard, query.DateRange)):
100 if not isinstance(qry, query.And):
99 if not isinstance(qry, query.And):
101 qry = [qry]
100 qry = [qry]
102
101
103 for term in qry:
102 for term in qry:
104 if isinstance(term, query.Not):
103 if isinstance(term, query.Not):
105 not_term = [z for z in term.leaves()][0]
104 not_term = [z for z in term.leaves()][0]
106 field = not_term.fieldname
105 field = not_term.fieldname
107 val = not_term.text
106 val = not_term.text
108 elif isinstance(term, query.DateRange):
107 elif isinstance(term, query.DateRange):
109 field = term.fieldname
108 field = term.fieldname
110 val = [term.startdate, term.enddate]
109 val = [term.startdate, term.enddate]
111 elif isinstance(term, query.NullQuery.__class__):
110 elif isinstance(term, query.NullQuery.__class__):
112 field = ''
111 field = ''
113 val = ''
112 val = ''
114 else:
113 else:
115 field = term.fieldname
114 field = term.fieldname
116 val = term.text
115 val = term.text
117 if field:
116 if field:
118 user_log = user_log.filter(get_filterion(field, val, term))
117 user_log = user_log.filter(get_filterion(field, val, term))
119 elif isinstance(qry, query.Or):
118 elif isinstance(qry, query.Or):
120 filters = []
119 filters = []
121 for term in qry:
120 for term in qry:
122 field = term.fieldname
121 field = term.fieldname
123 val = (term.text if not isinstance(term, query.DateRange)
122 val = (term.text if not isinstance(term, query.DateRange)
124 else [term.startdate, term.enddate])
123 else [term.startdate, term.enddate])
125 filters.append(get_filterion(field, val, term))
124 filters.append(get_filterion(field, val, term))
126 user_log = user_log.filter(or_(*filters))
125 user_log = user_log.filter(or_(*filters))
127
126
128 return user_log
127 return user_log
@@ -1,264 +1,261 b''
1
2
3 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 import os
19 import os
22 import re
20 import re
23 import time
21 import time
24 import datetime
22 import datetime
25 import dateutil
23 import dateutil
26 import pickle
24 import pickle
27
25
28 from rhodecode.model.db import DbSession, Session
26 from rhodecode.model.db import DbSession, Session
29
27
30
28
31 class CleanupCommand(Exception):
29 class CleanupCommand(Exception):
32 pass
30 pass
33
31
34
32
35 class BaseAuthSessions(object):
33 class BaseAuthSessions(object):
36 SESSION_TYPE = None
34 SESSION_TYPE = None
37 NOT_AVAILABLE = 'NOT AVAILABLE'
35 NOT_AVAILABLE = 'NOT AVAILABLE'
38
36
39 def __init__(self, config):
37 def __init__(self, config):
40 session_conf = {}
38 session_conf = {}
41 for k, v in list(config.items()):
39 for k, v in list(config.items()):
42 if k.startswith('beaker.session'):
40 if k.startswith('beaker.session'):
43 session_conf[k] = v
41 session_conf[k] = v
44 self.config = session_conf
42 self.config = session_conf
45
43
46 def get_count(self):
44 def get_count(self):
47 raise NotImplementedError
45 raise NotImplementedError
48
46
49 def get_expired_count(self, older_than_seconds=None):
47 def get_expired_count(self, older_than_seconds=None):
50 raise NotImplementedError
48 raise NotImplementedError
51
49
52 def clean_sessions(self, older_than_seconds=None):
50 def clean_sessions(self, older_than_seconds=None):
53 raise NotImplementedError
51 raise NotImplementedError
54
52
55 def _seconds_to_date(self, seconds):
53 def _seconds_to_date(self, seconds):
56 return datetime.datetime.utcnow() - dateutil.relativedelta.relativedelta(
54 return datetime.datetime.utcnow() - dateutil.relativedelta.relativedelta(
57 seconds=seconds)
55 seconds=seconds)
58
56
59
57
60 class DbAuthSessions(BaseAuthSessions):
58 class DbAuthSessions(BaseAuthSessions):
61 SESSION_TYPE = 'ext:database'
59 SESSION_TYPE = 'ext:database'
62
60
63 def get_count(self):
61 def get_count(self):
64 return DbSession.query().count()
62 return DbSession.query().count()
65
63
66 def get_expired_count(self, older_than_seconds=None):
64 def get_expired_count(self, older_than_seconds=None):
67 expiry_date = self._seconds_to_date(older_than_seconds)
65 expiry_date = self._seconds_to_date(older_than_seconds)
68 return DbSession.query().filter(DbSession.accessed < expiry_date).count()
66 return DbSession.query().filter(DbSession.accessed < expiry_date).count()
69
67
70 def clean_sessions(self, older_than_seconds=None):
68 def clean_sessions(self, older_than_seconds=None):
71 expiry_date = self._seconds_to_date(older_than_seconds)
69 expiry_date = self._seconds_to_date(older_than_seconds)
72 to_remove = DbSession.query().filter(DbSession.accessed < expiry_date).count()
70 to_remove = DbSession.query().filter(DbSession.accessed < expiry_date).count()
73 DbSession.query().filter(DbSession.accessed < expiry_date).delete()
71 DbSession.query().filter(DbSession.accessed < expiry_date).delete()
74 Session().commit()
72 Session().commit()
75 return to_remove
73 return to_remove
76
74
77
75
78 class FileAuthSessions(BaseAuthSessions):
76 class FileAuthSessions(BaseAuthSessions):
79 SESSION_TYPE = 'file sessions'
77 SESSION_TYPE = 'file sessions'
80
78
81 def _get_sessions_dir(self):
79 def _get_sessions_dir(self):
82 data_dir = self.config.get('beaker.session.data_dir')
80 data_dir = self.config.get('beaker.session.data_dir')
83 return data_dir
81 return data_dir
84
82
85 def _count_on_filesystem(self, path, older_than=0, callback=None):
83 def _count_on_filesystem(self, path, older_than=0, callback=None):
86 value = dict(percent=0, used=0, total=0, items=0, callbacks=0,
84 value = dict(percent=0, used=0, total=0, items=0, callbacks=0,
87 path=path, text='')
85 path=path, text='')
88 items_count = 0
86 items_count = 0
89 used = 0
87 used = 0
90 callbacks = 0
88 callbacks = 0
91 cur_time = time.time()
89 cur_time = time.time()
92 for root, dirs, files in os.walk(path):
90 for root, dirs, files in os.walk(path):
93 for f in files:
91 for f in files:
94 final_path = os.path.join(root, f)
92 final_path = os.path.join(root, f)
95 try:
93 try:
96 mtime = os.stat(final_path).st_mtime
94 mtime = os.stat(final_path).st_mtime
97 if (cur_time - mtime) > older_than:
95 if (cur_time - mtime) > older_than:
98 items_count += 1
96 items_count += 1
99 if callback:
97 if callback:
100 callback_res = callback(final_path)
98 callback_res = callback(final_path)
101 callbacks += 1
99 callbacks += 1
102 else:
100 else:
103 used += os.path.getsize(final_path)
101 used += os.path.getsize(final_path)
104 except OSError:
102 except OSError:
105 pass
103 pass
106 value.update({
104 value.update({
107 'percent': 100,
105 'percent': 100,
108 'used': used,
106 'used': used,
109 'total': used,
107 'total': used,
110 'items': items_count,
108 'items': items_count,
111 'callbacks': callbacks
109 'callbacks': callbacks
112 })
110 })
113 return value
111 return value
114
112
115 def get_count(self):
113 def get_count(self):
116 try:
114 try:
117 sessions_dir = self._get_sessions_dir()
115 sessions_dir = self._get_sessions_dir()
118 items_count = self._count_on_filesystem(sessions_dir)['items']
116 items_count = self._count_on_filesystem(sessions_dir)['items']
119 except Exception:
117 except Exception:
120 items_count = self.NOT_AVAILABLE
118 items_count = self.NOT_AVAILABLE
121 return items_count
119 return items_count
122
120
123 def get_expired_count(self, older_than_seconds=0):
121 def get_expired_count(self, older_than_seconds=0):
124 try:
122 try:
125 sessions_dir = self._get_sessions_dir()
123 sessions_dir = self._get_sessions_dir()
126 items_count = self._count_on_filesystem(
124 items_count = self._count_on_filesystem(
127 sessions_dir, older_than=older_than_seconds)['items']
125 sessions_dir, older_than=older_than_seconds)['items']
128 except Exception:
126 except Exception:
129 items_count = self.NOT_AVAILABLE
127 items_count = self.NOT_AVAILABLE
130 return items_count
128 return items_count
131
129
132 def clean_sessions(self, older_than_seconds=0):
130 def clean_sessions(self, older_than_seconds=0):
133 # find . -mtime +60 -exec rm {} \;
131 # find . -mtime +60 -exec rm {} \;
134
132
135 sessions_dir = self._get_sessions_dir()
133 sessions_dir = self._get_sessions_dir()
136
134
137 def remove_item(path):
135 def remove_item(path):
138 os.remove(path)
136 os.remove(path)
139
137
140 stats = self._count_on_filesystem(
138 stats = self._count_on_filesystem(
141 sessions_dir, older_than=older_than_seconds,
139 sessions_dir, older_than=older_than_seconds,
142 callback=remove_item)
140 callback=remove_item)
143 return stats['callbacks']
141 return stats['callbacks']
144
142
145
143
146 class MemcachedAuthSessions(BaseAuthSessions):
144 class MemcachedAuthSessions(BaseAuthSessions):
147 SESSION_TYPE = 'ext:memcached'
145 SESSION_TYPE = 'ext:memcached'
148 _key_regex = re.compile(r'ITEM (.*_session) \[(.*); (.*)\]')
146 _key_regex = re.compile(r'ITEM (.*_session) \[(.*); (.*)\]')
149
147
150 def _get_client(self):
148 def _get_client(self):
151 import memcache
149 import memcache
152 client = memcache.Client([self.config.get('beaker.session.url')])
150 client = memcache.Client([self.config.get('beaker.session.url')])
153 return client
151 return client
154
152
155 def _get_telnet_client(self, host, port):
153 def _get_telnet_client(self, host, port):
156 import telnetlib
154 import telnetlib
157 client = telnetlib.Telnet(host, port, None)
155 client = telnetlib.Telnet(host, port, None)
158 return client
156 return client
159
157
160 def _run_telnet_cmd(self, client, cmd):
158 def _run_telnet_cmd(self, client, cmd):
161 client.write("%s\n" % cmd)
159 client.write("%s\n" % cmd)
162 return client.read_until('END')
160 return client.read_until('END')
163
161
164 def key_details(self, client, slab_ids, limit=100):
162 def key_details(self, client, slab_ids, limit=100):
165 """ Return a list of tuples containing keys and details """
163 """ Return a list of tuples containing keys and details """
166 cmd = 'stats cachedump %s %s'
164 cmd = 'stats cachedump %s %s'
167 for slab_id in slab_ids:
165 for slab_id in slab_ids:
168 for key in self._key_regex.finditer(
166 yield from self._key_regex.finditer(
169 self._run_telnet_cmd(client, cmd % (slab_id, limit))):
167 self._run_telnet_cmd(client, cmd % (slab_id, limit)))
170 yield key
171
168
172 def get_count(self):
169 def get_count(self):
173 client = self._get_client()
170 client = self._get_client()
174 count = self.NOT_AVAILABLE
171 count = self.NOT_AVAILABLE
175 try:
172 try:
176 slabs = []
173 slabs = []
177 for server, slabs_data in client.get_slabs():
174 for server, slabs_data in client.get_slabs():
178 slabs.extend(list(slabs_data.keys()))
175 slabs.extend(list(slabs_data.keys()))
179
176
180 host, port = client.servers[0].address
177 host, port = client.servers[0].address
181 telnet_client = self._get_telnet_client(host, port)
178 telnet_client = self._get_telnet_client(host, port)
182 keys = self.key_details(telnet_client, slabs)
179 keys = self.key_details(telnet_client, slabs)
183 count = 0
180 count = 0
184 for _k in keys:
181 for _k in keys:
185 count += 1
182 count += 1
186 except Exception:
183 except Exception:
187 return count
184 return count
188
185
189 return count
186 return count
190
187
191 def get_expired_count(self, older_than_seconds=None):
188 def get_expired_count(self, older_than_seconds=None):
192 return self.NOT_AVAILABLE
189 return self.NOT_AVAILABLE
193
190
194 def clean_sessions(self, older_than_seconds=None):
191 def clean_sessions(self, older_than_seconds=None):
195 raise CleanupCommand('Cleanup for this session type not yet available')
192 raise CleanupCommand('Cleanup for this session type not yet available')
196
193
197
194
198 class RedisAuthSessions(BaseAuthSessions):
195 class RedisAuthSessions(BaseAuthSessions):
199 SESSION_TYPE = 'ext:redis'
196 SESSION_TYPE = 'ext:redis'
200
197
201 def _get_client(self):
198 def _get_client(self):
202 import redis
199 import redis
203 args = {
200 args = {
204 'socket_timeout': 60,
201 'socket_timeout': 60,
205 'url': self.config.get('beaker.session.url')
202 'url': self.config.get('beaker.session.url')
206 }
203 }
207
204
208 client = redis.StrictRedis.from_url(**args)
205 client = redis.StrictRedis.from_url(**args)
209 return client
206 return client
210
207
211 def get_count(self):
208 def get_count(self):
212 client = self._get_client()
209 client = self._get_client()
213 return len(client.keys('beaker_cache:*'))
210 return len(client.keys('beaker_cache:*'))
214
211
215 def get_expired_count(self, older_than_seconds=None):
212 def get_expired_count(self, older_than_seconds=None):
216 expiry_date = self._seconds_to_date(older_than_seconds)
213 expiry_date = self._seconds_to_date(older_than_seconds)
217 return self.NOT_AVAILABLE
214 return self.NOT_AVAILABLE
218
215
219 def clean_sessions(self, older_than_seconds=None):
216 def clean_sessions(self, older_than_seconds=None):
220 client = self._get_client()
217 client = self._get_client()
221 expiry_time = time.time() - older_than_seconds
218 expiry_time = time.time() - older_than_seconds
222 deleted_keys = 0
219 deleted_keys = 0
223 for key in client.keys('beaker_cache:*'):
220 for key in client.keys('beaker_cache:*'):
224 data = client.get(key)
221 data = client.get(key)
225 if data:
222 if data:
226 json_data = pickle.loads(data)
223 json_data = pickle.loads(data)
227 try:
224 try:
228 accessed_time = json_data['_accessed_time']
225 accessed_time = json_data['_accessed_time']
229 except KeyError:
226 except KeyError:
230 accessed_time = 0
227 accessed_time = 0
231 if accessed_time < expiry_time:
228 if accessed_time < expiry_time:
232 client.delete(key)
229 client.delete(key)
233 deleted_keys += 1
230 deleted_keys += 1
234
231
235 return deleted_keys
232 return deleted_keys
236
233
237
234
238 class MemoryAuthSessions(BaseAuthSessions):
235 class MemoryAuthSessions(BaseAuthSessions):
239 SESSION_TYPE = 'memory'
236 SESSION_TYPE = 'memory'
240
237
241 def get_count(self):
238 def get_count(self):
242 return self.NOT_AVAILABLE
239 return self.NOT_AVAILABLE
243
240
244 def get_expired_count(self, older_than_seconds=None):
241 def get_expired_count(self, older_than_seconds=None):
245 return self.NOT_AVAILABLE
242 return self.NOT_AVAILABLE
246
243
247 def clean_sessions(self, older_than_seconds=None):
244 def clean_sessions(self, older_than_seconds=None):
248 raise CleanupCommand('Cleanup for this session type not yet available')
245 raise CleanupCommand('Cleanup for this session type not yet available')
249
246
250
247
251 def get_session_handler(session_type):
248 def get_session_handler(session_type):
252 types = {
249 types = {
253 'file': FileAuthSessions,
250 'file': FileAuthSessions,
254 'ext:memcached': MemcachedAuthSessions,
251 'ext:memcached': MemcachedAuthSessions,
255 'ext:redis': RedisAuthSessions,
252 'ext:redis': RedisAuthSessions,
256 'ext:database': DbAuthSessions,
253 'ext:database': DbAuthSessions,
257 'memory': MemoryAuthSessions
254 'memory': MemoryAuthSessions
258 }
255 }
259
256
260 try:
257 try:
261 return types[session_type]
258 return types[session_type]
262 except KeyError:
259 except KeyError:
263 raise ValueError(
260 raise ValueError(
264 'This type {} is not supported'.format(session_type))
261 f'This type {session_type} is not supported')
@@ -1,809 +1,807 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Utilities library for RhodeCode
20 Utilities library for RhodeCode
22 """
21 """
23
22
24 import datetime
23 import datetime
25 import decorator
24 import decorator
26 import logging
25 import logging
27 import os
26 import os
28 import re
27 import re
29 import sys
28 import sys
30 import shutil
29 import shutil
31 import socket
30 import socket
32 import tempfile
31 import tempfile
33 import traceback
32 import traceback
34 import tarfile
33 import tarfile
35 import warnings
34 import warnings
36 from os.path import join as jn
35 from os.path import join as jn
37
36
38 import paste
37 import paste
39 import pkg_resources
38 import pkg_resources
40 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41
40
42 from mako import exceptions
41 from mako import exceptions
43
42
44 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
45 from rhodecode.lib.str_utils import safe_bytes, safe_str
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
46 from rhodecode.lib.vcs.backends.base import Config
45 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.exceptions import VCSError
46 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.ext_json import sjson as json
48 from rhodecode.lib.ext_json import sjson as json
50 from rhodecode.model import meta
49 from rhodecode.model import meta
51 from rhodecode.model.db import (
50 from rhodecode.model.db import (
52 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
53 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
54
53
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
59
58
60 # String which contains characters that are not allowed in slug names for
59 # String which contains characters that are not allowed in slug names for
61 # repositories or repository groups. It is properly escaped to use it in
60 # repositories or repository groups. It is properly escaped to use it in
62 # regular expressions.
61 # regular expressions.
63 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
64
63
65 # Regex that matches forbidden characters in repo/group slugs.
64 # Regex that matches forbidden characters in repo/group slugs.
66 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
67
66
68 # Regex that matches allowed characters in repo/group slugs.
67 # Regex that matches allowed characters in repo/group slugs.
69 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
70
69
71 # Regex that matches whole repo/group slugs.
70 # Regex that matches whole repo/group slugs.
72 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
73
72
74 _license_cache = None
73 _license_cache = None
75
74
76
75
77 def repo_name_slug(value):
76 def repo_name_slug(value):
78 """
77 """
79 Return slug of name of repository
78 Return slug of name of repository
80 This function is called on each creation/modification
79 This function is called on each creation/modification
81 of repository to prevent bad names in repo
80 of repository to prevent bad names in repo
82 """
81 """
83
82
84 replacement_char = '-'
83 replacement_char = '-'
85
84
86 slug = strip_tags(value)
85 slug = strip_tags(value)
87 slug = convert_accented_entities(slug)
86 slug = convert_accented_entities(slug)
88 slug = convert_misc_entities(slug)
87 slug = convert_misc_entities(slug)
89
88
90 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
91 slug = re.sub(r'[\s]+', '-', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
92 slug = collapse(slug, replacement_char)
91 slug = collapse(slug, replacement_char)
93
92
94 return slug
93 return slug
95
94
96
95
97 #==============================================================================
96 #==============================================================================
98 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
99 #==============================================================================
98 #==============================================================================
100 def get_repo_slug(request):
99 def get_repo_slug(request):
101 _repo = ''
100 _repo = ''
102
101
103 if hasattr(request, 'db_repo_name'):
102 if hasattr(request, 'db_repo_name'):
104 # if our requests has set db reference use it for name, this
103 # if our requests has set db reference use it for name, this
105 # translates the example.com/_<id> into proper repo names
104 # translates the example.com/_<id> into proper repo names
106 _repo = request.db_repo_name
105 _repo = request.db_repo_name
107 elif getattr(request, 'matchdict', None):
106 elif getattr(request, 'matchdict', None):
108 # pyramid
107 # pyramid
109 _repo = request.matchdict.get('repo_name')
108 _repo = request.matchdict.get('repo_name')
110
109
111 if _repo:
110 if _repo:
112 _repo = _repo.rstrip('/')
111 _repo = _repo.rstrip('/')
113 return _repo
112 return _repo
114
113
115
114
116 def get_repo_group_slug(request):
115 def get_repo_group_slug(request):
117 _group = ''
116 _group = ''
118 if hasattr(request, 'db_repo_group'):
117 if hasattr(request, 'db_repo_group'):
119 # if our requests has set db reference use it for name, this
118 # if our requests has set db reference use it for name, this
120 # translates the example.com/_<id> into proper repo group names
119 # translates the example.com/_<id> into proper repo group names
121 _group = request.db_repo_group.group_name
120 _group = request.db_repo_group.group_name
122 elif getattr(request, 'matchdict', None):
121 elif getattr(request, 'matchdict', None):
123 # pyramid
122 # pyramid
124 _group = request.matchdict.get('repo_group_name')
123 _group = request.matchdict.get('repo_group_name')
125
124
126 if _group:
125 if _group:
127 _group = _group.rstrip('/')
126 _group = _group.rstrip('/')
128 return _group
127 return _group
129
128
130
129
131 def get_user_group_slug(request):
130 def get_user_group_slug(request):
132 _user_group = ''
131 _user_group = ''
133
132
134 if hasattr(request, 'db_user_group'):
133 if hasattr(request, 'db_user_group'):
135 _user_group = request.db_user_group.users_group_name
134 _user_group = request.db_user_group.users_group_name
136 elif getattr(request, 'matchdict', None):
135 elif getattr(request, 'matchdict', None):
137 # pyramid
136 # pyramid
138 _user_group = request.matchdict.get('user_group_id')
137 _user_group = request.matchdict.get('user_group_id')
139 _user_group_name = request.matchdict.get('user_group_name')
138 _user_group_name = request.matchdict.get('user_group_name')
140 try:
139 try:
141 if _user_group:
140 if _user_group:
142 _user_group = UserGroup.get(_user_group)
141 _user_group = UserGroup.get(_user_group)
143 elif _user_group_name:
142 elif _user_group_name:
144 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
145
144
146 if _user_group:
145 if _user_group:
147 _user_group = _user_group.users_group_name
146 _user_group = _user_group.users_group_name
148 except Exception:
147 except Exception:
149 log.exception('Failed to get user group by id and name')
148 log.exception('Failed to get user group by id and name')
150 # catch all failures here
149 # catch all failures here
151 return None
150 return None
152
151
153 return _user_group
152 return _user_group
154
153
155
154
156 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
157 """
156 """
158 Scans given path for repos and return (name,(type,path)) tuple
157 Scans given path for repos and return (name,(type,path)) tuple
159
158
160 :param path: path to scan for repositories
159 :param path: path to scan for repositories
161 :param recursive: recursive search and return names with subdirs in front
160 :param recursive: recursive search and return names with subdirs in front
162 """
161 """
163
162
164 # remove ending slash for better results
163 # remove ending slash for better results
165 path = path.rstrip(os.sep)
164 path = path.rstrip(os.sep)
166 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
167
166
168 def _get_repos(p):
167 def _get_repos(p):
169 dirpaths = get_dirpaths(p)
168 dirpaths = get_dirpaths(p)
170 if not _is_dir_writable(p):
169 if not _is_dir_writable(p):
171 log.warning('repo path without write access: %s', p)
170 log.warning('repo path without write access: %s', p)
172
171
173 for dirpath in dirpaths:
172 for dirpath in dirpaths:
174 if os.path.isfile(os.path.join(p, dirpath)):
173 if os.path.isfile(os.path.join(p, dirpath)):
175 continue
174 continue
176 cur_path = os.path.join(p, dirpath)
175 cur_path = os.path.join(p, dirpath)
177
176
178 # skip removed repos
177 # skip removed repos
179 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
180 continue
179 continue
181
180
182 #skip .<somethin> dirs
181 #skip .<somethin> dirs
183 if dirpath.startswith('.'):
182 if dirpath.startswith('.'):
184 continue
183 continue
185
184
186 try:
185 try:
187 scm_info = get_scm(cur_path)
186 scm_info = get_scm(cur_path)
188 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
189 except VCSError:
188 except VCSError:
190 if not recursive:
189 if not recursive:
191 continue
190 continue
192 #check if this dir containts other repos for recursive scan
191 #check if this dir containts other repos for recursive scan
193 rec_path = os.path.join(p, dirpath)
192 rec_path = os.path.join(p, dirpath)
194 if os.path.isdir(rec_path):
193 if os.path.isdir(rec_path):
195 for inner_scm in _get_repos(rec_path):
194 yield from _get_repos(rec_path)
196 yield inner_scm
197
195
198 return _get_repos(path)
196 return _get_repos(path)
199
197
200
198
201 def get_dirpaths(p: str) -> list:
199 def get_dirpaths(p: str) -> list:
202 try:
200 try:
203 # OS-independable way of checking if we have at least read-only
201 # OS-independable way of checking if we have at least read-only
204 # access or not.
202 # access or not.
205 dirpaths = os.listdir(p)
203 dirpaths = os.listdir(p)
206 except OSError:
204 except OSError:
207 log.warning('ignoring repo path without read access: %s', p)
205 log.warning('ignoring repo path without read access: %s', p)
208 return []
206 return []
209
207
210 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
211 # decode paths and suddenly returns unicode objects itself. The items it
209 # decode paths and suddenly returns unicode objects itself. The items it
212 # cannot decode are returned as strings and cause issues.
210 # cannot decode are returned as strings and cause issues.
213 #
211 #
214 # Those paths are ignored here until a solid solution for path handling has
212 # Those paths are ignored here until a solid solution for path handling has
215 # been built.
213 # been built.
216 expected_type = type(p)
214 expected_type = type(p)
217
215
218 def _has_correct_type(item):
216 def _has_correct_type(item):
219 if type(item) is not expected_type:
217 if type(item) is not expected_type:
220 log.error(
218 log.error(
221 "Ignoring path %s since it cannot be decoded into str.",
219 "Ignoring path %s since it cannot be decoded into str.",
222 # Using "repr" to make sure that we see the byte value in case
220 # Using "repr" to make sure that we see the byte value in case
223 # of support.
221 # of support.
224 repr(item))
222 repr(item))
225 return False
223 return False
226 return True
224 return True
227
225
228 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
229
227
230 return dirpaths
228 return dirpaths
231
229
232
230
233 def _is_dir_writable(path):
231 def _is_dir_writable(path):
234 """
232 """
235 Probe if `path` is writable.
233 Probe if `path` is writable.
236
234
237 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
238 possible to create a file inside of `path`, stat does not produce reliable
236 possible to create a file inside of `path`, stat does not produce reliable
239 results in this case.
237 results in this case.
240 """
238 """
241 try:
239 try:
242 with tempfile.TemporaryFile(dir=path):
240 with tempfile.TemporaryFile(dir=path):
243 pass
241 pass
244 except OSError:
242 except OSError:
245 return False
243 return False
246 return True
244 return True
247
245
248
246
249 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
250 """
248 """
251 Returns True if given path is a valid repository False otherwise.
249 Returns True if given path is a valid repository False otherwise.
252 If expect_scm param is given also, compare if given scm is the same
250 If expect_scm param is given also, compare if given scm is the same
253 as expected from scm parameter. If explicit_scm is given don't try to
251 as expected from scm parameter. If explicit_scm is given don't try to
254 detect the scm, just use the given one to check if repo is valid
252 detect the scm, just use the given one to check if repo is valid
255
253
256 :param repo_name:
254 :param repo_name:
257 :param base_path:
255 :param base_path:
258 :param expect_scm:
256 :param expect_scm:
259 :param explicit_scm:
257 :param explicit_scm:
260 :param config:
258 :param config:
261
259
262 :return True: if given path is a valid repository
260 :return True: if given path is a valid repository
263 """
261 """
264 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
265 log.debug('Checking if `%s` is a valid path for repository. '
263 log.debug('Checking if `%s` is a valid path for repository. '
266 'Explicit type: %s', repo_name, explicit_scm)
264 'Explicit type: %s', repo_name, explicit_scm)
267
265
268 try:
266 try:
269 if explicit_scm:
267 if explicit_scm:
270 detected_scms = [get_scm_backend(explicit_scm)(
268 detected_scms = [get_scm_backend(explicit_scm)(
271 full_path, config=config).alias]
269 full_path, config=config).alias]
272 else:
270 else:
273 detected_scms = get_scm(full_path)
271 detected_scms = get_scm(full_path)
274
272
275 if expect_scm:
273 if expect_scm:
276 return detected_scms[0] == expect_scm
274 return detected_scms[0] == expect_scm
277 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
278 return True
276 return True
279 except VCSError:
277 except VCSError:
280 log.debug('path: %s is not a valid repo !', full_path)
278 log.debug('path: %s is not a valid repo !', full_path)
281 return False
279 return False
282
280
283
281
284 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
285 """
283 """
286 Returns True if given path is a repository group, False otherwise
284 Returns True if given path is a repository group, False otherwise
287
285
288 :param repo_name:
286 :param repo_name:
289 :param base_path:
287 :param base_path:
290 """
288 """
291 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
292 log.debug('Checking if `%s` is a valid path for repository group',
290 log.debug('Checking if `%s` is a valid path for repository group',
293 repo_group_name)
291 repo_group_name)
294
292
295 # check if it's not a repo
293 # check if it's not a repo
296 if is_valid_repo(repo_group_name, base_path):
294 if is_valid_repo(repo_group_name, base_path):
297 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
298 return False
296 return False
299
297
300 try:
298 try:
301 # we need to check bare git repos at higher level
299 # we need to check bare git repos at higher level
302 # since we might match branches/hooks/info/objects or possible
300 # since we might match branches/hooks/info/objects or possible
303 # other things inside bare git repo
301 # other things inside bare git repo
304 maybe_repo = os.path.dirname(full_path)
302 maybe_repo = os.path.dirname(full_path)
305 if maybe_repo == base_path:
303 if maybe_repo == base_path:
306 # skip root level repo check, we know root location CANNOT BE a repo group
304 # skip root level repo check, we know root location CANNOT BE a repo group
307 return False
305 return False
308
306
309 scm_ = get_scm(maybe_repo)
307 scm_ = get_scm(maybe_repo)
310 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
311 return False
309 return False
312 except VCSError:
310 except VCSError:
313 pass
311 pass
314
312
315 # check if it's a valid path
313 # check if it's a valid path
316 if skip_path_check or os.path.isdir(full_path):
314 if skip_path_check or os.path.isdir(full_path):
317 log.debug('path: %s is a valid repo group !', full_path)
315 log.debug('path: %s is a valid repo group !', full_path)
318 return True
316 return True
319
317
320 log.debug('path: %s is not a valid repo group !', full_path)
318 log.debug('path: %s is not a valid repo group !', full_path)
321 return False
319 return False
322
320
323
321
324 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
325 while True:
323 while True:
326 ok = eval(input(prompt))
324 ok = eval(input(prompt))
327 if ok.lower() in ('y', 'ye', 'yes'):
325 if ok.lower() in ('y', 'ye', 'yes'):
328 return True
326 return True
329 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
330 return False
328 return False
331 retries = retries - 1
329 retries = retries - 1
332 if retries < 0:
330 if retries < 0:
333 raise IOError
331 raise OSError
334 print(complaint)
332 print(complaint)
335
333
336 # propagated from mercurial documentation
334 # propagated from mercurial documentation
337 ui_sections = [
335 ui_sections = [
338 'alias', 'auth',
336 'alias', 'auth',
339 'decode/encode', 'defaults',
337 'decode/encode', 'defaults',
340 'diff', 'email',
338 'diff', 'email',
341 'extensions', 'format',
339 'extensions', 'format',
342 'merge-patterns', 'merge-tools',
340 'merge-patterns', 'merge-tools',
343 'hooks', 'http_proxy',
341 'hooks', 'http_proxy',
344 'smtp', 'patch',
342 'smtp', 'patch',
345 'paths', 'profiling',
343 'paths', 'profiling',
346 'server', 'trusted',
344 'server', 'trusted',
347 'ui', 'web', ]
345 'ui', 'web', ]
348
346
349
347
350 def config_data_from_db(clear_session=True, repo=None):
348 def config_data_from_db(clear_session=True, repo=None):
351 """
349 """
352 Read the configuration data from the database and return configuration
350 Read the configuration data from the database and return configuration
353 tuples.
351 tuples.
354 """
352 """
355 from rhodecode.model.settings import VcsSettingsModel
353 from rhodecode.model.settings import VcsSettingsModel
356
354
357 config = []
355 config = []
358
356
359 sa = meta.Session()
357 sa = meta.Session()
360 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
361
359
362 ui_settings = settings_model.get_ui_settings()
360 ui_settings = settings_model.get_ui_settings()
363
361
364 ui_data = []
362 ui_data = []
365 for setting in ui_settings:
363 for setting in ui_settings:
366 if setting.active:
364 if setting.active:
367 ui_data.append((setting.section, setting.key, setting.value))
365 ui_data.append((setting.section, setting.key, setting.value))
368 config.append((
366 config.append((
369 safe_str(setting.section), safe_str(setting.key),
367 safe_str(setting.section), safe_str(setting.key),
370 safe_str(setting.value)))
368 safe_str(setting.value)))
371 if setting.key == 'push_ssl':
369 if setting.key == 'push_ssl':
372 # force set push_ssl requirement to False, rhodecode
370 # force set push_ssl requirement to False, rhodecode
373 # handles that
371 # handles that
374 config.append((
372 config.append((
375 safe_str(setting.section), safe_str(setting.key), False))
373 safe_str(setting.section), safe_str(setting.key), False))
376 log.debug(
374 log.debug(
377 'settings ui from db@repo[%s]: %s',
375 'settings ui from db@repo[%s]: %s',
378 repo,
376 repo,
379 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
380 if clear_session:
378 if clear_session:
381 meta.Session.remove()
379 meta.Session.remove()
382
380
383 # TODO: mikhail: probably it makes no sense to re-read hooks information.
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
384 # It's already there and activated/deactivated
382 # It's already there and activated/deactivated
385 skip_entries = []
383 skip_entries = []
386 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
387 if 'pull' not in enabled_hook_classes:
385 if 'pull' not in enabled_hook_classes:
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
389 if 'push' not in enabled_hook_classes:
387 if 'push' not in enabled_hook_classes:
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
391 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
392 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
393
391
394 config = [entry for entry in config if entry[:2] not in skip_entries]
392 config = [entry for entry in config if entry[:2] not in skip_entries]
395
393
396 return config
394 return config
397
395
398
396
399 def make_db_config(clear_session=True, repo=None):
397 def make_db_config(clear_session=True, repo=None):
400 """
398 """
401 Create a :class:`Config` instance based on the values in the database.
399 Create a :class:`Config` instance based on the values in the database.
402 """
400 """
403 config = Config()
401 config = Config()
404 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
405 for section, option, value in config_data:
403 for section, option, value in config_data:
406 config.set(section, option, value)
404 config.set(section, option, value)
407 return config
405 return config
408
406
409
407
410 def get_enabled_hook_classes(ui_settings):
408 def get_enabled_hook_classes(ui_settings):
411 """
409 """
412 Return the enabled hook classes.
410 Return the enabled hook classes.
413
411
414 :param ui_settings: List of ui_settings as returned
412 :param ui_settings: List of ui_settings as returned
415 by :meth:`VcsSettingsModel.get_ui_settings`
413 by :meth:`VcsSettingsModel.get_ui_settings`
416
414
417 :return: a list with the enabled hook classes. The order is not guaranteed.
415 :return: a list with the enabled hook classes. The order is not guaranteed.
418 :rtype: list
416 :rtype: list
419 """
417 """
420 enabled_hooks = []
418 enabled_hooks = []
421 active_hook_keys = [
419 active_hook_keys = [
422 key for section, key, value, active in ui_settings
420 key for section, key, value, active in ui_settings
423 if section == 'hooks' and active]
421 if section == 'hooks' and active]
424
422
425 hook_names = {
423 hook_names = {
426 RhodeCodeUi.HOOK_PUSH: 'push',
424 RhodeCodeUi.HOOK_PUSH: 'push',
427 RhodeCodeUi.HOOK_PULL: 'pull',
425 RhodeCodeUi.HOOK_PULL: 'pull',
428 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
429 }
427 }
430
428
431 for key in active_hook_keys:
429 for key in active_hook_keys:
432 hook = hook_names.get(key)
430 hook = hook_names.get(key)
433 if hook:
431 if hook:
434 enabled_hooks.append(hook)
432 enabled_hooks.append(hook)
435
433
436 return enabled_hooks
434 return enabled_hooks
437
435
438
436
439 def set_rhodecode_config(config):
437 def set_rhodecode_config(config):
440 """
438 """
441 Updates pyramid config with new settings from database
439 Updates pyramid config with new settings from database
442
440
443 :param config:
441 :param config:
444 """
442 """
445 from rhodecode.model.settings import SettingsModel
443 from rhodecode.model.settings import SettingsModel
446 app_settings = SettingsModel().get_all_settings()
444 app_settings = SettingsModel().get_all_settings()
447
445
448 for k, v in list(app_settings.items()):
446 for k, v in list(app_settings.items()):
449 config[k] = v
447 config[k] = v
450
448
451
449
452 def get_rhodecode_realm():
450 def get_rhodecode_realm():
453 """
451 """
454 Return the rhodecode realm from database.
452 Return the rhodecode realm from database.
455 """
453 """
456 from rhodecode.model.settings import SettingsModel
454 from rhodecode.model.settings import SettingsModel
457 realm = SettingsModel().get_setting_by_name('realm')
455 realm = SettingsModel().get_setting_by_name('realm')
458 return safe_str(realm.app_settings_value)
456 return safe_str(realm.app_settings_value)
459
457
460
458
461 def get_rhodecode_base_path():
459 def get_rhodecode_base_path():
462 """
460 """
463 Returns the base path. The base path is the filesystem path which points
461 Returns the base path. The base path is the filesystem path which points
464 to the repository store.
462 to the repository store.
465 """
463 """
466
464
467 import rhodecode
465 import rhodecode
468 return rhodecode.CONFIG['default_base_path']
466 return rhodecode.CONFIG['default_base_path']
469
467
470
468
471 def map_groups(path):
469 def map_groups(path):
472 """
470 """
473 Given a full path to a repository, create all nested groups that this
471 Given a full path to a repository, create all nested groups that this
474 repo is inside. This function creates parent-child relationships between
472 repo is inside. This function creates parent-child relationships between
475 groups and creates default perms for all new groups.
473 groups and creates default perms for all new groups.
476
474
477 :param paths: full path to repository
475 :param paths: full path to repository
478 """
476 """
479 from rhodecode.model.repo_group import RepoGroupModel
477 from rhodecode.model.repo_group import RepoGroupModel
480 sa = meta.Session()
478 sa = meta.Session()
481 groups = path.split(Repository.NAME_SEP)
479 groups = path.split(Repository.NAME_SEP)
482 parent = None
480 parent = None
483 group = None
481 group = None
484
482
485 # last element is repo in nested groups structure
483 # last element is repo in nested groups structure
486 groups = groups[:-1]
484 groups = groups[:-1]
487 rgm = RepoGroupModel(sa)
485 rgm = RepoGroupModel(sa)
488 owner = User.get_first_super_admin()
486 owner = User.get_first_super_admin()
489 for lvl, group_name in enumerate(groups):
487 for lvl, group_name in enumerate(groups):
490 group_name = '/'.join(groups[:lvl] + [group_name])
488 group_name = '/'.join(groups[:lvl] + [group_name])
491 group = RepoGroup.get_by_group_name(group_name)
489 group = RepoGroup.get_by_group_name(group_name)
492 desc = '%s group' % group_name
490 desc = '%s group' % group_name
493
491
494 # skip folders that are now removed repos
492 # skip folders that are now removed repos
495 if REMOVED_REPO_PAT.match(group_name):
493 if REMOVED_REPO_PAT.match(group_name):
496 break
494 break
497
495
498 if group is None:
496 if group is None:
499 log.debug('creating group level: %s group_name: %s',
497 log.debug('creating group level: %s group_name: %s',
500 lvl, group_name)
498 lvl, group_name)
501 group = RepoGroup(group_name, parent)
499 group = RepoGroup(group_name, parent)
502 group.group_description = desc
500 group.group_description = desc
503 group.user = owner
501 group.user = owner
504 sa.add(group)
502 sa.add(group)
505 perm_obj = rgm._create_default_perms(group)
503 perm_obj = rgm._create_default_perms(group)
506 sa.add(perm_obj)
504 sa.add(perm_obj)
507 sa.flush()
505 sa.flush()
508
506
509 parent = group
507 parent = group
510 return group
508 return group
511
509
512
510
513 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
514 """
512 """
515 maps all repos given in initial_repo_list, non existing repositories
513 maps all repos given in initial_repo_list, non existing repositories
516 are created, if remove_obsolete is True it also checks for db entries
514 are created, if remove_obsolete is True it also checks for db entries
517 that are not in initial_repo_list and removes them.
515 that are not in initial_repo_list and removes them.
518
516
519 :param initial_repo_list: list of repositories found by scanning methods
517 :param initial_repo_list: list of repositories found by scanning methods
520 :param remove_obsolete: check for obsolete entries in database
518 :param remove_obsolete: check for obsolete entries in database
521 """
519 """
522 from rhodecode.model.repo import RepoModel
520 from rhodecode.model.repo import RepoModel
523 from rhodecode.model.repo_group import RepoGroupModel
521 from rhodecode.model.repo_group import RepoGroupModel
524 from rhodecode.model.settings import SettingsModel
522 from rhodecode.model.settings import SettingsModel
525
523
526 sa = meta.Session()
524 sa = meta.Session()
527 repo_model = RepoModel()
525 repo_model = RepoModel()
528 user = User.get_first_super_admin()
526 user = User.get_first_super_admin()
529 added = []
527 added = []
530
528
531 # creation defaults
529 # creation defaults
532 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
533 enable_statistics = defs.get('repo_enable_statistics')
531 enable_statistics = defs.get('repo_enable_statistics')
534 enable_locking = defs.get('repo_enable_locking')
532 enable_locking = defs.get('repo_enable_locking')
535 enable_downloads = defs.get('repo_enable_downloads')
533 enable_downloads = defs.get('repo_enable_downloads')
536 private = defs.get('repo_private')
534 private = defs.get('repo_private')
537
535
538 for name, repo in list(initial_repo_list.items()):
536 for name, repo in list(initial_repo_list.items()):
539 group = map_groups(name)
537 group = map_groups(name)
540 str_name = safe_str(name)
538 str_name = safe_str(name)
541 db_repo = repo_model.get_by_repo_name(str_name)
539 db_repo = repo_model.get_by_repo_name(str_name)
542 # found repo that is on filesystem not in RhodeCode database
540 # found repo that is on filesystem not in RhodeCode database
543 if not db_repo:
541 if not db_repo:
544 log.info('repository %s not found, creating now', name)
542 log.info('repository %s not found, creating now', name)
545 added.append(name)
543 added.append(name)
546 desc = (repo.description
544 desc = (repo.description
547 if repo.description != 'unknown'
545 if repo.description != 'unknown'
548 else '%s repository' % name)
546 else '%s repository' % name)
549
547
550 db_repo = repo_model._create_repo(
548 db_repo = repo_model._create_repo(
551 repo_name=name,
549 repo_name=name,
552 repo_type=repo.alias,
550 repo_type=repo.alias,
553 description=desc,
551 description=desc,
554 repo_group=getattr(group, 'group_id', None),
552 repo_group=getattr(group, 'group_id', None),
555 owner=user,
553 owner=user,
556 enable_locking=enable_locking,
554 enable_locking=enable_locking,
557 enable_downloads=enable_downloads,
555 enable_downloads=enable_downloads,
558 enable_statistics=enable_statistics,
556 enable_statistics=enable_statistics,
559 private=private,
557 private=private,
560 state=Repository.STATE_CREATED
558 state=Repository.STATE_CREATED
561 )
559 )
562 sa.commit()
560 sa.commit()
563 # we added that repo just now, and make sure we updated server info
561 # we added that repo just now, and make sure we updated server info
564 if db_repo.repo_type == 'git':
562 if db_repo.repo_type == 'git':
565 git_repo = db_repo.scm_instance()
563 git_repo = db_repo.scm_instance()
566 # update repository server-info
564 # update repository server-info
567 log.debug('Running update server info')
565 log.debug('Running update server info')
568 git_repo._update_server_info()
566 git_repo._update_server_info()
569
567
570 db_repo.update_commit_cache()
568 db_repo.update_commit_cache()
571
569
572 config = db_repo._config
570 config = db_repo._config
573 config.set('extensions', 'largefiles', '')
571 config.set('extensions', 'largefiles', '')
574 repo = db_repo.scm_instance(config=config)
572 repo = db_repo.scm_instance(config=config)
575 repo.install_hooks()
573 repo.install_hooks()
576
574
577 removed = []
575 removed = []
578 if remove_obsolete:
576 if remove_obsolete:
579 # remove from database those repositories that are not in the filesystem
577 # remove from database those repositories that are not in the filesystem
580 for repo in sa.query(Repository).all():
578 for repo in sa.query(Repository).all():
581 if repo.repo_name not in list(initial_repo_list.keys()):
579 if repo.repo_name not in list(initial_repo_list.keys()):
582 log.debug("Removing non-existing repository found in db `%s`",
580 log.debug("Removing non-existing repository found in db `%s`",
583 repo.repo_name)
581 repo.repo_name)
584 try:
582 try:
585 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
583 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
586 sa.commit()
584 sa.commit()
587 removed.append(repo.repo_name)
585 removed.append(repo.repo_name)
588 except Exception:
586 except Exception:
589 # don't hold further removals on error
587 # don't hold further removals on error
590 log.error(traceback.format_exc())
588 log.error(traceback.format_exc())
591 sa.rollback()
589 sa.rollback()
592
590
593 def splitter(full_repo_name):
591 def splitter(full_repo_name):
594 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
592 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
595 gr_name = None
593 gr_name = None
596 if len(_parts) == 2:
594 if len(_parts) == 2:
597 gr_name = _parts[0]
595 gr_name = _parts[0]
598 return gr_name
596 return gr_name
599
597
600 initial_repo_group_list = [splitter(x) for x in
598 initial_repo_group_list = [splitter(x) for x in
601 list(initial_repo_list.keys()) if splitter(x)]
599 list(initial_repo_list.keys()) if splitter(x)]
602
600
603 # remove from database those repository groups that are not in the
601 # remove from database those repository groups that are not in the
604 # filesystem due to parent child relationships we need to delete them
602 # filesystem due to parent child relationships we need to delete them
605 # in a specific order of most nested first
603 # in a specific order of most nested first
606 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
604 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
607 def nested_sort(gr):
605 def nested_sort(gr):
608 return len(gr.split('/'))
606 return len(gr.split('/'))
609 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
607 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
610 if group_name not in initial_repo_group_list:
608 if group_name not in initial_repo_group_list:
611 repo_group = RepoGroup.get_by_group_name(group_name)
609 repo_group = RepoGroup.get_by_group_name(group_name)
612 if (repo_group.children.all() or
610 if (repo_group.children.all() or
613 not RepoGroupModel().check_exist_filesystem(
611 not RepoGroupModel().check_exist_filesystem(
614 group_name=group_name, exc_on_failure=False)):
612 group_name=group_name, exc_on_failure=False)):
615 continue
613 continue
616
614
617 log.info(
615 log.info(
618 'Removing non-existing repository group found in db `%s`',
616 'Removing non-existing repository group found in db `%s`',
619 group_name)
617 group_name)
620 try:
618 try:
621 RepoGroupModel(sa).delete(group_name, fs_remove=False)
619 RepoGroupModel(sa).delete(group_name, fs_remove=False)
622 sa.commit()
620 sa.commit()
623 removed.append(group_name)
621 removed.append(group_name)
624 except Exception:
622 except Exception:
625 # don't hold further removals on error
623 # don't hold further removals on error
626 log.exception(
624 log.exception(
627 'Unable to remove repository group `%s`',
625 'Unable to remove repository group `%s`',
628 group_name)
626 group_name)
629 sa.rollback()
627 sa.rollback()
630 raise
628 raise
631
629
632 return added, removed
630 return added, removed
633
631
634
632
635 def load_rcextensions(root_path):
633 def load_rcextensions(root_path):
636 import rhodecode
634 import rhodecode
637 from rhodecode.config import conf
635 from rhodecode.config import conf
638
636
639 path = os.path.join(root_path)
637 path = os.path.join(root_path)
640 sys.path.append(path)
638 sys.path.append(path)
641
639
642 try:
640 try:
643 rcextensions = __import__('rcextensions')
641 rcextensions = __import__('rcextensions')
644 except ImportError:
642 except ImportError:
645 if os.path.isdir(os.path.join(path, 'rcextensions')):
643 if os.path.isdir(os.path.join(path, 'rcextensions')):
646 log.warning('Unable to load rcextensions from %s', path)
644 log.warning('Unable to load rcextensions from %s', path)
647 rcextensions = None
645 rcextensions = None
648
646
649 if rcextensions:
647 if rcextensions:
650 log.info('Loaded rcextensions from %s...', rcextensions)
648 log.info('Loaded rcextensions from %s...', rcextensions)
651 rhodecode.EXTENSIONS = rcextensions
649 rhodecode.EXTENSIONS = rcextensions
652
650
653 # Additional mappings that are not present in the pygments lexers
651 # Additional mappings that are not present in the pygments lexers
654 conf.LANGUAGES_EXTENSIONS_MAP.update(
652 conf.LANGUAGES_EXTENSIONS_MAP.update(
655 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
653 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
656
654
657
655
658 def get_custom_lexer(extension):
656 def get_custom_lexer(extension):
659 """
657 """
660 returns a custom lexer if it is defined in rcextensions module, or None
658 returns a custom lexer if it is defined in rcextensions module, or None
661 if there's no custom lexer defined
659 if there's no custom lexer defined
662 """
660 """
663 import rhodecode
661 import rhodecode
664 from pygments import lexers
662 from pygments import lexers
665
663
666 # custom override made by RhodeCode
664 # custom override made by RhodeCode
667 if extension in ['mako']:
665 if extension in ['mako']:
668 return lexers.get_lexer_by_name('html+mako')
666 return lexers.get_lexer_by_name('html+mako')
669
667
670 # check if we didn't define this extension as other lexer
668 # check if we didn't define this extension as other lexer
671 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
669 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
672 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
670 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
673 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
671 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
674 return lexers.get_lexer_by_name(_lexer_name)
672 return lexers.get_lexer_by_name(_lexer_name)
675
673
676
674
677 #==============================================================================
675 #==============================================================================
678 # TEST FUNCTIONS AND CREATORS
676 # TEST FUNCTIONS AND CREATORS
679 #==============================================================================
677 #==============================================================================
680 def create_test_index(repo_location, config):
678 def create_test_index(repo_location, config):
681 """
679 """
682 Makes default test index.
680 Makes default test index.
683 """
681 """
684 try:
682 try:
685 import rc_testdata
683 import rc_testdata
686 except ImportError:
684 except ImportError:
687 raise ImportError('Failed to import rc_testdata, '
685 raise ImportError('Failed to import rc_testdata, '
688 'please make sure this package is installed from requirements_test.txt')
686 'please make sure this package is installed from requirements_test.txt')
689 rc_testdata.extract_search_index(
687 rc_testdata.extract_search_index(
690 'vcs_search_index', os.path.dirname(config['search.location']))
688 'vcs_search_index', os.path.dirname(config['search.location']))
691
689
692
690
693 def create_test_directory(test_path):
691 def create_test_directory(test_path):
694 """
692 """
695 Create test directory if it doesn't exist.
693 Create test directory if it doesn't exist.
696 """
694 """
697 if not os.path.isdir(test_path):
695 if not os.path.isdir(test_path):
698 log.debug('Creating testdir %s', test_path)
696 log.debug('Creating testdir %s', test_path)
699 os.makedirs(test_path)
697 os.makedirs(test_path)
700
698
701
699
702 def create_test_database(test_path, config):
700 def create_test_database(test_path, config):
703 """
701 """
704 Makes a fresh database.
702 Makes a fresh database.
705 """
703 """
706 from rhodecode.lib.db_manage import DbManage
704 from rhodecode.lib.db_manage import DbManage
707 from rhodecode.lib.utils2 import get_encryption_key
705 from rhodecode.lib.utils2 import get_encryption_key
708
706
709 # PART ONE create db
707 # PART ONE create db
710 dbconf = config['sqlalchemy.db1.url']
708 dbconf = config['sqlalchemy.db1.url']
711 enc_key = get_encryption_key(config)
709 enc_key = get_encryption_key(config)
712
710
713 log.debug('making test db %s', dbconf)
711 log.debug('making test db %s', dbconf)
714
712
715 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
713 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
716 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
714 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
717 dbmanage.create_tables(override=True)
715 dbmanage.create_tables(override=True)
718 dbmanage.set_db_version()
716 dbmanage.set_db_version()
719 # for tests dynamically set new root paths based on generated content
717 # for tests dynamically set new root paths based on generated content
720 dbmanage.create_settings(dbmanage.config_prompt(test_path))
718 dbmanage.create_settings(dbmanage.config_prompt(test_path))
721 dbmanage.create_default_user()
719 dbmanage.create_default_user()
722 dbmanage.create_test_admin_and_users()
720 dbmanage.create_test_admin_and_users()
723 dbmanage.create_permissions()
721 dbmanage.create_permissions()
724 dbmanage.populate_default_permissions()
722 dbmanage.populate_default_permissions()
725 Session().commit()
723 Session().commit()
726
724
727
725
728 def create_test_repositories(test_path, config):
726 def create_test_repositories(test_path, config):
729 """
727 """
730 Creates test repositories in the temporary directory. Repositories are
728 Creates test repositories in the temporary directory. Repositories are
731 extracted from archives within the rc_testdata package.
729 extracted from archives within the rc_testdata package.
732 """
730 """
733 import rc_testdata
731 import rc_testdata
734 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
732 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
735
733
736 log.debug('making test vcs repositories')
734 log.debug('making test vcs repositories')
737
735
738 idx_path = config['search.location']
736 idx_path = config['search.location']
739 data_path = config['cache_dir']
737 data_path = config['cache_dir']
740
738
741 # clean index and data
739 # clean index and data
742 if idx_path and os.path.exists(idx_path):
740 if idx_path and os.path.exists(idx_path):
743 log.debug('remove %s', idx_path)
741 log.debug('remove %s', idx_path)
744 shutil.rmtree(idx_path)
742 shutil.rmtree(idx_path)
745
743
746 if data_path and os.path.exists(data_path):
744 if data_path and os.path.exists(data_path):
747 log.debug('remove %s', data_path)
745 log.debug('remove %s', data_path)
748 shutil.rmtree(data_path)
746 shutil.rmtree(data_path)
749
747
750 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
748 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
751 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
749 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
752
750
753 # Note: Subversion is in the process of being integrated with the system,
751 # Note: Subversion is in the process of being integrated with the system,
754 # until we have a properly packed version of the test svn repository, this
752 # until we have a properly packed version of the test svn repository, this
755 # tries to copy over the repo from a package "rc_testdata"
753 # tries to copy over the repo from a package "rc_testdata"
756 svn_repo_path = rc_testdata.get_svn_repo_archive()
754 svn_repo_path = rc_testdata.get_svn_repo_archive()
757 with tarfile.open(svn_repo_path) as tar:
755 with tarfile.open(svn_repo_path) as tar:
758 tar.extractall(jn(test_path, SVN_REPO))
756 tar.extractall(jn(test_path, SVN_REPO))
759
757
760
758
761 def password_changed(auth_user, session):
759 def password_changed(auth_user, session):
762 # Never report password change in case of default user or anonymous user.
760 # Never report password change in case of default user or anonymous user.
763 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
761 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
764 return False
762 return False
765
763
766 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
764 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
767 rhodecode_user = session.get('rhodecode_user', {})
765 rhodecode_user = session.get('rhodecode_user', {})
768 session_password_hash = rhodecode_user.get('password', '')
766 session_password_hash = rhodecode_user.get('password', '')
769 return password_hash != session_password_hash
767 return password_hash != session_password_hash
770
768
771
769
772 def read_opensource_licenses():
770 def read_opensource_licenses():
773 global _license_cache
771 global _license_cache
774
772
775 if not _license_cache:
773 if not _license_cache:
776 licenses = pkg_resources.resource_string(
774 licenses = pkg_resources.resource_string(
777 'rhodecode', 'config/licenses.json')
775 'rhodecode', 'config/licenses.json')
778 _license_cache = json.loads(licenses)
776 _license_cache = json.loads(licenses)
779
777
780 return _license_cache
778 return _license_cache
781
779
782
780
783 def generate_platform_uuid():
781 def generate_platform_uuid():
784 """
782 """
785 Generates platform UUID based on it's name
783 Generates platform UUID based on it's name
786 """
784 """
787 import platform
785 import platform
788
786
789 try:
787 try:
790 uuid_list = [platform.platform()]
788 uuid_list = [platform.platform()]
791 return sha256_safe(':'.join(uuid_list))
789 return sha256_safe(':'.join(uuid_list))
792 except Exception as e:
790 except Exception as e:
793 log.error('Failed to generate host uuid: %s', e)
791 log.error('Failed to generate host uuid: %s', e)
794 return 'UNDEFINED'
792 return 'UNDEFINED'
795
793
796
794
797 def send_test_email(recipients, email_body='TEST EMAIL'):
795 def send_test_email(recipients, email_body='TEST EMAIL'):
798 """
796 """
799 Simple code for generating test emails.
797 Simple code for generating test emails.
800 Usage::
798 Usage::
801
799
802 from rhodecode.lib import utils
800 from rhodecode.lib import utils
803 utils.send_test_email()
801 utils.send_test_email()
804 """
802 """
805 from rhodecode.lib.celerylib import tasks, run_task
803 from rhodecode.lib.celerylib import tasks, run_task
806
804
807 email_body = email_body_plaintext = email_body
805 email_body = email_body_plaintext = email_body
808 subject = 'SUBJECT FROM: {}'.format(socket.gethostname())
806 subject = f'SUBJECT FROM: {socket.gethostname()}'
809 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
807 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,989 +1,987 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21
19
22 """
20 """
23 Some simple helper functions
21 Some simple helper functions
24 """
22 """
25
23
26 import collections
24 import collections
27 import datetime
25 import datetime
28 import dateutil.relativedelta
26 import dateutil.relativedelta
29 import logging
27 import logging
30 import re
28 import re
31 import sys
29 import sys
32 import time
30 import time
33 import urllib.request
31 import urllib.request
34 import urllib.parse
32 import urllib.parse
35 import urllib.error
33 import urllib.error
36 import urlobject
34 import urlobject
37 import uuid
35 import uuid
38 import getpass
36 import getpass
39 import socket
37 import socket
40 import errno
38 import errno
41 import random
39 import random
42 import functools
40 import functools
43 from contextlib import closing
41 from contextlib import closing
44
42
45 import pygments.lexers
43 import pygments.lexers
46 import sqlalchemy
44 import sqlalchemy
47 import sqlalchemy.event
45 import sqlalchemy.event
48 import sqlalchemy.engine.url
46 import sqlalchemy.engine.url
49 import sqlalchemy.exc
47 import sqlalchemy.exc
50 import sqlalchemy.sql
48 import sqlalchemy.sql
51 import webob
49 import webob
52 from pyramid.settings import asbool
50 from pyramid.settings import asbool
53
51
54 import rhodecode
52 import rhodecode
55 from rhodecode.translation import _, _pluralize
53 from rhodecode.translation import _, _pluralize
56 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
57 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
58 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
59
57
60
58
61 def __get_lem(extra_mapping=None):
59 def __get_lem(extra_mapping=None):
62 """
60 """
63 Get language extension map based on what's inside pygments lexers
61 Get language extension map based on what's inside pygments lexers
64 """
62 """
65 d = collections.defaultdict(lambda: [])
63 d = collections.defaultdict(lambda: [])
66
64
67 def __clean(s):
65 def __clean(s):
68 s = s.lstrip('*')
66 s = s.lstrip('*')
69 s = s.lstrip('.')
67 s = s.lstrip('.')
70
68
71 if s.find('[') != -1:
69 if s.find('[') != -1:
72 exts = []
70 exts = []
73 start, stop = s.find('['), s.find(']')
71 start, stop = s.find('['), s.find(']')
74
72
75 for suffix in s[start + 1:stop]:
73 for suffix in s[start + 1:stop]:
76 exts.append(s[:s.find('[')] + suffix)
74 exts.append(s[:s.find('[')] + suffix)
77 return [e.lower() for e in exts]
75 return [e.lower() for e in exts]
78 else:
76 else:
79 return [s.lower()]
77 return [s.lower()]
80
78
81 for lx, t in sorted(pygments.lexers.LEXERS.items()):
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
82 m = list(map(__clean, t[-2]))
80 m = list(map(__clean, t[-2]))
83 if m:
81 if m:
84 m = functools.reduce(lambda x, y: x + y, m)
82 m = functools.reduce(lambda x, y: x + y, m)
85 for ext in m:
83 for ext in m:
86 desc = lx.replace('Lexer', '')
84 desc = lx.replace('Lexer', '')
87 d[ext].append(desc)
85 d[ext].append(desc)
88
86
89 data = dict(d)
87 data = dict(d)
90
88
91 extra_mapping = extra_mapping or {}
89 extra_mapping = extra_mapping or {}
92 if extra_mapping:
90 if extra_mapping:
93 for k, v in list(extra_mapping.items()):
91 for k, v in list(extra_mapping.items()):
94 if k not in data:
92 if k not in data:
95 # register new mapping2lexer
93 # register new mapping2lexer
96 data[k] = [v]
94 data[k] = [v]
97
95
98 return data
96 return data
99
97
100
98
101 def convert_line_endings(line: str, mode) -> str:
99 def convert_line_endings(line: str, mode) -> str:
102 """
100 """
103 Converts a given line "line end" accordingly to given mode
101 Converts a given line "line end" accordingly to given mode
104
102
105 Available modes are::
103 Available modes are::
106 0 - Unix
104 0 - Unix
107 1 - Mac
105 1 - Mac
108 2 - DOS
106 2 - DOS
109
107
110 :param line: given line to convert
108 :param line: given line to convert
111 :param mode: mode to convert to
109 :param mode: mode to convert to
112 :return: converted line according to mode
110 :return: converted line according to mode
113 """
111 """
114 if mode == 0:
112 if mode == 0:
115 line = line.replace('\r\n', '\n')
113 line = line.replace('\r\n', '\n')
116 line = line.replace('\r', '\n')
114 line = line.replace('\r', '\n')
117 elif mode == 1:
115 elif mode == 1:
118 line = line.replace('\r\n', '\r')
116 line = line.replace('\r\n', '\r')
119 line = line.replace('\n', '\r')
117 line = line.replace('\n', '\r')
120 elif mode == 2:
118 elif mode == 2:
121 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
122 return line
120 return line
123
121
124
122
125 def detect_mode(line: str, default) -> int:
123 def detect_mode(line: str, default) -> int:
126 """
124 """
127 Detects line break for given line, if line break couldn't be found
125 Detects line break for given line, if line break couldn't be found
128 given default value is returned
126 given default value is returned
129
127
130 :param line: str line
128 :param line: str line
131 :param default: default
129 :param default: default
132 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
133 """
131 """
134 if line.endswith('\r\n'):
132 if line.endswith('\r\n'):
135 return 2
133 return 2
136 elif line.endswith('\n'):
134 elif line.endswith('\n'):
137 return 0
135 return 0
138 elif line.endswith('\r'):
136 elif line.endswith('\r'):
139 return 1
137 return 1
140 else:
138 else:
141 return default
139 return default
142
140
143
141
144 def remove_suffix(s, suffix):
142 def remove_suffix(s, suffix):
145 if s.endswith(suffix):
143 if s.endswith(suffix):
146 s = s[:-1 * len(suffix)]
144 s = s[:-1 * len(suffix)]
147 return s
145 return s
148
146
149
147
150 def remove_prefix(s, prefix):
148 def remove_prefix(s, prefix):
151 if s.startswith(prefix):
149 if s.startswith(prefix):
152 s = s[len(prefix):]
150 s = s[len(prefix):]
153 return s
151 return s
154
152
155
153
156 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
157 """
155 """
158 Look through the calling stack and return the frame which called
156 Look through the calling stack and return the frame which called
159 this function and is part of core module ( ie. rhodecode.* )
157 this function and is part of core module ( ie. rhodecode.* )
160
158
161 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
159 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
162 :param depth:
160 :param depth:
163 :param output_writer:
161 :param output_writer:
164 :param indent:
162 :param indent:
165
163
166 usage::
164 usage::
167
165
168 from rhodecode.lib.utils2 import find_calling_context
166 from rhodecode.lib.utils2 import find_calling_context
169
167
170 calling_context = find_calling_context(ignore_modules=[
168 calling_context = find_calling_context(ignore_modules=[
171 'rhodecode.lib.caching_query',
169 'rhodecode.lib.caching_query',
172 'rhodecode.model.settings',
170 'rhodecode.model.settings',
173 ])
171 ])
174
172
175 """
173 """
176 import inspect
174 import inspect
177 if not output_writer:
175 if not output_writer:
178 try:
176 try:
179 from rich import print as pprint
177 from rich import print as pprint
180 except ImportError:
178 except ImportError:
181 pprint = print
179 pprint = print
182 output_writer = pprint
180 output_writer = pprint
183
181
184 frame = inspect.currentframe()
182 frame = inspect.currentframe()
185 cc = []
183 cc = []
186 try:
184 try:
187 for i in range(depth): # current frame + 3 callers
185 for i in range(depth): # current frame + 3 callers
188 frame = frame.f_back
186 frame = frame.f_back
189 if not frame:
187 if not frame:
190 break
188 break
191
189
192 info = inspect.getframeinfo(frame)
190 info = inspect.getframeinfo(frame)
193 name = frame.f_globals.get('__name__')
191 name = frame.f_globals.get('__name__')
194 if name not in ignore_modules:
192 if name not in ignore_modules:
195 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
193 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
196 finally:
194 finally:
197 # Avoids a reference cycle
195 # Avoids a reference cycle
198 del frame
196 del frame
199
197
200 output_writer('* INFO: This code was called from: *')
198 output_writer('* INFO: This code was called from: *')
201 for cnt, frm_info in enumerate(cc):
199 for cnt, frm_info in enumerate(cc):
202 if not indent:
200 if not indent:
203 cnt = 1
201 cnt = 1
204 output_writer(' ' * cnt + frm_info)
202 output_writer(' ' * cnt + frm_info)
205
203
206
204
207 def ping_connection(connection, branch):
205 def ping_connection(connection, branch):
208 if branch:
206 if branch:
209 # "branch" refers to a sub-connection of a connection,
207 # "branch" refers to a sub-connection of a connection,
210 # we don't want to bother pinging on these.
208 # we don't want to bother pinging on these.
211 return
209 return
212
210
213 # turn off "close with result". This flag is only used with
211 # turn off "close with result". This flag is only used with
214 # "connectionless" execution, otherwise will be False in any case
212 # "connectionless" execution, otherwise will be False in any case
215 save_should_close_with_result = connection.should_close_with_result
213 save_should_close_with_result = connection.should_close_with_result
216 connection.should_close_with_result = False
214 connection.should_close_with_result = False
217
215
218 try:
216 try:
219 # run a SELECT 1. use a core select() so that
217 # run a SELECT 1. use a core select() so that
220 # the SELECT of a scalar value without a table is
218 # the SELECT of a scalar value without a table is
221 # appropriately formatted for the backend
219 # appropriately formatted for the backend
222 connection.scalar(sqlalchemy.sql.select([1]))
220 connection.scalar(sqlalchemy.sql.select([1]))
223 except sqlalchemy.exc.DBAPIError as err:
221 except sqlalchemy.exc.DBAPIError as err:
224 # catch SQLAlchemy's DBAPIError, which is a wrapper
222 # catch SQLAlchemy's DBAPIError, which is a wrapper
225 # for the DBAPI's exception. It includes a .connection_invalidated
223 # for the DBAPI's exception. It includes a .connection_invalidated
226 # attribute which specifies if this connection is a "disconnect"
224 # attribute which specifies if this connection is a "disconnect"
227 # condition, which is based on inspection of the original exception
225 # condition, which is based on inspection of the original exception
228 # by the dialect in use.
226 # by the dialect in use.
229 if err.connection_invalidated:
227 if err.connection_invalidated:
230 # run the same SELECT again - the connection will re-validate
228 # run the same SELECT again - the connection will re-validate
231 # itself and establish a new connection. The disconnect detection
229 # itself and establish a new connection. The disconnect detection
232 # here also causes the whole connection pool to be invalidated
230 # here also causes the whole connection pool to be invalidated
233 # so that all stale connections are discarded.
231 # so that all stale connections are discarded.
234 connection.scalar(sqlalchemy.sql.select([1]))
232 connection.scalar(sqlalchemy.sql.select([1]))
235 else:
233 else:
236 raise
234 raise
237 finally:
235 finally:
238 # restore "close with result"
236 # restore "close with result"
239 connection.should_close_with_result = save_should_close_with_result
237 connection.should_close_with_result = save_should_close_with_result
240
238
241
239
242 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
240 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
243 """Custom engine_from_config functions."""
241 """Custom engine_from_config functions."""
244 log = logging.getLogger('sqlalchemy.engine')
242 log = logging.getLogger('sqlalchemy.engine')
245 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
243 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
246 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
244 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
247
245
248 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
246 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
249
247
250 def color_sql(sql):
248 def color_sql(sql):
251 color_seq = '\033[1;33m' # This is yellow: code 33
249 color_seq = '\033[1;33m' # This is yellow: code 33
252 normal = '\x1b[0m'
250 normal = '\x1b[0m'
253 return ''.join([color_seq, sql, normal])
251 return ''.join([color_seq, sql, normal])
254
252
255 if use_ping_connection:
253 if use_ping_connection:
256 log.debug('Adding ping_connection on the engine config.')
254 log.debug('Adding ping_connection on the engine config.')
257 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
255 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
258
256
259 if debug:
257 if debug:
260 # attach events only for debug configuration
258 # attach events only for debug configuration
261 def before_cursor_execute(conn, cursor, statement,
259 def before_cursor_execute(conn, cursor, statement,
262 parameters, context, executemany):
260 parameters, context, executemany):
263 setattr(conn, 'query_start_time', time.time())
261 setattr(conn, 'query_start_time', time.time())
264 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
262 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
265 find_calling_context(ignore_modules=[
263 find_calling_context(ignore_modules=[
266 'rhodecode.lib.caching_query',
264 'rhodecode.lib.caching_query',
267 'rhodecode.model.settings',
265 'rhodecode.model.settings',
268 ], output_writer=log.info)
266 ], output_writer=log.info)
269
267
270 def after_cursor_execute(conn, cursor, statement,
268 def after_cursor_execute(conn, cursor, statement,
271 parameters, context, executemany):
269 parameters, context, executemany):
272 delattr(conn, 'query_start_time')
270 delattr(conn, 'query_start_time')
273
271
274 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
272 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
275 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
273 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
276
274
277 return engine
275 return engine
278
276
279
277
280 def get_encryption_key(config) -> bytes:
278 def get_encryption_key(config) -> bytes:
281 secret = config.get('rhodecode.encrypted_values.secret')
279 secret = config.get('rhodecode.encrypted_values.secret')
282 default = config['beaker.session.secret']
280 default = config['beaker.session.secret']
283 enc_key = secret or default
281 enc_key = secret or default
284
282
285 return safe_bytes(enc_key)
283 return safe_bytes(enc_key)
286
284
287
285
288 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
286 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
289 """
287 """
290 Turns a datetime into an age string.
288 Turns a datetime into an age string.
291 If show_short_version is True, this generates a shorter string with
289 If show_short_version is True, this generates a shorter string with
292 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
290 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
293
291
294 * IMPORTANT*
292 * IMPORTANT*
295 Code of this function is written in special way so it's easier to
293 Code of this function is written in special way so it's easier to
296 backport it to javascript. If you mean to update it, please also update
294 backport it to javascript. If you mean to update it, please also update
297 `jquery.timeago-extension.js` file
295 `jquery.timeago-extension.js` file
298
296
299 :param prevdate: datetime object
297 :param prevdate: datetime object
300 :param now: get current time, if not define we use
298 :param now: get current time, if not define we use
301 `datetime.datetime.now()`
299 `datetime.datetime.now()`
302 :param show_short_version: if it should approximate the date and
300 :param show_short_version: if it should approximate the date and
303 return a shorter string
301 return a shorter string
304 :param show_suffix:
302 :param show_suffix:
305 :param short_format: show short format, eg 2D instead of 2 days
303 :param short_format: show short format, eg 2D instead of 2 days
306 :rtype: unicode
304 :rtype: unicode
307 :returns: unicode words describing age
305 :returns: unicode words describing age
308 """
306 """
309
307
310 def _get_relative_delta(now, prevdate):
308 def _get_relative_delta(now, prevdate):
311 base = dateutil.relativedelta.relativedelta(now, prevdate)
309 base = dateutil.relativedelta.relativedelta(now, prevdate)
312 return {
310 return {
313 'year': base.years,
311 'year': base.years,
314 'month': base.months,
312 'month': base.months,
315 'day': base.days,
313 'day': base.days,
316 'hour': base.hours,
314 'hour': base.hours,
317 'minute': base.minutes,
315 'minute': base.minutes,
318 'second': base.seconds,
316 'second': base.seconds,
319 }
317 }
320
318
321 def _is_leap_year(year):
319 def _is_leap_year(year):
322 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
320 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
323
321
324 def get_month(prevdate):
322 def get_month(prevdate):
325 return prevdate.month
323 return prevdate.month
326
324
327 def get_year(prevdate):
325 def get_year(prevdate):
328 return prevdate.year
326 return prevdate.year
329
327
330 now = now or datetime.datetime.now()
328 now = now or datetime.datetime.now()
331 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
329 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
332 deltas = {}
330 deltas = {}
333 future = False
331 future = False
334
332
335 if prevdate > now:
333 if prevdate > now:
336 now_old = now
334 now_old = now
337 now = prevdate
335 now = prevdate
338 prevdate = now_old
336 prevdate = now_old
339 future = True
337 future = True
340 if future:
338 if future:
341 prevdate = prevdate.replace(microsecond=0)
339 prevdate = prevdate.replace(microsecond=0)
342 # Get date parts deltas
340 # Get date parts deltas
343 for part in order:
341 for part in order:
344 rel_delta = _get_relative_delta(now, prevdate)
342 rel_delta = _get_relative_delta(now, prevdate)
345 deltas[part] = rel_delta[part]
343 deltas[part] = rel_delta[part]
346
344
347 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
345 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
348 # not 1 hour, -59 minutes and -59 seconds)
346 # not 1 hour, -59 minutes and -59 seconds)
349 offsets = [[5, 60], [4, 60], [3, 24]]
347 offsets = [[5, 60], [4, 60], [3, 24]]
350 for element in offsets: # seconds, minutes, hours
348 for element in offsets: # seconds, minutes, hours
351 num = element[0]
349 num = element[0]
352 length = element[1]
350 length = element[1]
353
351
354 part = order[num]
352 part = order[num]
355 carry_part = order[num - 1]
353 carry_part = order[num - 1]
356
354
357 if deltas[part] < 0:
355 if deltas[part] < 0:
358 deltas[part] += length
356 deltas[part] += length
359 deltas[carry_part] -= 1
357 deltas[carry_part] -= 1
360
358
361 # Same thing for days except that the increment depends on the (variable)
359 # Same thing for days except that the increment depends on the (variable)
362 # number of days in the month
360 # number of days in the month
363 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
361 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
364 if deltas['day'] < 0:
362 if deltas['day'] < 0:
365 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
363 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
366 deltas['day'] += 29
364 deltas['day'] += 29
367 else:
365 else:
368 deltas['day'] += month_lengths[get_month(prevdate) - 1]
366 deltas['day'] += month_lengths[get_month(prevdate) - 1]
369
367
370 deltas['month'] -= 1
368 deltas['month'] -= 1
371
369
372 if deltas['month'] < 0:
370 if deltas['month'] < 0:
373 deltas['month'] += 12
371 deltas['month'] += 12
374 deltas['year'] -= 1
372 deltas['year'] -= 1
375
373
376 # Format the result
374 # Format the result
377 if short_format:
375 if short_format:
378 fmt_funcs = {
376 fmt_funcs = {
379 'year': lambda d: '%dy' % d,
377 'year': lambda d: '%dy' % d,
380 'month': lambda d: '%dm' % d,
378 'month': lambda d: '%dm' % d,
381 'day': lambda d: '%dd' % d,
379 'day': lambda d: '%dd' % d,
382 'hour': lambda d: '%dh' % d,
380 'hour': lambda d: '%dh' % d,
383 'minute': lambda d: '%dmin' % d,
381 'minute': lambda d: '%dmin' % d,
384 'second': lambda d: '%dsec' % d,
382 'second': lambda d: '%dsec' % d,
385 }
383 }
386 else:
384 else:
387 fmt_funcs = {
385 fmt_funcs = {
388 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
386 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
389 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
387 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
390 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
388 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
391 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
389 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
392 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
390 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
393 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
391 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
394 }
392 }
395
393
396 i = 0
394 i = 0
397 for part in order:
395 for part in order:
398 value = deltas[part]
396 value = deltas[part]
399 if value != 0:
397 if value != 0:
400
398
401 if i < 5:
399 if i < 5:
402 sub_part = order[i + 1]
400 sub_part = order[i + 1]
403 sub_value = deltas[sub_part]
401 sub_value = deltas[sub_part]
404 else:
402 else:
405 sub_value = 0
403 sub_value = 0
406
404
407 if sub_value == 0 or show_short_version:
405 if sub_value == 0 or show_short_version:
408 _val = fmt_funcs[part](value)
406 _val = fmt_funcs[part](value)
409 if future:
407 if future:
410 if show_suffix:
408 if show_suffix:
411 return _('in ${ago}', mapping={'ago': _val})
409 return _('in ${ago}', mapping={'ago': _val})
412 else:
410 else:
413 return _(_val)
411 return _(_val)
414
412
415 else:
413 else:
416 if show_suffix:
414 if show_suffix:
417 return _('${ago} ago', mapping={'ago': _val})
415 return _('${ago} ago', mapping={'ago': _val})
418 else:
416 else:
419 return _(_val)
417 return _(_val)
420
418
421 val = fmt_funcs[part](value)
419 val = fmt_funcs[part](value)
422 val_detail = fmt_funcs[sub_part](sub_value)
420 val_detail = fmt_funcs[sub_part](sub_value)
423 mapping = {'val': val, 'detail': val_detail}
421 mapping = {'val': val, 'detail': val_detail}
424
422
425 if short_format:
423 if short_format:
426 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
424 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
427 if show_suffix:
425 if show_suffix:
428 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
426 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
429 if future:
427 if future:
430 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
428 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
431 else:
429 else:
432 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
430 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
433 if show_suffix:
431 if show_suffix:
434 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
432 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
435 if future:
433 if future:
436 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
434 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
437
435
438 return datetime_tmpl
436 return datetime_tmpl
439 i += 1
437 i += 1
440 return _('just now')
438 return _('just now')
441
439
442
440
443 def age_from_seconds(seconds):
441 def age_from_seconds(seconds):
444 seconds = safe_int(seconds) or 0
442 seconds = safe_int(seconds) or 0
445 prevdate = time_to_datetime(time.time() + seconds)
443 prevdate = time_to_datetime(time.time() + seconds)
446 return age(prevdate, show_suffix=False, show_short_version=True)
444 return age(prevdate, show_suffix=False, show_short_version=True)
447
445
448
446
449 def cleaned_uri(uri):
447 def cleaned_uri(uri):
450 """
448 """
451 Quotes '[' and ']' from uri if there is only one of them.
449 Quotes '[' and ']' from uri if there is only one of them.
452 according to RFC3986 we cannot use such chars in uri
450 according to RFC3986 we cannot use such chars in uri
453 :param uri:
451 :param uri:
454 :return: uri without this chars
452 :return: uri without this chars
455 """
453 """
456 return urllib.parse.quote(uri, safe='@$:/')
454 return urllib.parse.quote(uri, safe='@$:/')
457
455
458
456
459 def credentials_filter(uri):
457 def credentials_filter(uri):
460 """
458 """
461 Returns a url with removed credentials
459 Returns a url with removed credentials
462
460
463 :param uri:
461 :param uri:
464 """
462 """
465 import urlobject
463 import urlobject
466 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
464 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
467 return 'InvalidDecryptionKey'
465 return 'InvalidDecryptionKey'
468
466
469 url_obj = urlobject.URLObject(cleaned_uri(uri))
467 url_obj = urlobject.URLObject(cleaned_uri(uri))
470 url_obj = url_obj.without_password().without_username()
468 url_obj = url_obj.without_password().without_username()
471
469
472 return url_obj
470 return url_obj
473
471
474
472
475 def get_host_info(request):
473 def get_host_info(request):
476 """
474 """
477 Generate host info, to obtain full url e.g https://server.com
475 Generate host info, to obtain full url e.g https://server.com
478 use this
476 use this
479 `{scheme}://{netloc}`
477 `{scheme}://{netloc}`
480 """
478 """
481 if not request:
479 if not request:
482 return {}
480 return {}
483
481
484 qualified_home_url = request.route_url('home')
482 qualified_home_url = request.route_url('home')
485 parsed_url = urlobject.URLObject(qualified_home_url)
483 parsed_url = urlobject.URLObject(qualified_home_url)
486 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
484 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
487
485
488 return {
486 return {
489 'scheme': parsed_url.scheme,
487 'scheme': parsed_url.scheme,
490 'netloc': parsed_url.netloc+decoded_path,
488 'netloc': parsed_url.netloc+decoded_path,
491 'hostname': parsed_url.hostname,
489 'hostname': parsed_url.hostname,
492 }
490 }
493
491
494
492
495 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
493 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
496 qualified_home_url = request.route_url('home')
494 qualified_home_url = request.route_url('home')
497 parsed_url = urlobject.URLObject(qualified_home_url)
495 parsed_url = urlobject.URLObject(qualified_home_url)
498 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
496 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
499
497
500 args = {
498 args = {
501 'scheme': parsed_url.scheme,
499 'scheme': parsed_url.scheme,
502 'user': '',
500 'user': '',
503 'sys_user': getpass.getuser(),
501 'sys_user': getpass.getuser(),
504 # path if we use proxy-prefix
502 # path if we use proxy-prefix
505 'netloc': parsed_url.netloc+decoded_path,
503 'netloc': parsed_url.netloc+decoded_path,
506 'hostname': parsed_url.hostname,
504 'hostname': parsed_url.hostname,
507 'prefix': decoded_path,
505 'prefix': decoded_path,
508 'repo': repo_name,
506 'repo': repo_name,
509 'repoid': str(repo_id),
507 'repoid': str(repo_id),
510 'repo_type': repo_type
508 'repo_type': repo_type
511 }
509 }
512 args.update(override)
510 args.update(override)
513 args['user'] = urllib.parse.quote(safe_str(args['user']))
511 args['user'] = urllib.parse.quote(safe_str(args['user']))
514
512
515 for k, v in list(args.items()):
513 for k, v in list(args.items()):
516 tmpl_key = '{%s}' % k
514 tmpl_key = '{%s}' % k
517 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
515 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
518
516
519 # special case for SVN clone url
517 # special case for SVN clone url
520 if repo_type == 'svn':
518 if repo_type == 'svn':
521 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
519 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
522
520
523 # remove leading @ sign if it's present. Case of empty user
521 # remove leading @ sign if it's present. Case of empty user
524 url_obj = urlobject.URLObject(uri_tmpl)
522 url_obj = urlobject.URLObject(uri_tmpl)
525 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
523 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
526
524
527 return safe_str(url)
525 return safe_str(url)
528
526
529
527
530 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
528 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
531 maybe_unreachable=False, reference_obj=None):
529 maybe_unreachable=False, reference_obj=None):
532 """
530 """
533 Safe version of get_commit if this commit doesn't exists for a
531 Safe version of get_commit if this commit doesn't exists for a
534 repository it returns a Dummy one instead
532 repository it returns a Dummy one instead
535
533
536 :param repo: repository instance
534 :param repo: repository instance
537 :param commit_id: commit id as str
535 :param commit_id: commit id as str
538 :param commit_idx: numeric commit index
536 :param commit_idx: numeric commit index
539 :param pre_load: optional list of commit attributes to load
537 :param pre_load: optional list of commit attributes to load
540 :param maybe_unreachable: translate unreachable commits on git repos
538 :param maybe_unreachable: translate unreachable commits on git repos
541 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
539 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
542 """
540 """
543 # TODO(skreft): remove these circular imports
541 # TODO(skreft): remove these circular imports
544 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
542 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
545 from rhodecode.lib.vcs.exceptions import RepositoryError
543 from rhodecode.lib.vcs.exceptions import RepositoryError
546 if not isinstance(repo, BaseRepository):
544 if not isinstance(repo, BaseRepository):
547 raise Exception('You must pass an Repository '
545 raise Exception('You must pass an Repository '
548 'object as first argument got %s', type(repo))
546 'object as first argument got %s', type(repo))
549
547
550 try:
548 try:
551 commit = repo.get_commit(
549 commit = repo.get_commit(
552 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
550 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
553 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
551 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
554 except (RepositoryError, LookupError):
552 except (RepositoryError, LookupError):
555 commit = EmptyCommit()
553 commit = EmptyCommit()
556 return commit
554 return commit
557
555
558
556
559 def datetime_to_time(dt):
557 def datetime_to_time(dt):
560 if dt:
558 if dt:
561 return time.mktime(dt.timetuple())
559 return time.mktime(dt.timetuple())
562
560
563
561
564 def time_to_datetime(tm):
562 def time_to_datetime(tm):
565 if tm:
563 if tm:
566 if isinstance(tm, str):
564 if isinstance(tm, str):
567 try:
565 try:
568 tm = float(tm)
566 tm = float(tm)
569 except ValueError:
567 except ValueError:
570 return
568 return
571 return datetime.datetime.fromtimestamp(tm)
569 return datetime.datetime.fromtimestamp(tm)
572
570
573
571
574 def time_to_utcdatetime(tm):
572 def time_to_utcdatetime(tm):
575 if tm:
573 if tm:
576 if isinstance(tm, str):
574 if isinstance(tm, str):
577 try:
575 try:
578 tm = float(tm)
576 tm = float(tm)
579 except ValueError:
577 except ValueError:
580 return
578 return
581 return datetime.datetime.utcfromtimestamp(tm)
579 return datetime.datetime.utcfromtimestamp(tm)
582
580
583
581
584 MENTIONS_REGEX = re.compile(
582 MENTIONS_REGEX = re.compile(
585 # ^@ or @ without any special chars in front
583 # ^@ or @ without any special chars in front
586 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
584 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
587 # main body starts with letter, then can be . - _
585 # main body starts with letter, then can be . - _
588 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
586 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
589 re.VERBOSE | re.MULTILINE)
587 re.VERBOSE | re.MULTILINE)
590
588
591
589
592 def extract_mentioned_users(s):
590 def extract_mentioned_users(s):
593 """
591 """
594 Returns unique usernames from given string s that have @mention
592 Returns unique usernames from given string s that have @mention
595
593
596 :param s: string to get mentions
594 :param s: string to get mentions
597 """
595 """
598 usrs = set()
596 usrs = set()
599 for username in MENTIONS_REGEX.findall(s):
597 for username in MENTIONS_REGEX.findall(s):
600 usrs.add(username)
598 usrs.add(username)
601
599
602 return sorted(list(usrs), key=lambda k: k.lower())
600 return sorted(list(usrs), key=lambda k: k.lower())
603
601
604
602
605 def fix_PATH(os_=None):
603 def fix_PATH(os_=None):
606 """
604 """
607 Get current active python path, and append it to PATH variable to fix
605 Get current active python path, and append it to PATH variable to fix
608 issues of subprocess calls and different python versions
606 issues of subprocess calls and different python versions
609 """
607 """
610 if os_ is None:
608 if os_ is None:
611 import os
609 import os
612 else:
610 else:
613 os = os_
611 os = os_
614
612
615 cur_path = os.path.split(sys.executable)[0]
613 cur_path = os.path.split(sys.executable)[0]
616 os_path = os.environ['PATH']
614 os_path = os.environ['PATH']
617 if not os.environ['PATH'].startswith(cur_path):
615 if not os.environ['PATH'].startswith(cur_path):
618 os.environ['PATH'] = f'{cur_path}:{os_path}'
616 os.environ['PATH'] = f'{cur_path}:{os_path}'
619
617
620
618
621 def obfuscate_url_pw(engine):
619 def obfuscate_url_pw(engine):
622 _url = engine or ''
620 _url = engine or ''
623 try:
621 try:
624 _url = sqlalchemy.engine.url.make_url(engine)
622 _url = sqlalchemy.engine.url.make_url(engine)
625 except Exception:
623 except Exception:
626 pass
624 pass
627 return repr(_url)
625 return repr(_url)
628
626
629
627
630 def get_server_url(environ):
628 def get_server_url(environ):
631 req = webob.Request(environ)
629 req = webob.Request(environ)
632 return req.host_url + req.script_name
630 return req.host_url + req.script_name
633
631
634
632
635 def unique_id(hexlen=32):
633 def unique_id(hexlen=32):
636 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
634 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
637 return suuid(truncate_to=hexlen, alphabet=alphabet)
635 return suuid(truncate_to=hexlen, alphabet=alphabet)
638
636
639
637
640 def suuid(url=None, truncate_to=22, alphabet=None):
638 def suuid(url=None, truncate_to=22, alphabet=None):
641 """
639 """
642 Generate and return a short URL safe UUID.
640 Generate and return a short URL safe UUID.
643
641
644 If the url parameter is provided, set the namespace to the provided
642 If the url parameter is provided, set the namespace to the provided
645 URL and generate a UUID.
643 URL and generate a UUID.
646
644
647 :param url to get the uuid for
645 :param url to get the uuid for
648 :truncate_to: truncate the basic 22 UUID to shorter version
646 :truncate_to: truncate the basic 22 UUID to shorter version
649
647
650 The IDs won't be universally unique any longer, but the probability of
648 The IDs won't be universally unique any longer, but the probability of
651 a collision will still be very low.
649 a collision will still be very low.
652 """
650 """
653 # Define our alphabet.
651 # Define our alphabet.
654 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
652 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
655
653
656 # If no URL is given, generate a random UUID.
654 # If no URL is given, generate a random UUID.
657 if url is None:
655 if url is None:
658 unique_id = uuid.uuid4().int
656 unique_id = uuid.uuid4().int
659 else:
657 else:
660 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
658 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
661
659
662 alphabet_length = len(_ALPHABET)
660 alphabet_length = len(_ALPHABET)
663 output = []
661 output = []
664 while unique_id > 0:
662 while unique_id > 0:
665 digit = unique_id % alphabet_length
663 digit = unique_id % alphabet_length
666 output.append(_ALPHABET[digit])
664 output.append(_ALPHABET[digit])
667 unique_id = int(unique_id / alphabet_length)
665 unique_id = int(unique_id / alphabet_length)
668 return "".join(output)[:truncate_to]
666 return "".join(output)[:truncate_to]
669
667
670
668
671 def get_current_rhodecode_user(request=None):
669 def get_current_rhodecode_user(request=None):
672 """
670 """
673 Gets rhodecode user from request
671 Gets rhodecode user from request
674 """
672 """
675 import pyramid.threadlocal
673 import pyramid.threadlocal
676 pyramid_request = request or pyramid.threadlocal.get_current_request()
674 pyramid_request = request or pyramid.threadlocal.get_current_request()
677
675
678 # web case
676 # web case
679 if pyramid_request and hasattr(pyramid_request, 'user'):
677 if pyramid_request and hasattr(pyramid_request, 'user'):
680 return pyramid_request.user
678 return pyramid_request.user
681
679
682 # api case
680 # api case
683 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
681 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
684 return pyramid_request.rpc_user
682 return pyramid_request.rpc_user
685
683
686 return None
684 return None
687
685
688
686
689 def action_logger_generic(action, namespace=''):
687 def action_logger_generic(action, namespace=''):
690 """
688 """
691 A generic logger for actions useful to the system overview, tries to find
689 A generic logger for actions useful to the system overview, tries to find
692 an acting user for the context of the call otherwise reports unknown user
690 an acting user for the context of the call otherwise reports unknown user
693
691
694 :param action: logging message eg 'comment 5 deleted'
692 :param action: logging message eg 'comment 5 deleted'
695 :param type: string
693 :param type: string
696
694
697 :param namespace: namespace of the logging message eg. 'repo.comments'
695 :param namespace: namespace of the logging message eg. 'repo.comments'
698 :param type: string
696 :param type: string
699
697
700 """
698 """
701
699
702 logger_name = 'rhodecode.actions'
700 logger_name = 'rhodecode.actions'
703
701
704 if namespace:
702 if namespace:
705 logger_name += '.' + namespace
703 logger_name += '.' + namespace
706
704
707 log = logging.getLogger(logger_name)
705 log = logging.getLogger(logger_name)
708
706
709 # get a user if we can
707 # get a user if we can
710 user = get_current_rhodecode_user()
708 user = get_current_rhodecode_user()
711
709
712 logfunc = log.info
710 logfunc = log.info
713
711
714 if not user:
712 if not user:
715 user = '<unknown user>'
713 user = '<unknown user>'
716 logfunc = log.warning
714 logfunc = log.warning
717
715
718 logfunc('Logging action by {}: {}'.format(user, action))
716 logfunc(f'Logging action by {user}: {action}')
719
717
720
718
721 def escape_split(text, sep=',', maxsplit=-1):
719 def escape_split(text, sep=',', maxsplit=-1):
722 r"""
720 r"""
723 Allows for escaping of the separator: e.g. arg='foo\, bar'
721 Allows for escaping of the separator: e.g. arg='foo\, bar'
724
722
725 It should be noted that the way bash et. al. do command line parsing, those
723 It should be noted that the way bash et. al. do command line parsing, those
726 single quotes are required.
724 single quotes are required.
727 """
725 """
728 escaped_sep = r'\%s' % sep
726 escaped_sep = r'\%s' % sep
729
727
730 if escaped_sep not in text:
728 if escaped_sep not in text:
731 return text.split(sep, maxsplit)
729 return text.split(sep, maxsplit)
732
730
733 before, _mid, after = text.partition(escaped_sep)
731 before, _mid, after = text.partition(escaped_sep)
734 startlist = before.split(sep, maxsplit) # a regular split is fine here
732 startlist = before.split(sep, maxsplit) # a regular split is fine here
735 unfinished = startlist[-1]
733 unfinished = startlist[-1]
736 startlist = startlist[:-1]
734 startlist = startlist[:-1]
737
735
738 # recurse because there may be more escaped separators
736 # recurse because there may be more escaped separators
739 endlist = escape_split(after, sep, maxsplit)
737 endlist = escape_split(after, sep, maxsplit)
740
738
741 # finish building the escaped value. we use endlist[0] becaue the first
739 # finish building the escaped value. we use endlist[0] becaue the first
742 # part of the string sent in recursion is the rest of the escaped value.
740 # part of the string sent in recursion is the rest of the escaped value.
743 unfinished += sep + endlist[0]
741 unfinished += sep + endlist[0]
744
742
745 return startlist + [unfinished] + endlist[1:] # put together all the parts
743 return startlist + [unfinished] + endlist[1:] # put together all the parts
746
744
747
745
748 class OptionalAttr(object):
746 class OptionalAttr(object):
749 """
747 """
750 Special Optional Option that defines other attribute. Example::
748 Special Optional Option that defines other attribute. Example::
751
749
752 def test(apiuser, userid=Optional(OAttr('apiuser')):
750 def test(apiuser, userid=Optional(OAttr('apiuser')):
753 user = Optional.extract(userid)
751 user = Optional.extract(userid)
754 # calls
752 # calls
755
753
756 """
754 """
757
755
758 def __init__(self, attr_name):
756 def __init__(self, attr_name):
759 self.attr_name = attr_name
757 self.attr_name = attr_name
760
758
761 def __repr__(self):
759 def __repr__(self):
762 return '<OptionalAttr:%s>' % self.attr_name
760 return '<OptionalAttr:%s>' % self.attr_name
763
761
764 def __call__(self):
762 def __call__(self):
765 return self
763 return self
766
764
767
765
768 # alias
766 # alias
769 OAttr = OptionalAttr
767 OAttr = OptionalAttr
770
768
771
769
772 class Optional(object):
770 class Optional(object):
773 """
771 """
774 Defines an optional parameter::
772 Defines an optional parameter::
775
773
776 param = param.getval() if isinstance(param, Optional) else param
774 param = param.getval() if isinstance(param, Optional) else param
777 param = param() if isinstance(param, Optional) else param
775 param = param() if isinstance(param, Optional) else param
778
776
779 is equivalent of::
777 is equivalent of::
780
778
781 param = Optional.extract(param)
779 param = Optional.extract(param)
782
780
783 """
781 """
784
782
785 def __init__(self, type_):
783 def __init__(self, type_):
786 self.type_ = type_
784 self.type_ = type_
787
785
788 def __repr__(self):
786 def __repr__(self):
789 return '<Optional:%s>' % self.type_.__repr__()
787 return '<Optional:%s>' % self.type_.__repr__()
790
788
791 def __call__(self):
789 def __call__(self):
792 return self.getval()
790 return self.getval()
793
791
794 def getval(self):
792 def getval(self):
795 """
793 """
796 returns value from this Optional instance
794 returns value from this Optional instance
797 """
795 """
798 if isinstance(self.type_, OAttr):
796 if isinstance(self.type_, OAttr):
799 # use params name
797 # use params name
800 return self.type_.attr_name
798 return self.type_.attr_name
801 return self.type_
799 return self.type_
802
800
803 @classmethod
801 @classmethod
804 def extract(cls, val):
802 def extract(cls, val):
805 """
803 """
806 Extracts value from Optional() instance
804 Extracts value from Optional() instance
807
805
808 :param val:
806 :param val:
809 :return: original value if it's not Optional instance else
807 :return: original value if it's not Optional instance else
810 value of instance
808 value of instance
811 """
809 """
812 if isinstance(val, cls):
810 if isinstance(val, cls):
813 return val.getval()
811 return val.getval()
814 return val
812 return val
815
813
816
814
817 def glob2re(pat):
815 def glob2re(pat):
818 import fnmatch
816 import fnmatch
819 return fnmatch.translate(pat)
817 return fnmatch.translate(pat)
820
818
821
819
822 def parse_byte_string(size_str):
820 def parse_byte_string(size_str):
823 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
821 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
824 if not match:
822 if not match:
825 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
823 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
826 f'to use format of <num>(MB|KB)')
824 f'to use format of <num>(MB|KB)')
827
825
828 _parts = match.groups()
826 _parts = match.groups()
829 num, type_ = _parts
827 num, type_ = _parts
830 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
828 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
831
829
832
830
833 class CachedProperty(object):
831 class CachedProperty(object):
834 """
832 """
835 Lazy Attributes. With option to invalidate the cache by running a method
833 Lazy Attributes. With option to invalidate the cache by running a method
836
834
837 >>> class Foo(object):
835 >>> class Foo(object):
838 ...
836 ...
839 ... @CachedProperty
837 ... @CachedProperty
840 ... def heavy_func(self):
838 ... def heavy_func(self):
841 ... return 'super-calculation'
839 ... return 'super-calculation'
842 ...
840 ...
843 ... foo = Foo()
841 ... foo = Foo()
844 ... foo.heavy_func() # first computation
842 ... foo.heavy_func() # first computation
845 ... foo.heavy_func() # fetch from cache
843 ... foo.heavy_func() # fetch from cache
846 ... foo._invalidate_prop_cache('heavy_func')
844 ... foo._invalidate_prop_cache('heavy_func')
847
845
848 # at this point calling foo.heavy_func() will be re-computed
846 # at this point calling foo.heavy_func() will be re-computed
849 """
847 """
850
848
851 def __init__(self, func, func_name=None):
849 def __init__(self, func, func_name=None):
852
850
853 if func_name is None:
851 if func_name is None:
854 func_name = func.__name__
852 func_name = func.__name__
855 self.data = (func, func_name)
853 self.data = (func, func_name)
856 functools.update_wrapper(self, func)
854 functools.update_wrapper(self, func)
857
855
858 def __get__(self, inst, class_):
856 def __get__(self, inst, class_):
859 if inst is None:
857 if inst is None:
860 return self
858 return self
861
859
862 func, func_name = self.data
860 func, func_name = self.data
863 value = func(inst)
861 value = func(inst)
864 inst.__dict__[func_name] = value
862 inst.__dict__[func_name] = value
865 if '_invalidate_prop_cache' not in inst.__dict__:
863 if '_invalidate_prop_cache' not in inst.__dict__:
866 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
864 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
867 self._invalidate_prop_cache, inst)
865 self._invalidate_prop_cache, inst)
868 return value
866 return value
869
867
870 def _invalidate_prop_cache(self, inst, name):
868 def _invalidate_prop_cache(self, inst, name):
871 inst.__dict__.pop(name, None)
869 inst.__dict__.pop(name, None)
872
870
873
871
874 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
872 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
875 """
873 """
876 Retry decorator with exponential backoff.
874 Retry decorator with exponential backoff.
877
875
878 Parameters
876 Parameters
879 ----------
877 ----------
880 func : typing.Callable, optional
878 func : typing.Callable, optional
881 Callable on which the decorator is applied, by default None
879 Callable on which the decorator is applied, by default None
882 exception : Exception or tuple of Exceptions, optional
880 exception : Exception or tuple of Exceptions, optional
883 Exception(s) that invoke retry, by default Exception
881 Exception(s) that invoke retry, by default Exception
884 n_tries : int, optional
882 n_tries : int, optional
885 Number of tries before giving up, by default 5
883 Number of tries before giving up, by default 5
886 delay : int, optional
884 delay : int, optional
887 Initial delay between retries in seconds, by default 5
885 Initial delay between retries in seconds, by default 5
888 backoff : int, optional
886 backoff : int, optional
889 Backoff multiplier e.g. value of 2 will double the delay, by default 1
887 Backoff multiplier e.g. value of 2 will double the delay, by default 1
890 logger : bool, optional
888 logger : bool, optional
891 Option to log or print, by default False
889 Option to log or print, by default False
892
890
893 Returns
891 Returns
894 -------
892 -------
895 typing.Callable
893 typing.Callable
896 Decorated callable that calls itself when exception(s) occur.
894 Decorated callable that calls itself when exception(s) occur.
897
895
898 Examples
896 Examples
899 --------
897 --------
900 >>> import random
898 >>> import random
901 >>> @retry(exception=Exception, n_tries=3)
899 >>> @retry(exception=Exception, n_tries=3)
902 ... def test_random(text):
900 ... def test_random(text):
903 ... x = random.random()
901 ... x = random.random()
904 ... if x < 0.5:
902 ... if x < 0.5:
905 ... raise Exception("Fail")
903 ... raise Exception("Fail")
906 ... else:
904 ... else:
907 ... print("Success: ", text)
905 ... print("Success: ", text)
908 >>> test_random("It works!")
906 >>> test_random("It works!")
909 """
907 """
910
908
911 if func is None:
909 if func is None:
912 return functools.partial(
910 return functools.partial(
913 retry,
911 retry,
914 exception=exception,
912 exception=exception,
915 n_tries=n_tries,
913 n_tries=n_tries,
916 delay=delay,
914 delay=delay,
917 backoff=backoff,
915 backoff=backoff,
918 logger=logger,
916 logger=logger,
919 )
917 )
920
918
921 @functools.wraps(func)
919 @functools.wraps(func)
922 def wrapper(*args, **kwargs):
920 def wrapper(*args, **kwargs):
923 _n_tries, n_delay = n_tries, delay
921 _n_tries, n_delay = n_tries, delay
924 log = logging.getLogger('rhodecode.retry')
922 log = logging.getLogger('rhodecode.retry')
925
923
926 while _n_tries > 1:
924 while _n_tries > 1:
927 try:
925 try:
928 return func(*args, **kwargs)
926 return func(*args, **kwargs)
929 except exception as e:
927 except exception as e:
930 e_details = repr(e)
928 e_details = repr(e)
931 msg = "Exception on calling func {func}: {e}, " \
929 msg = "Exception on calling func {func}: {e}, " \
932 "Retrying in {n_delay} seconds..."\
930 "Retrying in {n_delay} seconds..."\
933 .format(func=func, e=e_details, n_delay=n_delay)
931 .format(func=func, e=e_details, n_delay=n_delay)
934 if logger:
932 if logger:
935 log.warning(msg)
933 log.warning(msg)
936 else:
934 else:
937 print(msg)
935 print(msg)
938 time.sleep(n_delay)
936 time.sleep(n_delay)
939 _n_tries -= 1
937 _n_tries -= 1
940 n_delay *= backoff
938 n_delay *= backoff
941
939
942 return func(*args, **kwargs)
940 return func(*args, **kwargs)
943
941
944 return wrapper
942 return wrapper
945
943
946
944
947 def user_agent_normalizer(user_agent_raw, safe=True):
945 def user_agent_normalizer(user_agent_raw, safe=True):
948 log = logging.getLogger('rhodecode.user_agent_normalizer')
946 log = logging.getLogger('rhodecode.user_agent_normalizer')
949 ua = (user_agent_raw or '').strip().lower()
947 ua = (user_agent_raw or '').strip().lower()
950 ua = ua.replace('"', '')
948 ua = ua.replace('"', '')
951
949
952 try:
950 try:
953 if 'mercurial/proto-1.0' in ua:
951 if 'mercurial/proto-1.0' in ua:
954 ua = ua.replace('mercurial/proto-1.0', '')
952 ua = ua.replace('mercurial/proto-1.0', '')
955 ua = ua.replace('(', '').replace(')', '').strip()
953 ua = ua.replace('(', '').replace(')', '').strip()
956 ua = ua.replace('mercurial ', 'mercurial/')
954 ua = ua.replace('mercurial ', 'mercurial/')
957 elif ua.startswith('git'):
955 elif ua.startswith('git'):
958 parts = ua.split(' ')
956 parts = ua.split(' ')
959 if parts:
957 if parts:
960 ua = parts[0]
958 ua = parts[0]
961 ua = re.sub(r'\.windows\.\d', '', ua).strip()
959 ua = re.sub(r'\.windows\.\d', '', ua).strip()
962
960
963 return ua
961 return ua
964 except Exception:
962 except Exception:
965 log.exception('Failed to parse scm user-agent')
963 log.exception('Failed to parse scm user-agent')
966 if not safe:
964 if not safe:
967 raise
965 raise
968
966
969 return ua
967 return ua
970
968
971
969
972 def get_available_port(min_port=40000, max_port=55555, use_range=False):
970 def get_available_port(min_port=40000, max_port=55555, use_range=False):
973 hostname = ''
971 hostname = ''
974 for _ in range(min_port, max_port):
972 for _ in range(min_port, max_port):
975 pick_port = 0
973 pick_port = 0
976 if use_range:
974 if use_range:
977 pick_port = random.randint(min_port, max_port)
975 pick_port = random.randint(min_port, max_port)
978
976
979 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
977 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
980 try:
978 try:
981 s.bind((hostname, pick_port))
979 s.bind((hostname, pick_port))
982 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
980 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
983 return s.getsockname()[1]
981 return s.getsockname()[1]
984 except OSError:
982 except OSError:
985 continue
983 continue
986 except socket.error as e:
984 except socket.error as e:
987 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
985 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
988 continue
986 continue
989 raise
987 raise
@@ -1,1987 +1,1985 b''
1
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 Base module for all VCS systems
20 Base module for all VCS systems
23 """
21 """
24 import os
22 import os
25 import re
23 import re
26 import time
24 import time
27 import shutil
25 import shutil
28 import datetime
26 import datetime
29 import fnmatch
27 import fnmatch
30 import itertools
28 import itertools
31 import logging
29 import logging
32 import dataclasses
30 import dataclasses
33 import warnings
31 import warnings
34
32
35 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
36
34
37
35
38 import rhodecode
36 import rhodecode
39 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib.utils2 import safe_str, CachedProperty
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
41 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
42 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 RepositoryError)
46 RepositoryError)
49
47
50
48
51 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
52
50
53
51
54 FILEMODE_DEFAULT = 0o100644
52 FILEMODE_DEFAULT = 0o100644
55 FILEMODE_EXECUTABLE = 0o100755
53 FILEMODE_EXECUTABLE = 0o100755
56 EMPTY_COMMIT_ID = '0' * 40
54 EMPTY_COMMIT_ID = '0' * 40
57
55
58
56
59 @dataclasses.dataclass
57 @dataclasses.dataclass
60 class Reference:
58 class Reference:
61 type: str
59 type: str
62 name: str
60 name: str
63 commit_id: str
61 commit_id: str
64
62
65 def __iter__(self):
63 def __iter__(self):
66 yield self.type
64 yield self.type
67 yield self.name
65 yield self.name
68 yield self.commit_id
66 yield self.commit_id
69
67
70 @property
68 @property
71 def branch(self):
69 def branch(self):
72 if self.type == 'branch':
70 if self.type == 'branch':
73 return self.name
71 return self.name
74
72
75 @property
73 @property
76 def bookmark(self):
74 def bookmark(self):
77 if self.type == 'book':
75 if self.type == 'book':
78 return self.name
76 return self.name
79
77
80 @property
78 @property
81 def to_str(self):
79 def to_str(self):
82 return reference_to_unicode(self)
80 return reference_to_unicode(self)
83
81
84 def asdict(self):
82 def asdict(self):
85 return dict(
83 return dict(
86 type=self.type,
84 type=self.type,
87 name=self.name,
85 name=self.name,
88 commit_id=self.commit_id
86 commit_id=self.commit_id
89 )
87 )
90
88
91
89
92 def unicode_to_reference(raw: str):
90 def unicode_to_reference(raw: str):
93 """
91 """
94 Convert a unicode (or string) to a reference object.
92 Convert a unicode (or string) to a reference object.
95 If unicode evaluates to False it returns None.
93 If unicode evaluates to False it returns None.
96 """
94 """
97 if raw:
95 if raw:
98 refs = raw.split(':')
96 refs = raw.split(':')
99 return Reference(*refs)
97 return Reference(*refs)
100 else:
98 else:
101 return None
99 return None
102
100
103
101
104 def reference_to_unicode(ref: Reference):
102 def reference_to_unicode(ref: Reference):
105 """
103 """
106 Convert a reference object to unicode.
104 Convert a reference object to unicode.
107 If reference is None it returns None.
105 If reference is None it returns None.
108 """
106 """
109 if ref:
107 if ref:
110 return ':'.join(ref)
108 return ':'.join(ref)
111 else:
109 else:
112 return None
110 return None
113
111
114
112
115 class MergeFailureReason(object):
113 class MergeFailureReason(object):
116 """
114 """
117 Enumeration with all the reasons why the server side merge could fail.
115 Enumeration with all the reasons why the server side merge could fail.
118
116
119 DO NOT change the number of the reasons, as they may be stored in the
117 DO NOT change the number of the reasons, as they may be stored in the
120 database.
118 database.
121
119
122 Changing the name of a reason is acceptable and encouraged to deprecate old
120 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
121 reasons.
124 """
122 """
125
123
126 # Everything went well.
124 # Everything went well.
127 NONE = 0
125 NONE = 0
128
126
129 # An unexpected exception was raised. Check the logs for more details.
127 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
128 UNKNOWN = 1
131
129
132 # The merge was not successful, there are conflicts.
130 # The merge was not successful, there are conflicts.
133 MERGE_FAILED = 2
131 MERGE_FAILED = 2
134
132
135 # The merge succeeded but we could not push it to the target repository.
133 # The merge succeeded but we could not push it to the target repository.
136 PUSH_FAILED = 3
134 PUSH_FAILED = 3
137
135
138 # The specified target is not a head in the target repository.
136 # The specified target is not a head in the target repository.
139 TARGET_IS_NOT_HEAD = 4
137 TARGET_IS_NOT_HEAD = 4
140
138
141 # The source repository contains more branches than the target. Pushing
139 # The source repository contains more branches than the target. Pushing
142 # the merge will create additional branches in the target.
140 # the merge will create additional branches in the target.
143 HG_SOURCE_HAS_MORE_BRANCHES = 5
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
144
142
145 # The target reference has multiple heads. That does not allow to correctly
143 # The target reference has multiple heads. That does not allow to correctly
146 # identify the target location. This could only happen for mercurial
144 # identify the target location. This could only happen for mercurial
147 # branches.
145 # branches.
148 HG_TARGET_HAS_MULTIPLE_HEADS = 6
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
149
147
150 # The target repository is locked
148 # The target repository is locked
151 TARGET_IS_LOCKED = 7
149 TARGET_IS_LOCKED = 7
152
150
153 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
154 # A involved commit could not be found.
152 # A involved commit could not be found.
155 _DEPRECATED_MISSING_COMMIT = 8
153 _DEPRECATED_MISSING_COMMIT = 8
156
154
157 # The target repo reference is missing.
155 # The target repo reference is missing.
158 MISSING_TARGET_REF = 9
156 MISSING_TARGET_REF = 9
159
157
160 # The source repo reference is missing.
158 # The source repo reference is missing.
161 MISSING_SOURCE_REF = 10
159 MISSING_SOURCE_REF = 10
162
160
163 # The merge was not successful, there are conflicts related to sub
161 # The merge was not successful, there are conflicts related to sub
164 # repositories.
162 # repositories.
165 SUBREPO_MERGE_FAILED = 11
163 SUBREPO_MERGE_FAILED = 11
166
164
167
165
168 class UpdateFailureReason(object):
166 class UpdateFailureReason(object):
169 """
167 """
170 Enumeration with all the reasons why the pull request update could fail.
168 Enumeration with all the reasons why the pull request update could fail.
171
169
172 DO NOT change the number of the reasons, as they may be stored in the
170 DO NOT change the number of the reasons, as they may be stored in the
173 database.
171 database.
174
172
175 Changing the name of a reason is acceptable and encouraged to deprecate old
173 Changing the name of a reason is acceptable and encouraged to deprecate old
176 reasons.
174 reasons.
177 """
175 """
178
176
179 # Everything went well.
177 # Everything went well.
180 NONE = 0
178 NONE = 0
181
179
182 # An unexpected exception was raised. Check the logs for more details.
180 # An unexpected exception was raised. Check the logs for more details.
183 UNKNOWN = 1
181 UNKNOWN = 1
184
182
185 # The pull request is up to date.
183 # The pull request is up to date.
186 NO_CHANGE = 2
184 NO_CHANGE = 2
187
185
188 # The pull request has a reference type that is not supported for update.
186 # The pull request has a reference type that is not supported for update.
189 WRONG_REF_TYPE = 3
187 WRONG_REF_TYPE = 3
190
188
191 # Update failed because the target reference is missing.
189 # Update failed because the target reference is missing.
192 MISSING_TARGET_REF = 4
190 MISSING_TARGET_REF = 4
193
191
194 # Update failed because the source reference is missing.
192 # Update failed because the source reference is missing.
195 MISSING_SOURCE_REF = 5
193 MISSING_SOURCE_REF = 5
196
194
197
195
198 class MergeResponse(object):
196 class MergeResponse(object):
199
197
200 # uses .format(**metadata) for variables
198 # uses .format(**metadata) for variables
201 MERGE_STATUS_MESSAGES = {
199 MERGE_STATUS_MESSAGES = {
202 MergeFailureReason.NONE: lazy_ugettext(
200 MergeFailureReason.NONE: lazy_ugettext(
203 'This pull request can be automatically merged.'),
201 'This pull request can be automatically merged.'),
204 MergeFailureReason.UNKNOWN: lazy_ugettext(
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
205 'This pull request cannot be merged because of an unhandled exception. '
203 'This pull request cannot be merged because of an unhandled exception. '
206 '{exception}'),
204 '{exception}'),
207 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
208 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
209 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
210 'This pull request could not be merged because push to '
208 'This pull request could not be merged because push to '
211 'target:`{target}@{merge_commit}` failed.'),
209 'target:`{target}@{merge_commit}` failed.'),
212 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
213 'This pull request cannot be merged because the target '
211 'This pull request cannot be merged because the target '
214 '`{target_ref.name}` is not a head.'),
212 '`{target_ref.name}` is not a head.'),
215 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
216 'This pull request cannot be merged because the source contains '
214 'This pull request cannot be merged because the source contains '
217 'more branches than the target.'),
215 'more branches than the target.'),
218 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
219 'This pull request cannot be merged because the target `{target_ref.name}` '
217 'This pull request cannot be merged because the target `{target_ref.name}` '
220 'has multiple heads: `{heads}`.'),
218 'has multiple heads: `{heads}`.'),
221 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
222 'This pull request cannot be merged because the target repository is '
220 'This pull request cannot be merged because the target repository is '
223 'locked by {locked_by}.'),
221 'locked by {locked_by}.'),
224
222
225 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
226 'This pull request cannot be merged because the target '
224 'This pull request cannot be merged because the target '
227 'reference `{target_ref.name}` is missing.'),
225 'reference `{target_ref.name}` is missing.'),
228 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
229 'This pull request cannot be merged because the source '
227 'This pull request cannot be merged because the source '
230 'reference `{source_ref.name}` is missing.'),
228 'reference `{source_ref.name}` is missing.'),
231 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
232 'This pull request cannot be merged because of conflicts related '
230 'This pull request cannot be merged because of conflicts related '
233 'to sub repositories.'),
231 'to sub repositories.'),
234
232
235 # Deprecations
233 # Deprecations
236 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
237 'This pull request cannot be merged because the target or the '
235 'This pull request cannot be merged because the target or the '
238 'source reference is missing.'),
236 'source reference is missing.'),
239
237
240 }
238 }
241
239
242 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
240 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
243 self.possible = possible
241 self.possible = possible
244 self.executed = executed
242 self.executed = executed
245 self.merge_ref = merge_ref
243 self.merge_ref = merge_ref
246 self.failure_reason = failure_reason
244 self.failure_reason = failure_reason
247 self.metadata = metadata or {}
245 self.metadata = metadata or {}
248
246
249 def __repr__(self):
247 def __repr__(self):
250 return f'<MergeResponse:{self.label} {self.failure_reason}>'
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
251
249
252 def __eq__(self, other):
250 def __eq__(self, other):
253 same_instance = isinstance(other, self.__class__)
251 same_instance = isinstance(other, self.__class__)
254 return same_instance \
252 return same_instance \
255 and self.possible == other.possible \
253 and self.possible == other.possible \
256 and self.executed == other.executed \
254 and self.executed == other.executed \
257 and self.failure_reason == other.failure_reason
255 and self.failure_reason == other.failure_reason
258
256
259 @property
257 @property
260 def label(self):
258 def label(self):
261 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
262 not k.startswith('_'))
260 not k.startswith('_'))
263 return label_dict.get(self.failure_reason)
261 return label_dict.get(self.failure_reason)
264
262
265 @property
263 @property
266 def merge_status_message(self):
264 def merge_status_message(self):
267 """
265 """
268 Return a human friendly error message for the given merge status code.
266 Return a human friendly error message for the given merge status code.
269 """
267 """
270 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
271
269
272 try:
270 try:
273 return msg.format(**self.metadata)
271 return msg.format(**self.metadata)
274 except Exception:
272 except Exception:
275 log.exception('Failed to format %s message', self)
273 log.exception('Failed to format %s message', self)
276 return msg
274 return msg
277
275
278 def asdict(self):
276 def asdict(self):
279 data = {}
277 data = {}
280 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
281 'merge_status_message']:
279 'merge_status_message']:
282 data[k] = getattr(self, k)
280 data[k] = getattr(self, k)
283 return data
281 return data
284
282
285
283
286 class TargetRefMissing(ValueError):
284 class TargetRefMissing(ValueError):
287 pass
285 pass
288
286
289
287
290 class SourceRefMissing(ValueError):
288 class SourceRefMissing(ValueError):
291 pass
289 pass
292
290
293
291
294 class BaseRepository(object):
292 class BaseRepository(object):
295 """
293 """
296 Base Repository for final backends
294 Base Repository for final backends
297
295
298 .. attribute:: DEFAULT_BRANCH_NAME
296 .. attribute:: DEFAULT_BRANCH_NAME
299
297
300 name of default branch (i.e. "trunk" for svn, "master" for git etc.
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
301
299
302 .. attribute:: commit_ids
300 .. attribute:: commit_ids
303
301
304 list of all available commit ids, in ascending order
302 list of all available commit ids, in ascending order
305
303
306 .. attribute:: path
304 .. attribute:: path
307
305
308 absolute path to the repository
306 absolute path to the repository
309
307
310 .. attribute:: bookmarks
308 .. attribute:: bookmarks
311
309
312 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
313 there are no bookmarks or the backend implementation does not support
311 there are no bookmarks or the backend implementation does not support
314 bookmarks.
312 bookmarks.
315
313
316 .. attribute:: tags
314 .. attribute:: tags
317
315
318 Mapping from name to :term:`Commit ID` of the tag.
316 Mapping from name to :term:`Commit ID` of the tag.
319
317
320 """
318 """
321
319
322 DEFAULT_BRANCH_NAME = None
320 DEFAULT_BRANCH_NAME = None
323 DEFAULT_CONTACT = "Unknown"
321 DEFAULT_CONTACT = "Unknown"
324 DEFAULT_DESCRIPTION = "unknown"
322 DEFAULT_DESCRIPTION = "unknown"
325 EMPTY_COMMIT_ID = '0' * 40
323 EMPTY_COMMIT_ID = '0' * 40
326 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
327
325
328 path = None
326 path = None
329
327
330 _is_empty = None
328 _is_empty = None
331 _commit_ids = {}
329 _commit_ids = {}
332
330
333 def __init__(self, repo_path, config=None, create=False, **kwargs):
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
334 """
332 """
335 Initializes repository. Raises RepositoryError if repository could
333 Initializes repository. Raises RepositoryError if repository could
336 not be find at the given ``repo_path`` or directory at ``repo_path``
334 not be find at the given ``repo_path`` or directory at ``repo_path``
337 exists and ``create`` is set to True.
335 exists and ``create`` is set to True.
338
336
339 :param repo_path: local path of the repository
337 :param repo_path: local path of the repository
340 :param config: repository configuration
338 :param config: repository configuration
341 :param create=False: if set to True, would try to create repository.
339 :param create=False: if set to True, would try to create repository.
342 :param src_url=None: if set, should be proper url from which repository
340 :param src_url=None: if set, should be proper url from which repository
343 would be cloned; requires ``create`` parameter to be set to True -
341 would be cloned; requires ``create`` parameter to be set to True -
344 raises RepositoryError if src_url is set and create evaluates to
342 raises RepositoryError if src_url is set and create evaluates to
345 False
343 False
346 """
344 """
347 raise NotImplementedError
345 raise NotImplementedError
348
346
349 def __repr__(self):
347 def __repr__(self):
350 return '<{} at {}>'.format(self.__class__.__name__, self.path)
348 return f'<{self.__class__.__name__} at {self.path}>'
351
349
352 def __len__(self):
350 def __len__(self):
353 return self.count()
351 return self.count()
354
352
355 def __eq__(self, other):
353 def __eq__(self, other):
356 same_instance = isinstance(other, self.__class__)
354 same_instance = isinstance(other, self.__class__)
357 return same_instance and other.path == self.path
355 return same_instance and other.path == self.path
358
356
359 def __ne__(self, other):
357 def __ne__(self, other):
360 return not self.__eq__(other)
358 return not self.__eq__(other)
361
359
362 def get_create_shadow_cache_pr_path(self, db_repo):
360 def get_create_shadow_cache_pr_path(self, db_repo):
363 path = db_repo.cached_diffs_dir
361 path = db_repo.cached_diffs_dir
364 if not os.path.exists(path):
362 if not os.path.exists(path):
365 os.makedirs(path, 0o755)
363 os.makedirs(path, 0o755)
366 return path
364 return path
367
365
368 @classmethod
366 @classmethod
369 def get_default_config(cls, default=None):
367 def get_default_config(cls, default=None):
370 config = Config()
368 config = Config()
371 if default and isinstance(default, list):
369 if default and isinstance(default, list):
372 for section, key, val in default:
370 for section, key, val in default:
373 config.set(section, key, val)
371 config.set(section, key, val)
374 return config
372 return config
375
373
376 @LazyProperty
374 @LazyProperty
377 def _remote(self):
375 def _remote(self):
378 raise NotImplementedError
376 raise NotImplementedError
379
377
380 def _heads(self, branch=None):
378 def _heads(self, branch=None):
381 return []
379 return []
382
380
383 @LazyProperty
381 @LazyProperty
384 def EMPTY_COMMIT(self):
382 def EMPTY_COMMIT(self):
385 return EmptyCommit(self.EMPTY_COMMIT_ID)
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
386
384
387 @LazyProperty
385 @LazyProperty
388 def alias(self):
386 def alias(self):
389 for k, v in settings.BACKENDS.items():
387 for k, v in settings.BACKENDS.items():
390 if v.split('.')[-1] == str(self.__class__.__name__):
388 if v.split('.')[-1] == str(self.__class__.__name__):
391 return k
389 return k
392
390
393 @LazyProperty
391 @LazyProperty
394 def name(self):
392 def name(self):
395 return safe_str(os.path.basename(self.path))
393 return safe_str(os.path.basename(self.path))
396
394
397 @LazyProperty
395 @LazyProperty
398 def description(self):
396 def description(self):
399 raise NotImplementedError
397 raise NotImplementedError
400
398
401 def refs(self):
399 def refs(self):
402 """
400 """
403 returns a `dict` with branches, bookmarks, tags, and closed_branches
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
404 for this repository
402 for this repository
405 """
403 """
406 return dict(
404 return dict(
407 branches=self.branches,
405 branches=self.branches,
408 branches_closed=self.branches_closed,
406 branches_closed=self.branches_closed,
409 tags=self.tags,
407 tags=self.tags,
410 bookmarks=self.bookmarks
408 bookmarks=self.bookmarks
411 )
409 )
412
410
413 @LazyProperty
411 @LazyProperty
414 def branches(self):
412 def branches(self):
415 """
413 """
416 A `dict` which maps branch names to commit ids.
414 A `dict` which maps branch names to commit ids.
417 """
415 """
418 raise NotImplementedError
416 raise NotImplementedError
419
417
420 @LazyProperty
418 @LazyProperty
421 def branches_closed(self):
419 def branches_closed(self):
422 """
420 """
423 A `dict` which maps tags names to commit ids.
421 A `dict` which maps tags names to commit ids.
424 """
422 """
425 raise NotImplementedError
423 raise NotImplementedError
426
424
427 @LazyProperty
425 @LazyProperty
428 def bookmarks(self):
426 def bookmarks(self):
429 """
427 """
430 A `dict` which maps tags names to commit ids.
428 A `dict` which maps tags names to commit ids.
431 """
429 """
432 raise NotImplementedError
430 raise NotImplementedError
433
431
434 @LazyProperty
432 @LazyProperty
435 def tags(self):
433 def tags(self):
436 """
434 """
437 A `dict` which maps tags names to commit ids.
435 A `dict` which maps tags names to commit ids.
438 """
436 """
439 raise NotImplementedError
437 raise NotImplementedError
440
438
441 @LazyProperty
439 @LazyProperty
442 def size(self):
440 def size(self):
443 """
441 """
444 Returns combined size in bytes for all repository files
442 Returns combined size in bytes for all repository files
445 """
443 """
446 tip = self.get_commit()
444 tip = self.get_commit()
447 return tip.size
445 return tip.size
448
446
449 def size_at_commit(self, commit_id):
447 def size_at_commit(self, commit_id):
450 commit = self.get_commit(commit_id)
448 commit = self.get_commit(commit_id)
451 return commit.size
449 return commit.size
452
450
453 def _check_for_empty(self):
451 def _check_for_empty(self):
454 no_commits = len(self._commit_ids) == 0
452 no_commits = len(self._commit_ids) == 0
455 if no_commits:
453 if no_commits:
456 # check on remote to be sure
454 # check on remote to be sure
457 return self._remote.is_empty()
455 return self._remote.is_empty()
458 else:
456 else:
459 return False
457 return False
460
458
461 def is_empty(self):
459 def is_empty(self):
462 if rhodecode.is_test:
460 if rhodecode.is_test:
463 return self._check_for_empty()
461 return self._check_for_empty()
464
462
465 if self._is_empty is None:
463 if self._is_empty is None:
466 # cache empty for production, but not tests
464 # cache empty for production, but not tests
467 self._is_empty = self._check_for_empty()
465 self._is_empty = self._check_for_empty()
468
466
469 return self._is_empty
467 return self._is_empty
470
468
471 @staticmethod
469 @staticmethod
472 def check_url(url, config):
470 def check_url(url, config):
473 """
471 """
474 Function will check given url and try to verify if it's a valid
472 Function will check given url and try to verify if it's a valid
475 link.
473 link.
476 """
474 """
477 raise NotImplementedError
475 raise NotImplementedError
478
476
479 @staticmethod
477 @staticmethod
480 def is_valid_repository(path):
478 def is_valid_repository(path):
481 """
479 """
482 Check if given `path` contains a valid repository of this backend
480 Check if given `path` contains a valid repository of this backend
483 """
481 """
484 raise NotImplementedError
482 raise NotImplementedError
485
483
486 # ==========================================================================
484 # ==========================================================================
487 # COMMITS
485 # COMMITS
488 # ==========================================================================
486 # ==========================================================================
489
487
490 @CachedProperty
488 @CachedProperty
491 def commit_ids(self):
489 def commit_ids(self):
492 raise NotImplementedError
490 raise NotImplementedError
493
491
494 def append_commit_id(self, commit_id):
492 def append_commit_id(self, commit_id):
495 if commit_id not in self.commit_ids:
493 if commit_id not in self.commit_ids:
496 self._rebuild_cache(self.commit_ids + [commit_id])
494 self._rebuild_cache(self.commit_ids + [commit_id])
497
495
498 # clear cache
496 # clear cache
499 self._invalidate_prop_cache('commit_ids')
497 self._invalidate_prop_cache('commit_ids')
500 self._is_empty = False
498 self._is_empty = False
501
499
502 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
503 translate_tag=None, maybe_unreachable=False, reference_obj=None):
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
504 """
502 """
505 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
506 are both None, most recent commit is returned.
504 are both None, most recent commit is returned.
507
505
508 :param pre_load: Optional. List of commit attributes to load.
506 :param pre_load: Optional. List of commit attributes to load.
509
507
510 :raises ``EmptyRepositoryError``: if there are no commits
508 :raises ``EmptyRepositoryError``: if there are no commits
511 """
509 """
512 raise NotImplementedError
510 raise NotImplementedError
513
511
514 def __iter__(self):
512 def __iter__(self):
515 for commit_id in self.commit_ids:
513 for commit_id in self.commit_ids:
516 yield self.get_commit(commit_id=commit_id)
514 yield self.get_commit(commit_id=commit_id)
517
515
518 def get_commits(
516 def get_commits(
519 self, start_id=None, end_id=None, start_date=None, end_date=None,
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
520 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
521 """
519 """
522 Returns iterator of `BaseCommit` objects from start to end
520 Returns iterator of `BaseCommit` objects from start to end
523 not inclusive. This should behave just like a list, ie. end is not
521 not inclusive. This should behave just like a list, ie. end is not
524 inclusive.
522 inclusive.
525
523
526 :param start_id: None or str, must be a valid commit id
524 :param start_id: None or str, must be a valid commit id
527 :param end_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
528 :param start_date:
526 :param start_date:
529 :param end_date:
527 :param end_date:
530 :param branch_name:
528 :param branch_name:
531 :param show_hidden:
529 :param show_hidden:
532 :param pre_load:
530 :param pre_load:
533 :param translate_tags:
531 :param translate_tags:
534 """
532 """
535 raise NotImplementedError
533 raise NotImplementedError
536
534
537 def __getitem__(self, key):
535 def __getitem__(self, key):
538 """
536 """
539 Allows index based access to the commit objects of this repository.
537 Allows index based access to the commit objects of this repository.
540 """
538 """
541 pre_load = ["author", "branch", "date", "message", "parents"]
539 pre_load = ["author", "branch", "date", "message", "parents"]
542 if isinstance(key, slice):
540 if isinstance(key, slice):
543 return self._get_range(key, pre_load)
541 return self._get_range(key, pre_load)
544 return self.get_commit(commit_idx=key, pre_load=pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
545
543
546 def _get_range(self, slice_obj, pre_load):
544 def _get_range(self, slice_obj, pre_load):
547 for commit_id in self.commit_ids.__getitem__(slice_obj):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
548 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
549
547
550 def count(self):
548 def count(self):
551 return len(self.commit_ids)
549 return len(self.commit_ids)
552
550
553 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
554 """
552 """
555 Creates and returns a tag for the given ``commit_id``.
553 Creates and returns a tag for the given ``commit_id``.
556
554
557 :param name: name for new tag
555 :param name: name for new tag
558 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
559 :param commit_id: commit id for which new tag would be created
557 :param commit_id: commit id for which new tag would be created
560 :param message: message of the tag's commit
558 :param message: message of the tag's commit
561 :param date: date of tag's commit
559 :param date: date of tag's commit
562
560
563 :raises TagAlreadyExistError: if tag with same name already exists
561 :raises TagAlreadyExistError: if tag with same name already exists
564 """
562 """
565 raise NotImplementedError
563 raise NotImplementedError
566
564
567 def remove_tag(self, name, user, message=None, date=None):
565 def remove_tag(self, name, user, message=None, date=None):
568 """
566 """
569 Removes tag with the given ``name``.
567 Removes tag with the given ``name``.
570
568
571 :param name: name of the tag to be removed
569 :param name: name of the tag to be removed
572 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
573 :param message: message of the tag's removal commit
571 :param message: message of the tag's removal commit
574 :param date: date of tag's removal commit
572 :param date: date of tag's removal commit
575
573
576 :raises TagDoesNotExistError: if tag with given name does not exists
574 :raises TagDoesNotExistError: if tag with given name does not exists
577 """
575 """
578 raise NotImplementedError
576 raise NotImplementedError
579
577
580 def get_diff(
578 def get_diff(
581 self, commit1, commit2, path=None, ignore_whitespace=False,
579 self, commit1, commit2, path=None, ignore_whitespace=False,
582 context=3, path1=None):
580 context=3, path1=None):
583 """
581 """
584 Returns (git like) *diff*, as plain text. Shows changes introduced by
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
585 `commit2` since `commit1`.
583 `commit2` since `commit1`.
586
584
587 :param commit1: Entry point from which diff is shown. Can be
585 :param commit1: Entry point from which diff is shown. Can be
588 ``self.EMPTY_COMMIT`` - in this case, patch showing all
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
589 the changes since empty state of the repository until `commit2`
587 the changes since empty state of the repository until `commit2`
590 :param commit2: Until which commit changes should be shown.
588 :param commit2: Until which commit changes should be shown.
591 :param path: Can be set to a path of a file to create a diff of that
589 :param path: Can be set to a path of a file to create a diff of that
592 file. If `path1` is also set, this value is only associated to
590 file. If `path1` is also set, this value is only associated to
593 `commit2`.
591 `commit2`.
594 :param ignore_whitespace: If set to ``True``, would not show whitespace
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
595 changes. Defaults to ``False``.
593 changes. Defaults to ``False``.
596 :param context: How many lines before/after changed lines should be
594 :param context: How many lines before/after changed lines should be
597 shown. Defaults to ``3``.
595 shown. Defaults to ``3``.
598 :param path1: Can be set to a path to associate with `commit1`. This
596 :param path1: Can be set to a path to associate with `commit1`. This
599 parameter works only for backends which support diff generation for
597 parameter works only for backends which support diff generation for
600 different paths. Other backends will raise a `ValueError` if `path1`
598 different paths. Other backends will raise a `ValueError` if `path1`
601 is set and has a different value than `path`.
599 is set and has a different value than `path`.
602 :param file_path: filter this diff by given path pattern
600 :param file_path: filter this diff by given path pattern
603 """
601 """
604 raise NotImplementedError
602 raise NotImplementedError
605
603
606 def strip(self, commit_id, branch=None):
604 def strip(self, commit_id, branch=None):
607 """
605 """
608 Strip given commit_id from the repository
606 Strip given commit_id from the repository
609 """
607 """
610 raise NotImplementedError
608 raise NotImplementedError
611
609
612 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
613 """
611 """
614 Return a latest common ancestor commit if one exists for this repo
612 Return a latest common ancestor commit if one exists for this repo
615 `commit_id1` vs `commit_id2` from `repo2`.
613 `commit_id1` vs `commit_id2` from `repo2`.
616
614
617 :param commit_id1: Commit it from this repository to use as a
615 :param commit_id1: Commit it from this repository to use as a
618 target for the comparison.
616 target for the comparison.
619 :param commit_id2: Source commit id to use for comparison.
617 :param commit_id2: Source commit id to use for comparison.
620 :param repo2: Source repository to use for comparison.
618 :param repo2: Source repository to use for comparison.
621 """
619 """
622 raise NotImplementedError
620 raise NotImplementedError
623
621
624 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
625 """
623 """
626 Compare this repository's revision `commit_id1` with `commit_id2`.
624 Compare this repository's revision `commit_id1` with `commit_id2`.
627
625
628 Returns a tuple(commits, ancestor) that would be merged from
626 Returns a tuple(commits, ancestor) that would be merged from
629 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
630 will be returned as ancestor.
628 will be returned as ancestor.
631
629
632 :param commit_id1: Commit it from this repository to use as a
630 :param commit_id1: Commit it from this repository to use as a
633 target for the comparison.
631 target for the comparison.
634 :param commit_id2: Source commit id to use for comparison.
632 :param commit_id2: Source commit id to use for comparison.
635 :param repo2: Source repository to use for comparison.
633 :param repo2: Source repository to use for comparison.
636 :param merge: If set to ``True`` will do a merge compare which also
634 :param merge: If set to ``True`` will do a merge compare which also
637 returns the common ancestor.
635 returns the common ancestor.
638 :param pre_load: Optional. List of commit attributes to load.
636 :param pre_load: Optional. List of commit attributes to load.
639 """
637 """
640 raise NotImplementedError
638 raise NotImplementedError
641
639
642 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
643 user_name='', user_email='', message='', dry_run=False,
641 user_name='', user_email='', message='', dry_run=False,
644 use_rebase=False, close_branch=False):
642 use_rebase=False, close_branch=False):
645 """
643 """
646 Merge the revisions specified in `source_ref` from `source_repo`
644 Merge the revisions specified in `source_ref` from `source_repo`
647 onto the `target_ref` of this repository.
645 onto the `target_ref` of this repository.
648
646
649 `source_ref` and `target_ref` are named tupls with the following
647 `source_ref` and `target_ref` are named tupls with the following
650 fields `type`, `name` and `commit_id`.
648 fields `type`, `name` and `commit_id`.
651
649
652 Returns a MergeResponse named tuple with the following fields
650 Returns a MergeResponse named tuple with the following fields
653 'possible', 'executed', 'source_commit', 'target_commit',
651 'possible', 'executed', 'source_commit', 'target_commit',
654 'merge_commit'.
652 'merge_commit'.
655
653
656 :param repo_id: `repo_id` target repo id.
654 :param repo_id: `repo_id` target repo id.
657 :param workspace_id: `workspace_id` unique identifier.
655 :param workspace_id: `workspace_id` unique identifier.
658 :param target_ref: `target_ref` points to the commit on top of which
656 :param target_ref: `target_ref` points to the commit on top of which
659 the `source_ref` should be merged.
657 the `source_ref` should be merged.
660 :param source_repo: The repository that contains the commits to be
658 :param source_repo: The repository that contains the commits to be
661 merged.
659 merged.
662 :param source_ref: `source_ref` points to the topmost commit from
660 :param source_ref: `source_ref` points to the topmost commit from
663 the `source_repo` which should be merged.
661 the `source_repo` which should be merged.
664 :param user_name: Merge commit `user_name`.
662 :param user_name: Merge commit `user_name`.
665 :param user_email: Merge commit `user_email`.
663 :param user_email: Merge commit `user_email`.
666 :param message: Merge commit `message`.
664 :param message: Merge commit `message`.
667 :param dry_run: If `True` the merge will not take place.
665 :param dry_run: If `True` the merge will not take place.
668 :param use_rebase: If `True` commits from the source will be rebased
666 :param use_rebase: If `True` commits from the source will be rebased
669 on top of the target instead of being merged.
667 on top of the target instead of being merged.
670 :param close_branch: If `True` branch will be close before merging it
668 :param close_branch: If `True` branch will be close before merging it
671 """
669 """
672 if dry_run:
670 if dry_run:
673 message = message or settings.MERGE_DRY_RUN_MESSAGE
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
674 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
675 user_name = user_name or settings.MERGE_DRY_RUN_USER
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
676 else:
674 else:
677 if not user_name:
675 if not user_name:
678 raise ValueError('user_name cannot be empty')
676 raise ValueError('user_name cannot be empty')
679 if not user_email:
677 if not user_email:
680 raise ValueError('user_email cannot be empty')
678 raise ValueError('user_email cannot be empty')
681 if not message:
679 if not message:
682 raise ValueError('message cannot be empty')
680 raise ValueError('message cannot be empty')
683
681
684 try:
682 try:
685 return self._merge_repo(
683 return self._merge_repo(
686 repo_id, workspace_id, target_ref, source_repo,
684 repo_id, workspace_id, target_ref, source_repo,
687 source_ref, message, user_name, user_email, dry_run=dry_run,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
688 use_rebase=use_rebase, close_branch=close_branch)
686 use_rebase=use_rebase, close_branch=close_branch)
689 except RepositoryError as exc:
687 except RepositoryError as exc:
690 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
691 return MergeResponse(
689 return MergeResponse(
692 False, False, None, MergeFailureReason.UNKNOWN,
690 False, False, None, MergeFailureReason.UNKNOWN,
693 metadata={'exception': str(exc)})
691 metadata={'exception': str(exc)})
694
692
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
696 source_repo, source_ref, merge_message,
694 source_repo, source_ref, merge_message,
697 merger_name, merger_email, dry_run=False,
695 merger_name, merger_email, dry_run=False,
698 use_rebase=False, close_branch=False):
696 use_rebase=False, close_branch=False):
699 """Internal implementation of merge."""
697 """Internal implementation of merge."""
700 raise NotImplementedError
698 raise NotImplementedError
701
699
702 def _maybe_prepare_merge_workspace(
700 def _maybe_prepare_merge_workspace(
703 self, repo_id, workspace_id, target_ref, source_ref):
701 self, repo_id, workspace_id, target_ref, source_ref):
704 """
702 """
705 Create the merge workspace.
703 Create the merge workspace.
706
704
707 :param workspace_id: `workspace_id` unique identifier.
705 :param workspace_id: `workspace_id` unique identifier.
708 """
706 """
709 raise NotImplementedError
707 raise NotImplementedError
710
708
711 @classmethod
709 @classmethod
712 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
713 """
711 """
714 Legacy version that was used before. We still need it for
712 Legacy version that was used before. We still need it for
715 backward compat
713 backward compat
716 """
714 """
717 return os.path.join(
715 return os.path.join(
718 os.path.dirname(repo_path),
716 os.path.dirname(repo_path),
719 '.__shadow_{}_{}'.format(os.path.basename(repo_path), workspace_id))
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
720
718
721 @classmethod
719 @classmethod
722 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
723 # The name of the shadow repository must start with '.', so it is
721 # The name of the shadow repository must start with '.', so it is
724 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
725 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
726 if os.path.exists(legacy_repository_path):
724 if os.path.exists(legacy_repository_path):
727 return legacy_repository_path
725 return legacy_repository_path
728 else:
726 else:
729 return os.path.join(
727 return os.path.join(
730 os.path.dirname(repo_path),
728 os.path.dirname(repo_path),
731 '.__shadow_repo_{}_{}'.format(repo_id, workspace_id))
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
732
730
733 def cleanup_merge_workspace(self, repo_id, workspace_id):
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
734 """
732 """
735 Remove merge workspace.
733 Remove merge workspace.
736
734
737 This function MUST not fail in case there is no workspace associated to
735 This function MUST not fail in case there is no workspace associated to
738 the given `workspace_id`.
736 the given `workspace_id`.
739
737
740 :param workspace_id: `workspace_id` unique identifier.
738 :param workspace_id: `workspace_id` unique identifier.
741 """
739 """
742 shadow_repository_path = self._get_shadow_repository_path(
740 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
741 self.path, repo_id, workspace_id)
744 shadow_repository_path_del = '{}.{}.delete'.format(
742 shadow_repository_path_del = '{}.{}.delete'.format(
745 shadow_repository_path, time.time())
743 shadow_repository_path, time.time())
746
744
747 # move the shadow repo, so it never conflicts with the one used.
745 # move the shadow repo, so it never conflicts with the one used.
748 # we use this method because shutil.rmtree had some edge case problems
746 # we use this method because shutil.rmtree had some edge case problems
749 # removing symlinked repositories
747 # removing symlinked repositories
750 if not os.path.isdir(shadow_repository_path):
748 if not os.path.isdir(shadow_repository_path):
751 return
749 return
752
750
753 shutil.move(shadow_repository_path, shadow_repository_path_del)
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
754 try:
752 try:
755 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
756 except Exception:
754 except Exception:
757 log.exception('Failed to gracefully remove shadow repo under %s',
755 log.exception('Failed to gracefully remove shadow repo under %s',
758 shadow_repository_path_del)
756 shadow_repository_path_del)
759 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
760
758
761 # ========== #
759 # ========== #
762 # COMMIT API #
760 # COMMIT API #
763 # ========== #
761 # ========== #
764
762
765 @LazyProperty
763 @LazyProperty
766 def in_memory_commit(self):
764 def in_memory_commit(self):
767 """
765 """
768 Returns :class:`InMemoryCommit` object for this repository.
766 Returns :class:`InMemoryCommit` object for this repository.
769 """
767 """
770 raise NotImplementedError
768 raise NotImplementedError
771
769
772 # ======================== #
770 # ======================== #
773 # UTILITIES FOR SUBCLASSES #
771 # UTILITIES FOR SUBCLASSES #
774 # ======================== #
772 # ======================== #
775
773
776 def _validate_diff_commits(self, commit1, commit2):
774 def _validate_diff_commits(self, commit1, commit2):
777 """
775 """
778 Validates that the given commits are related to this repository.
776 Validates that the given commits are related to this repository.
779
777
780 Intended as a utility for sub classes to have a consistent validation
778 Intended as a utility for sub classes to have a consistent validation
781 of input parameters in methods like :meth:`get_diff`.
779 of input parameters in methods like :meth:`get_diff`.
782 """
780 """
783 self._validate_commit(commit1)
781 self._validate_commit(commit1)
784 self._validate_commit(commit2)
782 self._validate_commit(commit2)
785 if (isinstance(commit1, EmptyCommit) and
783 if (isinstance(commit1, EmptyCommit) and
786 isinstance(commit2, EmptyCommit)):
784 isinstance(commit2, EmptyCommit)):
787 raise ValueError("Cannot compare two empty commits")
785 raise ValueError("Cannot compare two empty commits")
788
786
789 def _validate_commit(self, commit):
787 def _validate_commit(self, commit):
790 if not isinstance(commit, BaseCommit):
788 if not isinstance(commit, BaseCommit):
791 raise TypeError(
789 raise TypeError(
792 "%s is not of type BaseCommit" % repr(commit))
790 "%s is not of type BaseCommit" % repr(commit))
793 if commit.repository != self and not isinstance(commit, EmptyCommit):
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
794 raise ValueError(
792 raise ValueError(
795 "Commit %s must be a valid commit from this repository %s, "
793 "Commit %s must be a valid commit from this repository %s, "
796 "related to this repository instead %s." %
794 "related to this repository instead %s." %
797 (commit, self, commit.repository))
795 (commit, self, commit.repository))
798
796
799 def _validate_commit_id(self, commit_id):
797 def _validate_commit_id(self, commit_id):
800 if not isinstance(commit_id, str):
798 if not isinstance(commit_id, str):
801 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
802
800
803 def _validate_commit_idx(self, commit_idx):
801 def _validate_commit_idx(self, commit_idx):
804 if not isinstance(commit_idx, int):
802 if not isinstance(commit_idx, int):
805 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
806
804
807 def _validate_branch_name(self, branch_name):
805 def _validate_branch_name(self, branch_name):
808 if branch_name and branch_name not in self.branches_all:
806 if branch_name and branch_name not in self.branches_all:
809 msg = ("Branch {} not found in {}".format(branch_name, self))
807 msg = (f"Branch {branch_name} not found in {self}")
810 raise BranchDoesNotExistError(msg)
808 raise BranchDoesNotExistError(msg)
811
809
812 #
810 #
813 # Supporting deprecated API parts
811 # Supporting deprecated API parts
814 # TODO: johbo: consider to move this into a mixin
812 # TODO: johbo: consider to move this into a mixin
815 #
813 #
816
814
817 @property
815 @property
818 def EMPTY_CHANGESET(self):
816 def EMPTY_CHANGESET(self):
819 warnings.warn(
817 warnings.warn(
820 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
821 return self.EMPTY_COMMIT_ID
819 return self.EMPTY_COMMIT_ID
822
820
823 @property
821 @property
824 def revisions(self):
822 def revisions(self):
825 warnings.warn("Use commits attribute instead", DeprecationWarning)
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
826 return self.commit_ids
824 return self.commit_ids
827
825
828 @revisions.setter
826 @revisions.setter
829 def revisions(self, value):
827 def revisions(self, value):
830 warnings.warn("Use commits attribute instead", DeprecationWarning)
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
831 self.commit_ids = value
829 self.commit_ids = value
832
830
833 def get_changeset(self, revision=None, pre_load=None):
831 def get_changeset(self, revision=None, pre_load=None):
834 warnings.warn("Use get_commit instead", DeprecationWarning)
832 warnings.warn("Use get_commit instead", DeprecationWarning)
835 commit_id = None
833 commit_id = None
836 commit_idx = None
834 commit_idx = None
837 if isinstance(revision, str):
835 if isinstance(revision, str):
838 commit_id = revision
836 commit_id = revision
839 else:
837 else:
840 commit_idx = revision
838 commit_idx = revision
841 return self.get_commit(
839 return self.get_commit(
842 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
843
841
844 def get_changesets(
842 def get_changesets(
845 self, start=None, end=None, start_date=None, end_date=None,
843 self, start=None, end=None, start_date=None, end_date=None,
846 branch_name=None, pre_load=None):
844 branch_name=None, pre_load=None):
847 warnings.warn("Use get_commits instead", DeprecationWarning)
845 warnings.warn("Use get_commits instead", DeprecationWarning)
848 start_id = self._revision_to_commit(start)
846 start_id = self._revision_to_commit(start)
849 end_id = self._revision_to_commit(end)
847 end_id = self._revision_to_commit(end)
850 return self.get_commits(
848 return self.get_commits(
851 start_id=start_id, end_id=end_id, start_date=start_date,
849 start_id=start_id, end_id=end_id, start_date=start_date,
852 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
853
851
854 def _revision_to_commit(self, revision):
852 def _revision_to_commit(self, revision):
855 """
853 """
856 Translates a revision to a commit_id
854 Translates a revision to a commit_id
857
855
858 Helps to support the old changeset based API which allows to use
856 Helps to support the old changeset based API which allows to use
859 commit ids and commit indices interchangeable.
857 commit ids and commit indices interchangeable.
860 """
858 """
861 if revision is None:
859 if revision is None:
862 return revision
860 return revision
863
861
864 if isinstance(revision, str):
862 if isinstance(revision, str):
865 commit_id = revision
863 commit_id = revision
866 else:
864 else:
867 commit_id = self.commit_ids[revision]
865 commit_id = self.commit_ids[revision]
868 return commit_id
866 return commit_id
869
867
870 @property
868 @property
871 def in_memory_changeset(self):
869 def in_memory_changeset(self):
872 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
873 return self.in_memory_commit
871 return self.in_memory_commit
874
872
875 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
876 """
874 """
877 Returns a path permission checker or None if not supported
875 Returns a path permission checker or None if not supported
878
876
879 :param username: session user name
877 :param username: session user name
880 :return: an instance of BasePathPermissionChecker or None
878 :return: an instance of BasePathPermissionChecker or None
881 """
879 """
882 return None
880 return None
883
881
884 def install_hooks(self, force=False):
882 def install_hooks(self, force=False):
885 return self._remote.install_hooks(force)
883 return self._remote.install_hooks(force)
886
884
887 def get_hooks_info(self):
885 def get_hooks_info(self):
888 return self._remote.get_hooks_info()
886 return self._remote.get_hooks_info()
889
887
890 def vcsserver_invalidate_cache(self, delete=False):
888 def vcsserver_invalidate_cache(self, delete=False):
891 return self._remote.vcsserver_invalidate_cache(delete)
889 return self._remote.vcsserver_invalidate_cache(delete)
892
890
893
891
894 class BaseCommit(object):
892 class BaseCommit(object):
895 """
893 """
896 Each backend should implement it's commit representation.
894 Each backend should implement it's commit representation.
897
895
898 **Attributes**
896 **Attributes**
899
897
900 ``repository``
898 ``repository``
901 repository object within which commit exists
899 repository object within which commit exists
902
900
903 ``id``
901 ``id``
904 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
905 just ``tip``.
903 just ``tip``.
906
904
907 ``raw_id``
905 ``raw_id``
908 raw commit representation (i.e. full 40 length sha for git
906 raw commit representation (i.e. full 40 length sha for git
909 backend)
907 backend)
910
908
911 ``short_id``
909 ``short_id``
912 shortened (if apply) version of ``raw_id``; it would be simple
910 shortened (if apply) version of ``raw_id``; it would be simple
913 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
914 as ``raw_id`` for subversion
912 as ``raw_id`` for subversion
915
913
916 ``idx``
914 ``idx``
917 commit index
915 commit index
918
916
919 ``files``
917 ``files``
920 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
921
919
922 ``dirs``
920 ``dirs``
923 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
924
922
925 ``nodes``
923 ``nodes``
926 combined list of ``Node`` objects
924 combined list of ``Node`` objects
927
925
928 ``author``
926 ``author``
929 author of the commit, as unicode
927 author of the commit, as unicode
930
928
931 ``message``
929 ``message``
932 message of the commit, as unicode
930 message of the commit, as unicode
933
931
934 ``parents``
932 ``parents``
935 list of parent commits
933 list of parent commits
936
934
937 """
935 """
938 repository = None
936 repository = None
939 branch = None
937 branch = None
940
938
941 """
939 """
942 Depending on the backend this should be set to the branch name of the
940 Depending on the backend this should be set to the branch name of the
943 commit. Backends not supporting branches on commits should leave this
941 commit. Backends not supporting branches on commits should leave this
944 value as ``None``.
942 value as ``None``.
945 """
943 """
946
944
947 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
948 """
946 """
949 This template is used to generate a default prefix for repository archives
947 This template is used to generate a default prefix for repository archives
950 if no prefix has been specified.
948 if no prefix has been specified.
951 """
949 """
952
950
953 def __repr__(self):
951 def __repr__(self):
954 return self.__str__()
952 return self.__str__()
955
953
956 def __str__(self):
954 def __str__(self):
957 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
958
956
959 def __eq__(self, other):
957 def __eq__(self, other):
960 same_instance = isinstance(other, self.__class__)
958 same_instance = isinstance(other, self.__class__)
961 return same_instance and self.raw_id == other.raw_id
959 return same_instance and self.raw_id == other.raw_id
962
960
963 def __json__(self):
961 def __json__(self):
964 parents = []
962 parents = []
965 try:
963 try:
966 for parent in self.parents:
964 for parent in self.parents:
967 parents.append({'raw_id': parent.raw_id})
965 parents.append({'raw_id': parent.raw_id})
968 except NotImplementedError:
966 except NotImplementedError:
969 # empty commit doesn't have parents implemented
967 # empty commit doesn't have parents implemented
970 pass
968 pass
971
969
972 return {
970 return {
973 'short_id': self.short_id,
971 'short_id': self.short_id,
974 'raw_id': self.raw_id,
972 'raw_id': self.raw_id,
975 'revision': self.idx,
973 'revision': self.idx,
976 'message': self.message,
974 'message': self.message,
977 'date': self.date,
975 'date': self.date,
978 'author': self.author,
976 'author': self.author,
979 'parents': parents,
977 'parents': parents,
980 'branch': self.branch
978 'branch': self.branch
981 }
979 }
982
980
983 def __getstate__(self):
981 def __getstate__(self):
984 d = self.__dict__.copy()
982 d = self.__dict__.copy()
985 d.pop('_remote', None)
983 d.pop('_remote', None)
986 d.pop('repository', None)
984 d.pop('repository', None)
987 return d
985 return d
988
986
989 def get_remote(self):
987 def get_remote(self):
990 return self._remote
988 return self._remote
991
989
992 def serialize(self):
990 def serialize(self):
993 return self.__json__()
991 return self.__json__()
994
992
995 def _get_refs(self):
993 def _get_refs(self):
996 return {
994 return {
997 'branches': [self.branch] if self.branch else [],
995 'branches': [self.branch] if self.branch else [],
998 'bookmarks': getattr(self, 'bookmarks', []),
996 'bookmarks': getattr(self, 'bookmarks', []),
999 'tags': self.tags
997 'tags': self.tags
1000 }
998 }
1001
999
1002 @LazyProperty
1000 @LazyProperty
1003 def last(self):
1001 def last(self):
1004 """
1002 """
1005 ``True`` if this is last commit in repository, ``False``
1003 ``True`` if this is last commit in repository, ``False``
1006 otherwise; trying to access this attribute while there is no
1004 otherwise; trying to access this attribute while there is no
1007 commits would raise `EmptyRepositoryError`
1005 commits would raise `EmptyRepositoryError`
1008 """
1006 """
1009 if self.repository is None:
1007 if self.repository is None:
1010 raise CommitError("Cannot check if it's most recent commit")
1008 raise CommitError("Cannot check if it's most recent commit")
1011 return self.raw_id == self.repository.commit_ids[-1]
1009 return self.raw_id == self.repository.commit_ids[-1]
1012
1010
1013 @LazyProperty
1011 @LazyProperty
1014 def parents(self):
1012 def parents(self):
1015 """
1013 """
1016 Returns list of parent commits.
1014 Returns list of parent commits.
1017 """
1015 """
1018 raise NotImplementedError
1016 raise NotImplementedError
1019
1017
1020 @LazyProperty
1018 @LazyProperty
1021 def first_parent(self):
1019 def first_parent(self):
1022 """
1020 """
1023 Returns list of parent commits.
1021 Returns list of parent commits.
1024 """
1022 """
1025 return self.parents[0] if self.parents else EmptyCommit()
1023 return self.parents[0] if self.parents else EmptyCommit()
1026
1024
1027 @property
1025 @property
1028 def merge(self):
1026 def merge(self):
1029 """
1027 """
1030 Returns boolean if commit is a merge.
1028 Returns boolean if commit is a merge.
1031 """
1029 """
1032 return len(self.parents) > 1
1030 return len(self.parents) > 1
1033
1031
1034 @LazyProperty
1032 @LazyProperty
1035 def children(self):
1033 def children(self):
1036 """
1034 """
1037 Returns list of child commits.
1035 Returns list of child commits.
1038 """
1036 """
1039 raise NotImplementedError
1037 raise NotImplementedError
1040
1038
1041 @LazyProperty
1039 @LazyProperty
1042 def id(self):
1040 def id(self):
1043 """
1041 """
1044 Returns string identifying this commit.
1042 Returns string identifying this commit.
1045 """
1043 """
1046 raise NotImplementedError
1044 raise NotImplementedError
1047
1045
1048 @LazyProperty
1046 @LazyProperty
1049 def raw_id(self):
1047 def raw_id(self):
1050 """
1048 """
1051 Returns raw string identifying this commit.
1049 Returns raw string identifying this commit.
1052 """
1050 """
1053 raise NotImplementedError
1051 raise NotImplementedError
1054
1052
1055 @LazyProperty
1053 @LazyProperty
1056 def short_id(self):
1054 def short_id(self):
1057 """
1055 """
1058 Returns shortened version of ``raw_id`` attribute, as string,
1056 Returns shortened version of ``raw_id`` attribute, as string,
1059 identifying this commit, useful for presentation to users.
1057 identifying this commit, useful for presentation to users.
1060 """
1058 """
1061 raise NotImplementedError
1059 raise NotImplementedError
1062
1060
1063 @LazyProperty
1061 @LazyProperty
1064 def idx(self):
1062 def idx(self):
1065 """
1063 """
1066 Returns integer identifying this commit.
1064 Returns integer identifying this commit.
1067 """
1065 """
1068 raise NotImplementedError
1066 raise NotImplementedError
1069
1067
1070 @LazyProperty
1068 @LazyProperty
1071 def committer(self):
1069 def committer(self):
1072 """
1070 """
1073 Returns committer for this commit
1071 Returns committer for this commit
1074 """
1072 """
1075 raise NotImplementedError
1073 raise NotImplementedError
1076
1074
1077 @LazyProperty
1075 @LazyProperty
1078 def committer_name(self):
1076 def committer_name(self):
1079 """
1077 """
1080 Returns committer name for this commit
1078 Returns committer name for this commit
1081 """
1079 """
1082
1080
1083 return author_name(self.committer)
1081 return author_name(self.committer)
1084
1082
1085 @LazyProperty
1083 @LazyProperty
1086 def committer_email(self):
1084 def committer_email(self):
1087 """
1085 """
1088 Returns committer email address for this commit
1086 Returns committer email address for this commit
1089 """
1087 """
1090
1088
1091 return author_email(self.committer)
1089 return author_email(self.committer)
1092
1090
1093 @LazyProperty
1091 @LazyProperty
1094 def author(self):
1092 def author(self):
1095 """
1093 """
1096 Returns author for this commit
1094 Returns author for this commit
1097 """
1095 """
1098
1096
1099 raise NotImplementedError
1097 raise NotImplementedError
1100
1098
1101 @LazyProperty
1099 @LazyProperty
1102 def author_name(self):
1100 def author_name(self):
1103 """
1101 """
1104 Returns author name for this commit
1102 Returns author name for this commit
1105 """
1103 """
1106
1104
1107 return author_name(self.author)
1105 return author_name(self.author)
1108
1106
1109 @LazyProperty
1107 @LazyProperty
1110 def author_email(self):
1108 def author_email(self):
1111 """
1109 """
1112 Returns author email address for this commit
1110 Returns author email address for this commit
1113 """
1111 """
1114
1112
1115 return author_email(self.author)
1113 return author_email(self.author)
1116
1114
1117 def get_file_mode(self, path: bytes):
1115 def get_file_mode(self, path: bytes):
1118 """
1116 """
1119 Returns stat mode of the file at `path`.
1117 Returns stat mode of the file at `path`.
1120 """
1118 """
1121 raise NotImplementedError
1119 raise NotImplementedError
1122
1120
1123 def is_link(self, path):
1121 def is_link(self, path):
1124 """
1122 """
1125 Returns ``True`` if given `path` is a symlink
1123 Returns ``True`` if given `path` is a symlink
1126 """
1124 """
1127 raise NotImplementedError
1125 raise NotImplementedError
1128
1126
1129 def is_node_binary(self, path):
1127 def is_node_binary(self, path):
1130 """
1128 """
1131 Returns ``True`` is given path is a binary file
1129 Returns ``True`` is given path is a binary file
1132 """
1130 """
1133 raise NotImplementedError
1131 raise NotImplementedError
1134
1132
1135 def node_md5_hash(self, path):
1133 def node_md5_hash(self, path):
1136 """
1134 """
1137 Returns md5 hash of a node data
1135 Returns md5 hash of a node data
1138 """
1136 """
1139 raise NotImplementedError
1137 raise NotImplementedError
1140
1138
1141 def get_file_content(self, path) -> bytes:
1139 def get_file_content(self, path) -> bytes:
1142 """
1140 """
1143 Returns content of the file at the given `path`.
1141 Returns content of the file at the given `path`.
1144 """
1142 """
1145 raise NotImplementedError
1143 raise NotImplementedError
1146
1144
1147 def get_file_content_streamed(self, path):
1145 def get_file_content_streamed(self, path):
1148 """
1146 """
1149 returns a streaming response from vcsserver with file content
1147 returns a streaming response from vcsserver with file content
1150 """
1148 """
1151 raise NotImplementedError
1149 raise NotImplementedError
1152
1150
1153 def get_file_size(self, path):
1151 def get_file_size(self, path):
1154 """
1152 """
1155 Returns size of the file at the given `path`.
1153 Returns size of the file at the given `path`.
1156 """
1154 """
1157 raise NotImplementedError
1155 raise NotImplementedError
1158
1156
1159 def get_path_commit(self, path, pre_load=None):
1157 def get_path_commit(self, path, pre_load=None):
1160 """
1158 """
1161 Returns last commit of the file at the given `path`.
1159 Returns last commit of the file at the given `path`.
1162
1160
1163 :param pre_load: Optional. List of commit attributes to load.
1161 :param pre_load: Optional. List of commit attributes to load.
1164 """
1162 """
1165 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1166 if not commits:
1164 if not commits:
1167 raise RepositoryError(
1165 raise RepositoryError(
1168 'Failed to fetch history for path {}. '
1166 'Failed to fetch history for path {}. '
1169 'Please check if such path exists in your repository'.format(
1167 'Please check if such path exists in your repository'.format(
1170 path))
1168 path))
1171 return commits[0]
1169 return commits[0]
1172
1170
1173 def get_path_history(self, path, limit=None, pre_load=None):
1171 def get_path_history(self, path, limit=None, pre_load=None):
1174 """
1172 """
1175 Returns history of file as reversed list of :class:`BaseCommit`
1173 Returns history of file as reversed list of :class:`BaseCommit`
1176 objects for which file at given `path` has been modified.
1174 objects for which file at given `path` has been modified.
1177
1175
1178 :param limit: Optional. Allows to limit the size of the returned
1176 :param limit: Optional. Allows to limit the size of the returned
1179 history. This is intended as a hint to the underlying backend, so
1177 history. This is intended as a hint to the underlying backend, so
1180 that it can apply optimizations depending on the limit.
1178 that it can apply optimizations depending on the limit.
1181 :param pre_load: Optional. List of commit attributes to load.
1179 :param pre_load: Optional. List of commit attributes to load.
1182 """
1180 """
1183 raise NotImplementedError
1181 raise NotImplementedError
1184
1182
1185 def get_file_annotate(self, path, pre_load=None):
1183 def get_file_annotate(self, path, pre_load=None):
1186 """
1184 """
1187 Returns a generator of four element tuples with
1185 Returns a generator of four element tuples with
1188 lineno, sha, commit lazy loader and line
1186 lineno, sha, commit lazy loader and line
1189
1187
1190 :param pre_load: Optional. List of commit attributes to load.
1188 :param pre_load: Optional. List of commit attributes to load.
1191 """
1189 """
1192 raise NotImplementedError
1190 raise NotImplementedError
1193
1191
1194 def get_nodes(self, path, pre_load=None):
1192 def get_nodes(self, path, pre_load=None):
1195 """
1193 """
1196 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1197 state of commit at the given ``path``.
1195 state of commit at the given ``path``.
1198
1196
1199 :raises ``CommitError``: if node at the given ``path`` is not
1197 :raises ``CommitError``: if node at the given ``path`` is not
1200 instance of ``DirNode``
1198 instance of ``DirNode``
1201 """
1199 """
1202 raise NotImplementedError
1200 raise NotImplementedError
1203
1201
1204 def get_node(self, path):
1202 def get_node(self, path):
1205 """
1203 """
1206 Returns ``Node`` object from the given ``path``.
1204 Returns ``Node`` object from the given ``path``.
1207
1205
1208 :raises ``NodeDoesNotExistError``: if there is no node at the given
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1209 ``path``
1207 ``path``
1210 """
1208 """
1211 raise NotImplementedError
1209 raise NotImplementedError
1212
1210
1213 def get_largefile_node(self, path):
1211 def get_largefile_node(self, path):
1214 """
1212 """
1215 Returns the path to largefile from Mercurial/Git-lfs storage.
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1216 or None if it's not a largefile node
1214 or None if it's not a largefile node
1217 """
1215 """
1218 return None
1216 return None
1219
1217
1220 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1221 archive_dir_name=None, write_metadata=False, mtime=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1222 archive_at_path='/', cache_config=None):
1220 archive_at_path='/', cache_config=None):
1223 """
1221 """
1224 Creates an archive containing the contents of the repository.
1222 Creates an archive containing the contents of the repository.
1225
1223
1226 :param archive_name_key: unique key under this archive should be generated
1224 :param archive_name_key: unique key under this archive should be generated
1227 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1225 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1228 :param archive_dir_name: name of root directory in archive.
1226 :param archive_dir_name: name of root directory in archive.
1229 Default is repository name and commit's short_id joined with dash:
1227 Default is repository name and commit's short_id joined with dash:
1230 ``"{repo_name}-{short_id}"``.
1228 ``"{repo_name}-{short_id}"``.
1231 :param write_metadata: write a metadata file into archive.
1229 :param write_metadata: write a metadata file into archive.
1232 :param mtime: custom modification time for archive creation, defaults
1230 :param mtime: custom modification time for archive creation, defaults
1233 to time.time() if not given.
1231 to time.time() if not given.
1234 :param archive_at_path: pack files at this path (default '/')
1232 :param archive_at_path: pack files at this path (default '/')
1235 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1236
1234
1237 :raise VCSError: If prefix has a problem.
1235 :raise VCSError: If prefix has a problem.
1238 """
1236 """
1239 cache_config = cache_config or {}
1237 cache_config = cache_config or {}
1240 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1241 if kind not in allowed_kinds:
1239 if kind not in allowed_kinds:
1242 raise ImproperArchiveTypeError(
1240 raise ImproperArchiveTypeError(
1243 'Archive kind (%s) not supported use one of %s' %
1241 'Archive kind (%s) not supported use one of %s' %
1244 (kind, allowed_kinds))
1242 (kind, allowed_kinds))
1245
1243
1246 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1247 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 mtime = mtime is not None or time.mktime(self.date.timetuple())
1248 commit_id = self.raw_id
1246 commit_id = self.raw_id
1249
1247
1250 return self.repository._remote.archive_repo(
1248 return self.repository._remote.archive_repo(
1251 archive_name_key, kind, mtime, archive_at_path,
1249 archive_name_key, kind, mtime, archive_at_path,
1252 archive_dir_name, commit_id, cache_config)
1250 archive_dir_name, commit_id, cache_config)
1253
1251
1254 def _validate_archive_prefix(self, archive_dir_name):
1252 def _validate_archive_prefix(self, archive_dir_name):
1255 if archive_dir_name is None:
1253 if archive_dir_name is None:
1256 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1257 repo_name=safe_str(self.repository.name),
1255 repo_name=safe_str(self.repository.name),
1258 short_id=self.short_id)
1256 short_id=self.short_id)
1259 elif not isinstance(archive_dir_name, str):
1257 elif not isinstance(archive_dir_name, str):
1260 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1261 elif archive_dir_name.startswith('/'):
1259 elif archive_dir_name.startswith('/'):
1262 raise VCSError("Prefix cannot start with leading slash")
1260 raise VCSError("Prefix cannot start with leading slash")
1263 elif archive_dir_name.strip() == '':
1261 elif archive_dir_name.strip() == '':
1264 raise VCSError("Prefix cannot be empty")
1262 raise VCSError("Prefix cannot be empty")
1265 elif not archive_dir_name.isascii():
1263 elif not archive_dir_name.isascii():
1266 raise VCSError("Prefix cannot contain non ascii characters")
1264 raise VCSError("Prefix cannot contain non ascii characters")
1267 return archive_dir_name
1265 return archive_dir_name
1268
1266
1269 @LazyProperty
1267 @LazyProperty
1270 def root(self):
1268 def root(self):
1271 """
1269 """
1272 Returns ``RootNode`` object for this commit.
1270 Returns ``RootNode`` object for this commit.
1273 """
1271 """
1274 return self.get_node('')
1272 return self.get_node('')
1275
1273
1276 def next(self, branch=None):
1274 def next(self, branch=None):
1277 """
1275 """
1278 Returns next commit from current, if branch is gives it will return
1276 Returns next commit from current, if branch is gives it will return
1279 next commit belonging to this branch
1277 next commit belonging to this branch
1280
1278
1281 :param branch: show commits within the given named branch
1279 :param branch: show commits within the given named branch
1282 """
1280 """
1283 indexes = range(self.idx + 1, self.repository.count())
1281 indexes = range(self.idx + 1, self.repository.count())
1284 return self._find_next(indexes, branch)
1282 return self._find_next(indexes, branch)
1285
1283
1286 def prev(self, branch=None):
1284 def prev(self, branch=None):
1287 """
1285 """
1288 Returns previous commit from current, if branch is gives it will
1286 Returns previous commit from current, if branch is gives it will
1289 return previous commit belonging to this branch
1287 return previous commit belonging to this branch
1290
1288
1291 :param branch: show commit within the given named branch
1289 :param branch: show commit within the given named branch
1292 """
1290 """
1293 indexes = range(self.idx - 1, -1, -1)
1291 indexes = range(self.idx - 1, -1, -1)
1294 return self._find_next(indexes, branch)
1292 return self._find_next(indexes, branch)
1295
1293
1296 def _find_next(self, indexes, branch=None):
1294 def _find_next(self, indexes, branch=None):
1297 if branch and self.branch != branch:
1295 if branch and self.branch != branch:
1298 raise VCSError('Branch option used on commit not belonging '
1296 raise VCSError('Branch option used on commit not belonging '
1299 'to that branch')
1297 'to that branch')
1300
1298
1301 for next_idx in indexes:
1299 for next_idx in indexes:
1302 commit = self.repository.get_commit(commit_idx=next_idx)
1300 commit = self.repository.get_commit(commit_idx=next_idx)
1303 if branch and branch != commit.branch:
1301 if branch and branch != commit.branch:
1304 continue
1302 continue
1305 return commit
1303 return commit
1306 raise CommitDoesNotExistError
1304 raise CommitDoesNotExistError
1307
1305
1308 def diff(self, ignore_whitespace=True, context=3):
1306 def diff(self, ignore_whitespace=True, context=3):
1309 """
1307 """
1310 Returns a `Diff` object representing the change made by this commit.
1308 Returns a `Diff` object representing the change made by this commit.
1311 """
1309 """
1312 parent = self.first_parent
1310 parent = self.first_parent
1313 diff = self.repository.get_diff(
1311 diff = self.repository.get_diff(
1314 parent, self,
1312 parent, self,
1315 ignore_whitespace=ignore_whitespace,
1313 ignore_whitespace=ignore_whitespace,
1316 context=context)
1314 context=context)
1317 return diff
1315 return diff
1318
1316
1319 @LazyProperty
1317 @LazyProperty
1320 def added(self):
1318 def added(self):
1321 """
1319 """
1322 Returns list of added ``FileNode`` objects.
1320 Returns list of added ``FileNode`` objects.
1323 """
1321 """
1324 raise NotImplementedError
1322 raise NotImplementedError
1325
1323
1326 @LazyProperty
1324 @LazyProperty
1327 def changed(self):
1325 def changed(self):
1328 """
1326 """
1329 Returns list of modified ``FileNode`` objects.
1327 Returns list of modified ``FileNode`` objects.
1330 """
1328 """
1331 raise NotImplementedError
1329 raise NotImplementedError
1332
1330
1333 @LazyProperty
1331 @LazyProperty
1334 def removed(self):
1332 def removed(self):
1335 """
1333 """
1336 Returns list of removed ``FileNode`` objects.
1334 Returns list of removed ``FileNode`` objects.
1337 """
1335 """
1338 raise NotImplementedError
1336 raise NotImplementedError
1339
1337
1340 @LazyProperty
1338 @LazyProperty
1341 def size(self):
1339 def size(self):
1342 """
1340 """
1343 Returns total number of bytes from contents of all filenodes.
1341 Returns total number of bytes from contents of all filenodes.
1344 """
1342 """
1345 return sum(node.size for node in self.get_filenodes_generator())
1343 return sum(node.size for node in self.get_filenodes_generator())
1346
1344
1347 def walk(self, topurl=''):
1345 def walk(self, topurl=''):
1348 """
1346 """
1349 Similar to os.walk method. Insted of filesystem it walks through
1347 Similar to os.walk method. Insted of filesystem it walks through
1350 commit starting at given ``topurl``. Returns generator of tuples
1348 commit starting at given ``topurl``. Returns generator of tuples
1351 (top_node, dirnodes, filenodes).
1349 (top_node, dirnodes, filenodes).
1352 """
1350 """
1353 from rhodecode.lib.vcs.nodes import DirNode
1351 from rhodecode.lib.vcs.nodes import DirNode
1354
1352
1355 if isinstance(topurl, DirNode):
1353 if isinstance(topurl, DirNode):
1356 top_node = topurl
1354 top_node = topurl
1357 else:
1355 else:
1358 top_node = self.get_node(topurl)
1356 top_node = self.get_node(topurl)
1359
1357
1360 has_default_pre_load = False
1358 has_default_pre_load = False
1361 if isinstance(top_node, DirNode):
1359 if isinstance(top_node, DirNode):
1362 # used to inject as we walk same defaults as given top_node
1360 # used to inject as we walk same defaults as given top_node
1363 default_pre_load = top_node.default_pre_load
1361 default_pre_load = top_node.default_pre_load
1364 has_default_pre_load = True
1362 has_default_pre_load = True
1365
1363
1366 if not top_node.is_dir():
1364 if not top_node.is_dir():
1367 return
1365 return
1368 yield top_node, top_node.dirs, top_node.files
1366 yield top_node, top_node.dirs, top_node.files
1369 for dir_node in top_node.dirs:
1367 for dir_node in top_node.dirs:
1370 if has_default_pre_load:
1368 if has_default_pre_load:
1371 dir_node.default_pre_load = default_pre_load
1369 dir_node.default_pre_load = default_pre_load
1372 yield from self.walk(dir_node)
1370 yield from self.walk(dir_node)
1373
1371
1374 def get_filenodes_generator(self):
1372 def get_filenodes_generator(self):
1375 """
1373 """
1376 Returns generator that yields *all* file nodes.
1374 Returns generator that yields *all* file nodes.
1377 """
1375 """
1378 for topnode, dirs, files in self.walk():
1376 for topnode, dirs, files in self.walk():
1379 yield from files
1377 yield from files
1380
1378
1381 #
1379 #
1382 # Utilities for sub classes to support consistent behavior
1380 # Utilities for sub classes to support consistent behavior
1383 #
1381 #
1384
1382
1385 def no_node_at_path(self, path):
1383 def no_node_at_path(self, path):
1386 return NodeDoesNotExistError(
1384 return NodeDoesNotExistError(
1387 f"There is no file nor directory at the given path: "
1385 f"There is no file nor directory at the given path: "
1388 f"`{safe_str(path)}` at commit {self.short_id}")
1386 f"`{safe_str(path)}` at commit {self.short_id}")
1389
1387
1390 def _fix_path(self, path: str) -> str:
1388 def _fix_path(self, path: str) -> str:
1391 """
1389 """
1392 Paths are stored without trailing slash so we need to get rid off it if
1390 Paths are stored without trailing slash so we need to get rid off it if
1393 needed.
1391 needed.
1394 """
1392 """
1395 return safe_str(path).rstrip('/')
1393 return safe_str(path).rstrip('/')
1396
1394
1397 #
1395 #
1398 # Deprecated API based on changesets
1396 # Deprecated API based on changesets
1399 #
1397 #
1400
1398
1401 @property
1399 @property
1402 def revision(self):
1400 def revision(self):
1403 warnings.warn("Use idx instead", DeprecationWarning)
1401 warnings.warn("Use idx instead", DeprecationWarning)
1404 return self.idx
1402 return self.idx
1405
1403
1406 @revision.setter
1404 @revision.setter
1407 def revision(self, value):
1405 def revision(self, value):
1408 warnings.warn("Use idx instead", DeprecationWarning)
1406 warnings.warn("Use idx instead", DeprecationWarning)
1409 self.idx = value
1407 self.idx = value
1410
1408
1411 def get_file_changeset(self, path):
1409 def get_file_changeset(self, path):
1412 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1413 return self.get_path_commit(path)
1411 return self.get_path_commit(path)
1414
1412
1415
1413
1416 class BaseChangesetClass(type):
1414 class BaseChangesetClass(type):
1417
1415
1418 def __instancecheck__(self, instance):
1416 def __instancecheck__(self, instance):
1419 return isinstance(instance, BaseCommit)
1417 return isinstance(instance, BaseCommit)
1420
1418
1421
1419
1422 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1423
1421
1424 def __new__(cls, *args, **kwargs):
1422 def __new__(cls, *args, **kwargs):
1425 warnings.warn(
1423 warnings.warn(
1426 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1427 return super().__new__(cls, *args, **kwargs)
1425 return super().__new__(cls, *args, **kwargs)
1428
1426
1429
1427
1430 class BaseInMemoryCommit(object):
1428 class BaseInMemoryCommit(object):
1431 """
1429 """
1432 Represents differences between repository's state (most recent head) and
1430 Represents differences between repository's state (most recent head) and
1433 changes made *in place*.
1431 changes made *in place*.
1434
1432
1435 **Attributes**
1433 **Attributes**
1436
1434
1437 ``repository``
1435 ``repository``
1438 repository object for this in-memory-commit
1436 repository object for this in-memory-commit
1439
1437
1440 ``added``
1438 ``added``
1441 list of ``FileNode`` objects marked as *added*
1439 list of ``FileNode`` objects marked as *added*
1442
1440
1443 ``changed``
1441 ``changed``
1444 list of ``FileNode`` objects marked as *changed*
1442 list of ``FileNode`` objects marked as *changed*
1445
1443
1446 ``removed``
1444 ``removed``
1447 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1448 *removed*
1446 *removed*
1449
1447
1450 ``parents``
1448 ``parents``
1451 list of :class:`BaseCommit` instances representing parents of
1449 list of :class:`BaseCommit` instances representing parents of
1452 in-memory commit. Should always be 2-element sequence.
1450 in-memory commit. Should always be 2-element sequence.
1453
1451
1454 """
1452 """
1455
1453
1456 def __init__(self, repository):
1454 def __init__(self, repository):
1457 self.repository = repository
1455 self.repository = repository
1458 self.added = []
1456 self.added = []
1459 self.changed = []
1457 self.changed = []
1460 self.removed = []
1458 self.removed = []
1461 self.parents = []
1459 self.parents = []
1462
1460
1463 def add(self, *filenodes):
1461 def add(self, *filenodes):
1464 """
1462 """
1465 Marks given ``FileNode`` objects as *to be committed*.
1463 Marks given ``FileNode`` objects as *to be committed*.
1466
1464
1467 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1468 latest commit
1466 latest commit
1469 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 :raises ``NodeAlreadyAddedError``: if node with same path is already
1470 marked as *added*
1468 marked as *added*
1471 """
1469 """
1472 # Check if not already marked as *added* first
1470 # Check if not already marked as *added* first
1473 for node in filenodes:
1471 for node in filenodes:
1474 if node.path in (n.path for n in self.added):
1472 if node.path in (n.path for n in self.added):
1475 raise NodeAlreadyAddedError(
1473 raise NodeAlreadyAddedError(
1476 "Such FileNode %s is already marked for addition"
1474 "Such FileNode %s is already marked for addition"
1477 % node.path)
1475 % node.path)
1478 for node in filenodes:
1476 for node in filenodes:
1479 self.added.append(node)
1477 self.added.append(node)
1480
1478
1481 def change(self, *filenodes):
1479 def change(self, *filenodes):
1482 """
1480 """
1483 Marks given ``FileNode`` objects to be *changed* in next commit.
1481 Marks given ``FileNode`` objects to be *changed* in next commit.
1484
1482
1485 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 :raises ``EmptyRepositoryError``: if there are no commits yet
1486 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 :raises ``NodeAlreadyExistsError``: if node with same path is already
1487 marked to be *changed*
1485 marked to be *changed*
1488 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1489 marked to be *removed*
1487 marked to be *removed*
1490 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1491 commit
1489 commit
1492 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 :raises ``NodeNotChangedError``: if node hasn't really be changed
1493 """
1491 """
1494 for node in filenodes:
1492 for node in filenodes:
1495 if node.path in (n.path for n in self.removed):
1493 if node.path in (n.path for n in self.removed):
1496 raise NodeAlreadyRemovedError(
1494 raise NodeAlreadyRemovedError(
1497 "Node at %s is already marked as removed" % node.path)
1495 "Node at %s is already marked as removed" % node.path)
1498 try:
1496 try:
1499 self.repository.get_commit()
1497 self.repository.get_commit()
1500 except EmptyRepositoryError:
1498 except EmptyRepositoryError:
1501 raise EmptyRepositoryError(
1499 raise EmptyRepositoryError(
1502 "Nothing to change - try to *add* new nodes rather than "
1500 "Nothing to change - try to *add* new nodes rather than "
1503 "changing them")
1501 "changing them")
1504 for node in filenodes:
1502 for node in filenodes:
1505 if node.path in (n.path for n in self.changed):
1503 if node.path in (n.path for n in self.changed):
1506 raise NodeAlreadyChangedError(
1504 raise NodeAlreadyChangedError(
1507 "Node at '%s' is already marked as changed" % node.path)
1505 "Node at '%s' is already marked as changed" % node.path)
1508 self.changed.append(node)
1506 self.changed.append(node)
1509
1507
1510 def remove(self, *filenodes):
1508 def remove(self, *filenodes):
1511 """
1509 """
1512 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1513 *removed* in next commit.
1511 *removed* in next commit.
1514
1512
1515 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1516 be *removed*
1514 be *removed*
1517 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1518 be *changed*
1516 be *changed*
1519 """
1517 """
1520 for node in filenodes:
1518 for node in filenodes:
1521 if node.path in (n.path for n in self.removed):
1519 if node.path in (n.path for n in self.removed):
1522 raise NodeAlreadyRemovedError(
1520 raise NodeAlreadyRemovedError(
1523 "Node is already marked to for removal at %s" % node.path)
1521 "Node is already marked to for removal at %s" % node.path)
1524 if node.path in (n.path for n in self.changed):
1522 if node.path in (n.path for n in self.changed):
1525 raise NodeAlreadyChangedError(
1523 raise NodeAlreadyChangedError(
1526 "Node is already marked to be changed at %s" % node.path)
1524 "Node is already marked to be changed at %s" % node.path)
1527 # We only mark node as *removed* - real removal is done by
1525 # We only mark node as *removed* - real removal is done by
1528 # commit method
1526 # commit method
1529 self.removed.append(node)
1527 self.removed.append(node)
1530
1528
1531 def reset(self):
1529 def reset(self):
1532 """
1530 """
1533 Resets this instance to initial state (cleans ``added``, ``changed``
1531 Resets this instance to initial state (cleans ``added``, ``changed``
1534 and ``removed`` lists).
1532 and ``removed`` lists).
1535 """
1533 """
1536 self.added = []
1534 self.added = []
1537 self.changed = []
1535 self.changed = []
1538 self.removed = []
1536 self.removed = []
1539 self.parents = []
1537 self.parents = []
1540
1538
1541 def get_ipaths(self):
1539 def get_ipaths(self):
1542 """
1540 """
1543 Returns generator of paths from nodes marked as added, changed or
1541 Returns generator of paths from nodes marked as added, changed or
1544 removed.
1542 removed.
1545 """
1543 """
1546 for node in itertools.chain(self.added, self.changed, self.removed):
1544 for node in itertools.chain(self.added, self.changed, self.removed):
1547 yield node.path
1545 yield node.path
1548
1546
1549 def get_paths(self):
1547 def get_paths(self):
1550 """
1548 """
1551 Returns list of paths from nodes marked as added, changed or removed.
1549 Returns list of paths from nodes marked as added, changed or removed.
1552 """
1550 """
1553 return list(self.get_ipaths())
1551 return list(self.get_ipaths())
1554
1552
1555 def check_integrity(self, parents=None):
1553 def check_integrity(self, parents=None):
1556 """
1554 """
1557 Checks in-memory commit's integrity. Also, sets parents if not
1555 Checks in-memory commit's integrity. Also, sets parents if not
1558 already set.
1556 already set.
1559
1557
1560 :raises CommitError: if any error occurs (i.e.
1558 :raises CommitError: if any error occurs (i.e.
1561 ``NodeDoesNotExistError``).
1559 ``NodeDoesNotExistError``).
1562 """
1560 """
1563 if not self.parents:
1561 if not self.parents:
1564 parents = parents or []
1562 parents = parents or []
1565 if len(parents) == 0:
1563 if len(parents) == 0:
1566 try:
1564 try:
1567 parents = [self.repository.get_commit(), None]
1565 parents = [self.repository.get_commit(), None]
1568 except EmptyRepositoryError:
1566 except EmptyRepositoryError:
1569 parents = [None, None]
1567 parents = [None, None]
1570 elif len(parents) == 1:
1568 elif len(parents) == 1:
1571 parents += [None]
1569 parents += [None]
1572 self.parents = parents
1570 self.parents = parents
1573
1571
1574 # Local parents, only if not None
1572 # Local parents, only if not None
1575 parents = [p for p in self.parents if p]
1573 parents = [p for p in self.parents if p]
1576
1574
1577 # Check nodes marked as added
1575 # Check nodes marked as added
1578 for p in parents:
1576 for p in parents:
1579 for node in self.added:
1577 for node in self.added:
1580 try:
1578 try:
1581 p.get_node(node.path)
1579 p.get_node(node.path)
1582 except NodeDoesNotExistError:
1580 except NodeDoesNotExistError:
1583 pass
1581 pass
1584 else:
1582 else:
1585 raise NodeAlreadyExistsError(
1583 raise NodeAlreadyExistsError(
1586 "Node `{}` already exists at {}".format(node.path, p))
1584 f"Node `{node.path}` already exists at {p}")
1587
1585
1588 # Check nodes marked as changed
1586 # Check nodes marked as changed
1589 missing = set(self.changed)
1587 missing = set(self.changed)
1590 not_changed = set(self.changed)
1588 not_changed = set(self.changed)
1591 if self.changed and not parents:
1589 if self.changed and not parents:
1592 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 raise NodeDoesNotExistError(str(self.changed[0].path))
1593 for p in parents:
1591 for p in parents:
1594 for node in self.changed:
1592 for node in self.changed:
1595 try:
1593 try:
1596 old = p.get_node(node.path)
1594 old = p.get_node(node.path)
1597 missing.remove(node)
1595 missing.remove(node)
1598 # if content actually changed, remove node from not_changed
1596 # if content actually changed, remove node from not_changed
1599 if old.content != node.content:
1597 if old.content != node.content:
1600 not_changed.remove(node)
1598 not_changed.remove(node)
1601 except NodeDoesNotExistError:
1599 except NodeDoesNotExistError:
1602 pass
1600 pass
1603 if self.changed and missing:
1601 if self.changed and missing:
1604 raise NodeDoesNotExistError(
1602 raise NodeDoesNotExistError(
1605 "Node `%s` marked as modified but missing in parents: %s"
1603 "Node `%s` marked as modified but missing in parents: %s"
1606 % (node.path, parents))
1604 % (node.path, parents))
1607
1605
1608 if self.changed and not_changed:
1606 if self.changed and not_changed:
1609 raise NodeNotChangedError(
1607 raise NodeNotChangedError(
1610 "Node `%s` wasn't actually changed (parents: %s)"
1608 "Node `%s` wasn't actually changed (parents: %s)"
1611 % (not_changed.pop().path, parents))
1609 % (not_changed.pop().path, parents))
1612
1610
1613 # Check nodes marked as removed
1611 # Check nodes marked as removed
1614 if self.removed and not parents:
1612 if self.removed and not parents:
1615 raise NodeDoesNotExistError(
1613 raise NodeDoesNotExistError(
1616 "Cannot remove node at %s as there "
1614 "Cannot remove node at %s as there "
1617 "were no parents specified" % self.removed[0].path)
1615 "were no parents specified" % self.removed[0].path)
1618 really_removed = set()
1616 really_removed = set()
1619 for p in parents:
1617 for p in parents:
1620 for node in self.removed:
1618 for node in self.removed:
1621 try:
1619 try:
1622 p.get_node(node.path)
1620 p.get_node(node.path)
1623 really_removed.add(node)
1621 really_removed.add(node)
1624 except CommitError:
1622 except CommitError:
1625 pass
1623 pass
1626 not_removed = set(self.removed) - really_removed
1624 not_removed = set(self.removed) - really_removed
1627 if not_removed:
1625 if not_removed:
1628 # TODO: johbo: This code branch does not seem to be covered
1626 # TODO: johbo: This code branch does not seem to be covered
1629 raise NodeDoesNotExistError(
1627 raise NodeDoesNotExistError(
1630 "Cannot remove node at %s from "
1628 "Cannot remove node at %s from "
1631 "following parents: %s" % (not_removed, parents))
1629 "following parents: %s" % (not_removed, parents))
1632
1630
1633 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1631 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1634 """
1632 """
1635 Performs in-memory commit (doesn't check workdir in any way) and
1633 Performs in-memory commit (doesn't check workdir in any way) and
1636 returns newly created :class:`BaseCommit`. Updates repository's
1634 returns newly created :class:`BaseCommit`. Updates repository's
1637 attribute `commits`.
1635 attribute `commits`.
1638
1636
1639 .. note::
1637 .. note::
1640
1638
1641 While overriding this method each backend's should call
1639 While overriding this method each backend's should call
1642 ``self.check_integrity(parents)`` in the first place.
1640 ``self.check_integrity(parents)`` in the first place.
1643
1641
1644 :param message: message of the commit
1642 :param message: message of the commit
1645 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1643 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1646 :param parents: single parent or sequence of parents from which commit
1644 :param parents: single parent or sequence of parents from which commit
1647 would be derived
1645 would be derived
1648 :param date: ``datetime.datetime`` instance. Defaults to
1646 :param date: ``datetime.datetime`` instance. Defaults to
1649 ``datetime.datetime.now()``.
1647 ``datetime.datetime.now()``.
1650 :param branch: branch name, as string. If none given, default backend's
1648 :param branch: branch name, as string. If none given, default backend's
1651 branch would be used.
1649 branch would be used.
1652
1650
1653 :raises ``CommitError``: if any error occurs while committing
1651 :raises ``CommitError``: if any error occurs while committing
1654 """
1652 """
1655 raise NotImplementedError
1653 raise NotImplementedError
1656
1654
1657
1655
1658 class BaseInMemoryChangesetClass(type):
1656 class BaseInMemoryChangesetClass(type):
1659
1657
1660 def __instancecheck__(self, instance):
1658 def __instancecheck__(self, instance):
1661 return isinstance(instance, BaseInMemoryCommit)
1659 return isinstance(instance, BaseInMemoryCommit)
1662
1660
1663
1661
1664 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1662 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1665
1663
1666 def __new__(cls, *args, **kwargs):
1664 def __new__(cls, *args, **kwargs):
1667 warnings.warn(
1665 warnings.warn(
1668 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1666 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1669 return super().__new__(cls, *args, **kwargs)
1667 return super().__new__(cls, *args, **kwargs)
1670
1668
1671
1669
1672 class EmptyCommit(BaseCommit):
1670 class EmptyCommit(BaseCommit):
1673 """
1671 """
1674 An dummy empty commit. It's possible to pass hash when creating
1672 An dummy empty commit. It's possible to pass hash when creating
1675 an EmptyCommit
1673 an EmptyCommit
1676 """
1674 """
1677
1675
1678 def __init__(
1676 def __init__(
1679 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1677 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1680 message='', author='', date=None):
1678 message='', author='', date=None):
1681 self._empty_commit_id = commit_id
1679 self._empty_commit_id = commit_id
1682 # TODO: johbo: Solve idx parameter, default value does not make
1680 # TODO: johbo: Solve idx parameter, default value does not make
1683 # too much sense
1681 # too much sense
1684 self.idx = idx
1682 self.idx = idx
1685 self.message = message
1683 self.message = message
1686 self.author = author
1684 self.author = author
1687 self.date = date or datetime.datetime.fromtimestamp(0)
1685 self.date = date or datetime.datetime.fromtimestamp(0)
1688 self.repository = repo
1686 self.repository = repo
1689 self.alias = alias
1687 self.alias = alias
1690
1688
1691 @LazyProperty
1689 @LazyProperty
1692 def raw_id(self):
1690 def raw_id(self):
1693 """
1691 """
1694 Returns raw string identifying this commit, useful for web
1692 Returns raw string identifying this commit, useful for web
1695 representation.
1693 representation.
1696 """
1694 """
1697
1695
1698 return self._empty_commit_id
1696 return self._empty_commit_id
1699
1697
1700 @LazyProperty
1698 @LazyProperty
1701 def branch(self):
1699 def branch(self):
1702 if self.alias:
1700 if self.alias:
1703 from rhodecode.lib.vcs.backends import get_backend
1701 from rhodecode.lib.vcs.backends import get_backend
1704 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1702 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1705
1703
1706 @LazyProperty
1704 @LazyProperty
1707 def short_id(self):
1705 def short_id(self):
1708 return self.raw_id[:12]
1706 return self.raw_id[:12]
1709
1707
1710 @LazyProperty
1708 @LazyProperty
1711 def id(self):
1709 def id(self):
1712 return self.raw_id
1710 return self.raw_id
1713
1711
1714 def get_path_commit(self, path, pre_load=None):
1712 def get_path_commit(self, path, pre_load=None):
1715 return self
1713 return self
1716
1714
1717 def get_file_content(self, path) -> bytes:
1715 def get_file_content(self, path) -> bytes:
1718 return b''
1716 return b''
1719
1717
1720 def get_file_content_streamed(self, path):
1718 def get_file_content_streamed(self, path):
1721 yield self.get_file_content(path)
1719 yield self.get_file_content(path)
1722
1720
1723 def get_file_size(self, path):
1721 def get_file_size(self, path):
1724 return 0
1722 return 0
1725
1723
1726
1724
1727 class EmptyChangesetClass(type):
1725 class EmptyChangesetClass(type):
1728
1726
1729 def __instancecheck__(self, instance):
1727 def __instancecheck__(self, instance):
1730 return isinstance(instance, EmptyCommit)
1728 return isinstance(instance, EmptyCommit)
1731
1729
1732
1730
1733 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1731 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1734
1732
1735 def __new__(cls, *args, **kwargs):
1733 def __new__(cls, *args, **kwargs):
1736 warnings.warn(
1734 warnings.warn(
1737 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1735 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1738 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1736 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1739
1737
1740 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1738 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1741 alias=None, revision=-1, message='', author='', date=None):
1739 alias=None, revision=-1, message='', author='', date=None):
1742 if requested_revision is not None:
1740 if requested_revision is not None:
1743 warnings.warn(
1741 warnings.warn(
1744 "Parameter requested_revision not supported anymore",
1742 "Parameter requested_revision not supported anymore",
1745 DeprecationWarning)
1743 DeprecationWarning)
1746 super().__init__(
1744 super().__init__(
1747 commit_id=cs, repo=repo, alias=alias, idx=revision,
1745 commit_id=cs, repo=repo, alias=alias, idx=revision,
1748 message=message, author=author, date=date)
1746 message=message, author=author, date=date)
1749
1747
1750 @property
1748 @property
1751 def revision(self):
1749 def revision(self):
1752 warnings.warn("Use idx instead", DeprecationWarning)
1750 warnings.warn("Use idx instead", DeprecationWarning)
1753 return self.idx
1751 return self.idx
1754
1752
1755 @revision.setter
1753 @revision.setter
1756 def revision(self, value):
1754 def revision(self, value):
1757 warnings.warn("Use idx instead", DeprecationWarning)
1755 warnings.warn("Use idx instead", DeprecationWarning)
1758 self.idx = value
1756 self.idx = value
1759
1757
1760
1758
1761 class EmptyRepository(BaseRepository):
1759 class EmptyRepository(BaseRepository):
1762 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1760 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1763 pass
1761 pass
1764
1762
1765 def get_diff(self, *args, **kwargs):
1763 def get_diff(self, *args, **kwargs):
1766 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1764 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1767 return GitDiff(b'')
1765 return GitDiff(b'')
1768
1766
1769
1767
1770 class CollectionGenerator(object):
1768 class CollectionGenerator(object):
1771
1769
1772 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1770 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1773 self.repo = repo
1771 self.repo = repo
1774 self.commit_ids = commit_ids
1772 self.commit_ids = commit_ids
1775 self.collection_size = collection_size
1773 self.collection_size = collection_size
1776 self.pre_load = pre_load
1774 self.pre_load = pre_load
1777 self.translate_tag = translate_tag
1775 self.translate_tag = translate_tag
1778
1776
1779 def __len__(self):
1777 def __len__(self):
1780 if self.collection_size is not None:
1778 if self.collection_size is not None:
1781 return self.collection_size
1779 return self.collection_size
1782 return self.commit_ids.__len__()
1780 return self.commit_ids.__len__()
1783
1781
1784 def __iter__(self):
1782 def __iter__(self):
1785 for commit_id in self.commit_ids:
1783 for commit_id in self.commit_ids:
1786 # TODO: johbo: Mercurial passes in commit indices or commit ids
1784 # TODO: johbo: Mercurial passes in commit indices or commit ids
1787 yield self._commit_factory(commit_id)
1785 yield self._commit_factory(commit_id)
1788
1786
1789 def _commit_factory(self, commit_id):
1787 def _commit_factory(self, commit_id):
1790 """
1788 """
1791 Allows backends to override the way commits are generated.
1789 Allows backends to override the way commits are generated.
1792 """
1790 """
1793 return self.repo.get_commit(
1791 return self.repo.get_commit(
1794 commit_id=commit_id, pre_load=self.pre_load,
1792 commit_id=commit_id, pre_load=self.pre_load,
1795 translate_tag=self.translate_tag)
1793 translate_tag=self.translate_tag)
1796
1794
1797 def __getitem__(self, key):
1795 def __getitem__(self, key):
1798 """Return either a single element by index, or a sliced collection."""
1796 """Return either a single element by index, or a sliced collection."""
1799
1797
1800 if isinstance(key, slice):
1798 if isinstance(key, slice):
1801 commit_ids = self.commit_ids[key.start:key.stop]
1799 commit_ids = self.commit_ids[key.start:key.stop]
1802
1800
1803 else:
1801 else:
1804 # single item
1802 # single item
1805 commit_ids = self.commit_ids[key]
1803 commit_ids = self.commit_ids[key]
1806
1804
1807 return self.__class__(
1805 return self.__class__(
1808 self.repo, commit_ids, pre_load=self.pre_load,
1806 self.repo, commit_ids, pre_load=self.pre_load,
1809 translate_tag=self.translate_tag)
1807 translate_tag=self.translate_tag)
1810
1808
1811 def __repr__(self):
1809 def __repr__(self):
1812 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1810 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1813
1811
1814
1812
1815 class Config(object):
1813 class Config(object):
1816 """
1814 """
1817 Represents the configuration for a repository.
1815 Represents the configuration for a repository.
1818
1816
1819 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1817 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1820 standard library. It implements only the needed subset.
1818 standard library. It implements only the needed subset.
1821 """
1819 """
1822
1820
1823 def __init__(self):
1821 def __init__(self):
1824 self._values = {}
1822 self._values = {}
1825
1823
1826 def copy(self):
1824 def copy(self):
1827 clone = Config()
1825 clone = Config()
1828 for section, values in self._values.items():
1826 for section, values in self._values.items():
1829 clone._values[section] = values.copy()
1827 clone._values[section] = values.copy()
1830 return clone
1828 return clone
1831
1829
1832 def __repr__(self):
1830 def __repr__(self):
1833 return '<Config({} sections) at {}>'.format(
1831 return '<Config({} sections) at {}>'.format(
1834 len(self._values), hex(id(self)))
1832 len(self._values), hex(id(self)))
1835
1833
1836 def items(self, section):
1834 def items(self, section):
1837 return self._values.get(section, {}).items()
1835 return self._values.get(section, {}).items()
1838
1836
1839 def get(self, section, option):
1837 def get(self, section, option):
1840 return self._values.get(section, {}).get(option)
1838 return self._values.get(section, {}).get(option)
1841
1839
1842 def set(self, section, option, value):
1840 def set(self, section, option, value):
1843 section_values = self._values.setdefault(section, {})
1841 section_values = self._values.setdefault(section, {})
1844 section_values[option] = value
1842 section_values[option] = value
1845
1843
1846 def clear_section(self, section):
1844 def clear_section(self, section):
1847 self._values[section] = {}
1845 self._values[section] = {}
1848
1846
1849 def serialize(self):
1847 def serialize(self):
1850 """
1848 """
1851 Creates a list of three tuples (section, key, value) representing
1849 Creates a list of three tuples (section, key, value) representing
1852 this config object.
1850 this config object.
1853 """
1851 """
1854 items = []
1852 items = []
1855 for section in self._values:
1853 for section in self._values:
1856 for option, value in self._values[section].items():
1854 for option, value in self._values[section].items():
1857 items.append(
1855 items.append(
1858 (safe_str(section), safe_str(option), safe_str(value)))
1856 (safe_str(section), safe_str(option), safe_str(value)))
1859 return items
1857 return items
1860
1858
1861
1859
1862 class Diff(object):
1860 class Diff(object):
1863 """
1861 """
1864 Represents a diff result from a repository backend.
1862 Represents a diff result from a repository backend.
1865
1863
1866 Subclasses have to provide a backend specific value for
1864 Subclasses have to provide a backend specific value for
1867 :attr:`_header_re` and :attr:`_meta_re`.
1865 :attr:`_header_re` and :attr:`_meta_re`.
1868 """
1866 """
1869 _meta_re = None
1867 _meta_re = None
1870 _header_re: bytes = re.compile(br"")
1868 _header_re: bytes = re.compile(br"")
1871
1869
1872 def __init__(self, raw_diff: bytes):
1870 def __init__(self, raw_diff: bytes):
1873 if not isinstance(raw_diff, bytes):
1871 if not isinstance(raw_diff, bytes):
1874 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1872 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1875
1873
1876 self.raw = memoryview(raw_diff)
1874 self.raw = memoryview(raw_diff)
1877
1875
1878 def get_header_re(self):
1876 def get_header_re(self):
1879 return self._header_re
1877 return self._header_re
1880
1878
1881 def chunks(self):
1879 def chunks(self):
1882 """
1880 """
1883 split the diff in chunks of separate --git a/file b/file chunks
1881 split the diff in chunks of separate --git a/file b/file chunks
1884 to make diffs consistent we must prepend with \n, and make sure
1882 to make diffs consistent we must prepend with \n, and make sure
1885 we can detect last chunk as this was also has special rule
1883 we can detect last chunk as this was also has special rule
1886 """
1884 """
1887
1885
1888 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1886 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1889
1887
1890 chunks = diff_parts[1:]
1888 chunks = diff_parts[1:]
1891 total_chunks = len(chunks)
1889 total_chunks = len(chunks)
1892
1890
1893 def diff_iter(_chunks):
1891 def diff_iter(_chunks):
1894 for cur_chunk, chunk in enumerate(_chunks, start=1):
1892 for cur_chunk, chunk in enumerate(_chunks, start=1):
1895 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1893 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1896 return diff_iter(chunks)
1894 return diff_iter(chunks)
1897
1895
1898
1896
1899 class DiffChunk(object):
1897 class DiffChunk(object):
1900
1898
1901 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1899 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1902 self.diff_obj = diff_obj
1900 self.diff_obj = diff_obj
1903
1901
1904 # since we split by \ndiff --git that part is lost from original diff
1902 # since we split by \ndiff --git that part is lost from original diff
1905 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1903 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1906 if not is_last_chunk:
1904 if not is_last_chunk:
1907 chunk += b'\n'
1905 chunk += b'\n'
1908 header_re = self.diff_obj.get_header_re()
1906 header_re = self.diff_obj.get_header_re()
1909 match = header_re.match(chunk)
1907 match = header_re.match(chunk)
1910 self.header = match.groupdict()
1908 self.header = match.groupdict()
1911 self.diff = chunk[match.end():]
1909 self.diff = chunk[match.end():]
1912 self.raw = chunk
1910 self.raw = chunk
1913
1911
1914 @property
1912 @property
1915 def header_as_str(self):
1913 def header_as_str(self):
1916 if self.header:
1914 if self.header:
1917 def safe_str_on_bytes(val):
1915 def safe_str_on_bytes(val):
1918 if isinstance(val, bytes):
1916 if isinstance(val, bytes):
1919 return safe_str(val)
1917 return safe_str(val)
1920 return val
1918 return val
1921 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1919 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1922
1920
1923 def __repr__(self):
1921 def __repr__(self):
1924 return f'DiffChunk({self.header_as_str})'
1922 return f'DiffChunk({self.header_as_str})'
1925
1923
1926
1924
1927 class BasePathPermissionChecker(object):
1925 class BasePathPermissionChecker(object):
1928
1926
1929 @staticmethod
1927 @staticmethod
1930 def create_from_patterns(includes, excludes):
1928 def create_from_patterns(includes, excludes):
1931 if includes and '*' in includes and not excludes:
1929 if includes and '*' in includes and not excludes:
1932 return AllPathPermissionChecker()
1930 return AllPathPermissionChecker()
1933 elif excludes and '*' in excludes:
1931 elif excludes and '*' in excludes:
1934 return NonePathPermissionChecker()
1932 return NonePathPermissionChecker()
1935 else:
1933 else:
1936 return PatternPathPermissionChecker(includes, excludes)
1934 return PatternPathPermissionChecker(includes, excludes)
1937
1935
1938 @property
1936 @property
1939 def has_full_access(self):
1937 def has_full_access(self):
1940 raise NotImplementedError()
1938 raise NotImplementedError()
1941
1939
1942 def has_access(self, path):
1940 def has_access(self, path):
1943 raise NotImplementedError()
1941 raise NotImplementedError()
1944
1942
1945
1943
1946 class AllPathPermissionChecker(BasePathPermissionChecker):
1944 class AllPathPermissionChecker(BasePathPermissionChecker):
1947
1945
1948 @property
1946 @property
1949 def has_full_access(self):
1947 def has_full_access(self):
1950 return True
1948 return True
1951
1949
1952 def has_access(self, path):
1950 def has_access(self, path):
1953 return True
1951 return True
1954
1952
1955
1953
1956 class NonePathPermissionChecker(BasePathPermissionChecker):
1954 class NonePathPermissionChecker(BasePathPermissionChecker):
1957
1955
1958 @property
1956 @property
1959 def has_full_access(self):
1957 def has_full_access(self):
1960 return False
1958 return False
1961
1959
1962 def has_access(self, path):
1960 def has_access(self, path):
1963 return False
1961 return False
1964
1962
1965
1963
1966 class PatternPathPermissionChecker(BasePathPermissionChecker):
1964 class PatternPathPermissionChecker(BasePathPermissionChecker):
1967
1965
1968 def __init__(self, includes, excludes):
1966 def __init__(self, includes, excludes):
1969 self.includes = includes
1967 self.includes = includes
1970 self.excludes = excludes
1968 self.excludes = excludes
1971 self.includes_re = [] if not includes else [
1969 self.includes_re = [] if not includes else [
1972 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1970 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1973 self.excludes_re = [] if not excludes else [
1971 self.excludes_re = [] if not excludes else [
1974 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1972 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1975
1973
1976 @property
1974 @property
1977 def has_full_access(self):
1975 def has_full_access(self):
1978 return '*' in self.includes and not self.excludes
1976 return '*' in self.includes and not self.excludes
1979
1977
1980 def has_access(self, path):
1978 def has_access(self, path):
1981 for regex in self.excludes_re:
1979 for regex in self.excludes_re:
1982 if regex.match(path):
1980 if regex.match(path):
1983 return False
1981 return False
1984 for regex in self.includes_re:
1982 for regex in self.includes_re:
1985 if regex.match(path):
1983 if regex.match(path):
1986 return True
1984 return True
1987 return False
1985 return False
@@ -1,1053 +1,1053 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 "Cannot create repository at %s, location already exist"
153 "Cannot create repository at %s, location already exist"
154 % self.path)
154 % self.path)
155
155
156 if bare and do_workspace_checkout:
156 if bare and do_workspace_checkout:
157 raise RepositoryError("Cannot update a bare repository")
157 raise RepositoryError("Cannot update a bare repository")
158 try:
158 try:
159
159
160 if src_url:
160 if src_url:
161 # check URL before any actions
161 # check URL before any actions
162 GitRepository.check_url(src_url, self.config)
162 GitRepository.check_url(src_url, self.config)
163
163
164 if create:
164 if create:
165 os.makedirs(self.path, mode=0o755)
165 os.makedirs(self.path, mode=0o755)
166
166
167 if bare:
167 if bare:
168 self._remote.init_bare()
168 self._remote.init_bare()
169 else:
169 else:
170 self._remote.init()
170 self._remote.init()
171
171
172 if src_url and bare:
172 if src_url and bare:
173 # bare repository only allows a fetch and checkout is not allowed
173 # bare repository only allows a fetch and checkout is not allowed
174 self.fetch(src_url, commit_ids=None)
174 self.fetch(src_url, commit_ids=None)
175 elif src_url:
175 elif src_url:
176 self.pull(src_url, commit_ids=None,
176 self.pull(src_url, commit_ids=None,
177 update_after=do_workspace_checkout)
177 update_after=do_workspace_checkout)
178
178
179 else:
179 else:
180 if not self._remote.assert_correct_path():
180 if not self._remote.assert_correct_path():
181 raise RepositoryError(
181 raise RepositoryError(
182 'Path "%s" does not contain a Git repository' %
182 'Path "%s" does not contain a Git repository' %
183 (self.path,))
183 (self.path,))
184
184
185 # TODO: johbo: check if we have to translate the OSError here
185 # TODO: johbo: check if we have to translate the OSError here
186 except OSError as err:
186 except OSError as err:
187 raise RepositoryError(err)
187 raise RepositoryError(err)
188
188
189 def _get_all_commit_ids(self):
189 def _get_all_commit_ids(self):
190 return self._remote.get_all_commit_ids()
190 return self._remote.get_all_commit_ids()
191
191
192 def _get_commit_ids(self, filters=None):
192 def _get_commit_ids(self, filters=None):
193 # we must check if this repo is not empty, since later command
193 # we must check if this repo is not empty, since later command
194 # fails if it is. And it's cheaper to ask than throw the subprocess
194 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # errors
195 # errors
196
196
197 head = self._remote.head(show_exc=False)
197 head = self._remote.head(show_exc=False)
198
198
199 if not head:
199 if not head:
200 return []
200 return []
201
201
202 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
203 extra_filter = []
203 extra_filter = []
204
204
205 if filters:
205 if filters:
206 if filters.get('since'):
206 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
208 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
210 if filters.get('branch_name'):
211 rev_filter = []
211 rev_filter = []
212 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
214
214
215 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
217 # if filters.get('start'):
217 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
219 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
221
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
223 try:
224 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
225 except RepositoryError:
226 # Can be raised for empty repositories
226 # Can be raised for empty repositories
227 return []
227 return []
228 return output.splitlines()
228 return output.splitlines()
229
229
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
231
231
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
240
240
241 is_bstr = isinstance(commit_id_or_idx, str)
241 is_bstr = isinstance(commit_id_or_idx, str)
242 is_branch = reference_obj and reference_obj.branch
242 is_branch = reference_obj and reference_obj.branch
243
243
244 lookup_ok = False
244 lookup_ok = False
245 if is_bstr:
245 if is_bstr:
246 # Need to call remote to translate id for tagging scenarios,
246 # Need to call remote to translate id for tagging scenarios,
247 # or branch that are numeric
247 # or branch that are numeric
248 try:
248 try:
249 remote_data = self._remote.get_object(commit_id_or_idx,
249 remote_data = self._remote.get_object(commit_id_or_idx,
250 maybe_unreachable=maybe_unreachable)
250 maybe_unreachable=maybe_unreachable)
251 commit_id_or_idx = remote_data["commit_id"]
251 commit_id_or_idx = remote_data["commit_id"]
252 lookup_ok = True
252 lookup_ok = True
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 lookup_ok = False
254 lookup_ok = False
255
255
256 if lookup_ok is False:
256 if lookup_ok is False:
257 is_numeric_idx = \
257 is_numeric_idx = \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
259 or isinstance(commit_id_or_idx, int)
259 or isinstance(commit_id_or_idx, int)
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
261 try:
261 try:
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
263 lookup_ok = True
263 lookup_ok = True
264 except Exception:
264 except Exception:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # we failed regular lookup, and by integer number lookup
267 # we failed regular lookup, and by integer number lookup
268 if lookup_ok is False:
268 if lookup_ok is False:
269 raise CommitDoesNotExistError(commit_missing_err)
269 raise CommitDoesNotExistError(commit_missing_err)
270
270
271 # Ensure we return full id
271 # Ensure we return full id
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
273 raise CommitDoesNotExistError(
273 raise CommitDoesNotExistError(
274 "Given commit id %s not recognized" % commit_id_or_idx)
274 "Given commit id %s not recognized" % commit_id_or_idx)
275 return commit_id_or_idx
275 return commit_id_or_idx
276
276
277 def get_hook_location(self):
277 def get_hook_location(self):
278 """
278 """
279 returns absolute path to location where hooks are stored
279 returns absolute path to location where hooks are stored
280 """
280 """
281 loc = os.path.join(self.path, 'hooks')
281 loc = os.path.join(self.path, 'hooks')
282 if not self.bare:
282 if not self.bare:
283 loc = os.path.join(self.path, '.git', 'hooks')
283 loc = os.path.join(self.path, '.git', 'hooks')
284 return loc
284 return loc
285
285
286 @LazyProperty
286 @LazyProperty
287 def last_change(self):
287 def last_change(self):
288 """
288 """
289 Returns last change made on this repository as
289 Returns last change made on this repository as
290 `datetime.datetime` object.
290 `datetime.datetime` object.
291 """
291 """
292 try:
292 try:
293 return self.get_commit().date
293 return self.get_commit().date
294 except RepositoryError:
294 except RepositoryError:
295 tzoffset = makedate()[1]
295 tzoffset = makedate()[1]
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
297
297
298 def _get_fs_mtime(self):
298 def _get_fs_mtime(self):
299 idx_loc = '' if self.bare else '.git'
299 idx_loc = '' if self.bare else '.git'
300 # fallback to filesystem
300 # fallback to filesystem
301 in_path = os.path.join(self.path, idx_loc, "index")
301 in_path = os.path.join(self.path, idx_loc, "index")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
303 if os.path.exists(in_path):
303 if os.path.exists(in_path):
304 return os.stat(in_path).st_mtime
304 return os.stat(in_path).st_mtime
305 else:
305 else:
306 return os.stat(he_path).st_mtime
306 return os.stat(he_path).st_mtime
307
307
308 @LazyProperty
308 @LazyProperty
309 def description(self):
309 def description(self):
310 description = self._remote.get_description()
310 description = self._remote.get_description()
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
312
312
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
314 if self.is_empty():
314 if self.is_empty():
315 return OrderedDict()
315 return OrderedDict()
316
316
317 result = []
317 result = []
318 for ref, sha in self._refs.items():
318 for ref, sha in self._refs.items():
319 if ref.startswith(prefix):
319 if ref.startswith(prefix):
320 ref_name = ref
320 ref_name = ref
321 if strip_prefix:
321 if strip_prefix:
322 ref_name = ref[len(prefix):]
322 ref_name = ref[len(prefix):]
323 result.append((safe_str(ref_name), sha))
323 result.append((safe_str(ref_name), sha))
324
324
325 def get_name(entry):
325 def get_name(entry):
326 return entry[0]
326 return entry[0]
327
327
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
329
329
330 def _get_branches(self):
330 def _get_branches(self):
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches(self):
334 def branches(self):
335 return self._get_branches()
335 return self._get_branches()
336
336
337 @CachedProperty
337 @CachedProperty
338 def branches_closed(self):
338 def branches_closed(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def bookmarks(self):
342 def bookmarks(self):
343 return {}
343 return {}
344
344
345 @CachedProperty
345 @CachedProperty
346 def branches_all(self):
346 def branches_all(self):
347 all_branches = {}
347 all_branches = {}
348 all_branches.update(self.branches)
348 all_branches.update(self.branches)
349 all_branches.update(self.branches_closed)
349 all_branches.update(self.branches_closed)
350 return all_branches
350 return all_branches
351
351
352 @CachedProperty
352 @CachedProperty
353 def tags(self):
353 def tags(self):
354 return self._get_tags()
354 return self._get_tags()
355
355
356 def _get_tags(self):
356 def _get_tags(self):
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
358
358
359 def tag(self, name, user, commit_id=None, message=None, date=None,
359 def tag(self, name, user, commit_id=None, message=None, date=None,
360 **kwargs):
360 **kwargs):
361 # TODO: fix this method to apply annotated tags correct with message
361 # TODO: fix this method to apply annotated tags correct with message
362 """
362 """
363 Creates and returns a tag for the given ``commit_id``.
363 Creates and returns a tag for the given ``commit_id``.
364
364
365 :param name: name for new tag
365 :param name: name for new tag
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
367 :param commit_id: commit id for which new tag would be created
367 :param commit_id: commit id for which new tag would be created
368 :param message: message of the tag's commit
368 :param message: message of the tag's commit
369 :param date: date of tag's commit
369 :param date: date of tag's commit
370
370
371 :raises TagAlreadyExistError: if tag with same name already exists
371 :raises TagAlreadyExistError: if tag with same name already exists
372 """
372 """
373 if name in self.tags:
373 if name in self.tags:
374 raise TagAlreadyExistError("Tag %s already exists" % name)
374 raise TagAlreadyExistError("Tag %s already exists" % name)
375 commit = self.get_commit(commit_id=commit_id)
375 commit = self.get_commit(commit_id=commit_id)
376 message = message or "Added tag {} for commit {}".format(name, commit.raw_id)
376 message = message or f"Added tag {name} for commit {commit.raw_id}"
377
377
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
379
379
380 self._invalidate_prop_cache('tags')
380 self._invalidate_prop_cache('tags')
381 self._invalidate_prop_cache('_refs')
381 self._invalidate_prop_cache('_refs')
382
382
383 return commit
383 return commit
384
384
385 def remove_tag(self, name, user, message=None, date=None):
385 def remove_tag(self, name, user, message=None, date=None):
386 """
386 """
387 Removes tag with the given ``name``.
387 Removes tag with the given ``name``.
388
388
389 :param name: name of the tag to be removed
389 :param name: name of the tag to be removed
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
391 :param message: message of the tag's removal commit
391 :param message: message of the tag's removal commit
392 :param date: date of tag's removal commit
392 :param date: date of tag's removal commit
393
393
394 :raises TagDoesNotExistError: if tag with given name does not exists
394 :raises TagDoesNotExistError: if tag with given name does not exists
395 """
395 """
396 if name not in self.tags:
396 if name not in self.tags:
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
398
398
399 self._remote.tag_remove(name)
399 self._remote.tag_remove(name)
400 self._invalidate_prop_cache('tags')
400 self._invalidate_prop_cache('tags')
401 self._invalidate_prop_cache('_refs')
401 self._invalidate_prop_cache('_refs')
402
402
403 def _get_refs(self):
403 def _get_refs(self):
404 return self._remote.get_refs()
404 return self._remote.get_refs()
405
405
406 @CachedProperty
406 @CachedProperty
407 def _refs(self):
407 def _refs(self):
408 return self._get_refs()
408 return self._get_refs()
409
409
410 @property
410 @property
411 def _ref_tree(self):
411 def _ref_tree(self):
412 node = tree = {}
412 node = tree = {}
413 for ref, sha in self._refs.items():
413 for ref, sha in self._refs.items():
414 path = ref.split('/')
414 path = ref.split('/')
415 for bit in path[:-1]:
415 for bit in path[:-1]:
416 node = node.setdefault(bit, {})
416 node = node.setdefault(bit, {})
417 node[path[-1]] = sha
417 node[path[-1]] = sha
418 node = tree
418 node = tree
419 return tree
419 return tree
420
420
421 def get_remote_ref(self, ref_name):
421 def get_remote_ref(self, ref_name):
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
423 try:
423 try:
424 return self._refs[ref_key]
424 return self._refs[ref_key]
425 except Exception:
425 except Exception:
426 return
426 return
427
427
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434
434
435 if self.is_empty():
435 if self.is_empty():
436 raise EmptyRepositoryError("There are no commits yet")
436 raise EmptyRepositoryError("There are no commits yet")
437
437
438 if commit_id is not None:
438 if commit_id is not None:
439 self._validate_commit_id(commit_id)
439 self._validate_commit_id(commit_id)
440 try:
440 try:
441 # we have cached idx, use it without contacting the remote
441 # we have cached idx, use it without contacting the remote
442 idx = self._commit_ids[commit_id]
442 idx = self._commit_ids[commit_id]
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
444 except KeyError:
444 except KeyError:
445 pass
445 pass
446
446
447 elif commit_idx is not None:
447 elif commit_idx is not None:
448 self._validate_commit_idx(commit_idx)
448 self._validate_commit_idx(commit_idx)
449 try:
449 try:
450 _commit_id = self.commit_ids[commit_idx]
450 _commit_id = self.commit_ids[commit_idx]
451 if commit_idx < 0:
451 if commit_idx < 0:
452 commit_idx = self.commit_ids.index(_commit_id)
452 commit_idx = self.commit_ids.index(_commit_id)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
454 except IndexError:
454 except IndexError:
455 commit_id = commit_idx
455 commit_id = commit_idx
456 else:
456 else:
457 commit_id = "tip"
457 commit_id = "tip"
458
458
459 if translate_tag:
459 if translate_tag:
460 commit_id = self._lookup_commit(
460 commit_id = self._lookup_commit(
461 commit_id, maybe_unreachable=maybe_unreachable,
461 commit_id, maybe_unreachable=maybe_unreachable,
462 reference_obj=reference_obj)
462 reference_obj=reference_obj)
463
463
464 try:
464 try:
465 idx = self._commit_ids[commit_id]
465 idx = self._commit_ids[commit_id]
466 except KeyError:
466 except KeyError:
467 idx = -1
467 idx = -1
468
468
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
470
470
471 def get_commits(
471 def get_commits(
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
474 """
474 """
475 Returns generator of `GitCommit` objects from start to end (both
475 Returns generator of `GitCommit` objects from start to end (both
476 are inclusive), in ascending date order.
476 are inclusive), in ascending date order.
477
477
478 :param start_id: None, str(commit_id)
478 :param start_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
480 :param start_date: if specified, commits with commit date less than
480 :param start_date: if specified, commits with commit date less than
481 ``start_date`` would be filtered out from returned set
481 ``start_date`` would be filtered out from returned set
482 :param end_date: if specified, commits with commit date greater than
482 :param end_date: if specified, commits with commit date greater than
483 ``end_date`` would be filtered out from returned set
483 ``end_date`` would be filtered out from returned set
484 :param branch_name: if specified, commits not reachable from given
484 :param branch_name: if specified, commits not reachable from given
485 branch would be filtered out from returned set
485 branch would be filtered out from returned set
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
487 Mercurial evolve
487 Mercurial evolve
488 :raise BranchDoesNotExistError: If given `branch_name` does not
488 :raise BranchDoesNotExistError: If given `branch_name` does not
489 exist.
489 exist.
490 :raise CommitDoesNotExistError: If commits for given `start` or
490 :raise CommitDoesNotExistError: If commits for given `start` or
491 `end` could not be found.
491 `end` could not be found.
492
492
493 """
493 """
494 if self.is_empty():
494 if self.is_empty():
495 raise EmptyRepositoryError("There are no commits yet")
495 raise EmptyRepositoryError("There are no commits yet")
496
496
497 self._validate_branch_name(branch_name)
497 self._validate_branch_name(branch_name)
498
498
499 if start_id is not None:
499 if start_id is not None:
500 self._validate_commit_id(start_id)
500 self._validate_commit_id(start_id)
501 if end_id is not None:
501 if end_id is not None:
502 self._validate_commit_id(end_id)
502 self._validate_commit_id(end_id)
503
503
504 start_raw_id = self._lookup_commit(start_id)
504 start_raw_id = self._lookup_commit(start_id)
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
506 end_raw_id = self._lookup_commit(end_id)
506 end_raw_id = self._lookup_commit(end_id)
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
508
508
509 if None not in [start_id, end_id] and start_pos > end_pos:
509 if None not in [start_id, end_id] and start_pos > end_pos:
510 raise RepositoryError(
510 raise RepositoryError(
511 "Start commit '%s' cannot be after end commit '%s'" %
511 "Start commit '%s' cannot be after end commit '%s'" %
512 (start_id, end_id))
512 (start_id, end_id))
513
513
514 if end_pos is not None:
514 if end_pos is not None:
515 end_pos += 1
515 end_pos += 1
516
516
517 filter_ = []
517 filter_ = []
518 if branch_name:
518 if branch_name:
519 filter_.append({'branch_name': branch_name})
519 filter_.append({'branch_name': branch_name})
520 if start_date and not end_date:
520 if start_date and not end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 if end_date and not start_date:
522 if end_date and not start_date:
523 filter_.append({'until': end_date})
523 filter_.append({'until': end_date})
524 if start_date and end_date:
524 if start_date and end_date:
525 filter_.append({'since': start_date})
525 filter_.append({'since': start_date})
526 filter_.append({'until': end_date})
526 filter_.append({'until': end_date})
527
527
528 # if start_pos or end_pos:
528 # if start_pos or end_pos:
529 # filter_.append({'start': start_pos})
529 # filter_.append({'start': start_pos})
530 # filter_.append({'end': end_pos})
530 # filter_.append({'end': end_pos})
531
531
532 if filter_:
532 if filter_:
533 revfilters = {
533 revfilters = {
534 'branch_name': branch_name,
534 'branch_name': branch_name,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
537 'start': start_pos,
537 'start': start_pos,
538 'end': end_pos,
538 'end': end_pos,
539 }
539 }
540 commit_ids = self._get_commit_ids(filters=revfilters)
540 commit_ids = self._get_commit_ids(filters=revfilters)
541
541
542 else:
542 else:
543 commit_ids = self.commit_ids
543 commit_ids = self.commit_ids
544
544
545 if start_pos or end_pos:
545 if start_pos or end_pos:
546 commit_ids = commit_ids[start_pos: end_pos]
546 commit_ids = commit_ids[start_pos: end_pos]
547
547
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
549 translate_tag=translate_tags)
549 translate_tag=translate_tags)
550
550
551 def get_diff(
551 def get_diff(
552 self, commit1, commit2, path='', ignore_whitespace=False,
552 self, commit1, commit2, path='', ignore_whitespace=False,
553 context=3, path1=None):
553 context=3, path1=None):
554 """
554 """
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
556 ``commit2`` since ``commit1``.
556 ``commit2`` since ``commit1``.
557
557
558 :param commit1: Entry point from which diff is shown. Can be
558 :param commit1: Entry point from which diff is shown. Can be
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
560 the changes since empty state of the repository until ``commit2``
560 the changes since empty state of the repository until ``commit2``
561 :param commit2: Until which commits changes should be shown.
561 :param commit2: Until which commits changes should be shown.
562 :param path:
562 :param path:
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 changes. Defaults to ``False``.
564 changes. Defaults to ``False``.
565 :param context: How many lines before/after changed lines should be
565 :param context: How many lines before/after changed lines should be
566 shown. Defaults to ``3``.
566 shown. Defaults to ``3``.
567 :param path1:
567 :param path1:
568 """
568 """
569 self._validate_diff_commits(commit1, commit2)
569 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
570 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
571 raise ValueError("Diff of two different paths not supported.")
572
572
573 if path:
573 if path:
574 file_filter = path
574 file_filter = path
575 else:
575 else:
576 file_filter = None
576 file_filter = None
577
577
578 diff = self._remote.diff(
578 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
580 opt_ignorews=ignore_whitespace,
581 context=context)
581 context=context)
582
582
583 return GitDiff(diff)
583 return GitDiff(diff)
584
584
585 def strip(self, commit_id, branch_name):
585 def strip(self, commit_id, branch_name):
586 commit = self.get_commit(commit_id=commit_id)
586 commit = self.get_commit(commit_id=commit_id)
587 if commit.merge:
587 if commit.merge:
588 raise Exception('Cannot reset to merge commit')
588 raise Exception('Cannot reset to merge commit')
589
589
590 # parent is going to be the new head now
590 # parent is going to be the new head now
591 commit = commit.parents[0]
591 commit = commit.parents[0]
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
593
593
594 # clear cached properties
594 # clear cached properties
595 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('commit_ids')
596 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('_refs')
597 self._invalidate_prop_cache('branches')
597 self._invalidate_prop_cache('branches')
598
598
599 return len(self.commit_ids)
599 return len(self.commit_ids)
600
600
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
603 self, commit_id1, repo2, commit_id2)
603 self, commit_id1, repo2, commit_id2)
604
604
605 if commit_id1 == commit_id2:
605 if commit_id1 == commit_id2:
606 return commit_id1
606 return commit_id1
607
607
608 if self != repo2:
608 if self != repo2:
609 commits = self._remote.get_missing_revs(
609 commits = self._remote.get_missing_revs(
610 commit_id1, commit_id2, repo2.path)
610 commit_id1, commit_id2, repo2.path)
611 if commits:
611 if commits:
612 commit = repo2.get_commit(commits[-1])
612 commit = repo2.get_commit(commits[-1])
613 if commit.parents:
613 if commit.parents:
614 ancestor_id = commit.parents[0].raw_id
614 ancestor_id = commit.parents[0].raw_id
615 else:
615 else:
616 ancestor_id = None
616 ancestor_id = None
617 else:
617 else:
618 # no commits from other repo, ancestor_id is the commit_id2
618 # no commits from other repo, ancestor_id is the commit_id2
619 ancestor_id = commit_id2
619 ancestor_id = commit_id2
620 else:
620 else:
621 output, __ = self.run_git_command(
621 output, __ = self.run_git_command(
622 ['merge-base', commit_id1, commit_id2])
622 ['merge-base', commit_id1, commit_id2])
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
624
624
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
626
626
627 return ancestor_id
627 return ancestor_id
628
628
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
630 repo1 = self
630 repo1 = self
631 ancestor_id = None
631 ancestor_id = None
632
632
633 if commit_id1 == commit_id2:
633 if commit_id1 == commit_id2:
634 commits = []
634 commits = []
635 elif repo1 != repo2:
635 elif repo1 != repo2:
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
637 repo2.path)
637 repo2.path)
638 commits = [
638 commits = [
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
640 for commit_id in reversed(missing_ids)]
640 for commit_id in reversed(missing_ids)]
641 else:
641 else:
642 output, __ = repo1.run_git_command(
642 output, __ = repo1.run_git_command(
643 ['log', '--reverse', '--pretty=format: %H', '-s',
643 ['log', '--reverse', '--pretty=format: %H', '-s',
644 '{}..{}'.format(commit_id1, commit_id2)])
644 f'{commit_id1}..{commit_id2}'])
645 commits = [
645 commits = [
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
648
648
649 return commits
649 return commits
650
650
651 @LazyProperty
651 @LazyProperty
652 def in_memory_commit(self):
652 def in_memory_commit(self):
653 """
653 """
654 Returns ``GitInMemoryCommit`` object for this repository.
654 Returns ``GitInMemoryCommit`` object for this repository.
655 """
655 """
656 return GitInMemoryCommit(self)
656 return GitInMemoryCommit(self)
657
657
658 def pull(self, url, commit_ids=None, update_after=False):
658 def pull(self, url, commit_ids=None, update_after=False):
659 """
659 """
660 Pull changes from external location. Pull is different in GIT
660 Pull changes from external location. Pull is different in GIT
661 that fetch since it's doing a checkout
661 that fetch since it's doing a checkout
662
662
663 :param commit_ids: Optional. Can be set to a list of commit ids
663 :param commit_ids: Optional. Can be set to a list of commit ids
664 which shall be pulled from the other repository.
664 which shall be pulled from the other repository.
665 """
665 """
666 refs = None
666 refs = None
667 if commit_ids is not None:
667 if commit_ids is not None:
668 remote_refs = self._remote.get_remote_refs(url)
668 remote_refs = self._remote.get_remote_refs(url)
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.pull(url, refs=refs, update_after=update_after)
671 self._remote.invalidate_vcs_cache()
671 self._remote.invalidate_vcs_cache()
672
672
673 def fetch(self, url, commit_ids=None):
673 def fetch(self, url, commit_ids=None):
674 """
674 """
675 Fetch all git objects from external location.
675 Fetch all git objects from external location.
676 """
676 """
677 self._remote.sync_fetch(url, refs=commit_ids)
677 self._remote.sync_fetch(url, refs=commit_ids)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679
679
680 def push(self, url):
680 def push(self, url):
681 refs = None
681 refs = None
682 self._remote.sync_push(url, refs=refs)
682 self._remote.sync_push(url, refs=refs)
683
683
684 def set_refs(self, ref_name, commit_id):
684 def set_refs(self, ref_name, commit_id):
685 self._remote.set_refs(ref_name, commit_id)
685 self._remote.set_refs(ref_name, commit_id)
686 self._invalidate_prop_cache('_refs')
686 self._invalidate_prop_cache('_refs')
687
687
688 def remove_ref(self, ref_name):
688 def remove_ref(self, ref_name):
689 self._remote.remove_ref(ref_name)
689 self._remote.remove_ref(ref_name)
690 self._invalidate_prop_cache('_refs')
690 self._invalidate_prop_cache('_refs')
691
691
692 def run_gc(self, prune=True):
692 def run_gc(self, prune=True):
693 cmd = ['gc', '--aggressive']
693 cmd = ['gc', '--aggressive']
694 if prune:
694 if prune:
695 cmd += ['--prune=now']
695 cmd += ['--prune=now']
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
697 return stderr
697 return stderr
698
698
699 def _update_server_info(self):
699 def _update_server_info(self):
700 """
700 """
701 runs gits update-server-info command in this repo instance
701 runs gits update-server-info command in this repo instance
702 """
702 """
703 self._remote.update_server_info()
703 self._remote.update_server_info()
704
704
705 def _current_branch(self):
705 def _current_branch(self):
706 """
706 """
707 Return the name of the current branch.
707 Return the name of the current branch.
708
708
709 It only works for non bare repositories (i.e. repositories with a
709 It only works for non bare repositories (i.e. repositories with a
710 working copy)
710 working copy)
711 """
711 """
712 if self.bare:
712 if self.bare:
713 raise RepositoryError('Bare git repos do not have active branches')
713 raise RepositoryError('Bare git repos do not have active branches')
714
714
715 if self.is_empty():
715 if self.is_empty():
716 return None
716 return None
717
717
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
719 return stdout.strip()
719 return stdout.strip()
720
720
721 def _checkout(self, branch_name, create=False, force=False):
721 def _checkout(self, branch_name, create=False, force=False):
722 """
722 """
723 Checkout a branch in the working directory.
723 Checkout a branch in the working directory.
724
724
725 It tries to create the branch if create is True, failing if the branch
725 It tries to create the branch if create is True, failing if the branch
726 already exists.
726 already exists.
727
727
728 It only works for non bare repositories (i.e. repositories with a
728 It only works for non bare repositories (i.e. repositories with a
729 working copy)
729 working copy)
730 """
730 """
731 if self.bare:
731 if self.bare:
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
733
733
734 cmd = ['checkout']
734 cmd = ['checkout']
735 if force:
735 if force:
736 cmd.append('-f')
736 cmd.append('-f')
737 if create:
737 if create:
738 cmd.append('-b')
738 cmd.append('-b')
739 cmd.append(branch_name)
739 cmd.append(branch_name)
740 self.run_git_command(cmd, fail_on_stderr=False)
740 self.run_git_command(cmd, fail_on_stderr=False)
741
741
742 def _create_branch(self, branch_name, commit_id):
742 def _create_branch(self, branch_name, commit_id):
743 """
743 """
744 creates a branch in a GIT repo
744 creates a branch in a GIT repo
745 """
745 """
746 self._remote.create_branch(branch_name, commit_id)
746 self._remote.create_branch(branch_name, commit_id)
747
747
748 def _identify(self):
748 def _identify(self):
749 """
749 """
750 Return the current state of the working directory.
750 Return the current state of the working directory.
751 """
751 """
752 if self.bare:
752 if self.bare:
753 raise RepositoryError('Bare git repos do not have active branches')
753 raise RepositoryError('Bare git repos do not have active branches')
754
754
755 if self.is_empty():
755 if self.is_empty():
756 return None
756 return None
757
757
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
759 return stdout.strip()
759 return stdout.strip()
760
760
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
762 """
762 """
763 Create a local clone of the current repo.
763 Create a local clone of the current repo.
764 """
764 """
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
766 # clone will only fetch the active branch.
766 # clone will only fetch the active branch.
767 cmd = ['clone', '--branch', branch_name,
767 cmd = ['clone', '--branch', branch_name,
768 self.path, os.path.abspath(clone_path)]
768 self.path, os.path.abspath(clone_path)]
769
769
770 self.run_git_command(cmd, fail_on_stderr=False)
770 self.run_git_command(cmd, fail_on_stderr=False)
771
771
772 # if we get the different source branch, make sure we also fetch it for
772 # if we get the different source branch, make sure we also fetch it for
773 # merge conditions
773 # merge conditions
774 if source_branch and source_branch != branch_name:
774 if source_branch and source_branch != branch_name:
775 # check if the ref exists.
775 # check if the ref exists.
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
777 if shadow_repo.get_remote_ref(source_branch):
777 if shadow_repo.get_remote_ref(source_branch):
778 cmd = ['fetch', self.path, source_branch]
778 cmd = ['fetch', self.path, source_branch]
779 self.run_git_command(cmd, fail_on_stderr=False)
779 self.run_git_command(cmd, fail_on_stderr=False)
780
780
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
782 """
782 """
783 Fetch a branch from a local repository.
783 Fetch a branch from a local repository.
784 """
784 """
785 repository_path = os.path.abspath(repository_path)
785 repository_path = os.path.abspath(repository_path)
786 if repository_path == self.path:
786 if repository_path == self.path:
787 raise ValueError('Cannot fetch from the same repository')
787 raise ValueError('Cannot fetch from the same repository')
788
788
789 if use_origin:
789 if use_origin:
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
791 branch=branch_name)
791 branch=branch_name)
792
792
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
794 repository_path, branch_name]
794 repository_path, branch_name]
795 self.run_git_command(cmd, fail_on_stderr=False)
795 self.run_git_command(cmd, fail_on_stderr=False)
796
796
797 def _local_reset(self, branch_name):
797 def _local_reset(self, branch_name):
798 branch_name = f'{branch_name}'
798 branch_name = f'{branch_name}'
799 cmd = ['reset', '--hard', branch_name, '--']
799 cmd = ['reset', '--hard', branch_name, '--']
800 self.run_git_command(cmd, fail_on_stderr=False)
800 self.run_git_command(cmd, fail_on_stderr=False)
801
801
802 def _last_fetch_heads(self):
802 def _last_fetch_heads(self):
803 """
803 """
804 Return the last fetched heads that need merging.
804 Return the last fetched heads that need merging.
805
805
806 The algorithm is defined at
806 The algorithm is defined at
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
808 """
808 """
809 if not self.bare:
809 if not self.bare:
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
811 else:
811 else:
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
813
813
814 heads = []
814 heads = []
815 with open(fetch_heads_path) as f:
815 with open(fetch_heads_path) as f:
816 for line in f:
816 for line in f:
817 if ' not-for-merge ' in line:
817 if ' not-for-merge ' in line:
818 continue
818 continue
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
820 heads.append(line)
820 heads.append(line)
821
821
822 return heads
822 return heads
823
823
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
826
826
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
828 """
828 """
829 Pull a branch from a local repository.
829 Pull a branch from a local repository.
830 """
830 """
831 if self.bare:
831 if self.bare:
832 raise RepositoryError('Cannot pull into a bare git repository')
832 raise RepositoryError('Cannot pull into a bare git repository')
833 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # N.B.(skreft): The --ff-only option is to make sure this is a
834 # fast-forward (i.e., we are only pulling new changes and there are no
834 # fast-forward (i.e., we are only pulling new changes and there are no
835 # conflicts with our current branch)
835 # conflicts with our current branch)
836 # Additionally, that option needs to go before --no-tags, otherwise git
836 # Additionally, that option needs to go before --no-tags, otherwise git
837 # pull complains about it being an unknown flag.
837 # pull complains about it being an unknown flag.
838 cmd = ['pull']
838 cmd = ['pull']
839 if ff_only:
839 if ff_only:
840 cmd.append('--ff-only')
840 cmd.append('--ff-only')
841 cmd.extend(['--no-tags', repository_path, branch_name])
841 cmd.extend(['--no-tags', repository_path, branch_name])
842 self.run_git_command(cmd, fail_on_stderr=False)
842 self.run_git_command(cmd, fail_on_stderr=False)
843
843
844 def _local_merge(self, merge_message, user_name, user_email, heads):
844 def _local_merge(self, merge_message, user_name, user_email, heads):
845 """
845 """
846 Merge the given head into the checked out branch.
846 Merge the given head into the checked out branch.
847
847
848 It will force a merge commit.
848 It will force a merge commit.
849
849
850 Currently it raises an error if the repo is empty, as it is not possible
850 Currently it raises an error if the repo is empty, as it is not possible
851 to create a merge commit in an empty repo.
851 to create a merge commit in an empty repo.
852
852
853 :param merge_message: The message to use for the merge commit.
853 :param merge_message: The message to use for the merge commit.
854 :param heads: the heads to merge.
854 :param heads: the heads to merge.
855 """
855 """
856 if self.bare:
856 if self.bare:
857 raise RepositoryError('Cannot merge into a bare git repository')
857 raise RepositoryError('Cannot merge into a bare git repository')
858
858
859 if not heads:
859 if not heads:
860 return
860 return
861
861
862 if self.is_empty():
862 if self.is_empty():
863 # TODO(skreft): do something more robust in this case.
863 # TODO(skreft): do something more robust in this case.
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
865 unresolved = None
865 unresolved = None
866
866
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
868 # commit message. We also specify the user who is doing the merge.
868 # commit message. We also specify the user who is doing the merge.
869 cmd = ['-c', f'user.name="{user_name}"',
869 cmd = ['-c', f'user.name="{user_name}"',
870 '-c', f'user.email={user_email}',
870 '-c', f'user.email={user_email}',
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
872
872
873 merge_cmd = cmd + heads
873 merge_cmd = cmd + heads
874
874
875 try:
875 try:
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
877 except RepositoryError:
877 except RepositoryError:
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
879 fail_on_stderr=False)[0].splitlines()
879 fail_on_stderr=False)[0].splitlines()
880 # NOTE(marcink): we add U notation for consistent with HG backend output
880 # NOTE(marcink): we add U notation for consistent with HG backend output
881 unresolved = [f'U {f}' for f in files]
881 unresolved = [f'U {f}' for f in files]
882
882
883 # Cleanup any merge leftovers
883 # Cleanup any merge leftovers
884 self._remote.invalidate_vcs_cache()
884 self._remote.invalidate_vcs_cache()
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
886
886
887 if unresolved:
887 if unresolved:
888 raise UnresolvedFilesInRepo(unresolved)
888 raise UnresolvedFilesInRepo(unresolved)
889 else:
889 else:
890 raise
890 raise
891
891
892 def _local_push(
892 def _local_push(
893 self, source_branch, repository_path, target_branch,
893 self, source_branch, repository_path, target_branch,
894 enable_hooks=False, rc_scm_data=None):
894 enable_hooks=False, rc_scm_data=None):
895 """
895 """
896 Push the source_branch to the given repository and target_branch.
896 Push the source_branch to the given repository and target_branch.
897
897
898 Currently it if the target_branch is not master and the target repo is
898 Currently it if the target_branch is not master and the target repo is
899 empty, the push will work, but then GitRepository won't be able to find
899 empty, the push will work, but then GitRepository won't be able to find
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
901 pointing to master, which does not exist).
901 pointing to master, which does not exist).
902
902
903 It does not run the hooks in the target repo.
903 It does not run the hooks in the target repo.
904 """
904 """
905 # TODO(skreft): deal with the case in which the target repo is empty,
905 # TODO(skreft): deal with the case in which the target repo is empty,
906 # and the target_branch is not master.
906 # and the target_branch is not master.
907 target_repo = GitRepository(repository_path)
907 target_repo = GitRepository(repository_path)
908 if (not target_repo.bare and
908 if (not target_repo.bare and
909 target_repo._current_branch() == target_branch):
909 target_repo._current_branch() == target_branch):
910 # Git prevents pushing to the checked out branch, so simulate it by
910 # Git prevents pushing to the checked out branch, so simulate it by
911 # pulling into the target repository.
911 # pulling into the target repository.
912 target_repo._local_pull(self.path, source_branch)
912 target_repo._local_pull(self.path, source_branch)
913 else:
913 else:
914 cmd = ['push', os.path.abspath(repository_path),
914 cmd = ['push', os.path.abspath(repository_path),
915 '{}:{}'.format(source_branch, target_branch)]
915 f'{source_branch}:{target_branch}']
916 gitenv = {}
916 gitenv = {}
917 if rc_scm_data:
917 if rc_scm_data:
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
919
919
920 if not enable_hooks:
920 if not enable_hooks:
921 gitenv['RC_SKIP_HOOKS'] = '1'
921 gitenv['RC_SKIP_HOOKS'] = '1'
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
923
923
924 def _get_new_pr_branch(self, source_branch, target_branch):
924 def _get_new_pr_branch(self, source_branch, target_branch):
925 prefix = 'pr_{}-{}_'.format(source_branch, target_branch)
925 prefix = f'pr_{source_branch}-{target_branch}_'
926 pr_branches = []
926 pr_branches = []
927 for branch in self.branches:
927 for branch in self.branches:
928 if branch.startswith(prefix):
928 if branch.startswith(prefix):
929 pr_branches.append(int(branch[len(prefix):]))
929 pr_branches.append(int(branch[len(prefix):]))
930
930
931 if not pr_branches:
931 if not pr_branches:
932 branch_id = 0
932 branch_id = 0
933 else:
933 else:
934 branch_id = max(pr_branches) + 1
934 branch_id = max(pr_branches) + 1
935
935
936 return '%s%d' % (prefix, branch_id)
936 return '%s%d' % (prefix, branch_id)
937
937
938 def _maybe_prepare_merge_workspace(
938 def _maybe_prepare_merge_workspace(
939 self, repo_id, workspace_id, target_ref, source_ref):
939 self, repo_id, workspace_id, target_ref, source_ref):
940 shadow_repository_path = self._get_shadow_repository_path(
940 shadow_repository_path = self._get_shadow_repository_path(
941 self.path, repo_id, workspace_id)
941 self.path, repo_id, workspace_id)
942 if not os.path.exists(shadow_repository_path):
942 if not os.path.exists(shadow_repository_path):
943 self._local_clone(
943 self._local_clone(
944 shadow_repository_path, target_ref.name, source_ref.name)
944 shadow_repository_path, target_ref.name, source_ref.name)
945 log.debug('Prepared %s shadow repository in %s',
945 log.debug('Prepared %s shadow repository in %s',
946 self.alias, shadow_repository_path)
946 self.alias, shadow_repository_path)
947
947
948 return shadow_repository_path
948 return shadow_repository_path
949
949
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
951 source_repo, source_ref, merge_message,
951 source_repo, source_ref, merge_message,
952 merger_name, merger_email, dry_run=False,
952 merger_name, merger_email, dry_run=False,
953 use_rebase=False, close_branch=False):
953 use_rebase=False, close_branch=False):
954
954
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
956 'rebase' if use_rebase else 'merge', dry_run)
956 'rebase' if use_rebase else 'merge', dry_run)
957 if target_ref.commit_id != self.branches[target_ref.name]:
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
959 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
960 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
963
963
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
965 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
967
968 # checkout source, if it's different. Otherwise we could not
968 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
969 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
970 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
971 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
972 shadow_repo._checkout(source_ref.name, force=True)
973
973
974 # checkout target, and fetch changes
974 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
975 shadow_repo._checkout(target_ref.name, force=True)
976
976
977 # fetch/reset pull the target, in case it is changed
977 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
978 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
980 shadow_repo._local_reset(target_ref.name)
981
981
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
983 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
987 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
988 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
989 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
991 metadata={'target_ref': target_ref})
992
992
993 # calculate new branch
993 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
994 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
995 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
997 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
998 shadow_repo._checkout(pr_branch, create=True)
999 try:
999 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1001 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1002 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1003 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1004 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1006 metadata={'source_ref': source_ref})
1007
1007
1008 merge_ref = None
1008 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1009 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1010 metadata = {}
1011 try:
1011 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1013 [source_ref.commit_id])
1014 merge_possible = True
1014 merge_possible = True
1015
1015
1016 # Need to invalidate the cache, or otherwise we
1016 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1017 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1020
1021 # Set a reference pointing to the merge commit. This reference may
1021 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1022 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1023 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1026 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1030
1031 merge_possible = False
1031 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1033
1034 if merge_possible and not dry_run:
1034 if merge_possible and not dry_run:
1035 try:
1035 try:
1036 shadow_repo._local_push(
1036 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1039 merge_succeeded = True
1040 except RepositoryError:
1040 except RepositoryError:
1041 log.exception(
1041 log.exception(
1042 'Failure when doing local push from the shadow '
1042 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1043 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1044 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1046 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1047 metadata['merge_commit'] = pr_branch
1048 else:
1048 else:
1049 merge_succeeded = False
1049 merge_succeeded = False
1050
1050
1051 return MergeResponse(
1051 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1053 metadata=metadata)
@@ -1,403 +1,403 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG commit module
20 HG commit module
21 """
21 """
22
22
23 import os
23 import os
24
24
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26
26
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
27 from rhodecode.lib.datelib import utcdate_fromtimestamp
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.str_utils import safe_bytes, safe_str
29 from rhodecode.lib.vcs import path as vcspath
29 from rhodecode.lib.vcs import path as vcspath
30 from rhodecode.lib.vcs.backends import base
30 from rhodecode.lib.vcs.backends import base
31 from rhodecode.lib.vcs.exceptions import CommitError
31 from rhodecode.lib.vcs.exceptions import CommitError
32 from rhodecode.lib.vcs.nodes import (
32 from rhodecode.lib.vcs.nodes import (
33 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
33 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
34 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
34 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
35 LargeFileNode)
35 LargeFileNode)
36 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
36 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
37
37
38
38
39 class MercurialCommit(base.BaseCommit):
39 class MercurialCommit(base.BaseCommit):
40 """
40 """
41 Represents state of the repository at the single commit.
41 Represents state of the repository at the single commit.
42 """
42 """
43
43
44 _filter_pre_load = [
44 _filter_pre_load = [
45 # git specific property not supported here
45 # git specific property not supported here
46 "_commit",
46 "_commit",
47 ]
47 ]
48
48
49 def __init__(self, repository, raw_id, idx, pre_load=None):
49 def __init__(self, repository, raw_id, idx, pre_load=None):
50 raw_id = safe_str(raw_id)
50 raw_id = safe_str(raw_id)
51
51
52 self.repository = repository
52 self.repository = repository
53 self._remote = repository._remote
53 self._remote = repository._remote
54
54
55 self.raw_id = raw_id
55 self.raw_id = raw_id
56 self.idx = idx
56 self.idx = idx
57
57
58 self._set_bulk_properties(pre_load)
58 self._set_bulk_properties(pre_load)
59
59
60 # caches
60 # caches
61 self.nodes = {}
61 self.nodes = {}
62 self._stat_modes = {} # stat info for paths
62 self._stat_modes = {} # stat info for paths
63
63
64 def _set_bulk_properties(self, pre_load):
64 def _set_bulk_properties(self, pre_load):
65 if not pre_load:
65 if not pre_load:
66 return
66 return
67 pre_load = [entry for entry in pre_load
67 pre_load = [entry for entry in pre_load
68 if entry not in self._filter_pre_load]
68 if entry not in self._filter_pre_load]
69 if not pre_load:
69 if not pre_load:
70 return
70 return
71
71
72 result = self._remote.bulk_request(self.raw_id, pre_load)
72 result = self._remote.bulk_request(self.raw_id, pre_load)
73
73
74 for attr, value in result.items():
74 for attr, value in result.items():
75 if attr in ["author", "branch", "message"]:
75 if attr in ["author", "branch", "message"]:
76 value = safe_str(value)
76 value = safe_str(value)
77 elif attr == "affected_files":
77 elif attr == "affected_files":
78 value = list(map(safe_str, value))
78 value = list(map(safe_str, value))
79 elif attr == "date":
79 elif attr == "date":
80 value = utcdate_fromtimestamp(*value)
80 value = utcdate_fromtimestamp(*value)
81 elif attr in ["children", "parents"]:
81 elif attr in ["children", "parents"]:
82 value = self._make_commits(value)
82 value = self._make_commits(value)
83 elif attr in ["phase"]:
83 elif attr in ["phase"]:
84 value = self._get_phase_text(value)
84 value = self._get_phase_text(value)
85 self.__dict__[attr] = value
85 self.__dict__[attr] = value
86
86
87 @LazyProperty
87 @LazyProperty
88 def tags(self):
88 def tags(self):
89 tags = [name for name, commit_id in self.repository.tags.items()
89 tags = [name for name, commit_id in self.repository.tags.items()
90 if commit_id == self.raw_id]
90 if commit_id == self.raw_id]
91 return tags
91 return tags
92
92
93 @LazyProperty
93 @LazyProperty
94 def branch(self):
94 def branch(self):
95 return safe_str(self._remote.ctx_branch(self.raw_id))
95 return safe_str(self._remote.ctx_branch(self.raw_id))
96
96
97 @LazyProperty
97 @LazyProperty
98 def bookmarks(self):
98 def bookmarks(self):
99 bookmarks = [
99 bookmarks = [
100 name for name, commit_id in self.repository.bookmarks.items()
100 name for name, commit_id in self.repository.bookmarks.items()
101 if commit_id == self.raw_id]
101 if commit_id == self.raw_id]
102 return bookmarks
102 return bookmarks
103
103
104 @LazyProperty
104 @LazyProperty
105 def message(self):
105 def message(self):
106 return safe_str(self._remote.ctx_description(self.raw_id))
106 return safe_str(self._remote.ctx_description(self.raw_id))
107
107
108 @LazyProperty
108 @LazyProperty
109 def committer(self):
109 def committer(self):
110 return safe_str(self.author)
110 return safe_str(self.author)
111
111
112 @LazyProperty
112 @LazyProperty
113 def author(self):
113 def author(self):
114 return safe_str(self._remote.ctx_user(self.raw_id))
114 return safe_str(self._remote.ctx_user(self.raw_id))
115
115
116 @LazyProperty
116 @LazyProperty
117 def date(self):
117 def date(self):
118 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
118 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
119
119
120 @LazyProperty
120 @LazyProperty
121 def status(self):
121 def status(self):
122 """
122 """
123 Returns modified, added, removed, deleted files for current commit
123 Returns modified, added, removed, deleted files for current commit
124 """
124 """
125 return self._remote.ctx_status(self.raw_id)
125 return self._remote.ctx_status(self.raw_id)
126
126
127 @LazyProperty
127 @LazyProperty
128 def _file_paths(self):
128 def _file_paths(self):
129 return self._remote.ctx_list(self.raw_id)
129 return self._remote.ctx_list(self.raw_id)
130
130
131 @LazyProperty
131 @LazyProperty
132 def _dir_paths(self):
132 def _dir_paths(self):
133 dir_paths = ['']
133 dir_paths = ['']
134 dir_paths.extend(list(set(get_dirs_for_path(*self._file_paths))))
134 dir_paths.extend(list(set(get_dirs_for_path(*self._file_paths))))
135
135
136 return dir_paths
136 return dir_paths
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return 'tip'
145 return 'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, commit_ids, pre_load=None):
152 def _make_commits(self, commit_ids, pre_load=None):
153 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
153 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
154 for commit_id in commit_ids]
154 for commit_id in commit_ids]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.raw_id)
161 parents = self._remote.ctx_parents(self.raw_id)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.raw_id)
173 phase_id = self._remote.ctx_phase(self.raw_id)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_str(phase_text)
176 return safe_str(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.raw_id)
185 hidden = self._remote.ctx_hidden(self.raw_id)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.raw_id)
193 children = self._remote.ctx_children(self.raw_id)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def _get_kind(self, path):
196 def _get_kind(self, path):
197 path = self._fix_path(path)
197 path = self._fix_path(path)
198 if path in self._file_paths:
198 if path in self._file_paths:
199 return NodeKind.FILE
199 return NodeKind.FILE
200 elif path in self._dir_paths:
200 elif path in self._dir_paths:
201 return NodeKind.DIR
201 return NodeKind.DIR
202 else:
202 else:
203 raise CommitError(f"Node does not exist at the given path '{path}'")
203 raise CommitError(f"Node does not exist at the given path '{path}'")
204
204
205 def _assert_is_path(self, path) -> str:
205 def _assert_is_path(self, path) -> str:
206 path = self._fix_path(path)
206 path = self._fix_path(path)
207 if self._get_kind(path) != NodeKind.FILE:
207 if self._get_kind(path) != NodeKind.FILE:
208 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
208 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
209
209
210 return path
210 return path
211
211
212 def get_file_mode(self, path: bytes):
212 def get_file_mode(self, path: bytes):
213 """
213 """
214 Returns stat mode of the file at the given ``path``.
214 Returns stat mode of the file at the given ``path``.
215 """
215 """
216 path = self._assert_is_path(path)
216 path = self._assert_is_path(path)
217
217
218 if path not in self._stat_modes:
218 if path not in self._stat_modes:
219 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
219 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
220
220
221 if 'x' in self._stat_modes[path]:
221 if 'x' in self._stat_modes[path]:
222 return base.FILEMODE_EXECUTABLE
222 return base.FILEMODE_EXECUTABLE
223 return base.FILEMODE_DEFAULT
223 return base.FILEMODE_DEFAULT
224
224
225 def is_link(self, path):
225 def is_link(self, path):
226 path = self._assert_is_path(path)
226 path = self._assert_is_path(path)
227 if path not in self._stat_modes:
227 if path not in self._stat_modes:
228 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
228 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
229
229
230 return 'l' in self._stat_modes[path]
230 return 'l' in self._stat_modes[path]
231
231
232 def is_node_binary(self, path):
232 def is_node_binary(self, path):
233 path = self._assert_is_path(path)
233 path = self._assert_is_path(path)
234 return self._remote.is_binary(self.raw_id, path)
234 return self._remote.is_binary(self.raw_id, path)
235
235
236 def node_md5_hash(self, path):
236 def node_md5_hash(self, path):
237 path = self._assert_is_path(path)
237 path = self._assert_is_path(path)
238 return self._remote.md5_hash(self.raw_id, path)
238 return self._remote.md5_hash(self.raw_id, path)
239
239
240 def get_file_content(self, path):
240 def get_file_content(self, path):
241 """
241 """
242 Returns content of the file at given ``path``.
242 Returns content of the file at given ``path``.
243 """
243 """
244 path = self._assert_is_path(path)
244 path = self._assert_is_path(path)
245 return self._remote.fctx_node_data(self.raw_id, path)
245 return self._remote.fctx_node_data(self.raw_id, path)
246
246
247 def get_file_content_streamed(self, path):
247 def get_file_content_streamed(self, path):
248 path = self._assert_is_path(path)
248 path = self._assert_is_path(path)
249 stream_method = getattr(self._remote, 'stream:fctx_node_data')
249 stream_method = getattr(self._remote, 'stream:fctx_node_data')
250 return stream_method(self.raw_id, path)
250 return stream_method(self.raw_id, path)
251
251
252 def get_file_size(self, path):
252 def get_file_size(self, path):
253 """
253 """
254 Returns size of the file at given ``path``.
254 Returns size of the file at given ``path``.
255 """
255 """
256 path = self._assert_is_path(path)
256 path = self._assert_is_path(path)
257 return self._remote.fctx_size(self.raw_id, path)
257 return self._remote.fctx_size(self.raw_id, path)
258
258
259 def get_path_history(self, path, limit=None, pre_load=None):
259 def get_path_history(self, path, limit=None, pre_load=None):
260 """
260 """
261 Returns history of file as reversed list of `MercurialCommit` objects
261 Returns history of file as reversed list of `MercurialCommit` objects
262 for which file at given ``path`` has been modified.
262 for which file at given ``path`` has been modified.
263 """
263 """
264 path = self._assert_is_path(path)
264 path = self._assert_is_path(path)
265 hist = self._remote.node_history(self.raw_id, path, limit)
265 hist = self._remote.node_history(self.raw_id, path, limit)
266 return [
266 return [
267 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
267 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
268 for commit_id in hist]
268 for commit_id in hist]
269
269
270 def get_file_annotate(self, path, pre_load=None):
270 def get_file_annotate(self, path, pre_load=None):
271 """
271 """
272 Returns a generator of four element tuples with
272 Returns a generator of four element tuples with
273 lineno, commit_id, commit lazy loader and line
273 lineno, commit_id, commit lazy loader and line
274 """
274 """
275 result = self._remote.fctx_annotate(self.raw_id, path)
275 result = self._remote.fctx_annotate(self.raw_id, path)
276
276
277 for ln_no, commit_id, content in result:
277 for ln_no, commit_id, content in result:
278 yield (
278 yield (
279 ln_no, commit_id,
279 ln_no, commit_id,
280 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
280 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
281 content)
281 content)
282
282
283 def get_nodes(self, path, pre_load=None):
283 def get_nodes(self, path, pre_load=None):
284 """
284 """
285 Returns combined ``DirNode`` and ``FileNode`` objects list representing
285 Returns combined ``DirNode`` and ``FileNode`` objects list representing
286 state of commit at the given ``path``. If node at the given ``path``
286 state of commit at the given ``path``. If node at the given ``path``
287 is not instance of ``DirNode``, CommitError would be raised.
287 is not instance of ``DirNode``, CommitError would be raised.
288 """
288 """
289
289
290 if self._get_kind(path) != NodeKind.DIR:
290 if self._get_kind(path) != NodeKind.DIR:
291 raise CommitError(
291 raise CommitError(
292 "Directory does not exist for idx {} at '{}'".format(self.raw_id, path))
292 f"Directory does not exist for idx {self.raw_id} at '{path}'")
293 path = self._fix_path(path)
293 path = self._fix_path(path)
294
294
295 filenodes = [
295 filenodes = [
296 FileNode(safe_bytes(f), commit=self, pre_load=pre_load) for f in self._file_paths
296 FileNode(safe_bytes(f), commit=self, pre_load=pre_load) for f in self._file_paths
297 if os.path.dirname(f) == path]
297 if os.path.dirname(f) == path]
298 # TODO: johbo: Check if this can be done in a more obvious way
298 # TODO: johbo: Check if this can be done in a more obvious way
299 dirs = path == '' and '' or [
299 dirs = path == '' and '' or [
300 d for d in self._dir_paths
300 d for d in self._dir_paths
301 if d and vcspath.dirname(d) == path]
301 if d and vcspath.dirname(d) == path]
302 dirnodes = [
302 dirnodes = [
303 DirNode(safe_bytes(d), commit=self) for d in dirs
303 DirNode(safe_bytes(d), commit=self) for d in dirs
304 if os.path.dirname(d) == path]
304 if os.path.dirname(d) == path]
305
305
306 alias = self.repository.alias
306 alias = self.repository.alias
307 for k, vals in self._submodules.items():
307 for k, vals in self._submodules.items():
308 if vcspath.dirname(k) == path:
308 if vcspath.dirname(k) == path:
309 loc = vals[0]
309 loc = vals[0]
310 commit = vals[1]
310 commit = vals[1]
311 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
311 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
312
312
313 nodes = dirnodes + filenodes
313 nodes = dirnodes + filenodes
314 for node in nodes:
314 for node in nodes:
315 if node.path not in self.nodes:
315 if node.path not in self.nodes:
316 self.nodes[node.path] = node
316 self.nodes[node.path] = node
317 nodes.sort()
317 nodes.sort()
318
318
319 return nodes
319 return nodes
320
320
321 def get_node(self, path, pre_load=None):
321 def get_node(self, path, pre_load=None):
322 """
322 """
323 Returns `Node` object from the given `path`. If there is no node at
323 Returns `Node` object from the given `path`. If there is no node at
324 the given `path`, `NodeDoesNotExistError` would be raised.
324 the given `path`, `NodeDoesNotExistError` would be raised.
325 """
325 """
326 path = self._fix_path(path)
326 path = self._fix_path(path)
327
327
328 if path not in self.nodes:
328 if path not in self.nodes:
329 if path in self._file_paths:
329 if path in self._file_paths:
330 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
330 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
331 elif path in self._dir_paths:
331 elif path in self._dir_paths:
332 if path == '':
332 if path == '':
333 node = RootNode(commit=self)
333 node = RootNode(commit=self)
334 else:
334 else:
335 node = DirNode(safe_bytes(path), commit=self)
335 node = DirNode(safe_bytes(path), commit=self)
336 else:
336 else:
337 raise self.no_node_at_path(path)
337 raise self.no_node_at_path(path)
338
338
339 # cache node
339 # cache node
340 self.nodes[path] = node
340 self.nodes[path] = node
341 return self.nodes[path]
341 return self.nodes[path]
342
342
343 def get_largefile_node(self, path):
343 def get_largefile_node(self, path):
344 pointer_spec = self._remote.is_large_file(self.raw_id, path)
344 pointer_spec = self._remote.is_large_file(self.raw_id, path)
345 if pointer_spec:
345 if pointer_spec:
346 # content of that file regular FileNode is the hash of largefile
346 # content of that file regular FileNode is the hash of largefile
347 file_id = self.get_file_content(path).strip()
347 file_id = self.get_file_content(path).strip()
348
348
349 if self._remote.in_largefiles_store(file_id):
349 if self._remote.in_largefiles_store(file_id):
350 lf_path = self._remote.store_path(file_id)
350 lf_path = self._remote.store_path(file_id)
351 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
351 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
352 elif self._remote.in_user_cache(file_id):
352 elif self._remote.in_user_cache(file_id):
353 lf_path = self._remote.store_path(file_id)
353 lf_path = self._remote.store_path(file_id)
354 self._remote.link(file_id, path)
354 self._remote.link(file_id, path)
355 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
355 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
356
356
357 @LazyProperty
357 @LazyProperty
358 def _submodules(self):
358 def _submodules(self):
359 """
359 """
360 Returns a dictionary with submodule information from substate file
360 Returns a dictionary with submodule information from substate file
361 of hg repository.
361 of hg repository.
362 """
362 """
363 return self._remote.ctx_substate(self.raw_id)
363 return self._remote.ctx_substate(self.raw_id)
364
364
365 @LazyProperty
365 @LazyProperty
366 def affected_files(self):
366 def affected_files(self):
367 """
367 """
368 Gets a fast accessible file changes for given commit
368 Gets a fast accessible file changes for given commit
369 """
369 """
370 return self._remote.ctx_files(self.raw_id)
370 return self._remote.ctx_files(self.raw_id)
371
371
372 @property
372 @property
373 def added(self):
373 def added(self):
374 """
374 """
375 Returns list of added ``FileNode`` objects.
375 Returns list of added ``FileNode`` objects.
376 """
376 """
377 return AddedFileNodesGenerator(self.added_paths, self)
377 return AddedFileNodesGenerator(self.added_paths, self)
378
378
379 @LazyProperty
379 @LazyProperty
380 def added_paths(self):
380 def added_paths(self):
381 return [n for n in self.status[1]]
381 return [n for n in self.status[1]]
382
382
383 @property
383 @property
384 def changed(self):
384 def changed(self):
385 """
385 """
386 Returns list of modified ``FileNode`` objects.
386 Returns list of modified ``FileNode`` objects.
387 """
387 """
388 return ChangedFileNodesGenerator(self.changed_paths, self)
388 return ChangedFileNodesGenerator(self.changed_paths, self)
389
389
390 @LazyProperty
390 @LazyProperty
391 def changed_paths(self):
391 def changed_paths(self):
392 return [n for n in self.status[0]]
392 return [n for n in self.status[0]]
393
393
394 @property
394 @property
395 def removed(self):
395 def removed(self):
396 """
396 """
397 Returns list of removed ``FileNode`` objects.
397 Returns list of removed ``FileNode`` objects.
398 """
398 """
399 return RemovedFileNodesGenerator(self.removed_paths, self)
399 return RemovedFileNodesGenerator(self.removed_paths, self)
400
400
401 @LazyProperty
401 @LazyProperty
402 def removed_paths(self):
402 def removed_paths(self):
403 return [n for n in self.status[2]]
403 return [n for n in self.status[2]]
@@ -1,1013 +1,1013 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = {commit_id: index
105 self._commit_ids = {commit_id: index
106 for index, commit_id in enumerate(commit_ids)}
106 for index, commit_id in enumerate(commit_ids)}
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(n, h,) for n, h in
138 _branches = [(n, h,) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(n, h,) for n, h in
157 _tags = [(n, h,) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag {} for commit {}".format(name, commit.short_id)
181 message = f"Added tag {name} for commit {commit.short_id}"
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (n, h) for n, h in
233 (n, h) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 f"Cannot create repository at {self.path}, location already exist")
368 f"Cannot create repository at {self.path}, location already exist")
369
369
370 if src_url:
370 if src_url:
371 url = str(self._get_url(src_url))
371 url = str(self._get_url(src_url))
372 MercurialRepository.check_url(url, self.config)
372 MercurialRepository.check_url(url, self.config)
373
373
374 self._remote.clone(url, self.path, do_workspace_checkout)
374 self._remote.clone(url, self.path, do_workspace_checkout)
375
375
376 # Don't try to create if we've already cloned repo
376 # Don't try to create if we've already cloned repo
377 create = False
377 create = False
378
378
379 if create:
379 if create:
380 os.makedirs(self.path, mode=0o755)
380 os.makedirs(self.path, mode=0o755)
381
381
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_str(contact or self.DEFAULT_CONTACT)
399 return safe_str(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 if url != 'default' and '://' not in url:
428 if url != 'default' and '://' not in url:
429 url = "file:" + urllib.request.pathname2url(url)
429 url = "file:" + urllib.request.pathname2url(url)
430 return url
430 return url
431
431
432 def get_hook_location(self):
432 def get_hook_location(self):
433 """
433 """
434 returns absolute path to location where hooks are stored
434 returns absolute path to location where hooks are stored
435 """
435 """
436 return os.path.join(self.path, '.hg', '.hgrc')
436 return os.path.join(self.path, '.hg', '.hgrc')
437
437
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 """
440 """
441 Returns ``MercurialCommit`` object representing repository's
441 Returns ``MercurialCommit`` object representing repository's
442 commit at the given `commit_id` or `commit_idx`.
442 commit at the given `commit_id` or `commit_idx`.
443 """
443 """
444 if self.is_empty():
444 if self.is_empty():
445 raise EmptyRepositoryError("There are no commits yet")
445 raise EmptyRepositoryError("There are no commits yet")
446
446
447 if commit_id is not None:
447 if commit_id is not None:
448 self._validate_commit_id(commit_id)
448 self._validate_commit_id(commit_id)
449 try:
449 try:
450 # we have cached idx, use it without contacting the remote
450 # we have cached idx, use it without contacting the remote
451 idx = self._commit_ids[commit_id]
451 idx = self._commit_ids[commit_id]
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 except KeyError:
453 except KeyError:
454 pass
454 pass
455
455
456 elif commit_idx is not None:
456 elif commit_idx is not None:
457 self._validate_commit_idx(commit_idx)
457 self._validate_commit_idx(commit_idx)
458 try:
458 try:
459 _commit_id = self.commit_ids[commit_idx]
459 _commit_id = self.commit_ids[commit_idx]
460 if commit_idx < 0:
460 if commit_idx < 0:
461 commit_idx = self.commit_ids.index(_commit_id)
461 commit_idx = self.commit_ids.index(_commit_id)
462
462
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 except IndexError:
464 except IndexError:
465 commit_id = commit_idx
465 commit_id = commit_idx
466 else:
466 else:
467 commit_id = "tip"
467 commit_id = "tip"
468
468
469 # case here is no cached version, do an actual lookup instead
469 # case here is no cached version, do an actual lookup instead
470 try:
470 try:
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 except CommitDoesNotExistError:
472 except CommitDoesNotExistError:
473 msg = "Commit {} does not exist for `{}`".format(
473 msg = "Commit {} does not exist for `{}`".format(
474 *map(safe_str, [commit_id, self.name]))
474 *map(safe_str, [commit_id, self.name]))
475 raise CommitDoesNotExistError(msg)
475 raise CommitDoesNotExistError(msg)
476
476
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478
478
479 def get_commits(
479 def get_commits(
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 """
482 """
483 Returns generator of ``MercurialCommit`` objects from start to end
483 Returns generator of ``MercurialCommit`` objects from start to end
484 (both are inclusive)
484 (both are inclusive)
485
485
486 :param start_id: None, str(commit_id)
486 :param start_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
488 :param start_date: if specified, commits with commit date less than
488 :param start_date: if specified, commits with commit date less than
489 ``start_date`` would be filtered out from returned set
489 ``start_date`` would be filtered out from returned set
490 :param end_date: if specified, commits with commit date greater than
490 :param end_date: if specified, commits with commit date greater than
491 ``end_date`` would be filtered out from returned set
491 ``end_date`` would be filtered out from returned set
492 :param branch_name: if specified, commits not reachable from given
492 :param branch_name: if specified, commits not reachable from given
493 branch would be filtered out from returned set
493 branch would be filtered out from returned set
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 Mercurial evolve
495 Mercurial evolve
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 exist.
497 exist.
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 ``end`` could not be found.
499 ``end`` could not be found.
500 """
500 """
501 # actually we should check now if it's not an empty repo
501 # actually we should check now if it's not an empty repo
502 if self.is_empty():
502 if self.is_empty():
503 raise EmptyRepositoryError("There are no commits yet")
503 raise EmptyRepositoryError("There are no commits yet")
504 self._validate_branch_name(branch_name)
504 self._validate_branch_name(branch_name)
505
505
506 branch_ancestors = False
506 branch_ancestors = False
507 if start_id is not None:
507 if start_id is not None:
508 self._validate_commit_id(start_id)
508 self._validate_commit_id(start_id)
509 c_start = self.get_commit(commit_id=start_id)
509 c_start = self.get_commit(commit_id=start_id)
510 start_pos = self._commit_ids[c_start.raw_id]
510 start_pos = self._commit_ids[c_start.raw_id]
511 else:
511 else:
512 start_pos = None
512 start_pos = None
513
513
514 if end_id is not None:
514 if end_id is not None:
515 self._validate_commit_id(end_id)
515 self._validate_commit_id(end_id)
516 c_end = self.get_commit(commit_id=end_id)
516 c_end = self.get_commit(commit_id=end_id)
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 else:
518 else:
519 end_pos = None
519 end_pos = None
520
520
521 if None not in [start_id, end_id] and start_pos > end_pos:
521 if None not in [start_id, end_id] and start_pos > end_pos:
522 raise RepositoryError(
522 raise RepositoryError(
523 "Start commit '%s' cannot be after end commit '%s'" %
523 "Start commit '%s' cannot be after end commit '%s'" %
524 (start_id, end_id))
524 (start_id, end_id))
525
525
526 if end_pos is not None:
526 if end_pos is not None:
527 end_pos += 1
527 end_pos += 1
528
528
529 commit_filter = []
529 commit_filter = []
530
530
531 if branch_name and not branch_ancestors:
531 if branch_name and not branch_ancestors:
532 commit_filter.append('branch("{}")'.format(branch_name))
532 commit_filter.append(f'branch("{branch_name}")')
533 elif branch_name and branch_ancestors:
533 elif branch_name and branch_ancestors:
534 commit_filter.append('ancestors(branch("{}"))'.format(branch_name))
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535
535
536 if start_date and not end_date:
536 if start_date and not end_date:
537 commit_filter.append('date(">{}")'.format(start_date))
537 commit_filter.append(f'date(">{start_date}")')
538 if end_date and not start_date:
538 if end_date and not start_date:
539 commit_filter.append('date("<{}")'.format(end_date))
539 commit_filter.append(f'date("<{end_date}")')
540 if start_date and end_date:
540 if start_date and end_date:
541 commit_filter.append(
541 commit_filter.append(
542 'date(">{}") and date("<{}")'.format(start_date, end_date))
542 f'date(">{start_date}") and date("<{end_date}")')
543
543
544 if not show_hidden:
544 if not show_hidden:
545 commit_filter.append('not obsolete()')
545 commit_filter.append('not obsolete()')
546 commit_filter.append('not hidden()')
546 commit_filter.append('not hidden()')
547
547
548 # TODO: johbo: Figure out a simpler way for this solution
548 # TODO: johbo: Figure out a simpler way for this solution
549 collection_generator = CollectionGenerator
549 collection_generator = CollectionGenerator
550 if commit_filter:
550 if commit_filter:
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 revisions = self._remote.rev_range([commit_filter])
552 revisions = self._remote.rev_range([commit_filter])
553 collection_generator = MercurialIndexBasedCollectionGenerator
553 collection_generator = MercurialIndexBasedCollectionGenerator
554 else:
554 else:
555 revisions = self.commit_ids
555 revisions = self.commit_ids
556
556
557 if start_pos or end_pos:
557 if start_pos or end_pos:
558 revisions = revisions[start_pos:end_pos]
558 revisions = revisions[start_pos:end_pos]
559
559
560 return collection_generator(self, revisions, pre_load=pre_load)
560 return collection_generator(self, revisions, pre_load=pre_load)
561
561
562 def pull(self, url, commit_ids=None):
562 def pull(self, url, commit_ids=None):
563 """
563 """
564 Pull changes from external location.
564 Pull changes from external location.
565
565
566 :param commit_ids: Optional. Can be set to a list of commit ids
566 :param commit_ids: Optional. Can be set to a list of commit ids
567 which shall be pulled from the other repository.
567 which shall be pulled from the other repository.
568 """
568 """
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.pull(url, commit_ids=commit_ids)
570 self._remote.pull(url, commit_ids=commit_ids)
571 self._remote.invalidate_vcs_cache()
571 self._remote.invalidate_vcs_cache()
572
572
573 def fetch(self, url, commit_ids=None):
573 def fetch(self, url, commit_ids=None):
574 """
574 """
575 Backward compatibility with GIT fetch==pull
575 Backward compatibility with GIT fetch==pull
576 """
576 """
577 return self.pull(url, commit_ids=commit_ids)
577 return self.pull(url, commit_ids=commit_ids)
578
578
579 def push(self, url):
579 def push(self, url):
580 url = self._get_url(url)
580 url = self._get_url(url)
581 self._remote.sync_push(url)
581 self._remote.sync_push(url)
582
582
583 def _local_clone(self, clone_path):
583 def _local_clone(self, clone_path):
584 """
584 """
585 Create a local clone of the current repo.
585 Create a local clone of the current repo.
586 """
586 """
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 hooks=False)
588 hooks=False)
589
589
590 def _update(self, revision, clean=False):
590 def _update(self, revision, clean=False):
591 """
591 """
592 Update the working copy to the specified revision.
592 Update the working copy to the specified revision.
593 """
593 """
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 self._remote.update(revision, clean=clean)
595 self._remote.update(revision, clean=clean)
596
596
597 def _identify(self):
597 def _identify(self):
598 """
598 """
599 Return the current state of the working directory.
599 Return the current state of the working directory.
600 """
600 """
601 return self._remote.identify().strip().rstrip('+')
601 return self._remote.identify().strip().rstrip('+')
602
602
603 def _heads(self, branch=None):
603 def _heads(self, branch=None):
604 """
604 """
605 Return the commit ids of the repository heads.
605 Return the commit ids of the repository heads.
606 """
606 """
607 return self._remote.heads(branch=branch).strip().split(' ')
607 return self._remote.heads(branch=branch).strip().split(' ')
608
608
609 def _ancestor(self, revision1, revision2):
609 def _ancestor(self, revision1, revision2):
610 """
610 """
611 Return the common ancestor of the two revisions.
611 Return the common ancestor of the two revisions.
612 """
612 """
613 return self._remote.ancestor(revision1, revision2)
613 return self._remote.ancestor(revision1, revision2)
614
614
615 def _local_push(
615 def _local_push(
616 self, revision, repository_path, push_branches=False,
616 self, revision, repository_path, push_branches=False,
617 enable_hooks=False):
617 enable_hooks=False):
618 """
618 """
619 Push the given revision to the specified repository.
619 Push the given revision to the specified repository.
620
620
621 :param push_branches: allow to create branches in the target repo.
621 :param push_branches: allow to create branches in the target repo.
622 """
622 """
623 self._remote.push(
623 self._remote.push(
624 [revision], repository_path, hooks=enable_hooks,
624 [revision], repository_path, hooks=enable_hooks,
625 push_branches=push_branches)
625 push_branches=push_branches)
626
626
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 """
629 """
630 Merge the given source_revision into the checked out revision.
630 Merge the given source_revision into the checked out revision.
631
631
632 Returns the commit id of the merge and a boolean indicating if the
632 Returns the commit id of the merge and a boolean indicating if the
633 commit needs to be pushed.
633 commit needs to be pushed.
634 """
634 """
635 source_ref_commit_id = source_ref.commit_id
635 source_ref_commit_id = source_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
637
637
638 # update our workdir to target ref, for proper merge
638 # update our workdir to target ref, for proper merge
639 self._update(target_ref_commit_id, clean=True)
639 self._update(target_ref_commit_id, clean=True)
640
640
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643
643
644 if close_commit_id:
644 if close_commit_id:
645 # NOTE(marcink): if we get the close commit, this is our new source
645 # NOTE(marcink): if we get the close commit, this is our new source
646 # which will include the close commit itself.
646 # which will include the close commit itself.
647 source_ref_commit_id = close_commit_id
647 source_ref_commit_id = close_commit_id
648
648
649 if ancestor == source_ref_commit_id:
649 if ancestor == source_ref_commit_id:
650 # Nothing to do, the changes were already integrated
650 # Nothing to do, the changes were already integrated
651 return target_ref_commit_id, False
651 return target_ref_commit_id, False
652
652
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 # In this case we should force a commit message
654 # In this case we should force a commit message
655 return source_ref_commit_id, True
655 return source_ref_commit_id, True
656
656
657 unresolved = None
657 unresolved = None
658 if use_rebase:
658 if use_rebase:
659 try:
659 try:
660 bookmark_name = 'rcbook{}{}'.format(source_ref_commit_id, target_ref_commit_id)
660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 self._remote.rebase(
662 self._remote.rebase(
663 source=source_ref_commit_id, dest=target_ref_commit_id)
663 source=source_ref_commit_id, dest=target_ref_commit_id)
664 self._remote.invalidate_vcs_cache()
664 self._remote.invalidate_vcs_cache()
665 self._update(bookmark_name, clean=True)
665 self._update(bookmark_name, clean=True)
666 return self._identify(), True
666 return self._identify(), True
667 except RepositoryError as e:
667 except RepositoryError as e:
668 # The rebase-abort may raise another exception which 'hides'
668 # The rebase-abort may raise another exception which 'hides'
669 # the original one, therefore we log it here.
669 # the original one, therefore we log it here.
670 log.exception('Error while rebasing shadow repo during merge.')
670 log.exception('Error while rebasing shadow repo during merge.')
671 if 'unresolved conflicts' in safe_str(e):
671 if 'unresolved conflicts' in safe_str(e):
672 unresolved = self._remote.get_unresolved_files()
672 unresolved = self._remote.get_unresolved_files()
673 log.debug('unresolved files: %s', unresolved)
673 log.debug('unresolved files: %s', unresolved)
674
674
675 # Cleanup any rebase leftovers
675 # Cleanup any rebase leftovers
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 self._remote.rebase(abort=True)
677 self._remote.rebase(abort=True)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679 self._remote.update(clean=True)
679 self._remote.update(clean=True)
680 if unresolved:
680 if unresolved:
681 raise UnresolvedFilesInRepo(unresolved)
681 raise UnresolvedFilesInRepo(unresolved)
682 else:
682 else:
683 raise
683 raise
684 else:
684 else:
685 try:
685 try:
686 self._remote.merge(source_ref_commit_id)
686 self._remote.merge(source_ref_commit_id)
687 self._remote.invalidate_vcs_cache()
687 self._remote.invalidate_vcs_cache()
688 self._remote.commit(
688 self._remote.commit(
689 message=safe_str(merge_message),
689 message=safe_str(merge_message),
690 username=safe_str('{} <{}>'.format(user_name, user_email)))
690 username=safe_str(f'{user_name} <{user_email}>'))
691 self._remote.invalidate_vcs_cache()
691 self._remote.invalidate_vcs_cache()
692 return self._identify(), True
692 return self._identify(), True
693 except RepositoryError as e:
693 except RepositoryError as e:
694 # The merge-abort may raise another exception which 'hides'
694 # The merge-abort may raise another exception which 'hides'
695 # the original one, therefore we log it here.
695 # the original one, therefore we log it here.
696 log.exception('Error while merging shadow repo during merge.')
696 log.exception('Error while merging shadow repo during merge.')
697 if 'unresolved merge conflicts' in safe_str(e):
697 if 'unresolved merge conflicts' in safe_str(e):
698 unresolved = self._remote.get_unresolved_files()
698 unresolved = self._remote.get_unresolved_files()
699 log.debug('unresolved files: %s', unresolved)
699 log.debug('unresolved files: %s', unresolved)
700
700
701 # Cleanup any merge leftovers
701 # Cleanup any merge leftovers
702 self._remote.update(clean=True)
702 self._remote.update(clean=True)
703 if unresolved:
703 if unresolved:
704 raise UnresolvedFilesInRepo(unresolved)
704 raise UnresolvedFilesInRepo(unresolved)
705 else:
705 else:
706 raise
706 raise
707
707
708 def _local_close(self, target_ref, user_name, user_email,
708 def _local_close(self, target_ref, user_name, user_email,
709 source_ref, close_message=''):
709 source_ref, close_message=''):
710 """
710 """
711 Close the branch of the given source_revision
711 Close the branch of the given source_revision
712
712
713 Returns the commit id of the close and a boolean indicating if the
713 Returns the commit id of the close and a boolean indicating if the
714 commit needs to be pushed.
714 commit needs to be pushed.
715 """
715 """
716 self._update(source_ref.commit_id)
716 self._update(source_ref.commit_id)
717 message = close_message or f"Closing branch: `{source_ref.name}`"
717 message = close_message or f"Closing branch: `{source_ref.name}`"
718 try:
718 try:
719 self._remote.commit(
719 self._remote.commit(
720 message=safe_str(message),
720 message=safe_str(message),
721 username=safe_str('{} <{}>'.format(user_name, user_email)),
721 username=safe_str(f'{user_name} <{user_email}>'),
722 close_branch=True)
722 close_branch=True)
723 self._remote.invalidate_vcs_cache()
723 self._remote.invalidate_vcs_cache()
724 return self._identify(), True
724 return self._identify(), True
725 except RepositoryError:
725 except RepositoryError:
726 # Cleanup any commit leftovers
726 # Cleanup any commit leftovers
727 self._remote.update(clean=True)
727 self._remote.update(clean=True)
728 raise
728 raise
729
729
730 def _is_the_same_branch(self, target_ref, source_ref):
730 def _is_the_same_branch(self, target_ref, source_ref):
731 return (
731 return (
732 self._get_branch_name(target_ref) ==
732 self._get_branch_name(target_ref) ==
733 self._get_branch_name(source_ref))
733 self._get_branch_name(source_ref))
734
734
735 def _get_branch_name(self, ref):
735 def _get_branch_name(self, ref):
736 if ref.type == 'branch':
736 if ref.type == 'branch':
737 return ref.name
737 return ref.name
738 return self._remote.ctx_branch(ref.commit_id)
738 return self._remote.ctx_branch(ref.commit_id)
739
739
740 def _maybe_prepare_merge_workspace(
740 def _maybe_prepare_merge_workspace(
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 shadow_repository_path = self._get_shadow_repository_path(
742 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
743 self.path, repo_id, workspace_id)
744 if not os.path.exists(shadow_repository_path):
744 if not os.path.exists(shadow_repository_path):
745 self._local_clone(shadow_repository_path)
745 self._local_clone(shadow_repository_path)
746 log.debug(
746 log.debug(
747 'Prepared shadow repository in %s', shadow_repository_path)
747 'Prepared shadow repository in %s', shadow_repository_path)
748
748
749 return shadow_repository_path
749 return shadow_repository_path
750
750
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 source_repo, source_ref, merge_message,
752 source_repo, source_ref, merge_message,
753 merger_name, merger_email, dry_run=False,
753 merger_name, merger_email, dry_run=False,
754 use_rebase=False, close_branch=False):
754 use_rebase=False, close_branch=False):
755
755
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 'rebase' if use_rebase else 'merge', dry_run)
757 'rebase' if use_rebase else 'merge', dry_run)
758 if target_ref.commit_id not in self._heads():
758 if target_ref.commit_id not in self._heads():
759 return MergeResponse(
759 return MergeResponse(
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 metadata={'target_ref': target_ref})
761 metadata={'target_ref': target_ref})
762
762
763 try:
763 try:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 heads_all = self._heads(target_ref.name)
765 heads_all = self._heads(target_ref.name)
766 max_heads = 10
766 max_heads = 10
767 if len(heads_all) > max_heads:
767 if len(heads_all) > max_heads:
768 heads = '\n,'.join(
768 heads = '\n,'.join(
769 heads_all[:max_heads] +
769 heads_all[:max_heads] +
770 ['and {} more.'.format(len(heads_all)-max_heads)])
770 [f'and {len(heads_all)-max_heads} more.'])
771 else:
771 else:
772 heads = '\n,'.join(heads_all)
772 heads = '\n,'.join(heads_all)
773 metadata = {
773 metadata = {
774 'target_ref': target_ref,
774 'target_ref': target_ref,
775 'source_ref': source_ref,
775 'source_ref': source_ref,
776 'heads': heads
776 'heads': heads
777 }
777 }
778 return MergeResponse(
778 return MergeResponse(
779 False, False, None,
779 False, False, None,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 metadata=metadata)
781 metadata=metadata)
782 except CommitDoesNotExistError:
782 except CommitDoesNotExistError:
783 log.exception('Failure when looking up branch heads on hg target')
783 log.exception('Failure when looking up branch heads on hg target')
784 return MergeResponse(
784 return MergeResponse(
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 metadata={'target_ref': target_ref})
786 metadata={'target_ref': target_ref})
787
787
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 repo_id, workspace_id, target_ref, source_ref)
789 repo_id, workspace_id, target_ref, source_ref)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791
791
792 log.debug('Pulling in target reference %s', target_ref)
792 log.debug('Pulling in target reference %s', target_ref)
793 self._validate_pull_reference(target_ref)
793 self._validate_pull_reference(target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
795
795
796 try:
796 try:
797 log.debug('Pulling in source reference %s', source_ref)
797 log.debug('Pulling in source reference %s', source_ref)
798 source_repo._validate_pull_reference(source_ref)
798 source_repo._validate_pull_reference(source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
800 except CommitDoesNotExistError:
800 except CommitDoesNotExistError:
801 log.exception('Failure when doing local pull on hg shadow repo')
801 log.exception('Failure when doing local pull on hg shadow repo')
802 return MergeResponse(
802 return MergeResponse(
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 metadata={'source_ref': source_ref})
804 metadata={'source_ref': source_ref})
805
805
806 merge_ref = None
806 merge_ref = None
807 merge_commit_id = None
807 merge_commit_id = None
808 close_commit_id = None
808 close_commit_id = None
809 merge_failure_reason = MergeFailureReason.NONE
809 merge_failure_reason = MergeFailureReason.NONE
810 metadata = {}
810 metadata = {}
811
811
812 # enforce that close branch should be used only in case we source from
812 # enforce that close branch should be used only in case we source from
813 # an actual Branch
813 # an actual Branch
814 close_branch = close_branch and source_ref.type == 'branch'
814 close_branch = close_branch and source_ref.type == 'branch'
815
815
816 # don't allow to close branch if source and target are the same
816 # don't allow to close branch if source and target are the same
817 close_branch = close_branch and source_ref.name != target_ref.name
817 close_branch = close_branch and source_ref.name != target_ref.name
818
818
819 needs_push_on_close = False
819 needs_push_on_close = False
820 if close_branch and not use_rebase and not dry_run:
820 if close_branch and not use_rebase and not dry_run:
821 try:
821 try:
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 target_ref, merger_name, merger_email, source_ref)
823 target_ref, merger_name, merger_email, source_ref)
824 merge_possible = True
824 merge_possible = True
825 except RepositoryError:
825 except RepositoryError:
826 log.exception('Failure when doing close branch on '
826 log.exception('Failure when doing close branch on '
827 'shadow repo: %s', shadow_repo)
827 'shadow repo: %s', shadow_repo)
828 merge_possible = False
828 merge_possible = False
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 else:
830 else:
831 merge_possible = True
831 merge_possible = True
832
832
833 needs_push = False
833 needs_push = False
834 if merge_possible:
834 if merge_possible:
835
835
836 try:
836 try:
837 merge_commit_id, needs_push = shadow_repo._local_merge(
837 merge_commit_id, needs_push = shadow_repo._local_merge(
838 target_ref, merge_message, merger_name, merger_email,
838 target_ref, merge_message, merger_name, merger_email,
839 source_ref, use_rebase=use_rebase,
839 source_ref, use_rebase=use_rebase,
840 close_commit_id=close_commit_id, dry_run=dry_run)
840 close_commit_id=close_commit_id, dry_run=dry_run)
841 merge_possible = True
841 merge_possible = True
842
842
843 # read the state of the close action, if it
843 # read the state of the close action, if it
844 # maybe required a push
844 # maybe required a push
845 needs_push = needs_push or needs_push_on_close
845 needs_push = needs_push or needs_push_on_close
846
846
847 # Set a bookmark pointing to the merge commit. This bookmark
847 # Set a bookmark pointing to the merge commit. This bookmark
848 # may be used to easily identify the last successful merge
848 # may be used to easily identify the last successful merge
849 # commit in the shadow repository.
849 # commit in the shadow repository.
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 except SubrepoMergeError:
852 except SubrepoMergeError:
853 log.exception(
853 log.exception(
854 'Subrepo merge error during local merge on hg shadow repo.')
854 'Subrepo merge error during local merge on hg shadow repo.')
855 merge_possible = False
855 merge_possible = False
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 needs_push = False
857 needs_push = False
858 except RepositoryError as e:
858 except RepositoryError as e:
859 log.exception('Failure when doing local merge on hg shadow repo')
859 log.exception('Failure when doing local merge on hg shadow repo')
860 if isinstance(e, UnresolvedFilesInRepo):
860 if isinstance(e, UnresolvedFilesInRepo):
861 all_conflicts = list(e.args[0])
861 all_conflicts = list(e.args[0])
862 max_conflicts = 20
862 max_conflicts = 20
863 if len(all_conflicts) > max_conflicts:
863 if len(all_conflicts) > max_conflicts:
864 conflicts = all_conflicts[:max_conflicts] \
864 conflicts = all_conflicts[:max_conflicts] \
865 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
865 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 else:
866 else:
867 conflicts = all_conflicts
867 conflicts = all_conflicts
868 metadata['unresolved_files'] = \
868 metadata['unresolved_files'] = \
869 '\n* conflict: ' + \
869 '\n* conflict: ' + \
870 ('\n * conflict: '.join(conflicts))
870 ('\n * conflict: '.join(conflicts))
871
871
872 merge_possible = False
872 merge_possible = False
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 needs_push = False
874 needs_push = False
875
875
876 if merge_possible and not dry_run:
876 if merge_possible and not dry_run:
877 if needs_push:
877 if needs_push:
878 # In case the target is a bookmark, update it, so after pushing
878 # In case the target is a bookmark, update it, so after pushing
879 # the bookmarks is also updated in the target.
879 # the bookmarks is also updated in the target.
880 if target_ref.type == 'book':
880 if target_ref.type == 'book':
881 shadow_repo.bookmark(
881 shadow_repo.bookmark(
882 target_ref.name, revision=merge_commit_id)
882 target_ref.name, revision=merge_commit_id)
883 try:
883 try:
884 shadow_repo_with_hooks = self.get_shadow_instance(
884 shadow_repo_with_hooks = self.get_shadow_instance(
885 shadow_repository_path,
885 shadow_repository_path,
886 enable_hooks=True)
886 enable_hooks=True)
887 # This is the actual merge action, we push from shadow
887 # This is the actual merge action, we push from shadow
888 # into origin.
888 # into origin.
889 # Note: the push_branches option will push any new branch
889 # Note: the push_branches option will push any new branch
890 # defined in the source repository to the target. This may
890 # defined in the source repository to the target. This may
891 # be dangerous as branches are permanent in Mercurial.
891 # be dangerous as branches are permanent in Mercurial.
892 # This feature was requested in issue #441.
892 # This feature was requested in issue #441.
893 shadow_repo_with_hooks._local_push(
893 shadow_repo_with_hooks._local_push(
894 merge_commit_id, self.path, push_branches=True,
894 merge_commit_id, self.path, push_branches=True,
895 enable_hooks=True)
895 enable_hooks=True)
896
896
897 # maybe we also need to push the close_commit_id
897 # maybe we also need to push the close_commit_id
898 if close_commit_id:
898 if close_commit_id:
899 shadow_repo_with_hooks._local_push(
899 shadow_repo_with_hooks._local_push(
900 close_commit_id, self.path, push_branches=True,
900 close_commit_id, self.path, push_branches=True,
901 enable_hooks=True)
901 enable_hooks=True)
902 merge_succeeded = True
902 merge_succeeded = True
903 except RepositoryError:
903 except RepositoryError:
904 log.exception(
904 log.exception(
905 'Failure when doing local push from the shadow '
905 'Failure when doing local push from the shadow '
906 'repository to the target repository at %s.', self.path)
906 'repository to the target repository at %s.', self.path)
907 merge_succeeded = False
907 merge_succeeded = False
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 metadata['target'] = 'hg shadow repo'
909 metadata['target'] = 'hg shadow repo'
910 metadata['merge_commit'] = merge_commit_id
910 metadata['merge_commit'] = merge_commit_id
911 else:
911 else:
912 merge_succeeded = True
912 merge_succeeded = True
913 else:
913 else:
914 merge_succeeded = False
914 merge_succeeded = False
915
915
916 return MergeResponse(
916 return MergeResponse(
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 metadata=metadata)
918 metadata=metadata)
919
919
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 config = self.config.copy()
921 config = self.config.copy()
922 if not enable_hooks:
922 if not enable_hooks:
923 config.clear_section('hooks')
923 config.clear_section('hooks')
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925
925
926 def _validate_pull_reference(self, reference):
926 def _validate_pull_reference(self, reference):
927 if not (reference.name in self.bookmarks or
927 if not (reference.name in self.bookmarks or
928 reference.name in self.branches or
928 reference.name in self.branches or
929 self.get_commit(reference.commit_id)):
929 self.get_commit(reference.commit_id)):
930 raise CommitDoesNotExistError(
930 raise CommitDoesNotExistError(
931 'Unknown branch, bookmark or commit id')
931 'Unknown branch, bookmark or commit id')
932
932
933 def _local_pull(self, repository_path, reference):
933 def _local_pull(self, repository_path, reference):
934 """
934 """
935 Fetch a branch, bookmark or commit from a local repository.
935 Fetch a branch, bookmark or commit from a local repository.
936 """
936 """
937 repository_path = os.path.abspath(repository_path)
937 repository_path = os.path.abspath(repository_path)
938 if repository_path == self.path:
938 if repository_path == self.path:
939 raise ValueError('Cannot pull from the same repository')
939 raise ValueError('Cannot pull from the same repository')
940
940
941 reference_type_to_option_name = {
941 reference_type_to_option_name = {
942 'book': 'bookmark',
942 'book': 'bookmark',
943 'branch': 'branch',
943 'branch': 'branch',
944 }
944 }
945 option_name = reference_type_to_option_name.get(
945 option_name = reference_type_to_option_name.get(
946 reference.type, 'revision')
946 reference.type, 'revision')
947
947
948 if option_name == 'revision':
948 if option_name == 'revision':
949 ref = reference.commit_id
949 ref = reference.commit_id
950 else:
950 else:
951 ref = reference.name
951 ref = reference.name
952
952
953 options = {option_name: [ref]}
953 options = {option_name: [ref]}
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 self._remote.invalidate_vcs_cache()
955 self._remote.invalidate_vcs_cache()
956
956
957 def bookmark(self, bookmark, revision=None):
957 def bookmark(self, bookmark, revision=None):
958 if isinstance(bookmark, str):
958 if isinstance(bookmark, str):
959 bookmark = safe_str(bookmark)
959 bookmark = safe_str(bookmark)
960 self._remote.bookmark(bookmark, revision=revision)
960 self._remote.bookmark(bookmark, revision=revision)
961 self._remote.invalidate_vcs_cache()
961 self._remote.invalidate_vcs_cache()
962
962
963 def get_path_permissions(self, username):
963 def get_path_permissions(self, username):
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965
965
966 def read_patterns(suffix):
966 def read_patterns(suffix):
967 svalue = None
967 svalue = None
968 for section, option in [
968 for section, option in [
969 ('narrowacl', username + suffix),
969 ('narrowacl', username + suffix),
970 ('narrowacl', 'default' + suffix),
970 ('narrowacl', 'default' + suffix),
971 ('narrowhgacl', username + suffix),
971 ('narrowhgacl', username + suffix),
972 ('narrowhgacl', 'default' + suffix)
972 ('narrowhgacl', 'default' + suffix)
973 ]:
973 ]:
974 try:
974 try:
975 svalue = hgacl.get(section, option)
975 svalue = hgacl.get(section, option)
976 break # stop at the first value we find
976 break # stop at the first value we find
977 except configparser.NoOptionError:
977 except configparser.NoOptionError:
978 pass
978 pass
979 if not svalue:
979 if not svalue:
980 return None
980 return None
981 result = ['/']
981 result = ['/']
982 for pattern in svalue.split():
982 for pattern in svalue.split():
983 result.append(pattern)
983 result.append(pattern)
984 if '*' not in pattern and '?' not in pattern:
984 if '*' not in pattern and '?' not in pattern:
985 result.append(pattern + '/*')
985 result.append(pattern + '/*')
986 return result
986 return result
987
987
988 if os.path.exists(hgacl_file):
988 if os.path.exists(hgacl_file):
989 try:
989 try:
990 hgacl = configparser.RawConfigParser()
990 hgacl = configparser.RawConfigParser()
991 hgacl.read(hgacl_file)
991 hgacl.read(hgacl_file)
992
992
993 includes = read_patterns('.includes')
993 includes = read_patterns('.includes')
994 excludes = read_patterns('.excludes')
994 excludes = read_patterns('.excludes')
995 return BasePathPermissionChecker.create_from_patterns(
995 return BasePathPermissionChecker.create_from_patterns(
996 includes, excludes)
996 includes, excludes)
997 except BaseException as e:
997 except BaseException as e:
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 hgacl_file, self.name, e)
999 hgacl_file, self.name, e)
1000 raise exceptions.RepositoryRequirementError(msg)
1000 raise exceptions.RepositoryRequirementError(msg)
1001 else:
1001 else:
1002 return None
1002 return None
1003
1003
1004
1004
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006
1006
1007 def _commit_factory(self, commit_id):
1007 def _commit_factory(self, commit_id):
1008 if isinstance(commit_id, int):
1008 if isinstance(commit_id, int):
1009 return self.repo.get_commit(
1009 return self.repo.get_commit(
1010 commit_idx=commit_id, pre_load=self.pre_load)
1010 commit_idx=commit_id, pre_load=self.pre_load)
1011 else:
1011 else:
1012 return self.repo.get_commit(
1012 return self.repo.get_commit(
1013 commit_id=commit_id, pre_load=self.pre_load)
1013 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,254 +1,254 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN commit module
20 SVN commit module
21 """
21 """
22
22
23
23
24 import dateutil.parser
24 import dateutil.parser
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26
26
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.vcs import nodes, path as vcspath
28 from rhodecode.lib.vcs import nodes, path as vcspath
29 from rhodecode.lib.vcs.backends import base
29 from rhodecode.lib.vcs.backends import base
30 from rhodecode.lib.vcs.exceptions import CommitError
30 from rhodecode.lib.vcs.exceptions import CommitError
31
31
32
32
33 _SVN_PROP_TRUE = '*'
33 _SVN_PROP_TRUE = '*'
34
34
35
35
36 class SubversionCommit(base.BaseCommit):
36 class SubversionCommit(base.BaseCommit):
37 """
37 """
38 Subversion specific implementation of commits
38 Subversion specific implementation of commits
39
39
40 .. attribute:: branch
40 .. attribute:: branch
41
41
42 The Subversion backend does not support to assign branches to
42 The Subversion backend does not support to assign branches to
43 specific commits. This attribute has always the value `None`.
43 specific commits. This attribute has always the value `None`.
44
44
45 """
45 """
46
46
47 def __init__(self, repository, commit_id):
47 def __init__(self, repository, commit_id):
48 self.repository = repository
48 self.repository = repository
49 self.idx = self.repository._get_commit_idx(commit_id)
49 self.idx = self.repository._get_commit_idx(commit_id)
50 self._svn_rev = self.idx + 1
50 self._svn_rev = self.idx + 1
51 self._remote = repository._remote
51 self._remote = repository._remote
52 # TODO: handling of raw_id should be a method on repository itself,
52 # TODO: handling of raw_id should be a method on repository itself,
53 # which knows how to translate commit index and commit id
53 # which knows how to translate commit index and commit id
54 self.raw_id = commit_id
54 self.raw_id = commit_id
55 self.short_id = commit_id
55 self.short_id = commit_id
56 self.id = 'r{}'.format(commit_id)
56 self.id = f'r{commit_id}'
57
57
58 # TODO: Implement the following placeholder attributes
58 # TODO: Implement the following placeholder attributes
59 self.nodes = {}
59 self.nodes = {}
60 self.tags = []
60 self.tags = []
61
61
62 @property
62 @property
63 def author(self):
63 def author(self):
64 return safe_str(self._properties.get('svn:author'))
64 return safe_str(self._properties.get('svn:author'))
65
65
66 @property
66 @property
67 def date(self):
67 def date(self):
68 return _date_from_svn_properties(self._properties)
68 return _date_from_svn_properties(self._properties)
69
69
70 @property
70 @property
71 def message(self):
71 def message(self):
72 return safe_str(self._properties.get('svn:log'))
72 return safe_str(self._properties.get('svn:log'))
73
73
74 @LazyProperty
74 @LazyProperty
75 def _properties(self):
75 def _properties(self):
76 return self._remote.revision_properties(self._svn_rev)
76 return self._remote.revision_properties(self._svn_rev)
77
77
78 @LazyProperty
78 @LazyProperty
79 def parents(self):
79 def parents(self):
80 parent_idx = self.idx - 1
80 parent_idx = self.idx - 1
81 if parent_idx >= 0:
81 if parent_idx >= 0:
82 parent = self.repository.get_commit(commit_idx=parent_idx)
82 parent = self.repository.get_commit(commit_idx=parent_idx)
83 return [parent]
83 return [parent]
84 return []
84 return []
85
85
86 @LazyProperty
86 @LazyProperty
87 def children(self):
87 def children(self):
88 child_idx = self.idx + 1
88 child_idx = self.idx + 1
89 if child_idx < len(self.repository.commit_ids):
89 if child_idx < len(self.repository.commit_ids):
90 child = self.repository.get_commit(commit_idx=child_idx)
90 child = self.repository.get_commit(commit_idx=child_idx)
91 return [child]
91 return [child]
92 return []
92 return []
93
93
94 def get_file_mode(self, path: bytes):
94 def get_file_mode(self, path: bytes):
95 # Note: Subversion flags files which are executable with a special
95 # Note: Subversion flags files which are executable with a special
96 # property `svn:executable` which is set to the value ``"*"``.
96 # property `svn:executable` which is set to the value ``"*"``.
97 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
97 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
98 return base.FILEMODE_EXECUTABLE
98 return base.FILEMODE_EXECUTABLE
99 else:
99 else:
100 return base.FILEMODE_DEFAULT
100 return base.FILEMODE_DEFAULT
101
101
102 def is_link(self, path):
102 def is_link(self, path):
103 # Note: Subversion has a flag for special files, the content of the
103 # Note: Subversion has a flag for special files, the content of the
104 # file contains the type of that file.
104 # file contains the type of that file.
105 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
105 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
106 return self.get_file_content(path).startswith(b'link')
106 return self.get_file_content(path).startswith(b'link')
107 return False
107 return False
108
108
109 def is_node_binary(self, path):
109 def is_node_binary(self, path):
110 path = self._fix_path(path)
110 path = self._fix_path(path)
111 return self._remote.is_binary(self._svn_rev, safe_str(path))
111 return self._remote.is_binary(self._svn_rev, safe_str(path))
112
112
113 def node_md5_hash(self, path):
113 def node_md5_hash(self, path):
114 path = self._fix_path(path)
114 path = self._fix_path(path)
115 return self._remote.md5_hash(self._svn_rev, safe_str(path))
115 return self._remote.md5_hash(self._svn_rev, safe_str(path))
116
116
117 def _get_file_property(self, path, name):
117 def _get_file_property(self, path, name):
118 file_properties = self._remote.node_properties(
118 file_properties = self._remote.node_properties(
119 safe_str(path), self._svn_rev)
119 safe_str(path), self._svn_rev)
120 return file_properties.get(name)
120 return file_properties.get(name)
121
121
122 def get_file_content(self, path):
122 def get_file_content(self, path):
123 path = self._fix_path(path)
123 path = self._fix_path(path)
124 return self._remote.get_file_content(self._svn_rev, safe_str(path))
124 return self._remote.get_file_content(self._svn_rev, safe_str(path))
125
125
126 def get_file_content_streamed(self, path):
126 def get_file_content_streamed(self, path):
127 path = self._fix_path(path)
127 path = self._fix_path(path)
128
128
129 stream_method = getattr(self._remote, 'stream:get_file_content')
129 stream_method = getattr(self._remote, 'stream:get_file_content')
130 return stream_method(self._svn_rev, safe_str(path))
130 return stream_method(self._svn_rev, safe_str(path))
131
131
132 def get_file_size(self, path):
132 def get_file_size(self, path):
133 path = self._fix_path(path)
133 path = self._fix_path(path)
134 return self._remote.get_file_size(self._svn_rev, safe_str(path))
134 return self._remote.get_file_size(self._svn_rev, safe_str(path))
135
135
136 def get_path_history(self, path, limit=None, pre_load=None):
136 def get_path_history(self, path, limit=None, pre_load=None):
137 path = safe_str(self._fix_path(path))
137 path = safe_str(self._fix_path(path))
138 history = self._remote.node_history(path, self._svn_rev, limit)
138 history = self._remote.node_history(path, self._svn_rev, limit)
139 return [
139 return [
140 self.repository.get_commit(commit_id=str(svn_rev))
140 self.repository.get_commit(commit_id=str(svn_rev))
141 for svn_rev in history]
141 for svn_rev in history]
142
142
143 def get_file_annotate(self, path, pre_load=None):
143 def get_file_annotate(self, path, pre_load=None):
144 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
144 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
145
145
146 for zero_based_line_no, svn_rev, content in result:
146 for zero_based_line_no, svn_rev, content in result:
147 commit_id = str(svn_rev)
147 commit_id = str(svn_rev)
148 line_no = zero_based_line_no + 1
148 line_no = zero_based_line_no + 1
149 yield (
149 yield (
150 line_no,
150 line_no,
151 commit_id,
151 commit_id,
152 lambda: self.repository.get_commit(commit_id=commit_id),
152 lambda: self.repository.get_commit(commit_id=commit_id),
153 content)
153 content)
154
154
155 def get_node(self, path, pre_load=None):
155 def get_node(self, path, pre_load=None):
156 path = self._fix_path(path)
156 path = self._fix_path(path)
157 if path not in self.nodes:
157 if path not in self.nodes:
158
158
159 if path == '':
159 if path == '':
160 node = nodes.RootNode(commit=self)
160 node = nodes.RootNode(commit=self)
161 else:
161 else:
162 node_type = self._remote.get_node_type(self._svn_rev, safe_str(path))
162 node_type = self._remote.get_node_type(self._svn_rev, safe_str(path))
163 if node_type == 'dir':
163 if node_type == 'dir':
164 node = nodes.DirNode(safe_bytes(path), commit=self)
164 node = nodes.DirNode(safe_bytes(path), commit=self)
165 elif node_type == 'file':
165 elif node_type == 'file':
166 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
166 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
167 else:
167 else:
168 raise self.no_node_at_path(path)
168 raise self.no_node_at_path(path)
169
169
170 self.nodes[path] = node
170 self.nodes[path] = node
171 return self.nodes[path]
171 return self.nodes[path]
172
172
173 def get_nodes(self, path, pre_load=None):
173 def get_nodes(self, path, pre_load=None):
174 if self._get_kind(path) != nodes.NodeKind.DIR:
174 if self._get_kind(path) != nodes.NodeKind.DIR:
175 raise CommitError(
175 raise CommitError(
176 f"Directory does not exist for commit {self.raw_id} at '{path}'")
176 f"Directory does not exist for commit {self.raw_id} at '{path}'")
177 path = safe_str(self._fix_path(path))
177 path = safe_str(self._fix_path(path))
178
178
179 path_nodes = []
179 path_nodes = []
180 for name, kind in self._remote.get_nodes(self._svn_rev, path):
180 for name, kind in self._remote.get_nodes(self._svn_rev, path):
181 node_path = vcspath.join(path, name)
181 node_path = vcspath.join(path, name)
182 if kind == 'dir':
182 if kind == 'dir':
183 node = nodes.DirNode(safe_bytes(node_path), commit=self)
183 node = nodes.DirNode(safe_bytes(node_path), commit=self)
184 elif kind == 'file':
184 elif kind == 'file':
185 node = nodes.FileNode(safe_bytes(node_path), commit=self, pre_load=pre_load)
185 node = nodes.FileNode(safe_bytes(node_path), commit=self, pre_load=pre_load)
186 else:
186 else:
187 raise ValueError(f"Node kind {kind} not supported.")
187 raise ValueError(f"Node kind {kind} not supported.")
188 self.nodes[node_path] = node
188 self.nodes[node_path] = node
189 path_nodes.append(node)
189 path_nodes.append(node)
190
190
191 return path_nodes
191 return path_nodes
192
192
193 def _get_kind(self, path):
193 def _get_kind(self, path):
194 path = self._fix_path(path)
194 path = self._fix_path(path)
195 kind = self._remote.get_node_type(self._svn_rev, path)
195 kind = self._remote.get_node_type(self._svn_rev, path)
196 if kind == 'file':
196 if kind == 'file':
197 return nodes.NodeKind.FILE
197 return nodes.NodeKind.FILE
198 elif kind == 'dir':
198 elif kind == 'dir':
199 return nodes.NodeKind.DIR
199 return nodes.NodeKind.DIR
200 else:
200 else:
201 raise CommitError(
201 raise CommitError(
202 "Node does not exist at the given path '{}'".format(path))
202 f"Node does not exist at the given path '{path}'")
203
203
204 @LazyProperty
204 @LazyProperty
205 def _changes_cache(self):
205 def _changes_cache(self):
206 return self._remote.revision_changes(self._svn_rev)
206 return self._remote.revision_changes(self._svn_rev)
207
207
208 @LazyProperty
208 @LazyProperty
209 def affected_files(self):
209 def affected_files(self):
210 changed_files = set()
210 changed_files = set()
211 for files in self._changes_cache.values():
211 for files in self._changes_cache.values():
212 changed_files.update(files)
212 changed_files.update(files)
213 return list(changed_files)
213 return list(changed_files)
214
214
215 @LazyProperty
215 @LazyProperty
216 def id(self):
216 def id(self):
217 return self.raw_id
217 return self.raw_id
218
218
219 @property
219 @property
220 def added(self):
220 def added(self):
221 return nodes.AddedFileNodesGenerator(self.added_paths, self)
221 return nodes.AddedFileNodesGenerator(self.added_paths, self)
222
222
223 @LazyProperty
223 @LazyProperty
224 def added_paths(self):
224 def added_paths(self):
225 return [n for n in self._changes_cache['added']]
225 return [n for n in self._changes_cache['added']]
226
226
227 @property
227 @property
228 def changed(self):
228 def changed(self):
229 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
229 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
230
230
231 @LazyProperty
231 @LazyProperty
232 def changed_paths(self):
232 def changed_paths(self):
233 return [n for n in self._changes_cache['changed']]
233 return [n for n in self._changes_cache['changed']]
234
234
235 @property
235 @property
236 def removed(self):
236 def removed(self):
237 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
237 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
238
238
239 @LazyProperty
239 @LazyProperty
240 def removed_paths(self):
240 def removed_paths(self):
241 return [n for n in self._changes_cache['removed']]
241 return [n for n in self._changes_cache['removed']]
242
242
243
243
244 def _date_from_svn_properties(properties):
244 def _date_from_svn_properties(properties):
245 """
245 """
246 Parses the date out of given svn properties.
246 Parses the date out of given svn properties.
247
247
248 :return: :class:`datetime.datetime` instance. The object is naive.
248 :return: :class:`datetime.datetime` instance. The object is naive.
249 """
249 """
250
250
251 aware_date = dateutil.parser.parse(properties.get('svn:date'))
251 aware_date = dateutil.parser.parse(properties.get('svn:date'))
252 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
252 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
253 final_date = aware_date
253 final_date = aware_date
254 return final_date.replace(tzinfo=None)
254 return final_date.replace(tzinfo=None)
@@ -1,367 +1,367 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN repository module
20 SVN repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import urllib.request
25 import urllib.request
26 import urllib.parse
26 import urllib.parse
27 import urllib.error
27 import urllib.error
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from collections import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.str_utils import safe_str
33 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 f"Cannot create repository at {self.path}, location already exist"
91 f"Cannot create repository at {self.path}, location already exist"
92 )
92 )
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in range(1, head + 1)]
105 return [str(r) for r in range(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
119 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
168 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
169
169
170 def get_name(item):
170 def get_name(item):
171 return item[0]
171 return item[0]
172
172
173 return OrderedDict(sorted(found_items.items(), key=get_name))
173 return OrderedDict(sorted(found_items.items(), key=get_name))
174
174
175 def _patterns_from_section(self, section):
175 def _patterns_from_section(self, section):
176 return (pattern for key, pattern in self.config.items(section))
176 return (pattern for key, pattern in self.config.items(section))
177
177
178 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
178 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
179 if self != repo2:
179 if self != repo2:
180 raise ValueError(
180 raise ValueError(
181 "Subversion does not support getting common ancestor of"
181 "Subversion does not support getting common ancestor of"
182 " different repositories.")
182 " different repositories.")
183
183
184 if int(commit_id1) < int(commit_id2):
184 if int(commit_id1) < int(commit_id2):
185 return commit_id1
185 return commit_id1
186 return commit_id2
186 return commit_id2
187
187
188 def verify(self):
188 def verify(self):
189 verify = self._remote.verify()
189 verify = self._remote.verify()
190
190
191 self._remote.invalidate_vcs_cache()
191 self._remote.invalidate_vcs_cache()
192 return verify
192 return verify
193
193
194 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
194 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
195 # TODO: johbo: Implement better comparison, this is a very naive
195 # TODO: johbo: Implement better comparison, this is a very naive
196 # version which does not allow to compare branches, tags or folders
196 # version which does not allow to compare branches, tags or folders
197 # at all.
197 # at all.
198 if repo2 != self:
198 if repo2 != self:
199 raise ValueError(
199 raise ValueError(
200 "Subversion does not support comparison of of different "
200 "Subversion does not support comparison of of different "
201 "repositories.")
201 "repositories.")
202
202
203 if commit_id1 == commit_id2:
203 if commit_id1 == commit_id2:
204 return []
204 return []
205
205
206 commit_idx1 = self._get_commit_idx(commit_id1)
206 commit_idx1 = self._get_commit_idx(commit_id1)
207 commit_idx2 = self._get_commit_idx(commit_id2)
207 commit_idx2 = self._get_commit_idx(commit_id2)
208
208
209 commits = [
209 commits = [
210 self.get_commit(commit_idx=idx)
210 self.get_commit(commit_idx=idx)
211 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
211 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
212
212
213 return commits
213 return commits
214
214
215 def _get_commit_idx(self, commit_id):
215 def _get_commit_idx(self, commit_id):
216 try:
216 try:
217 svn_rev = int(commit_id)
217 svn_rev = int(commit_id)
218 except:
218 except:
219 # TODO: johbo: this might be only one case, HEAD, check this
219 # TODO: johbo: this might be only one case, HEAD, check this
220 svn_rev = self._remote.lookup(commit_id)
220 svn_rev = self._remote.lookup(commit_id)
221 commit_idx = svn_rev - 1
221 commit_idx = svn_rev - 1
222 if commit_idx >= len(self.commit_ids):
222 if commit_idx >= len(self.commit_ids):
223 raise CommitDoesNotExistError(
223 raise CommitDoesNotExistError(
224 "Commit at index {} does not exist.".format(commit_idx))
224 f"Commit at index {commit_idx} does not exist.")
225 return commit_idx
225 return commit_idx
226
226
227 @staticmethod
227 @staticmethod
228 def check_url(url, config):
228 def check_url(url, config):
229 """
229 """
230 Check if `url` is a valid source to import a Subversion repository.
230 Check if `url` is a valid source to import a Subversion repository.
231 """
231 """
232 # convert to URL if it's a local directory
232 # convert to URL if it's a local directory
233 if os.path.isdir(url):
233 if os.path.isdir(url):
234 url = 'file://' + urllib.request.pathname2url(url)
234 url = 'file://' + urllib.request.pathname2url(url)
235 return connection.Svn.check_url(url, config.serialize())
235 return connection.Svn.check_url(url, config.serialize())
236
236
237 @staticmethod
237 @staticmethod
238 def is_valid_repository(path):
238 def is_valid_repository(path):
239 try:
239 try:
240 SubversionRepository(path)
240 SubversionRepository(path)
241 return True
241 return True
242 except VCSError:
242 except VCSError:
243 pass
243 pass
244 return False
244 return False
245
245
246 def _check_path(self):
246 def _check_path(self):
247 if not os.path.exists(self.path):
247 if not os.path.exists(self.path):
248 raise VCSError('Path "{}" does not exist!'.format(self.path))
248 raise VCSError(f'Path "{self.path}" does not exist!')
249 if not self._remote.is_path_valid_repository(self.path):
249 if not self._remote.is_path_valid_repository(self.path):
250 raise VCSError(
250 raise VCSError(
251 'Path "%s" does not contain a Subversion repository' %
251 'Path "%s" does not contain a Subversion repository' %
252 (self.path, ))
252 (self.path, ))
253
253
254 @LazyProperty
254 @LazyProperty
255 def last_change(self):
255 def last_change(self):
256 """
256 """
257 Returns last change made on this repository as
257 Returns last change made on this repository as
258 `datetime.datetime` object.
258 `datetime.datetime` object.
259 """
259 """
260 # Subversion always has a first commit which has id "0" and contains
260 # Subversion always has a first commit which has id "0" and contains
261 # what we are looking for.
261 # what we are looking for.
262 last_id = len(self.commit_ids)
262 last_id = len(self.commit_ids)
263 properties = self._remote.revision_properties(last_id)
263 properties = self._remote.revision_properties(last_id)
264 return _date_from_svn_properties(properties)
264 return _date_from_svn_properties(properties)
265
265
266 @LazyProperty
266 @LazyProperty
267 def in_memory_commit(self):
267 def in_memory_commit(self):
268 return SubversionInMemoryCommit(self)
268 return SubversionInMemoryCommit(self)
269
269
270 def get_hook_location(self):
270 def get_hook_location(self):
271 """
271 """
272 returns absolute path to location where hooks are stored
272 returns absolute path to location where hooks are stored
273 """
273 """
274 return os.path.join(self.path, 'hooks')
274 return os.path.join(self.path, 'hooks')
275
275
276 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
276 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
277 translate_tag=None, maybe_unreachable=False, reference_obj=None):
277 translate_tag=None, maybe_unreachable=False, reference_obj=None):
278 if self.is_empty():
278 if self.is_empty():
279 raise EmptyRepositoryError("There are no commits yet")
279 raise EmptyRepositoryError("There are no commits yet")
280 if commit_id is not None:
280 if commit_id is not None:
281 self._validate_commit_id(commit_id)
281 self._validate_commit_id(commit_id)
282 elif commit_idx is not None:
282 elif commit_idx is not None:
283 self._validate_commit_idx(commit_idx)
283 self._validate_commit_idx(commit_idx)
284 try:
284 try:
285 commit_id = self.commit_ids[commit_idx]
285 commit_id = self.commit_ids[commit_idx]
286 except IndexError:
286 except IndexError:
287 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
287 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
288
288
289 commit_id = self._sanitize_commit_id(commit_id)
289 commit_id = self._sanitize_commit_id(commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
291 return commit
291 return commit
292
292
293 def get_commits(
293 def get_commits(
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
296 if self.is_empty():
296 if self.is_empty():
297 raise EmptyRepositoryError("There are no commit_ids yet")
297 raise EmptyRepositoryError("There are no commit_ids yet")
298 self._validate_branch_name(branch_name)
298 self._validate_branch_name(branch_name)
299
299
300 if start_id is not None:
300 if start_id is not None:
301 self._validate_commit_id(start_id)
301 self._validate_commit_id(start_id)
302 if end_id is not None:
302 if end_id is not None:
303 self._validate_commit_id(end_id)
303 self._validate_commit_id(end_id)
304
304
305 start_raw_id = self._sanitize_commit_id(start_id)
305 start_raw_id = self._sanitize_commit_id(start_id)
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
307 end_raw_id = self._sanitize_commit_id(end_id)
307 end_raw_id = self._sanitize_commit_id(end_id)
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
309
309
310 if None not in [start_id, end_id] and start_pos > end_pos:
310 if None not in [start_id, end_id] and start_pos > end_pos:
311 raise RepositoryError(
311 raise RepositoryError(
312 "Start commit '%s' cannot be after end commit '%s'" %
312 "Start commit '%s' cannot be after end commit '%s'" %
313 (start_id, end_id))
313 (start_id, end_id))
314 if end_pos is not None:
314 if end_pos is not None:
315 end_pos += 1
315 end_pos += 1
316
316
317 # Date based filtering
317 # Date based filtering
318 if start_date or end_date:
318 if start_date or end_date:
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
320 date_astimestamp(start_date) if start_date else None,
320 date_astimestamp(start_date) if start_date else None,
321 date_astimestamp(end_date) if end_date else None)
321 date_astimestamp(end_date) if end_date else None)
322 start_pos = start_raw_id - 1
322 start_pos = start_raw_id - 1
323 end_pos = end_raw_id
323 end_pos = end_raw_id
324
324
325 commit_ids = self.commit_ids
325 commit_ids = self.commit_ids
326
326
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
329 svn_rev = int(self.commit_ids[-1])
329 svn_rev = int(self.commit_ids[-1])
330 commit_ids = self._remote.node_history(
330 commit_ids = self._remote.node_history(
331 path=branch_name, revision=svn_rev, limit=None)
331 path=branch_name, revision=svn_rev, limit=None)
332 commit_ids = [str(i) for i in reversed(commit_ids)]
332 commit_ids = [str(i) for i in reversed(commit_ids)]
333
333
334 if start_pos or end_pos:
334 if start_pos or end_pos:
335 commit_ids = commit_ids[start_pos:end_pos]
335 commit_ids = commit_ids[start_pos:end_pos]
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
337
337
338 def _sanitize_commit_id(self, commit_id):
338 def _sanitize_commit_id(self, commit_id):
339 if commit_id and commit_id.isdigit():
339 if commit_id and commit_id.isdigit():
340 if int(commit_id) <= len(self.commit_ids):
340 if int(commit_id) <= len(self.commit_ids):
341 return commit_id
341 return commit_id
342 else:
342 else:
343 raise CommitDoesNotExistError(
343 raise CommitDoesNotExistError(
344 "Commit {} does not exist.".format(commit_id))
344 f"Commit {commit_id} does not exist.")
345 if commit_id not in [
345 if commit_id not in [
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
347 raise CommitDoesNotExistError(
347 raise CommitDoesNotExistError(
348 "Commit id {} not understood.".format(commit_id))
348 f"Commit id {commit_id} not understood.")
349 svn_rev = self._remote.lookup('HEAD')
349 svn_rev = self._remote.lookup('HEAD')
350 return str(svn_rev)
350 return str(svn_rev)
351
351
352 def get_diff(
352 def get_diff(
353 self, commit1, commit2, path=None, ignore_whitespace=False,
353 self, commit1, commit2, path=None, ignore_whitespace=False,
354 context=3, path1=None):
354 context=3, path1=None):
355 self._validate_diff_commits(commit1, commit2)
355 self._validate_diff_commits(commit1, commit2)
356 svn_rev1 = int(commit1.raw_id)
356 svn_rev1 = int(commit1.raw_id)
357 svn_rev2 = int(commit2.raw_id)
357 svn_rev2 = int(commit2.raw_id)
358 diff = self._remote.diff(
358 diff = self._remote.diff(
359 svn_rev1, svn_rev2, path1=path1, path2=path,
359 svn_rev1, svn_rev2, path1=path1, path2=path,
360 ignore_whitespace=ignore_whitespace, context=context)
360 ignore_whitespace=ignore_whitespace, context=context)
361 return SubversionDiff(diff)
361 return SubversionDiff(diff)
362
362
363
363
364 def _sanitize_url(url):
364 def _sanitize_url(url):
365 if '://' not in url:
365 if '://' not in url:
366 url = 'file://' + urllib.request.pathname2url(url)
366 url = 'file://' + urllib.request.pathname2url(url)
367 return url
367 return url
@@ -1,159 +1,159 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities aimed to help achieve mostly basic tasks.
20 Utilities aimed to help achieve mostly basic tasks.
21 """
21 """
22
22
23
23
24
24
25
25
26 import re
26 import re
27 import os
27 import os
28 import time
28 import time
29 import datetime
29 import datetime
30 import logging
30 import logging
31
31
32 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
33 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
34
34
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 def get_scm(path):
39 def get_scm(path):
40 """
40 """
41 Returns one of alias from ``ALIASES`` (in order of precedence same as
41 Returns one of alias from ``ALIASES`` (in order of precedence same as
42 shortcuts given in ``ALIASES``) and working dir path for the given
42 shortcuts given in ``ALIASES``) and working dir path for the given
43 argument. If no scm-specific directory is found or more than one scm is
43 argument. If no scm-specific directory is found or more than one scm is
44 found at that directory, ``VCSError`` is raised.
44 found at that directory, ``VCSError`` is raised.
45 """
45 """
46 if not os.path.isdir(path):
46 if not os.path.isdir(path):
47 raise VCSError("Given path %s is not a directory" % path)
47 raise VCSError("Given path %s is not a directory" % path)
48
48
49 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
49 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
50
50
51 if len(found_scms) > 1:
51 if len(found_scms) > 1:
52 found = ', '.join(x[0] for x in found_scms)
52 found = ', '.join(x[0] for x in found_scms)
53 raise VCSError(
53 raise VCSError(
54 'More than one [{}] scm found at given path {}'.format(found, path))
54 f'More than one [{found}] scm found at given path {path}')
55
55
56 if len(found_scms) == 0:
56 if len(found_scms) == 0:
57 raise VCSError('No scm found at given path %s' % path)
57 raise VCSError('No scm found at given path %s' % path)
58
58
59 return found_scms[0]
59 return found_scms[0]
60
60
61
61
62 def get_scm_backend(backend_type):
62 def get_scm_backend(backend_type):
63 from rhodecode.lib.vcs.backends import get_backend
63 from rhodecode.lib.vcs.backends import get_backend
64 return get_backend(backend_type)
64 return get_backend(backend_type)
65
65
66
66
67 def get_scms_for_path(path):
67 def get_scms_for_path(path):
68 """
68 """
69 Returns all scm's found at the given path. If no scm is recognized
69 Returns all scm's found at the given path. If no scm is recognized
70 - empty list is returned.
70 - empty list is returned.
71
71
72 :param path: path to directory which should be checked. May be callable.
72 :param path: path to directory which should be checked. May be callable.
73
73
74 :raises VCSError: if given ``path`` is not a directory
74 :raises VCSError: if given ``path`` is not a directory
75 """
75 """
76 from rhodecode.lib.vcs.backends import get_backend
76 from rhodecode.lib.vcs.backends import get_backend
77 if hasattr(path, '__call__'):
77 if hasattr(path, '__call__'):
78 path = path()
78 path = path()
79 if not os.path.isdir(path):
79 if not os.path.isdir(path):
80 raise VCSError("Given path %r is not a directory" % path)
80 raise VCSError("Given path %r is not a directory" % path)
81
81
82 result = []
82 result = []
83 for key in settings.available_aliases():
83 for key in settings.available_aliases():
84 try:
84 try:
85 backend = get_backend(key)
85 backend = get_backend(key)
86 except VCSBackendNotSupportedError:
86 except VCSBackendNotSupportedError:
87 log.warning('VCSBackendNotSupportedError: %s not supported', key)
87 log.warning('VCSBackendNotSupportedError: %s not supported', key)
88 continue
88 continue
89 if backend.is_valid_repository(path):
89 if backend.is_valid_repository(path):
90 result.append(key)
90 result.append(key)
91 return result
91 return result
92
92
93
93
94 def parse_datetime(text):
94 def parse_datetime(text):
95 """
95 """
96 Parses given text and returns ``datetime.datetime`` instance or raises
96 Parses given text and returns ``datetime.datetime`` instance or raises
97 ``ValueError``.
97 ``ValueError``.
98
98
99 :param text: string of desired date/datetime or something more verbose,
99 :param text: string of desired date/datetime or something more verbose,
100 like *yesterday*, *2weeks 3days*, etc.
100 like *yesterday*, *2weeks 3days*, etc.
101 """
101 """
102 if not text:
102 if not text:
103 raise ValueError('Wrong date: "%s"' % text)
103 raise ValueError('Wrong date: "%s"' % text)
104
104
105 if isinstance(text, datetime.datetime):
105 if isinstance(text, datetime.datetime):
106 return text
106 return text
107
107
108 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
108 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
109 text = text.strip().lower()[:19]
109 text = text.strip().lower()[:19]
110
110
111 input_formats = (
111 input_formats = (
112 '%Y-%m-%d %H:%M:%S',
112 '%Y-%m-%d %H:%M:%S',
113 '%Y-%m-%dt%H:%M:%S',
113 '%Y-%m-%dt%H:%M:%S',
114 '%Y-%m-%d %H:%M',
114 '%Y-%m-%d %H:%M',
115 '%Y-%m-%dt%H:%M',
115 '%Y-%m-%dt%H:%M',
116 '%Y-%m-%d',
116 '%Y-%m-%d',
117 '%m/%d/%Y %H:%M:%S',
117 '%m/%d/%Y %H:%M:%S',
118 '%m/%d/%Yt%H:%M:%S',
118 '%m/%d/%Yt%H:%M:%S',
119 '%m/%d/%Y %H:%M',
119 '%m/%d/%Y %H:%M',
120 '%m/%d/%Yt%H:%M',
120 '%m/%d/%Yt%H:%M',
121 '%m/%d/%Y',
121 '%m/%d/%Y',
122 '%m/%d/%y %H:%M:%S',
122 '%m/%d/%y %H:%M:%S',
123 '%m/%d/%yt%H:%M:%S',
123 '%m/%d/%yt%H:%M:%S',
124 '%m/%d/%y %H:%M',
124 '%m/%d/%y %H:%M',
125 '%m/%d/%yt%H:%M',
125 '%m/%d/%yt%H:%M',
126 '%m/%d/%y',
126 '%m/%d/%y',
127 )
127 )
128 for format_def in input_formats:
128 for format_def in input_formats:
129 try:
129 try:
130 return datetime.datetime(*time.strptime(text, format_def)[:6])
130 return datetime.datetime(*time.strptime(text, format_def)[:6])
131 except ValueError:
131 except ValueError:
132 pass
132 pass
133
133
134 # Try descriptive texts
134 # Try descriptive texts
135 if text == 'tomorrow':
135 if text == 'tomorrow':
136 future = datetime.datetime.now() + datetime.timedelta(days=1)
136 future = datetime.datetime.now() + datetime.timedelta(days=1)
137 args = future.timetuple()[:3] + (23, 59, 59)
137 args = future.timetuple()[:3] + (23, 59, 59)
138 return datetime.datetime(*args)
138 return datetime.datetime(*args)
139 elif text == 'today':
139 elif text == 'today':
140 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
140 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
141 elif text == 'now':
141 elif text == 'now':
142 return datetime.datetime.now()
142 return datetime.datetime.now()
143 elif text == 'yesterday':
143 elif text == 'yesterday':
144 past = datetime.datetime.now() - datetime.timedelta(days=1)
144 past = datetime.datetime.now() - datetime.timedelta(days=1)
145 return datetime.datetime(*past.timetuple()[:3])
145 return datetime.datetime(*past.timetuple()[:3])
146 else:
146 else:
147 days = 0
147 days = 0
148 matched = re.match(
148 matched = re.match(
149 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
149 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
150 if matched:
150 if matched:
151 groupdict = matched.groupdict()
151 groupdict = matched.groupdict()
152 if groupdict['days']:
152 if groupdict['days']:
153 days += int(matched.groupdict()['days'])
153 days += int(matched.groupdict()['days'])
154 if groupdict['weeks']:
154 if groupdict['weeks']:
155 days += int(matched.groupdict()['weeks']) * 7
155 days += int(matched.groupdict()['weeks']) * 7
156 past = datetime.datetime.now() - datetime.timedelta(days=days)
156 past = datetime.datetime.now() - datetime.timedelta(days=days)
157 return datetime.datetime(*past.timetuple()[:3])
157 return datetime.datetime(*past.timetuple()[:3])
158
158
159 raise ValueError('Wrong date: "%s"' % text)
159 raise ValueError('Wrong date: "%s"' % text)
@@ -1,88 +1,87 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 """
19 """
21 Utilities to be shared by multiple controllers.
20 Utilities to be shared by multiple controllers.
22
21
23 Should only contain utilities to be shared in the controller layer.
22 Should only contain utilities to be shared in the controller layer.
24 """
23 """
25
24
26 from rhodecode.lib import helpers as h
25 from rhodecode.lib import helpers as h
27 from rhodecode.lib.vcs.exceptions import RepositoryError
26 from rhodecode.lib.vcs.exceptions import RepositoryError
28
27
29
28
30 def parse_path_ref(ref, default_path=None):
29 def parse_path_ref(ref, default_path=None):
31 """
30 """
32 Parse out a path and reference combination and return both parts of it.
31 Parse out a path and reference combination and return both parts of it.
33
32
34 This is used to allow support of path based comparisons for Subversion
33 This is used to allow support of path based comparisons for Subversion
35 as an iterim solution in parameter handling.
34 as an iterim solution in parameter handling.
36 """
35 """
37 if '@' in ref:
36 if '@' in ref:
38 return ref.rsplit('@', 1)
37 return ref.rsplit('@', 1)
39 else:
38 else:
40 return default_path, ref
39 return default_path, ref
41
40
42
41
43 def get_format_ref_id(repo):
42 def get_format_ref_id(repo):
44 """Returns a `repo` specific reference formatter function"""
43 """Returns a `repo` specific reference formatter function"""
45 if h.is_svn(repo):
44 if h.is_svn(repo):
46 return _format_ref_id_svn
45 return _format_ref_id_svn
47 else:
46 else:
48 return _format_ref_id
47 return _format_ref_id
49
48
50
49
51 def _format_ref_id(name, raw_id):
50 def _format_ref_id(name, raw_id):
52 """Default formatting of a given reference `name`"""
51 """Default formatting of a given reference `name`"""
53 return name
52 return name
54
53
55
54
56 def _format_ref_id_svn(name, raw_id):
55 def _format_ref_id_svn(name, raw_id):
57 """Special way of formatting a reference for Subversion including path"""
56 """Special way of formatting a reference for Subversion including path"""
58 return '%s@%s' % (name, raw_id)
57 return '{}@{}'.format(name, raw_id)
59
58
60
59
61 def get_commit_from_ref_name(repo, ref_name, ref_type=None):
60 def get_commit_from_ref_name(repo, ref_name, ref_type=None):
62 """
61 """
63 Gets the commit for a `ref_name` taking into account `ref_type`.
62 Gets the commit for a `ref_name` taking into account `ref_type`.
64 Needed in case a bookmark / tag share the same name.
63 Needed in case a bookmark / tag share the same name.
65
64
66 :param repo: the repo instance
65 :param repo: the repo instance
67 :param ref_name: the name of the ref to get
66 :param ref_name: the name of the ref to get
68 :param ref_type: optional, used to disambiguate colliding refs
67 :param ref_type: optional, used to disambiguate colliding refs
69 """
68 """
70 repo_scm = repo.scm_instance()
69 repo_scm = repo.scm_instance()
71 ref_type_mapping = {
70 ref_type_mapping = {
72 'book': repo_scm.bookmarks,
71 'book': repo_scm.bookmarks,
73 'bookmark': repo_scm.bookmarks,
72 'bookmark': repo_scm.bookmarks,
74 'tag': repo_scm.tags,
73 'tag': repo_scm.tags,
75 'branch': repo_scm.branches,
74 'branch': repo_scm.branches,
76 }
75 }
77
76
78 commit_id = ref_name
77 commit_id = ref_name
79 if repo_scm.alias != 'svn': # pass svn refs straight to backend until
78 if repo_scm.alias != 'svn': # pass svn refs straight to backend until
80 # the branch issue with svn is fixed
79 # the branch issue with svn is fixed
81 if ref_type and ref_type in ref_type_mapping:
80 if ref_type and ref_type in ref_type_mapping:
82 try:
81 try:
83 commit_id = ref_type_mapping[ref_type][ref_name]
82 commit_id = ref_type_mapping[ref_type][ref_name]
84 except KeyError:
83 except KeyError:
85 raise RepositoryError(
84 raise RepositoryError(
86 '%s "%s" does not exist' % (ref_type, ref_name))
85 '{} "{}" does not exist'.format(ref_type, ref_name))
87
86
88 return repo_scm.get_commit(commit_id)
87 return repo_scm.get_commit(commit_id)
@@ -1,141 +1,140 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 import logging
20 import logging
22
21
23 import rhodecode
22 import rhodecode
24 from rhodecode.model import meta, db
23 from rhodecode.model import meta, db
25 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
24 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
26
25
27 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
28
27
29
28
30 def init_model(engine, encryption_key: bytes = b''):
29 def init_model(engine, encryption_key: bytes = b''):
31 """
30 """
32 Initializes db session, bind the engine with the metadata,
31 Initializes db session, bind the engine with the metadata,
33 Call this before using any of the tables or classes in the model,
32 Call this before using any of the tables or classes in the model,
34 preferably once in application start
33 preferably once in application start
35
34
36 :param engine: engine to bind to
35 :param engine: engine to bind to
37 :param encryption_key: key used for encryption
36 :param encryption_key: key used for encryption
38 """
37 """
39
38
40 engine_str = obfuscate_url_pw(str(engine.url))
39 engine_str = obfuscate_url_pw(str(engine.url))
41 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
40 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
42
41
43 meta.bind_engine_to_session(engine)
42 meta.bind_engine_to_session(engine)
44 init_model_encryption(db, enc_key=encryption_key)
43 init_model_encryption(db, enc_key=encryption_key)
45
44
46
45
47 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
46 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
48 if not enc_key:
47 if not enc_key:
49 from pyramid.threadlocal import get_current_registry
48 from pyramid.threadlocal import get_current_registry
50 config = config or get_current_registry().settings
49 config = config or get_current_registry().settings
51 enc_key = get_encryption_key(config)
50 enc_key = get_encryption_key(config)
52
51
53 for db_model in db_models:
52 for db_model in db_models:
54 log.debug('setting encryption key for model %s', db_model)
53 log.debug('setting encryption key for model %s', db_model)
55 db_model.ENCRYPTION_KEY = enc_key
54 db_model.ENCRYPTION_KEY = enc_key
56
55
57
56
58 class BaseModel(object):
57 class BaseModel(object):
59 """
58 """
60 Base Model for all RhodeCode models, it adds sql alchemy session
59 Base Model for all RhodeCode models, it adds sql alchemy session
61 into instance of model
60 into instance of model
62
61
63 :param sa: If passed it reuses this session instead of creating a new one
62 :param sa: If passed it reuses this session instead of creating a new one
64 """
63 """
65
64
66 cls = None # override in child class
65 cls = None # override in child class
67
66
68 def __init__(self, sa=None):
67 def __init__(self, sa=None):
69 if sa is not None:
68 if sa is not None:
70 self.sa = sa
69 self.sa = sa
71 else:
70 else:
72 self.sa = meta.Session()
71 self.sa = meta.Session()
73
72
74 def _get_instance(self, cls, instance, callback=None):
73 def _get_instance(self, cls, instance, callback=None):
75 """
74 """
76 Gets instance of given cls using some simple lookup mechanism.
75 Gets instance of given cls using some simple lookup mechanism.
77
76
78 :param cls: classes to fetch
77 :param cls: classes to fetch
79 :param instance: int or Instance
78 :param instance: int or Instance
80 :param callback: callback to call if all lookups failed
79 :param callback: callback to call if all lookups failed
81 """
80 """
82
81
83 if isinstance(instance, cls):
82 if isinstance(instance, cls):
84 return instance
83 return instance
85 elif isinstance(instance, int):
84 elif isinstance(instance, int):
86 if isinstance(cls, tuple):
85 if isinstance(cls, tuple):
87 # if we pass multi instances we pick first to .get()
86 # if we pass multi instances we pick first to .get()
88 cls = cls[0]
87 cls = cls[0]
89 return cls.get(instance)
88 return cls.get(instance)
90 else:
89 else:
91 if instance:
90 if instance:
92 if callback is None:
91 if callback is None:
93 raise Exception(
92 raise Exception(
94 'given object must be int or Instance of %s '
93 'given object must be int or Instance of %s '
95 'got %s, no callback provided' % (cls, type(instance))
94 'got %s, no callback provided' % (cls, type(instance))
96 )
95 )
97 else:
96 else:
98 return callback(instance)
97 return callback(instance)
99
98
100 def _get_user(self, user):
99 def _get_user(self, user):
101 """
100 """
102 Helper method to get user by ID, or username fallback
101 Helper method to get user by ID, or username fallback
103
102
104 :param user: UserID, username, or User instance
103 :param user: UserID, username, or User instance
105 """
104 """
106 return self._get_instance(
105 return self._get_instance(
107 db.User, user, callback=db.User.get_by_username)
106 db.User, user, callback=db.User.get_by_username)
108
107
109 def _get_user_group(self, user_group):
108 def _get_user_group(self, user_group):
110 """
109 """
111 Helper method to get user by ID, or username fallback
110 Helper method to get user by ID, or username fallback
112
111
113 :param user_group: UserGroupID, user_group_name, or UserGroup instance
112 :param user_group: UserGroupID, user_group_name, or UserGroup instance
114 """
113 """
115 return self._get_instance(
114 return self._get_instance(
116 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
115 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
117
116
118 def _get_repo(self, repository):
117 def _get_repo(self, repository):
119 """
118 """
120 Helper method to get repository by ID, or repository name
119 Helper method to get repository by ID, or repository name
121
120
122 :param repository: RepoID, repository name or Repository Instance
121 :param repository: RepoID, repository name or Repository Instance
123 """
122 """
124 return self._get_instance(
123 return self._get_instance(
125 db.Repository, repository, callback=db.Repository.get_by_repo_name)
124 db.Repository, repository, callback=db.Repository.get_by_repo_name)
126
125
127 def _get_perm(self, permission):
126 def _get_perm(self, permission):
128 """
127 """
129 Helper method to get permission by ID, or permission name
128 Helper method to get permission by ID, or permission name
130
129
131 :param permission: PermissionID, permission_name or Permission instance
130 :param permission: PermissionID, permission_name or Permission instance
132 """
131 """
133 return self._get_instance(
132 return self._get_instance(
134 db.Permission, permission, callback=db.Permission.get_by_key)
133 db.Permission, permission, callback=db.Permission.get_by_key)
135
134
136 @classmethod
135 @classmethod
137 def get_all(cls):
136 def get_all(cls):
138 """
137 """
139 Returns all instances of what is defined in `cls` class variable
138 Returns all instances of what is defined in `cls` class variable
140 """
139 """
141 return cls.cls.getAll()
140 return cls.cls.getAll()
@@ -1,124 +1,122 b''
1
2
3 # Copyright (C) 2013-2023 RhodeCode GmbH
1 # Copyright (C) 2013-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 authentication tokens model for RhodeCode
20 authentication tokens model for RhodeCode
23 """
21 """
24
22
25 import time
23 import time
26 import logging
24 import logging
27 import traceback
25 import traceback
28 from sqlalchemy import or_
26 from sqlalchemy import or_
29
27
30 from rhodecode.model import BaseModel
28 from rhodecode.model import BaseModel
31 from rhodecode.model.db import UserApiKeys
29 from rhodecode.model.db import UserApiKeys
32 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
33
31
34 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
35
33
36
34
37 class AuthTokenModel(BaseModel):
35 class AuthTokenModel(BaseModel):
38 cls = UserApiKeys
36 cls = UserApiKeys
39
37
40 @classmethod
38 @classmethod
41 def get_lifetime_values(cls, translator):
39 def get_lifetime_values(cls, translator):
42 from rhodecode.lib import helpers as h
40 from rhodecode.lib import helpers as h
43 _ = translator
41 _ = translator
44
42
45 def date_after_min(mins):
43 def date_after_min(mins):
46 after = time.time() + (60 * mins)
44 after = time.time() + (60 * mins)
47 return h.format_date(h.time_to_datetime(after))
45 return h.format_date(h.time_to_datetime(after))
48
46
49 return [
47 return [
50 (str(-1),
48 (str(-1),
51 _('forever')),
49 _('forever')),
52 (str(5),
50 (str(5),
53 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
51 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
54 (str(60),
52 (str(60),
55 _('1 hour {end_date}').format(end_date=date_after_min(60))),
53 _('1 hour {end_date}').format(end_date=date_after_min(60))),
56 (str(60 * 24),
54 (str(60 * 24),
57 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
55 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
58 (str(60 * 24 * 30),
56 (str(60 * 24 * 30),
59 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
57 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
60 ]
58 ]
61
59
62 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
60 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
63 """
61 """
64 :param user: user or user_id
62 :param user: user or user_id
65 :param description: description of ApiKey
63 :param description: description of ApiKey
66 :param lifetime: expiration time in minutes
64 :param lifetime: expiration time in minutes
67 :param role: role for the apikey
65 :param role: role for the apikey
68 """
66 """
69 from rhodecode.lib.auth import generate_auth_token
67 from rhodecode.lib.auth import generate_auth_token
70
68
71 user = self._get_user(user)
69 user = self._get_user(user)
72
70
73 new_auth_token = UserApiKeys()
71 new_auth_token = UserApiKeys()
74 new_auth_token.api_key = generate_auth_token(user.username)
72 new_auth_token.api_key = generate_auth_token(user.username)
75 new_auth_token.user_id = user.user_id
73 new_auth_token.user_id = user.user_id
76 new_auth_token.description = description
74 new_auth_token.description = description
77 new_auth_token.role = role
75 new_auth_token.role = role
78 new_auth_token.expires = time.time() + (lifetime * 60) \
76 new_auth_token.expires = time.time() + (lifetime * 60) \
79 if lifetime != -1 else -1
77 if lifetime != -1 else -1
80 Session().add(new_auth_token)
78 Session().add(new_auth_token)
81
79
82 return new_auth_token
80 return new_auth_token
83
81
84 def delete(self, auth_token_id, user=None):
82 def delete(self, auth_token_id, user=None):
85 """
83 """
86 Deletes given api_key, if user is set it also filters the object for
84 Deletes given api_key, if user is set it also filters the object for
87 deletion by given user.
85 deletion by given user.
88 """
86 """
89 auth_token = UserApiKeys.query().filter(
87 auth_token = UserApiKeys.query().filter(
90 UserApiKeys.user_api_key_id == auth_token_id)
88 UserApiKeys.user_api_key_id == auth_token_id)
91
89
92 if user:
90 if user:
93 user = self._get_user(user)
91 user = self._get_user(user)
94 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
92 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
95 auth_token = auth_token.scalar()
93 auth_token = auth_token.scalar()
96
94
97 if auth_token:
95 if auth_token:
98 try:
96 try:
99 Session().delete(auth_token)
97 Session().delete(auth_token)
100 except Exception:
98 except Exception:
101 log.error(traceback.format_exc())
99 log.error(traceback.format_exc())
102 raise
100 raise
103
101
104 def get_auth_tokens(self, user, show_expired=True):
102 def get_auth_tokens(self, user, show_expired=True):
105 user = self._get_user(user)
103 user = self._get_user(user)
106 user_auth_tokens = UserApiKeys.query()\
104 user_auth_tokens = UserApiKeys.query()\
107 .filter(UserApiKeys.user_id == user.user_id)
105 .filter(UserApiKeys.user_id == user.user_id)
108 if not show_expired:
106 if not show_expired:
109 user_auth_tokens = user_auth_tokens\
107 user_auth_tokens = user_auth_tokens\
110 .filter(or_(UserApiKeys.expires == -1,
108 .filter(or_(UserApiKeys.expires == -1,
111 UserApiKeys.expires >= time.time()))
109 UserApiKeys.expires >= time.time()))
112 user_auth_tokens = user_auth_tokens.order_by(
110 user_auth_tokens = user_auth_tokens.order_by(
113 UserApiKeys.user_api_key_id)
111 UserApiKeys.user_api_key_id)
114 return user_auth_tokens
112 return user_auth_tokens
115
113
116 def get_auth_token(self, auth_token):
114 def get_auth_token(self, auth_token):
117 auth_token = UserApiKeys.query().filter(
115 auth_token = UserApiKeys.query().filter(
118 UserApiKeys.api_key == auth_token)
116 UserApiKeys.api_key == auth_token)
119 auth_token = auth_token \
117 auth_token = auth_token \
120 .filter(or_(UserApiKeys.expires == -1,
118 .filter(or_(UserApiKeys.expires == -1,
121 UserApiKeys.expires >= time.time()))\
119 UserApiKeys.expires >= time.time()))\
122 .first()
120 .first()
123
121
124 return auth_token
122 return auth_token
@@ -1,403 +1,402 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
7 #
6 #
8 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
10 # GNU General Public License for more details.
12 #
11 #
13 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
14 #
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20
19
21 import itertools
20 import itertools
22 import logging
21 import logging
23 import collections
22 import collections
24
23
25 from rhodecode.model import BaseModel
24 from rhodecode.model import BaseModel
26 from rhodecode.model.db import (
25 from rhodecode.model.db import (
27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
26 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
27 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
29 from rhodecode.lib.markup_renderer import (
28 from rhodecode.lib.markup_renderer import (
30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
29 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
31
30
32 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
33
32
34
33
35 class ChangesetStatusModel(BaseModel):
34 class ChangesetStatusModel(BaseModel):
36
35
37 cls = ChangesetStatus
36 cls = ChangesetStatus
38
37
39 def __get_changeset_status(self, changeset_status):
38 def __get_changeset_status(self, changeset_status):
40 return self._get_instance(ChangesetStatus, changeset_status)
39 return self._get_instance(ChangesetStatus, changeset_status)
41
40
42 def __get_pull_request(self, pull_request):
41 def __get_pull_request(self, pull_request):
43 return self._get_instance(PullRequest, pull_request)
42 return self._get_instance(PullRequest, pull_request)
44
43
45 def _get_status_query(self, repo, revision, pull_request,
44 def _get_status_query(self, repo, revision, pull_request,
46 with_revisions=False):
45 with_revisions=False):
47 repo = self._get_repo(repo)
46 repo = self._get_repo(repo)
48
47
49 q = ChangesetStatus.query()\
48 q = ChangesetStatus.query()\
50 .filter(ChangesetStatus.repo == repo)
49 .filter(ChangesetStatus.repo == repo)
51 if not with_revisions:
50 if not with_revisions:
52 q = q.filter(ChangesetStatus.version == 0)
51 q = q.filter(ChangesetStatus.version == 0)
53
52
54 if revision:
53 if revision:
55 q = q.filter(ChangesetStatus.revision == revision)
54 q = q.filter(ChangesetStatus.revision == revision)
56 elif pull_request:
55 elif pull_request:
57 pull_request = self.__get_pull_request(pull_request)
56 pull_request = self.__get_pull_request(pull_request)
58 # TODO: johbo: Think about the impact of this join, there must
57 # TODO: johbo: Think about the impact of this join, there must
59 # be a reason why ChangesetStatus and ChanagesetComment is linked
58 # be a reason why ChangesetStatus and ChanagesetComment is linked
60 # to the pull request. Might be that we want to do the same for
59 # to the pull request. Might be that we want to do the same for
61 # the pull_request_version_id.
60 # the pull_request_version_id.
62 q = q.join(ChangesetComment).filter(
61 q = q.join(ChangesetComment).filter(
63 ChangesetStatus.pull_request == pull_request,
62 ChangesetStatus.pull_request == pull_request,
64 ChangesetComment.pull_request_version_id == None)
63 ChangesetComment.pull_request_version_id == None)
65 else:
64 else:
66 raise Exception('Please specify revision or pull_request')
65 raise Exception('Please specify revision or pull_request')
67 q = q.order_by(ChangesetStatus.version.asc())
66 q = q.order_by(ChangesetStatus.version.asc())
68 return q
67 return q
69
68
70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
69 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
71 trim_votes=True):
70 trim_votes=True):
72 """
71 """
73 Calculate status based on given group members, and voting rule
72 Calculate status based on given group members, and voting rule
74
73
75
74
76 group1 - 4 members, 3 required for approval
75 group1 - 4 members, 3 required for approval
77 user1 - approved
76 user1 - approved
78 user2 - reject
77 user2 - reject
79 user3 - approved
78 user3 - approved
80 user4 - rejected
79 user4 - rejected
81
80
82 final_state: rejected, reasons not at least 3 votes
81 final_state: rejected, reasons not at least 3 votes
83
82
84
83
85 group1 - 4 members, 2 required for approval
84 group1 - 4 members, 2 required for approval
86 user1 - approved
85 user1 - approved
87 user2 - reject
86 user2 - reject
88 user3 - approved
87 user3 - approved
89 user4 - rejected
88 user4 - rejected
90
89
91 final_state: approved, reasons got at least 2 approvals
90 final_state: approved, reasons got at least 2 approvals
92
91
93 group1 - 4 members, ALL required for approval
92 group1 - 4 members, ALL required for approval
94 user1 - approved
93 user1 - approved
95 user2 - reject
94 user2 - reject
96 user3 - approved
95 user3 - approved
97 user4 - rejected
96 user4 - rejected
98
97
99 final_state: rejected, reasons not all approvals
98 final_state: rejected, reasons not all approvals
100
99
101
100
102 group1 - 4 members, ALL required for approval
101 group1 - 4 members, ALL required for approval
103 user1 - approved
102 user1 - approved
104 user2 - approved
103 user2 - approved
105 user3 - approved
104 user3 - approved
106 user4 - approved
105 user4 - approved
107
106
108 final_state: approved, reason all approvals received
107 final_state: approved, reason all approvals received
109
108
110 group1 - 4 members, 5 required for approval
109 group1 - 4 members, 5 required for approval
111 (approval should be shorted to number of actual members)
110 (approval should be shorted to number of actual members)
112
111
113 user1 - approved
112 user1 - approved
114 user2 - approved
113 user2 - approved
115 user3 - approved
114 user3 - approved
116 user4 - approved
115 user4 - approved
117
116
118 final_state: approved, reason all approvals received
117 final_state: approved, reason all approvals received
119
118
120 """
119 """
121 group_vote_data = {}
120 group_vote_data = {}
122 got_rule = False
121 got_rule = False
123 members = collections.OrderedDict()
122 members = collections.OrderedDict()
124 for review_obj, user, reasons, mandatory, statuses \
123 for review_obj, user, reasons, mandatory, statuses \
125 in group_statuses_by_reviewers:
124 in group_statuses_by_reviewers:
126
125
127 if not got_rule:
126 if not got_rule:
128 group_vote_data = review_obj.rule_user_group_data()
127 group_vote_data = review_obj.rule_user_group_data()
129 got_rule = bool(group_vote_data)
128 got_rule = bool(group_vote_data)
130
129
131 members[user.user_id] = statuses
130 members[user.user_id] = statuses
132
131
133 if not group_vote_data:
132 if not group_vote_data:
134 return []
133 return []
135
134
136 required_votes = group_vote_data['vote_rule']
135 required_votes = group_vote_data['vote_rule']
137 if required_votes == -1:
136 if required_votes == -1:
138 # -1 means all required, so we replace it with how many people
137 # -1 means all required, so we replace it with how many people
139 # are in the members
138 # are in the members
140 required_votes = len(members)
139 required_votes = len(members)
141
140
142 if trim_votes and required_votes > len(members):
141 if trim_votes and required_votes > len(members):
143 # we require more votes than we have members in the group
142 # we require more votes than we have members in the group
144 # in this case we trim the required votes to the number of members
143 # in this case we trim the required votes to the number of members
145 required_votes = len(members)
144 required_votes = len(members)
146
145
147 approvals = sum([
146 approvals = sum([
148 1 for statuses in members.values()
147 1 for statuses in members.values()
149 if statuses and
148 if statuses and
150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
149 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
151
150
152 calculated_votes = []
151 calculated_votes = []
153 # we have all votes from users, now check if we have enough votes
152 # we have all votes from users, now check if we have enough votes
154 # to fill other
153 # to fill other
155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
154 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
156 if approvals >= required_votes:
155 if approvals >= required_votes:
157 fill_in = ChangesetStatus.STATUS_APPROVED
156 fill_in = ChangesetStatus.STATUS_APPROVED
158
157
159 for member, statuses in members.items():
158 for member, statuses in members.items():
160 if statuses:
159 if statuses:
161 ver, latest = statuses[0]
160 ver, latest = statuses[0]
162 if fill_in == ChangesetStatus.STATUS_APPROVED:
161 if fill_in == ChangesetStatus.STATUS_APPROVED:
163 calculated_votes.append(fill_in)
162 calculated_votes.append(fill_in)
164 else:
163 else:
165 calculated_votes.append(latest.status)
164 calculated_votes.append(latest.status)
166 else:
165 else:
167 calculated_votes.append(fill_in)
166 calculated_votes.append(fill_in)
168
167
169 return calculated_votes
168 return calculated_votes
170
169
171 def calculate_status(self, statuses_by_reviewers):
170 def calculate_status(self, statuses_by_reviewers):
172 """
171 """
173 Given the approval statuses from reviewers, calculates final approval
172 Given the approval statuses from reviewers, calculates final approval
174 status. There can only be 3 results, all approved, all rejected. If
173 status. There can only be 3 results, all approved, all rejected. If
175 there is no consensus the PR is under review.
174 there is no consensus the PR is under review.
176
175
177 :param statuses_by_reviewers:
176 :param statuses_by_reviewers:
178 """
177 """
179
178
180 def group_rule(element):
179 def group_rule(element):
181 _review_obj = element[0]
180 _review_obj = element[0]
182 rule_data = _review_obj.rule_user_group_data()
181 rule_data = _review_obj.rule_user_group_data()
183 if rule_data and rule_data['id']:
182 if rule_data and rule_data['id']:
184 return rule_data['id']
183 return rule_data['id']
185 # don't return None, as we cant compare this
184 # don't return None, as we cant compare this
186 return 0
185 return 0
187
186
188 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
187 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
188
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
190
192 reviewers_number = len(statuses_by_reviewers)
191 reviewers_number = len(statuses_by_reviewers)
193 votes = collections.defaultdict(int)
192 votes = collections.defaultdict(int)
194 for group, group_statuses_by_reviewers in voting_by_groups:
193 for group, group_statuses_by_reviewers in voting_by_groups:
195 if group:
194 if group:
196 # calculate how the "group" voted
195 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
196 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
197 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
198 votes[vote_status] += 1
200 else:
199 else:
201
200
202 for review_obj, user, reasons, mandatory, statuses \
201 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
202 in group_statuses_by_reviewers:
204 # individual vote
203 # individual vote
205 if statuses:
204 if statuses:
206 ver, latest = statuses[0]
205 ver, latest = statuses[0]
207 votes[latest.status] += 1
206 votes[latest.status] += 1
208
207
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
211
210
212 # TODO(marcink): with group voting, how does rejected work,
211 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
212 # do we ever get rejected state ?
214
213
215 if approved_votes_count and (approved_votes_count == reviewers_number):
214 if approved_votes_count and (approved_votes_count == reviewers_number):
216 return ChangesetStatus.STATUS_APPROVED
215 return ChangesetStatus.STATUS_APPROVED
217
216
218 if rejected_votes_count and (rejected_votes_count == reviewers_number):
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
219 return ChangesetStatus.STATUS_REJECTED
218 return ChangesetStatus.STATUS_REJECTED
220
219
221 return ChangesetStatus.STATUS_UNDER_REVIEW
220 return ChangesetStatus.STATUS_UNDER_REVIEW
222
221
223 def get_statuses(self, repo, revision=None, pull_request=None,
222 def get_statuses(self, repo, revision=None, pull_request=None,
224 with_revisions=False):
223 with_revisions=False):
225 q = self._get_status_query(repo, revision, pull_request,
224 q = self._get_status_query(repo, revision, pull_request,
226 with_revisions)
225 with_revisions)
227 return q.all()
226 return q.all()
228
227
229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
230 """
229 """
231 Returns latest status of changeset for given revision or for given
230 Returns latest status of changeset for given revision or for given
232 pull request. Statuses are versioned inside a table itself and
231 pull request. Statuses are versioned inside a table itself and
233 version == 0 is always the current one
232 version == 0 is always the current one
234
233
235 :param repo:
234 :param repo:
236 :param revision: 40char hash or None
235 :param revision: 40char hash or None
237 :param pull_request: pull_request reference
236 :param pull_request: pull_request reference
238 :param as_str: return status as string not object
237 :param as_str: return status as string not object
239 """
238 """
240 q = self._get_status_query(repo, revision, pull_request)
239 q = self._get_status_query(repo, revision, pull_request)
241
240
242 # need to use first here since there can be multiple statuses
241 # need to use first here since there can be multiple statuses
243 # returned from pull_request
242 # returned from pull_request
244 status = q.first()
243 status = q.first()
245 if as_str:
244 if as_str:
246 status = status.status if status else status
245 status = status.status if status else status
247 st = status or ChangesetStatus.DEFAULT
246 st = status or ChangesetStatus.DEFAULT
248 return str(st)
247 return str(st)
249 return status
248 return status
250
249
251 def _render_auto_status_message(
250 def _render_auto_status_message(
252 self, status, commit_id=None, pull_request=None):
251 self, status, commit_id=None, pull_request=None):
253 """
252 """
254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
255 so it's always looking the same disregarding on which default
254 so it's always looking the same disregarding on which default
256 renderer system is using.
255 renderer system is using.
257
256
258 :param status: status text to change into
257 :param status: status text to change into
259 :param commit_id: the commit_id we change the status for
258 :param commit_id: the commit_id we change the status for
260 :param pull_request: the pull request we change the status for
259 :param pull_request: the pull request we change the status for
261 """
260 """
262
261
263 new_status = ChangesetStatus.get_status_lbl(status)
262 new_status = ChangesetStatus.get_status_lbl(status)
264
263
265 params = {
264 params = {
266 'new_status_label': new_status,
265 'new_status_label': new_status,
267 'pull_request': pull_request,
266 'pull_request': pull_request,
268 'commit_id': commit_id,
267 'commit_id': commit_id,
269 }
268 }
270 renderer = RstTemplateRenderer()
269 renderer = RstTemplateRenderer()
271 return renderer.render('auto_status_change.mako', **params)
270 return renderer.render('auto_status_change.mako', **params)
272
271
273 def set_status(self, repo, status, user, comment=None, revision=None,
272 def set_status(self, repo, status, user, comment=None, revision=None,
274 pull_request=None, dont_allow_on_closed_pull_request=False):
273 pull_request=None, dont_allow_on_closed_pull_request=False):
275 """
274 """
276 Creates new status for changeset or updates the old ones bumping their
275 Creates new status for changeset or updates the old ones bumping their
277 version, leaving the current status at
276 version, leaving the current status at
278
277
279 :param repo:
278 :param repo:
280 :param revision:
279 :param revision:
281 :param status:
280 :param status:
282 :param user:
281 :param user:
283 :param comment:
282 :param comment:
284 :param dont_allow_on_closed_pull_request: don't allow a status change
283 :param dont_allow_on_closed_pull_request: don't allow a status change
285 if last status was for pull request and it's closed. We shouldn't
284 if last status was for pull request and it's closed. We shouldn't
286 mess around this manually
285 mess around this manually
287 """
286 """
288 repo = self._get_repo(repo)
287 repo = self._get_repo(repo)
289
288
290 q = ChangesetStatus.query()
289 q = ChangesetStatus.query()
291
290
292 if revision:
291 if revision:
293 q = q.filter(ChangesetStatus.repo == repo)
292 q = q.filter(ChangesetStatus.repo == repo)
294 q = q.filter(ChangesetStatus.revision == revision)
293 q = q.filter(ChangesetStatus.revision == revision)
295 elif pull_request:
294 elif pull_request:
296 pull_request = self.__get_pull_request(pull_request)
295 pull_request = self.__get_pull_request(pull_request)
297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
299 cur_statuses = q.all()
298 cur_statuses = q.all()
300
299
301 # if statuses exists and last is associated with a closed pull request
300 # if statuses exists and last is associated with a closed pull request
302 # we need to check if we can allow this status change
301 # we need to check if we can allow this status change
303 if (dont_allow_on_closed_pull_request and cur_statuses
302 if (dont_allow_on_closed_pull_request and cur_statuses
304 and getattr(cur_statuses[0].pull_request, 'status', '')
303 and getattr(cur_statuses[0].pull_request, 'status', '')
305 == PullRequest.STATUS_CLOSED):
304 == PullRequest.STATUS_CLOSED):
306 raise StatusChangeOnClosedPullRequestError(
305 raise StatusChangeOnClosedPullRequestError(
307 'Changing status on closed pull request is not allowed'
306 'Changing status on closed pull request is not allowed'
308 )
307 )
309
308
310 # update all current statuses with older version
309 # update all current statuses with older version
311 if cur_statuses:
310 if cur_statuses:
312 for st in cur_statuses:
311 for st in cur_statuses:
313 st.version += 1
312 st.version += 1
314 Session().add(st)
313 Session().add(st)
315 Session().flush()
314 Session().flush()
316
315
317 def _create_status(user, repo, status, comment, revision, pull_request):
316 def _create_status(user, repo, status, comment, revision, pull_request):
318 new_status = ChangesetStatus()
317 new_status = ChangesetStatus()
319 new_status.author = self._get_user(user)
318 new_status.author = self._get_user(user)
320 new_status.repo = self._get_repo(repo)
319 new_status.repo = self._get_repo(repo)
321 new_status.status = status
320 new_status.status = status
322 new_status.comment = comment
321 new_status.comment = comment
323 new_status.revision = revision
322 new_status.revision = revision
324 new_status.pull_request = pull_request
323 new_status.pull_request = pull_request
325 return new_status
324 return new_status
326
325
327 if not comment:
326 if not comment:
328 from rhodecode.model.comment import CommentsModel
327 from rhodecode.model.comment import CommentsModel
329 comment = CommentsModel().create(
328 comment = CommentsModel().create(
330 text=self._render_auto_status_message(
329 text=self._render_auto_status_message(
331 status, commit_id=revision, pull_request=pull_request),
330 status, commit_id=revision, pull_request=pull_request),
332 repo=repo,
331 repo=repo,
333 user=user,
332 user=user,
334 pull_request=pull_request,
333 pull_request=pull_request,
335 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
336 )
335 )
337
336
338 if revision:
337 if revision:
339 new_status = _create_status(
338 new_status = _create_status(
340 user=user, repo=repo, status=status, comment=comment,
339 user=user, repo=repo, status=status, comment=comment,
341 revision=revision, pull_request=pull_request)
340 revision=revision, pull_request=pull_request)
342 Session().add(new_status)
341 Session().add(new_status)
343 return new_status
342 return new_status
344 elif pull_request:
343 elif pull_request:
345 # pull request can have more than one revision associated to it
344 # pull request can have more than one revision associated to it
346 # we need to create new version for each one
345 # we need to create new version for each one
347 new_statuses = []
346 new_statuses = []
348 repo = pull_request.source_repo
347 repo = pull_request.source_repo
349 for rev in pull_request.revisions:
348 for rev in pull_request.revisions:
350 new_status = _create_status(
349 new_status = _create_status(
351 user=user, repo=repo, status=status, comment=comment,
350 user=user, repo=repo, status=status, comment=comment,
352 revision=rev, pull_request=pull_request)
351 revision=rev, pull_request=pull_request)
353 new_statuses.append(new_status)
352 new_statuses.append(new_status)
354 Session().add(new_status)
353 Session().add(new_status)
355 return new_statuses
354 return new_statuses
356
355
357 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
358
357
359 commit_statuses_map = collections.defaultdict(list)
358 commit_statuses_map = collections.defaultdict(list)
360 for st in commit_statuses:
359 for st in commit_statuses:
361 commit_statuses_map[st.author.username] += [st]
360 commit_statuses_map[st.author.username] += [st]
362
361
363 reviewers = []
362 reviewers = []
364
363
365 def version(commit_status):
364 def version(commit_status):
366 return commit_status.version
365 return commit_status.version
367
366
368 for obj in reviewers_data:
367 for obj in reviewers_data:
369 if not obj.user:
368 if not obj.user:
370 continue
369 continue
371 if user and obj.user.username != user.username:
370 if user and obj.user.username != user.username:
372 # single user filter
371 # single user filter
373 continue
372 continue
374
373
375 statuses = commit_statuses_map.get(obj.user.username, None)
374 statuses = commit_statuses_map.get(obj.user.username, None)
376 if statuses:
375 if statuses:
377 status_groups = itertools.groupby(
376 status_groups = itertools.groupby(
378 sorted(statuses, key=version), version)
377 sorted(statuses, key=version), version)
379 statuses = [(x, list(y)[0]) for x, y in status_groups]
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
380
379
381 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
381
383 if user:
382 if user:
384 return reviewers[0] if reviewers else reviewers
383 return reviewers[0] if reviewers else reviewers
385 else:
384 else:
386 return reviewers
385 return reviewers
387
386
388 def reviewers_statuses(self, pull_request, user=None):
387 def reviewers_statuses(self, pull_request, user=None):
389 _commit_statuses = self.get_statuses(
388 _commit_statuses = self.get_statuses(
390 pull_request.source_repo,
389 pull_request.source_repo,
391 pull_request=pull_request,
390 pull_request=pull_request,
392 with_revisions=True)
391 with_revisions=True)
393 reviewers = pull_request.get_pull_request_reviewers(
392 reviewers = pull_request.get_pull_request_reviewers(
394 role=PullRequestReviewers.ROLE_REVIEWER)
393 role=PullRequestReviewers.ROLE_REVIEWER)
395 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
396
395
397 def calculated_review_status(self, pull_request):
396 def calculated_review_status(self, pull_request):
398 """
397 """
399 calculate pull request status based on reviewers, it should be a list
398 calculate pull request status based on reviewers, it should be a list
400 of two element lists.
399 of two element lists.
401 """
400 """
402 reviewers = self.reviewers_statuses(pull_request)
401 reviewers = self.reviewers_statuses(pull_request)
403 return self.calculate_status(reviewers)
402 return self.calculate_status(reviewers)
@@ -1,855 +1,852 b''
1
2
3 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
8 #
6 #
9 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
10 # GNU General Public License for more details.
13 #
11 #
14 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
14 #
17 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
18
21 """
19 """
22 comments model for RhodeCode
20 comments model for RhodeCode
23 """
21 """
24 import datetime
22 import datetime
25
23
26 import logging
24 import logging
27 import traceback
25 import traceback
28 import collections
26 import collections
29
27
30 from pyramid.threadlocal import get_current_registry, get_current_request
28 from pyramid.threadlocal import get_current_registry, get_current_request
31 from sqlalchemy.sql.expression import null
29 from sqlalchemy.sql.expression import null
32 from sqlalchemy.sql.functions import coalesce
30 from sqlalchemy.sql.functions import coalesce
33
31
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
32 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 from rhodecode.lib import audit_logger
33 from rhodecode.lib import audit_logger
36 from rhodecode.lib.exceptions import CommentVersionMismatch
34 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (
37 from rhodecode.model.db import (
40 false, true,
38 false, true,
41 ChangesetComment,
39 ChangesetComment,
42 User,
40 User,
43 Notification,
41 Notification,
44 PullRequest,
42 PullRequest,
45 AttributeDict,
43 AttributeDict,
46 ChangesetCommentHistory,
44 ChangesetCommentHistory,
47 )
45 )
48 from rhodecode.model.notification import NotificationModel
46 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
50 from rhodecode.model.settings import VcsSettingsModel
48 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.notification import EmailNotificationModel
49 from rhodecode.model.notification import EmailNotificationModel
52 from rhodecode.model.validation_schema.schemas import comment_schema
50 from rhodecode.model.validation_schema.schemas import comment_schema
53
51
54
52
55 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
56
54
57
55
58 class CommentsModel(BaseModel):
56 class CommentsModel(BaseModel):
59
57
60 cls = ChangesetComment
58 cls = ChangesetComment
61
59
62 DIFF_CONTEXT_BEFORE = 3
60 DIFF_CONTEXT_BEFORE = 3
63 DIFF_CONTEXT_AFTER = 3
61 DIFF_CONTEXT_AFTER = 3
64
62
65 def __get_commit_comment(self, changeset_comment):
63 def __get_commit_comment(self, changeset_comment):
66 return self._get_instance(ChangesetComment, changeset_comment)
64 return self._get_instance(ChangesetComment, changeset_comment)
67
65
68 def __get_pull_request(self, pull_request):
66 def __get_pull_request(self, pull_request):
69 return self._get_instance(PullRequest, pull_request)
67 return self._get_instance(PullRequest, pull_request)
70
68
71 def _extract_mentions(self, s):
69 def _extract_mentions(self, s):
72 user_objects = []
70 user_objects = []
73 for username in extract_mentioned_users(s):
71 for username in extract_mentioned_users(s):
74 user_obj = User.get_by_username(username, case_insensitive=True)
72 user_obj = User.get_by_username(username, case_insensitive=True)
75 if user_obj:
73 if user_obj:
76 user_objects.append(user_obj)
74 user_objects.append(user_obj)
77 return user_objects
75 return user_objects
78
76
79 def _get_renderer(self, global_renderer='rst', request=None):
77 def _get_renderer(self, global_renderer='rst', request=None):
80 request = request or get_current_request()
78 request = request or get_current_request()
81
79
82 try:
80 try:
83 global_renderer = request.call_context.visual.default_renderer
81 global_renderer = request.call_context.visual.default_renderer
84 except AttributeError:
82 except AttributeError:
85 log.debug("Renderer not set, falling back "
83 log.debug("Renderer not set, falling back "
86 "to default renderer '%s'", global_renderer)
84 "to default renderer '%s'", global_renderer)
87 except Exception:
85 except Exception:
88 log.error(traceback.format_exc())
86 log.error(traceback.format_exc())
89 return global_renderer
87 return global_renderer
90
88
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
89 def aggregate_comments(self, comments, versions, show_version, inline=False):
92 # group by versions, and count until, and display objects
90 # group by versions, and count until, and display objects
93
91
94 comment_groups = collections.defaultdict(list)
92 comment_groups = collections.defaultdict(list)
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
93 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
96
94
97 def yield_comments(pos):
95 def yield_comments(pos):
98 for co in comment_groups[pos]:
96 yield from comment_groups[pos]
99 yield co
100
97
101 comment_versions = collections.defaultdict(
98 comment_versions = collections.defaultdict(
102 lambda: collections.defaultdict(list))
99 lambda: collections.defaultdict(list))
103 prev_prvid = -1
100 prev_prvid = -1
104 # fake last entry with None, to aggregate on "latest" version which
101 # fake last entry with None, to aggregate on "latest" version which
105 # doesn't have an pull_request_version_id
102 # doesn't have an pull_request_version_id
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
103 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
107 prvid = ver.pull_request_version_id
104 prvid = ver.pull_request_version_id
108 if prev_prvid == -1:
105 if prev_prvid == -1:
109 prev_prvid = prvid
106 prev_prvid = prvid
110
107
111 for co in yield_comments(prvid):
108 for co in yield_comments(prvid):
112 comment_versions[prvid]['at'].append(co)
109 comment_versions[prvid]['at'].append(co)
113
110
114 # save until
111 # save until
115 current = comment_versions[prvid]['at']
112 current = comment_versions[prvid]['at']
116 prev_until = comment_versions[prev_prvid]['until']
113 prev_until = comment_versions[prev_prvid]['until']
117 cur_until = prev_until + current
114 cur_until = prev_until + current
118 comment_versions[prvid]['until'].extend(cur_until)
115 comment_versions[prvid]['until'].extend(cur_until)
119
116
120 # save outdated
117 # save outdated
121 if inline:
118 if inline:
122 outdated = [x for x in cur_until
119 outdated = [x for x in cur_until
123 if x.outdated_at_version(show_version)]
120 if x.outdated_at_version(show_version)]
124 else:
121 else:
125 outdated = [x for x in cur_until
122 outdated = [x for x in cur_until
126 if x.older_than_version(show_version)]
123 if x.older_than_version(show_version)]
127 display = [x for x in cur_until if x not in outdated]
124 display = [x for x in cur_until if x not in outdated]
128
125
129 comment_versions[prvid]['outdated'] = outdated
126 comment_versions[prvid]['outdated'] = outdated
130 comment_versions[prvid]['display'] = display
127 comment_versions[prvid]['display'] = display
131
128
132 prev_prvid = prvid
129 prev_prvid = prvid
133
130
134 return comment_versions
131 return comment_versions
135
132
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
133 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
137 qry = Session().query(ChangesetComment) \
134 qry = Session().query(ChangesetComment) \
138 .filter(ChangesetComment.repo == repo)
135 .filter(ChangesetComment.repo == repo)
139
136
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
137 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
138 qry = qry.filter(ChangesetComment.comment_type == comment_type)
142
139
143 if user:
140 if user:
144 user = self._get_user(user)
141 user = self._get_user(user)
145 if user:
142 if user:
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
143 qry = qry.filter(ChangesetComment.user_id == user.user_id)
147
144
148 if commit_id:
145 if commit_id:
149 qry = qry.filter(ChangesetComment.revision == commit_id)
146 qry = qry.filter(ChangesetComment.revision == commit_id)
150
147
151 qry = qry.order_by(ChangesetComment.created_on)
148 qry = qry.order_by(ChangesetComment.created_on)
152 return qry.all()
149 return qry.all()
153
150
154 def get_repository_unresolved_todos(self, repo):
151 def get_repository_unresolved_todos(self, repo):
155 todos = Session().query(ChangesetComment) \
152 todos = Session().query(ChangesetComment) \
156 .filter(ChangesetComment.repo == repo) \
153 .filter(ChangesetComment.repo == repo) \
157 .filter(ChangesetComment.resolved_by == None) \
154 .filter(ChangesetComment.resolved_by == None) \
158 .filter(ChangesetComment.comment_type
155 .filter(ChangesetComment.comment_type
159 == ChangesetComment.COMMENT_TYPE_TODO)
156 == ChangesetComment.COMMENT_TYPE_TODO)
160 todos = todos.all()
157 todos = todos.all()
161
158
162 return todos
159 return todos
163
160
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
161 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
165
162
166 todos = Session().query(ChangesetComment) \
163 todos = Session().query(ChangesetComment) \
167 .filter(ChangesetComment.pull_request == pull_request) \
164 .filter(ChangesetComment.pull_request == pull_request) \
168 .filter(ChangesetComment.resolved_by == None) \
165 .filter(ChangesetComment.resolved_by == None) \
169 .filter(ChangesetComment.comment_type
166 .filter(ChangesetComment.comment_type
170 == ChangesetComment.COMMENT_TYPE_TODO)
167 == ChangesetComment.COMMENT_TYPE_TODO)
171
168
172 if not include_drafts:
169 if not include_drafts:
173 todos = todos.filter(ChangesetComment.draft == false())
170 todos = todos.filter(ChangesetComment.draft == false())
174
171
175 if not show_outdated:
172 if not show_outdated:
176 todos = todos.filter(
173 todos = todos.filter(
177 coalesce(ChangesetComment.display_state, '') !=
174 coalesce(ChangesetComment.display_state, '') !=
178 ChangesetComment.COMMENT_OUTDATED)
175 ChangesetComment.COMMENT_OUTDATED)
179
176
180 todos = todos.all()
177 todos = todos.all()
181
178
182 return todos
179 return todos
183
180
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
181 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
185
182
186 todos = Session().query(ChangesetComment) \
183 todos = Session().query(ChangesetComment) \
187 .filter(ChangesetComment.pull_request == pull_request) \
184 .filter(ChangesetComment.pull_request == pull_request) \
188 .filter(ChangesetComment.resolved_by != None) \
185 .filter(ChangesetComment.resolved_by != None) \
189 .filter(ChangesetComment.comment_type
186 .filter(ChangesetComment.comment_type
190 == ChangesetComment.COMMENT_TYPE_TODO)
187 == ChangesetComment.COMMENT_TYPE_TODO)
191
188
192 if not include_drafts:
189 if not include_drafts:
193 todos = todos.filter(ChangesetComment.draft == false())
190 todos = todos.filter(ChangesetComment.draft == false())
194
191
195 if not show_outdated:
192 if not show_outdated:
196 todos = todos.filter(
193 todos = todos.filter(
197 coalesce(ChangesetComment.display_state, '') !=
194 coalesce(ChangesetComment.display_state, '') !=
198 ChangesetComment.COMMENT_OUTDATED)
195 ChangesetComment.COMMENT_OUTDATED)
199
196
200 todos = todos.all()
197 todos = todos.all()
201
198
202 return todos
199 return todos
203
200
204 def get_pull_request_drafts(self, user_id, pull_request):
201 def get_pull_request_drafts(self, user_id, pull_request):
205 drafts = Session().query(ChangesetComment) \
202 drafts = Session().query(ChangesetComment) \
206 .filter(ChangesetComment.pull_request == pull_request) \
203 .filter(ChangesetComment.pull_request == pull_request) \
207 .filter(ChangesetComment.user_id == user_id) \
204 .filter(ChangesetComment.user_id == user_id) \
208 .filter(ChangesetComment.draft == true())
205 .filter(ChangesetComment.draft == true())
209 return drafts.all()
206 return drafts.all()
210
207
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
208 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
212
209
213 todos = Session().query(ChangesetComment) \
210 todos = Session().query(ChangesetComment) \
214 .filter(ChangesetComment.revision == commit_id) \
211 .filter(ChangesetComment.revision == commit_id) \
215 .filter(ChangesetComment.resolved_by == None) \
212 .filter(ChangesetComment.resolved_by == None) \
216 .filter(ChangesetComment.comment_type
213 .filter(ChangesetComment.comment_type
217 == ChangesetComment.COMMENT_TYPE_TODO)
214 == ChangesetComment.COMMENT_TYPE_TODO)
218
215
219 if not include_drafts:
216 if not include_drafts:
220 todos = todos.filter(ChangesetComment.draft == false())
217 todos = todos.filter(ChangesetComment.draft == false())
221
218
222 if not show_outdated:
219 if not show_outdated:
223 todos = todos.filter(
220 todos = todos.filter(
224 coalesce(ChangesetComment.display_state, '') !=
221 coalesce(ChangesetComment.display_state, '') !=
225 ChangesetComment.COMMENT_OUTDATED)
222 ChangesetComment.COMMENT_OUTDATED)
226
223
227 todos = todos.all()
224 todos = todos.all()
228
225
229 return todos
226 return todos
230
227
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
228 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
232
229
233 todos = Session().query(ChangesetComment) \
230 todos = Session().query(ChangesetComment) \
234 .filter(ChangesetComment.revision == commit_id) \
231 .filter(ChangesetComment.revision == commit_id) \
235 .filter(ChangesetComment.resolved_by != None) \
232 .filter(ChangesetComment.resolved_by != None) \
236 .filter(ChangesetComment.comment_type
233 .filter(ChangesetComment.comment_type
237 == ChangesetComment.COMMENT_TYPE_TODO)
234 == ChangesetComment.COMMENT_TYPE_TODO)
238
235
239 if not include_drafts:
236 if not include_drafts:
240 todos = todos.filter(ChangesetComment.draft == false())
237 todos = todos.filter(ChangesetComment.draft == false())
241
238
242 if not show_outdated:
239 if not show_outdated:
243 todos = todos.filter(
240 todos = todos.filter(
244 coalesce(ChangesetComment.display_state, '') !=
241 coalesce(ChangesetComment.display_state, '') !=
245 ChangesetComment.COMMENT_OUTDATED)
242 ChangesetComment.COMMENT_OUTDATED)
246
243
247 todos = todos.all()
244 todos = todos.all()
248
245
249 return todos
246 return todos
250
247
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
248 def get_commit_inline_comments(self, commit_id, include_drafts=True):
252 inline_comments = Session().query(ChangesetComment) \
249 inline_comments = Session().query(ChangesetComment) \
253 .filter(ChangesetComment.line_no != None) \
250 .filter(ChangesetComment.line_no != None) \
254 .filter(ChangesetComment.f_path != None) \
251 .filter(ChangesetComment.f_path != None) \
255 .filter(ChangesetComment.revision == commit_id)
252 .filter(ChangesetComment.revision == commit_id)
256
253
257 if not include_drafts:
254 if not include_drafts:
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
255 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
259
256
260 inline_comments = inline_comments.all()
257 inline_comments = inline_comments.all()
261 return inline_comments
258 return inline_comments
262
259
263 def _log_audit_action(self, action, action_data, auth_user, comment):
260 def _log_audit_action(self, action, action_data, auth_user, comment):
264 audit_logger.store(
261 audit_logger.store(
265 action=action,
262 action=action,
266 action_data=action_data,
263 action_data=action_data,
267 user=auth_user,
264 user=auth_user,
268 repo=comment.repo)
265 repo=comment.repo)
269
266
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
267 def create(self, text, repo, user, commit_id=None, pull_request=None,
271 f_path=None, line_no=None, status_change=None,
268 f_path=None, line_no=None, status_change=None,
272 status_change_type=None, comment_type=None, is_draft=False,
269 status_change_type=None, comment_type=None, is_draft=False,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
270 resolves_comment_id=None, closing_pr=False, send_email=True,
274 renderer=None, auth_user=None, extra_recipients=None):
271 renderer=None, auth_user=None, extra_recipients=None):
275 """
272 """
276 Creates new comment for commit or pull request.
273 Creates new comment for commit or pull request.
277 IF status_change is not none this comment is associated with a
274 IF status_change is not none this comment is associated with a
278 status change of commit or commit associated with pull request
275 status change of commit or commit associated with pull request
279
276
280 :param text:
277 :param text:
281 :param repo:
278 :param repo:
282 :param user:
279 :param user:
283 :param commit_id:
280 :param commit_id:
284 :param pull_request:
281 :param pull_request:
285 :param f_path:
282 :param f_path:
286 :param line_no:
283 :param line_no:
287 :param status_change: Label for status change
284 :param status_change: Label for status change
288 :param comment_type: Type of comment
285 :param comment_type: Type of comment
289 :param is_draft: is comment a draft only
286 :param is_draft: is comment a draft only
290 :param resolves_comment_id: id of comment which this one will resolve
287 :param resolves_comment_id: id of comment which this one will resolve
291 :param status_change_type: type of status change
288 :param status_change_type: type of status change
292 :param closing_pr:
289 :param closing_pr:
293 :param send_email:
290 :param send_email:
294 :param renderer: pick renderer for this comment
291 :param renderer: pick renderer for this comment
295 :param auth_user: current authenticated user calling this method
292 :param auth_user: current authenticated user calling this method
296 :param extra_recipients: list of extra users to be added to recipients
293 :param extra_recipients: list of extra users to be added to recipients
297 """
294 """
298
295
299 request = get_current_request()
296 request = get_current_request()
300 _ = request.translate
297 _ = request.translate
301
298
302 if not renderer:
299 if not renderer:
303 renderer = self._get_renderer(request=request)
300 renderer = self._get_renderer(request=request)
304
301
305 repo = self._get_repo(repo)
302 repo = self._get_repo(repo)
306 user = self._get_user(user)
303 user = self._get_user(user)
307 auth_user = auth_user or user
304 auth_user = auth_user or user
308
305
309 schema = comment_schema.CommentSchema()
306 schema = comment_schema.CommentSchema()
310 validated_kwargs = schema.deserialize(dict(
307 validated_kwargs = schema.deserialize(dict(
311 comment_body=text,
308 comment_body=text,
312 comment_type=comment_type,
309 comment_type=comment_type,
313 is_draft=is_draft,
310 is_draft=is_draft,
314 comment_file=f_path,
311 comment_file=f_path,
315 comment_line=line_no,
312 comment_line=line_no,
316 renderer_type=renderer,
313 renderer_type=renderer,
317 status_change=status_change_type,
314 status_change=status_change_type,
318 resolves_comment_id=resolves_comment_id,
315 resolves_comment_id=resolves_comment_id,
319 repo=repo.repo_id,
316 repo=repo.repo_id,
320 user=user.user_id,
317 user=user.user_id,
321 ))
318 ))
322
319
323 is_draft = validated_kwargs['is_draft']
320 is_draft = validated_kwargs['is_draft']
324
321
325 comment = ChangesetComment()
322 comment = ChangesetComment()
326 comment.renderer = validated_kwargs['renderer_type']
323 comment.renderer = validated_kwargs['renderer_type']
327 comment.text = validated_kwargs['comment_body']
324 comment.text = validated_kwargs['comment_body']
328 comment.f_path = validated_kwargs['comment_file']
325 comment.f_path = validated_kwargs['comment_file']
329 comment.line_no = validated_kwargs['comment_line']
326 comment.line_no = validated_kwargs['comment_line']
330 comment.comment_type = validated_kwargs['comment_type']
327 comment.comment_type = validated_kwargs['comment_type']
331 comment.draft = is_draft
328 comment.draft = is_draft
332
329
333 comment.repo = repo
330 comment.repo = repo
334 comment.author = user
331 comment.author = user
335 resolved_comment = self.__get_commit_comment(
332 resolved_comment = self.__get_commit_comment(
336 validated_kwargs['resolves_comment_id'])
333 validated_kwargs['resolves_comment_id'])
337
334
338 # check if the comment actually belongs to this PR
335 # check if the comment actually belongs to this PR
339 if resolved_comment and resolved_comment.pull_request and \
336 if resolved_comment and resolved_comment.pull_request and \
340 resolved_comment.pull_request != pull_request:
337 resolved_comment.pull_request != pull_request:
341 log.warning('Comment tried to resolved unrelated todo comment: %s',
338 log.warning('Comment tried to resolved unrelated todo comment: %s',
342 resolved_comment)
339 resolved_comment)
343 # comment not bound to this pull request, forbid
340 # comment not bound to this pull request, forbid
344 resolved_comment = None
341 resolved_comment = None
345
342
346 elif resolved_comment and resolved_comment.repo and \
343 elif resolved_comment and resolved_comment.repo and \
347 resolved_comment.repo != repo:
344 resolved_comment.repo != repo:
348 log.warning('Comment tried to resolved unrelated todo comment: %s',
345 log.warning('Comment tried to resolved unrelated todo comment: %s',
349 resolved_comment)
346 resolved_comment)
350 # comment not bound to this repo, forbid
347 # comment not bound to this repo, forbid
351 resolved_comment = None
348 resolved_comment = None
352
349
353 if resolved_comment and resolved_comment.resolved_by:
350 if resolved_comment and resolved_comment.resolved_by:
354 # if this comment is already resolved, don't mark it again!
351 # if this comment is already resolved, don't mark it again!
355 resolved_comment = None
352 resolved_comment = None
356
353
357 comment.resolved_comment = resolved_comment
354 comment.resolved_comment = resolved_comment
358
355
359 pull_request_id = pull_request
356 pull_request_id = pull_request
360
357
361 commit_obj = None
358 commit_obj = None
362 pull_request_obj = None
359 pull_request_obj = None
363
360
364 if commit_id:
361 if commit_id:
365 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
362 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
366 # do a lookup, so we don't pass something bad here
363 # do a lookup, so we don't pass something bad here
367 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
364 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
368 comment.revision = commit_obj.raw_id
365 comment.revision = commit_obj.raw_id
369
366
370 elif pull_request_id:
367 elif pull_request_id:
371 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
368 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
372 pull_request_obj = self.__get_pull_request(pull_request_id)
369 pull_request_obj = self.__get_pull_request(pull_request_id)
373 comment.pull_request = pull_request_obj
370 comment.pull_request = pull_request_obj
374 else:
371 else:
375 raise Exception('Please specify commit or pull_request_id')
372 raise Exception('Please specify commit or pull_request_id')
376
373
377 Session().add(comment)
374 Session().add(comment)
378 Session().flush()
375 Session().flush()
379 kwargs = {
376 kwargs = {
380 'user': user,
377 'user': user,
381 'renderer_type': renderer,
378 'renderer_type': renderer,
382 'repo_name': repo.repo_name,
379 'repo_name': repo.repo_name,
383 'status_change': status_change,
380 'status_change': status_change,
384 'status_change_type': status_change_type,
381 'status_change_type': status_change_type,
385 'comment_body': text,
382 'comment_body': text,
386 'comment_file': f_path,
383 'comment_file': f_path,
387 'comment_line': line_no,
384 'comment_line': line_no,
388 'comment_type': comment_type or 'note',
385 'comment_type': comment_type or 'note',
389 'comment_id': comment.comment_id
386 'comment_id': comment.comment_id
390 }
387 }
391
388
392 if commit_obj:
389 if commit_obj:
393 recipients = ChangesetComment.get_users(
390 recipients = ChangesetComment.get_users(
394 revision=commit_obj.raw_id)
391 revision=commit_obj.raw_id)
395 # add commit author if it's in RhodeCode system
392 # add commit author if it's in RhodeCode system
396 cs_author = User.get_from_cs_author(commit_obj.author)
393 cs_author = User.get_from_cs_author(commit_obj.author)
397 if not cs_author:
394 if not cs_author:
398 # use repo owner if we cannot extract the author correctly
395 # use repo owner if we cannot extract the author correctly
399 cs_author = repo.user
396 cs_author = repo.user
400 recipients += [cs_author]
397 recipients += [cs_author]
401
398
402 commit_comment_url = self.get_url(comment, request=request)
399 commit_comment_url = self.get_url(comment, request=request)
403 commit_comment_reply_url = self.get_url(
400 commit_comment_reply_url = self.get_url(
404 comment, request=request,
401 comment, request=request,
405 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
406
403
407 target_repo_url = h.link_to(
404 target_repo_url = h.link_to(
408 repo.repo_name,
405 repo.repo_name,
409 h.route_url('repo_summary', repo_name=repo.repo_name))
406 h.route_url('repo_summary', repo_name=repo.repo_name))
410
407
411 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
412 commit_id=commit_id)
409 commit_id=commit_id)
413
410
414 # commit specifics
411 # commit specifics
415 kwargs.update({
412 kwargs.update({
416 'commit': commit_obj,
413 'commit': commit_obj,
417 'commit_message': commit_obj.message,
414 'commit_message': commit_obj.message,
418 'commit_target_repo_url': target_repo_url,
415 'commit_target_repo_url': target_repo_url,
419 'commit_comment_url': commit_comment_url,
416 'commit_comment_url': commit_comment_url,
420 'commit_comment_reply_url': commit_comment_reply_url,
417 'commit_comment_reply_url': commit_comment_reply_url,
421 'commit_url': commit_url,
418 'commit_url': commit_url,
422 'thread_ids': [commit_url, commit_comment_url],
419 'thread_ids': [commit_url, commit_comment_url],
423 })
420 })
424
421
425 elif pull_request_obj:
422 elif pull_request_obj:
426 # get the current participants of this pull request
423 # get the current participants of this pull request
427 recipients = ChangesetComment.get_users(
424 recipients = ChangesetComment.get_users(
428 pull_request_id=pull_request_obj.pull_request_id)
425 pull_request_id=pull_request_obj.pull_request_id)
429 # add pull request author
426 # add pull request author
430 recipients += [pull_request_obj.author]
427 recipients += [pull_request_obj.author]
431
428
432 # add the reviewers to notification
429 # add the reviewers to notification
433 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
430 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
434
431
435 pr_target_repo = pull_request_obj.target_repo
432 pr_target_repo = pull_request_obj.target_repo
436 pr_source_repo = pull_request_obj.source_repo
433 pr_source_repo = pull_request_obj.source_repo
437
434
438 pr_comment_url = self.get_url(comment, request=request)
435 pr_comment_url = self.get_url(comment, request=request)
439 pr_comment_reply_url = self.get_url(
436 pr_comment_reply_url = self.get_url(
440 comment, request=request,
437 comment, request=request,
441 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
438 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
442
439
443 pr_url = h.route_url(
440 pr_url = h.route_url(
444 'pullrequest_show',
441 'pullrequest_show',
445 repo_name=pr_target_repo.repo_name,
442 repo_name=pr_target_repo.repo_name,
446 pull_request_id=pull_request_obj.pull_request_id, )
443 pull_request_id=pull_request_obj.pull_request_id, )
447
444
448 # set some variables for email notification
445 # set some variables for email notification
449 pr_target_repo_url = h.route_url(
446 pr_target_repo_url = h.route_url(
450 'repo_summary', repo_name=pr_target_repo.repo_name)
447 'repo_summary', repo_name=pr_target_repo.repo_name)
451
448
452 pr_source_repo_url = h.route_url(
449 pr_source_repo_url = h.route_url(
453 'repo_summary', repo_name=pr_source_repo.repo_name)
450 'repo_summary', repo_name=pr_source_repo.repo_name)
454
451
455 # pull request specifics
452 # pull request specifics
456 kwargs.update({
453 kwargs.update({
457 'pull_request': pull_request_obj,
454 'pull_request': pull_request_obj,
458 'pr_id': pull_request_obj.pull_request_id,
455 'pr_id': pull_request_obj.pull_request_id,
459 'pull_request_url': pr_url,
456 'pull_request_url': pr_url,
460 'pull_request_target_repo': pr_target_repo,
457 'pull_request_target_repo': pr_target_repo,
461 'pull_request_target_repo_url': pr_target_repo_url,
458 'pull_request_target_repo_url': pr_target_repo_url,
462 'pull_request_source_repo': pr_source_repo,
459 'pull_request_source_repo': pr_source_repo,
463 'pull_request_source_repo_url': pr_source_repo_url,
460 'pull_request_source_repo_url': pr_source_repo_url,
464 'pr_comment_url': pr_comment_url,
461 'pr_comment_url': pr_comment_url,
465 'pr_comment_reply_url': pr_comment_reply_url,
462 'pr_comment_reply_url': pr_comment_reply_url,
466 'pr_closing': closing_pr,
463 'pr_closing': closing_pr,
467 'thread_ids': [pr_url, pr_comment_url],
464 'thread_ids': [pr_url, pr_comment_url],
468 })
465 })
469
466
470 if send_email:
467 if send_email:
471 recipients += [self._get_user(u) for u in (extra_recipients or [])]
468 recipients += [self._get_user(u) for u in (extra_recipients or [])]
472
469
473 mention_recipients = set(
470 mention_recipients = set(
474 self._extract_mentions(text)).difference(recipients)
471 self._extract_mentions(text)).difference(recipients)
475
472
476 # create notification objects, and emails
473 # create notification objects, and emails
477 NotificationModel().create(
474 NotificationModel().create(
478 created_by=user,
475 created_by=user,
479 notification_subject='', # Filled in based on the notification_type
476 notification_subject='', # Filled in based on the notification_type
480 notification_body='', # Filled in based on the notification_type
477 notification_body='', # Filled in based on the notification_type
481 notification_type=notification_type,
478 notification_type=notification_type,
482 recipients=recipients,
479 recipients=recipients,
483 mention_recipients=mention_recipients,
480 mention_recipients=mention_recipients,
484 email_kwargs=kwargs,
481 email_kwargs=kwargs,
485 )
482 )
486
483
487 Session().flush()
484 Session().flush()
488 if comment.pull_request:
485 if comment.pull_request:
489 action = 'repo.pull_request.comment.create'
486 action = 'repo.pull_request.comment.create'
490 else:
487 else:
491 action = 'repo.commit.comment.create'
488 action = 'repo.commit.comment.create'
492
489
493 if not is_draft:
490 if not is_draft:
494 comment_data = comment.get_api_data()
491 comment_data = comment.get_api_data()
495
492
496 self._log_audit_action(
493 self._log_audit_action(
497 action, {'data': comment_data}, auth_user, comment)
494 action, {'data': comment_data}, auth_user, comment)
498
495
499 return comment
496 return comment
500
497
501 def edit(self, comment_id, text, auth_user, version):
498 def edit(self, comment_id, text, auth_user, version):
502 """
499 """
503 Change existing comment for commit or pull request.
500 Change existing comment for commit or pull request.
504
501
505 :param comment_id:
502 :param comment_id:
506 :param text:
503 :param text:
507 :param auth_user: current authenticated user calling this method
504 :param auth_user: current authenticated user calling this method
508 :param version: last comment version
505 :param version: last comment version
509 """
506 """
510 if not text:
507 if not text:
511 log.warning('Missing text for comment, skipping...')
508 log.warning('Missing text for comment, skipping...')
512 return
509 return
513
510
514 comment = ChangesetComment.get(comment_id)
511 comment = ChangesetComment.get(comment_id)
515 old_comment_text = comment.text
512 old_comment_text = comment.text
516 comment.text = text
513 comment.text = text
517 comment.modified_at = datetime.datetime.now()
514 comment.modified_at = datetime.datetime.now()
518 version = safe_int(version)
515 version = safe_int(version)
519
516
520 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
517 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
521 # would return 3 here
518 # would return 3 here
522 comment_version = ChangesetCommentHistory.get_version(comment_id)
519 comment_version = ChangesetCommentHistory.get_version(comment_id)
523
520
524 if isinstance(version, int) and (comment_version - version) != 1:
521 if isinstance(version, int) and (comment_version - version) != 1:
525 log.warning(
522 log.warning(
526 'Version mismatch comment_version {} submitted {}, skipping'.format(
523 'Version mismatch comment_version {} submitted {}, skipping'.format(
527 comment_version-1, # -1 since note above
524 comment_version-1, # -1 since note above
528 version
525 version
529 )
526 )
530 )
527 )
531 raise CommentVersionMismatch()
528 raise CommentVersionMismatch()
532
529
533 comment_history = ChangesetCommentHistory()
530 comment_history = ChangesetCommentHistory()
534 comment_history.comment_id = comment_id
531 comment_history.comment_id = comment_id
535 comment_history.version = comment_version
532 comment_history.version = comment_version
536 comment_history.created_by_user_id = auth_user.user_id
533 comment_history.created_by_user_id = auth_user.user_id
537 comment_history.text = old_comment_text
534 comment_history.text = old_comment_text
538 # TODO add email notification
535 # TODO add email notification
539 Session().add(comment_history)
536 Session().add(comment_history)
540 Session().add(comment)
537 Session().add(comment)
541 Session().flush()
538 Session().flush()
542
539
543 if comment.pull_request:
540 if comment.pull_request:
544 action = 'repo.pull_request.comment.edit'
541 action = 'repo.pull_request.comment.edit'
545 else:
542 else:
546 action = 'repo.commit.comment.edit'
543 action = 'repo.commit.comment.edit'
547
544
548 comment_data = comment.get_api_data()
545 comment_data = comment.get_api_data()
549 comment_data['old_comment_text'] = old_comment_text
546 comment_data['old_comment_text'] = old_comment_text
550 self._log_audit_action(
547 self._log_audit_action(
551 action, {'data': comment_data}, auth_user, comment)
548 action, {'data': comment_data}, auth_user, comment)
552
549
553 return comment_history
550 return comment_history
554
551
555 def delete(self, comment, auth_user):
552 def delete(self, comment, auth_user):
556 """
553 """
557 Deletes given comment
554 Deletes given comment
558 """
555 """
559 comment = self.__get_commit_comment(comment)
556 comment = self.__get_commit_comment(comment)
560 old_data = comment.get_api_data()
557 old_data = comment.get_api_data()
561 Session().delete(comment)
558 Session().delete(comment)
562
559
563 if comment.pull_request:
560 if comment.pull_request:
564 action = 'repo.pull_request.comment.delete'
561 action = 'repo.pull_request.comment.delete'
565 else:
562 else:
566 action = 'repo.commit.comment.delete'
563 action = 'repo.commit.comment.delete'
567
564
568 self._log_audit_action(
565 self._log_audit_action(
569 action, {'old_data': old_data}, auth_user, comment)
566 action, {'old_data': old_data}, auth_user, comment)
570
567
571 return comment
568 return comment
572
569
573 def get_all_comments(self, repo_id, revision=None, pull_request=None,
570 def get_all_comments(self, repo_id, revision=None, pull_request=None,
574 include_drafts=True, count_only=False):
571 include_drafts=True, count_only=False):
575 q = ChangesetComment.query()\
572 q = ChangesetComment.query()\
576 .filter(ChangesetComment.repo_id == repo_id)
573 .filter(ChangesetComment.repo_id == repo_id)
577 if revision:
574 if revision:
578 q = q.filter(ChangesetComment.revision == revision)
575 q = q.filter(ChangesetComment.revision == revision)
579 elif pull_request:
576 elif pull_request:
580 pull_request = self.__get_pull_request(pull_request)
577 pull_request = self.__get_pull_request(pull_request)
581 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
578 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
582 else:
579 else:
583 raise Exception('Please specify commit or pull_request')
580 raise Exception('Please specify commit or pull_request')
584 if not include_drafts:
581 if not include_drafts:
585 q = q.filter(ChangesetComment.draft == false())
582 q = q.filter(ChangesetComment.draft == false())
586 q = q.order_by(ChangesetComment.created_on)
583 q = q.order_by(ChangesetComment.created_on)
587 if count_only:
584 if count_only:
588 return q.count()
585 return q.count()
589
586
590 return q.all()
587 return q.all()
591
588
592 def get_url(self, comment, request=None, permalink=False, anchor=None):
589 def get_url(self, comment, request=None, permalink=False, anchor=None):
593 if not request:
590 if not request:
594 request = get_current_request()
591 request = get_current_request()
595
592
596 comment = self.__get_commit_comment(comment)
593 comment = self.__get_commit_comment(comment)
597 if anchor is None:
594 if anchor is None:
598 anchor = 'comment-{}'.format(comment.comment_id)
595 anchor = f'comment-{comment.comment_id}'
599
596
600 if comment.pull_request:
597 if comment.pull_request:
601 pull_request = comment.pull_request
598 pull_request = comment.pull_request
602 if permalink:
599 if permalink:
603 return request.route_url(
600 return request.route_url(
604 'pull_requests_global',
601 'pull_requests_global',
605 pull_request_id=pull_request.pull_request_id,
602 pull_request_id=pull_request.pull_request_id,
606 _anchor=anchor)
603 _anchor=anchor)
607 else:
604 else:
608 return request.route_url(
605 return request.route_url(
609 'pullrequest_show',
606 'pullrequest_show',
610 repo_name=safe_str(pull_request.target_repo.repo_name),
607 repo_name=safe_str(pull_request.target_repo.repo_name),
611 pull_request_id=pull_request.pull_request_id,
608 pull_request_id=pull_request.pull_request_id,
612 _anchor=anchor)
609 _anchor=anchor)
613
610
614 else:
611 else:
615 repo = comment.repo
612 repo = comment.repo
616 commit_id = comment.revision
613 commit_id = comment.revision
617
614
618 if permalink:
615 if permalink:
619 return request.route_url(
616 return request.route_url(
620 'repo_commit', repo_name=safe_str(repo.repo_id),
617 'repo_commit', repo_name=safe_str(repo.repo_id),
621 commit_id=commit_id,
618 commit_id=commit_id,
622 _anchor=anchor)
619 _anchor=anchor)
623
620
624 else:
621 else:
625 return request.route_url(
622 return request.route_url(
626 'repo_commit', repo_name=safe_str(repo.repo_name),
623 'repo_commit', repo_name=safe_str(repo.repo_name),
627 commit_id=commit_id,
624 commit_id=commit_id,
628 _anchor=anchor)
625 _anchor=anchor)
629
626
630 def get_comments(self, repo_id, revision=None, pull_request=None):
627 def get_comments(self, repo_id, revision=None, pull_request=None):
631 """
628 """
632 Gets main comments based on revision or pull_request_id
629 Gets main comments based on revision or pull_request_id
633
630
634 :param repo_id:
631 :param repo_id:
635 :param revision:
632 :param revision:
636 :param pull_request:
633 :param pull_request:
637 """
634 """
638
635
639 q = ChangesetComment.query()\
636 q = ChangesetComment.query()\
640 .filter(ChangesetComment.repo_id == repo_id)\
637 .filter(ChangesetComment.repo_id == repo_id)\
641 .filter(ChangesetComment.line_no == None)\
638 .filter(ChangesetComment.line_no == None)\
642 .filter(ChangesetComment.f_path == None)
639 .filter(ChangesetComment.f_path == None)
643 if revision:
640 if revision:
644 q = q.filter(ChangesetComment.revision == revision)
641 q = q.filter(ChangesetComment.revision == revision)
645 elif pull_request:
642 elif pull_request:
646 pull_request = self.__get_pull_request(pull_request)
643 pull_request = self.__get_pull_request(pull_request)
647 q = q.filter(ChangesetComment.pull_request == pull_request)
644 q = q.filter(ChangesetComment.pull_request == pull_request)
648 else:
645 else:
649 raise Exception('Please specify commit or pull_request')
646 raise Exception('Please specify commit or pull_request')
650 q = q.order_by(ChangesetComment.created_on)
647 q = q.order_by(ChangesetComment.created_on)
651 return q.all()
648 return q.all()
652
649
653 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
650 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
654 q = self._get_inline_comments_query(repo_id, revision, pull_request)
651 q = self._get_inline_comments_query(repo_id, revision, pull_request)
655 return self._group_comments_by_path_and_line_number(q)
652 return self._group_comments_by_path_and_line_number(q)
656
653
657 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
654 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
658 version=None):
655 version=None):
659 inline_comms = []
656 inline_comms = []
660 for fname, per_line_comments in inline_comments.items():
657 for fname, per_line_comments in inline_comments.items():
661 for lno, comments in per_line_comments.items():
658 for lno, comments in per_line_comments.items():
662 for comm in comments:
659 for comm in comments:
663 if not comm.outdated_at_version(version) and skip_outdated:
660 if not comm.outdated_at_version(version) and skip_outdated:
664 inline_comms.append(comm)
661 inline_comms.append(comm)
665
662
666 return inline_comms
663 return inline_comms
667
664
668 def get_outdated_comments(self, repo_id, pull_request):
665 def get_outdated_comments(self, repo_id, pull_request):
669 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
666 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
670 # of a pull request.
667 # of a pull request.
671 q = self._all_inline_comments_of_pull_request(pull_request)
668 q = self._all_inline_comments_of_pull_request(pull_request)
672 q = q.filter(
669 q = q.filter(
673 ChangesetComment.display_state ==
670 ChangesetComment.display_state ==
674 ChangesetComment.COMMENT_OUTDATED
671 ChangesetComment.COMMENT_OUTDATED
675 ).order_by(ChangesetComment.comment_id.asc())
672 ).order_by(ChangesetComment.comment_id.asc())
676
673
677 return self._group_comments_by_path_and_line_number(q)
674 return self._group_comments_by_path_and_line_number(q)
678
675
679 def _get_inline_comments_query(self, repo_id, revision, pull_request):
676 def _get_inline_comments_query(self, repo_id, revision, pull_request):
680 # TODO: johbo: Split this into two methods: One for PR and one for
677 # TODO: johbo: Split this into two methods: One for PR and one for
681 # commit.
678 # commit.
682 if revision:
679 if revision:
683 q = Session().query(ChangesetComment).filter(
680 q = Session().query(ChangesetComment).filter(
684 ChangesetComment.repo_id == repo_id,
681 ChangesetComment.repo_id == repo_id,
685 ChangesetComment.line_no != null(),
682 ChangesetComment.line_no != null(),
686 ChangesetComment.f_path != null(),
683 ChangesetComment.f_path != null(),
687 ChangesetComment.revision == revision)
684 ChangesetComment.revision == revision)
688
685
689 elif pull_request:
686 elif pull_request:
690 pull_request = self.__get_pull_request(pull_request)
687 pull_request = self.__get_pull_request(pull_request)
691 if not CommentsModel.use_outdated_comments(pull_request):
688 if not CommentsModel.use_outdated_comments(pull_request):
692 q = self._visible_inline_comments_of_pull_request(pull_request)
689 q = self._visible_inline_comments_of_pull_request(pull_request)
693 else:
690 else:
694 q = self._all_inline_comments_of_pull_request(pull_request)
691 q = self._all_inline_comments_of_pull_request(pull_request)
695
692
696 else:
693 else:
697 raise Exception('Please specify commit or pull_request_id')
694 raise Exception('Please specify commit or pull_request_id')
698 q = q.order_by(ChangesetComment.comment_id.asc())
695 q = q.order_by(ChangesetComment.comment_id.asc())
699 return q
696 return q
700
697
701 def _group_comments_by_path_and_line_number(self, q):
698 def _group_comments_by_path_and_line_number(self, q):
702 comments = q.all()
699 comments = q.all()
703 paths = collections.defaultdict(lambda: collections.defaultdict(list))
700 paths = collections.defaultdict(lambda: collections.defaultdict(list))
704 for co in comments:
701 for co in comments:
705 paths[co.f_path][co.line_no].append(co)
702 paths[co.f_path][co.line_no].append(co)
706 return paths
703 return paths
707
704
708 @classmethod
705 @classmethod
709 def needed_extra_diff_context(cls):
706 def needed_extra_diff_context(cls):
710 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
707 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
711
708
712 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
709 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
713 if not CommentsModel.use_outdated_comments(pull_request):
710 if not CommentsModel.use_outdated_comments(pull_request):
714 return
711 return
715
712
716 comments = self._visible_inline_comments_of_pull_request(pull_request)
713 comments = self._visible_inline_comments_of_pull_request(pull_request)
717 comments_to_outdate = comments.all()
714 comments_to_outdate = comments.all()
718
715
719 for comment in comments_to_outdate:
716 for comment in comments_to_outdate:
720 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
717 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
721
718
722 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
719 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
723 diff_line = _parse_comment_line_number(comment.line_no)
720 diff_line = _parse_comment_line_number(comment.line_no)
724
721
725 try:
722 try:
726 old_context = old_diff_proc.get_context_of_line(
723 old_context = old_diff_proc.get_context_of_line(
727 path=comment.f_path, diff_line=diff_line)
724 path=comment.f_path, diff_line=diff_line)
728 new_context = new_diff_proc.get_context_of_line(
725 new_context = new_diff_proc.get_context_of_line(
729 path=comment.f_path, diff_line=diff_line)
726 path=comment.f_path, diff_line=diff_line)
730 except (diffs.LineNotInDiffException,
727 except (diffs.LineNotInDiffException,
731 diffs.FileNotInDiffException):
728 diffs.FileNotInDiffException):
732 if not comment.draft:
729 if not comment.draft:
733 comment.display_state = ChangesetComment.COMMENT_OUTDATED
730 comment.display_state = ChangesetComment.COMMENT_OUTDATED
734 return
731 return
735
732
736 if old_context == new_context:
733 if old_context == new_context:
737 return
734 return
738
735
739 if self._should_relocate_diff_line(diff_line):
736 if self._should_relocate_diff_line(diff_line):
740 new_diff_lines = new_diff_proc.find_context(
737 new_diff_lines = new_diff_proc.find_context(
741 path=comment.f_path, context=old_context,
738 path=comment.f_path, context=old_context,
742 offset=self.DIFF_CONTEXT_BEFORE)
739 offset=self.DIFF_CONTEXT_BEFORE)
743 if not new_diff_lines and not comment.draft:
740 if not new_diff_lines and not comment.draft:
744 comment.display_state = ChangesetComment.COMMENT_OUTDATED
741 comment.display_state = ChangesetComment.COMMENT_OUTDATED
745 else:
742 else:
746 new_diff_line = self._choose_closest_diff_line(
743 new_diff_line = self._choose_closest_diff_line(
747 diff_line, new_diff_lines)
744 diff_line, new_diff_lines)
748 comment.line_no = _diff_to_comment_line_number(new_diff_line)
745 comment.line_no = _diff_to_comment_line_number(new_diff_line)
749 else:
746 else:
750 if not comment.draft:
747 if not comment.draft:
751 comment.display_state = ChangesetComment.COMMENT_OUTDATED
748 comment.display_state = ChangesetComment.COMMENT_OUTDATED
752
749
753 def _should_relocate_diff_line(self, diff_line):
750 def _should_relocate_diff_line(self, diff_line):
754 """
751 """
755 Checks if relocation shall be tried for the given `diff_line`.
752 Checks if relocation shall be tried for the given `diff_line`.
756
753
757 If a comment points into the first lines, then we can have a situation
754 If a comment points into the first lines, then we can have a situation
758 that after an update another line has been added on top. In this case
755 that after an update another line has been added on top. In this case
759 we would find the context still and move the comment around. This
756 we would find the context still and move the comment around. This
760 would be wrong.
757 would be wrong.
761 """
758 """
762 should_relocate = (
759 should_relocate = (
763 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
760 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
764 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
761 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
765 return should_relocate
762 return should_relocate
766
763
767 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
764 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
768 candidate = new_diff_lines[0]
765 candidate = new_diff_lines[0]
769 best_delta = _diff_line_delta(diff_line, candidate)
766 best_delta = _diff_line_delta(diff_line, candidate)
770 for new_diff_line in new_diff_lines[1:]:
767 for new_diff_line in new_diff_lines[1:]:
771 delta = _diff_line_delta(diff_line, new_diff_line)
768 delta = _diff_line_delta(diff_line, new_diff_line)
772 if delta < best_delta:
769 if delta < best_delta:
773 candidate = new_diff_line
770 candidate = new_diff_line
774 best_delta = delta
771 best_delta = delta
775 return candidate
772 return candidate
776
773
777 def _visible_inline_comments_of_pull_request(self, pull_request):
774 def _visible_inline_comments_of_pull_request(self, pull_request):
778 comments = self._all_inline_comments_of_pull_request(pull_request)
775 comments = self._all_inline_comments_of_pull_request(pull_request)
779 comments = comments.filter(
776 comments = comments.filter(
780 coalesce(ChangesetComment.display_state, '') !=
777 coalesce(ChangesetComment.display_state, '') !=
781 ChangesetComment.COMMENT_OUTDATED)
778 ChangesetComment.COMMENT_OUTDATED)
782 return comments
779 return comments
783
780
784 def _all_inline_comments_of_pull_request(self, pull_request):
781 def _all_inline_comments_of_pull_request(self, pull_request):
785 comments = Session().query(ChangesetComment)\
782 comments = Session().query(ChangesetComment)\
786 .filter(ChangesetComment.line_no != None)\
783 .filter(ChangesetComment.line_no != None)\
787 .filter(ChangesetComment.f_path != None)\
784 .filter(ChangesetComment.f_path != None)\
788 .filter(ChangesetComment.pull_request == pull_request)
785 .filter(ChangesetComment.pull_request == pull_request)
789 return comments
786 return comments
790
787
791 def _all_general_comments_of_pull_request(self, pull_request):
788 def _all_general_comments_of_pull_request(self, pull_request):
792 comments = Session().query(ChangesetComment)\
789 comments = Session().query(ChangesetComment)\
793 .filter(ChangesetComment.line_no == None)\
790 .filter(ChangesetComment.line_no == None)\
794 .filter(ChangesetComment.f_path == None)\
791 .filter(ChangesetComment.f_path == None)\
795 .filter(ChangesetComment.pull_request == pull_request)
792 .filter(ChangesetComment.pull_request == pull_request)
796
793
797 return comments
794 return comments
798
795
799 @staticmethod
796 @staticmethod
800 def use_outdated_comments(pull_request):
797 def use_outdated_comments(pull_request):
801 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
798 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
802 settings = settings_model.get_general_settings()
799 settings = settings_model.get_general_settings()
803 return settings.get('rhodecode_use_outdated_comments', False)
800 return settings.get('rhodecode_use_outdated_comments', False)
804
801
805 def trigger_commit_comment_hook(self, repo, user, action, data=None):
802 def trigger_commit_comment_hook(self, repo, user, action, data=None):
806 repo = self._get_repo(repo)
803 repo = self._get_repo(repo)
807 target_scm = repo.scm_instance()
804 target_scm = repo.scm_instance()
808 if action == 'create':
805 if action == 'create':
809 trigger_hook = hooks_utils.trigger_comment_commit_hooks
806 trigger_hook = hooks_utils.trigger_comment_commit_hooks
810 elif action == 'edit':
807 elif action == 'edit':
811 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
808 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
812 else:
809 else:
813 return
810 return
814
811
815 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
812 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
816 repo, action, trigger_hook)
813 repo, action, trigger_hook)
817 trigger_hook(
814 trigger_hook(
818 username=user.username,
815 username=user.username,
819 repo_name=repo.repo_name,
816 repo_name=repo.repo_name,
820 repo_type=target_scm.alias,
817 repo_type=target_scm.alias,
821 repo=repo,
818 repo=repo,
822 data=data)
819 data=data)
823
820
824
821
825 def _parse_comment_line_number(line_no):
822 def _parse_comment_line_number(line_no):
826 """
823 r"""
827 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
824 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
828 """
825 """
829 old_line = None
826 old_line = None
830 new_line = None
827 new_line = None
831 if line_no.startswith('o'):
828 if line_no.startswith('o'):
832 old_line = int(line_no[1:])
829 old_line = int(line_no[1:])
833 elif line_no.startswith('n'):
830 elif line_no.startswith('n'):
834 new_line = int(line_no[1:])
831 new_line = int(line_no[1:])
835 else:
832 else:
836 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
833 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
837 return diffs.DiffLineNumber(old_line, new_line)
834 return diffs.DiffLineNumber(old_line, new_line)
838
835
839
836
840 def _diff_to_comment_line_number(diff_line):
837 def _diff_to_comment_line_number(diff_line):
841 if diff_line.new is not None:
838 if diff_line.new is not None:
842 return u'n{}'.format(diff_line.new)
839 return f'n{diff_line.new}'
843 elif diff_line.old is not None:
840 elif diff_line.old is not None:
844 return u'o{}'.format(diff_line.old)
841 return f'o{diff_line.old}'
845 return u''
842 return ''
846
843
847
844
848 def _diff_line_delta(a, b):
845 def _diff_line_delta(a, b):
849 if None not in (a.new, b.new):
846 if None not in (a.new, b.new):
850 return abs(a.new - b.new)
847 return abs(a.new - b.new)
851 elif None not in (a.old, b.old):
848 elif None not in (a.old, b.old):
852 return abs(a.old - b.old)
849 return abs(a.old - b.old)
853 else:
850 else:
854 raise ValueError(
851 raise ValueError(
855 "Cannot compute delta between {} and {}".format(a, b))
852 f"Cannot compute delta between {a} and {b}")
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now