##// END OF EJS Templates
caches: introduce new conditional cache function.
marcink -
r2891:63610fd1 default
parent child Browse files
Show More
@@ -1,75 +1,74 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 from dogpile.cache import register_backend
23 from dogpile.cache import make_region
24 23
25 24 register_backend(
26 25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
27 26 "LRUMemoryBackend")
28 27
29 28 register_backend(
30 29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
31 30 "FileNamespaceBackend")
32 31
33 32 register_backend(
34 33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
35 34 "RedisPickleBackend")
36 35
37 36
38 37 log = logging.getLogger(__name__)
39 38
40 39 from . import region_meta
41 40 from .utils import (
42 41 get_default_cache_settings, key_generator, get_or_create_region,
43 clear_cache_namespace)
42 clear_cache_namespace, make_region)
44 43
45 44
46 45 def configure_dogpile_cache(settings):
47 46 cache_dir = settings.get('cache_dir')
48 47 if cache_dir:
49 48 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
50 49
51 50 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
52 51
53 52 # inspect available namespaces
54 53 avail_regions = set()
55 54 for key in rc_cache_data.keys():
56 55 namespace_name = key.split('.', 1)[0]
57 56 avail_regions.add(namespace_name)
58 57 log.debug('dogpile: found following cache regions: %s', avail_regions)
59 58
60 59 # register them into namespace
61 60 for region_name in avail_regions:
62 61 new_region = make_region(
63 62 name=region_name,
64 63 function_key_generator=key_generator
65 64 )
66 65
67 66 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
68 67
69 68 log.debug('dogpile: registering a new region %s[%s]',
70 69 region_name, new_region.__dict__)
71 70 region_meta.dogpile_cache_regions[region_name] = new_region
72 71
73 72
74 73 def includeme(config):
75 74 configure_dogpile_cache(config.registry.settings)
@@ -1,106 +1,185 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import logging
22 from dogpile.cache import make_region
22 import functools
23
24 from dogpile.cache import CacheRegion
25 from dogpile.cache.util import compat
23 26
24 27 from rhodecode.lib.utils import safe_str, sha1
25 28 from . import region_meta
26 29
27 30 log = logging.getLogger(__name__)
28 31
29 32
33 class RhodeCodeCacheRegion(CacheRegion):
34
35 def conditional_cache_on_arguments(
36 self, namespace=None,
37 expiration_time=None,
38 should_cache_fn=None,
39 to_str=compat.string_type,
40 function_key_generator=None,
41 condition=True):
42 """
43 Custom conditional decorator, that will not touch any dogpile internals if
44 condition isn't meet. This works a bit different than should_cache_fn
45 And it's faster in cases we don't ever want to compute cached values
46 """
47 expiration_time_is_callable = compat.callable(expiration_time)
48
49 if function_key_generator is None:
50 function_key_generator = self.function_key_generator
51
52 def decorator(fn):
53 if to_str is compat.string_type:
54 # backwards compatible
55 key_generator = function_key_generator(namespace, fn)
56 else:
57 key_generator = function_key_generator(namespace, fn, to_str=to_str)
58
59 @functools.wraps(fn)
60 def decorate(*arg, **kw):
61 key = key_generator(*arg, **kw)
62
63 @functools.wraps(fn)
64 def creator():
65 return fn(*arg, **kw)
66
67 if not condition:
68 return creator()
69
70 timeout = expiration_time() if expiration_time_is_callable \
71 else expiration_time
72
73 return self.get_or_create(key, creator, timeout, should_cache_fn)
74
75 def invalidate(*arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.delete(key)
78
79 def set_(value, *arg, **kw):
80 key = key_generator(*arg, **kw)
81 self.set(key, value)
82
83 def get(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 return self.get(key)
86
87 def refresh(*arg, **kw):
88 key = key_generator(*arg, **kw)
89 value = fn(*arg, **kw)
90 self.set(key, value)
91 return value
92
93 decorate.set = set_
94 decorate.invalidate = invalidate
95 decorate.refresh = refresh
96 decorate.get = get
97 decorate.original = fn
98 decorate.key_generator = key_generator
99
100 return decorate
101
102 return decorator
103
104
105 def make_region(*arg, **kw):
106 return RhodeCodeCacheRegion(*arg, **kw)
107
108
30 109 def get_default_cache_settings(settings, prefixes=None):
31 110 prefixes = prefixes or []
32 111 cache_settings = {}
33 112 for key in settings.keys():
34 113 for prefix in prefixes:
35 114 if key.startswith(prefix):
36 115 name = key.split(prefix)[1].strip()
37 116 val = settings[key]
38 117 if isinstance(val, basestring):
39 118 val = val.strip()
40 119 cache_settings[name] = val
41 120 return cache_settings
42 121
43 122
44 123 def compute_key_from_params(*args):
45 124 """
46 125 Helper to compute key from given params to be used in cache manager
47 126 """
48 127 return sha1("_".join(map(safe_str, args)))
49 128
50 129
51 130 def key_generator(namespace, fn):
52 131 fname = fn.__name__
53 132
54 133 def generate_key(*args):
55 134 namespace_pref = namespace or 'default'
56 135 arg_key = compute_key_from_params(*args)
57 136 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
58 137
59 138 return final_key
60 139
61 140 return generate_key
62 141
63 142
64 143 def get_or_create_region(region_name, region_namespace=None):
65 144 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
66 145 region_obj = region_meta.dogpile_cache_regions.get(region_name)
67 146 if not region_obj:
68 147 raise EnvironmentError(
69 148 'Region `{}` not in configured: {}.'.format(
70 149 region_name, region_meta.dogpile_cache_regions.keys()))
71 150
72 151 region_uid_name = '{}:{}'.format(region_name, region_namespace)
73 152 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
74 153 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
75 154 if region_exist:
76 155 log.debug('Using already configured region: %s', region_namespace)
77 156 return region_exist
78 157 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
79 158 expiration_time = region_obj.expiration_time
80 159
81 160 if not os.path.isdir(cache_dir):
82 161 os.makedirs(cache_dir)
83 162 new_region = make_region(
84 163 name=region_uid_name, function_key_generator=key_generator
85 164 )
86 165 namespace_filename = os.path.join(
87 166 cache_dir, "{}.cache.dbm".format(region_namespace))
88 167 # special type that allows 1db per namespace
89 168 new_region.configure(
90 169 backend='dogpile.cache.rc.file_namespace',
91 170 expiration_time=expiration_time,
92 171 arguments={"filename": namespace_filename}
93 172 )
94 173
95 174 # create and save in region caches
96 175 log.debug('configuring new region: %s',region_uid_name)
97 176 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
98 177
99 178 return region_obj
100 179
101 180
102 181 def clear_cache_namespace(cache_region, cache_namespace_uid):
103 182 region = get_or_create_region(cache_region, cache_namespace_uid)
104 183 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
105 184 region.delete_multi(cache_keys)
106 185 return len(cache_keys)
@@ -1,779 +1,778 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 import hashlib
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 42 from mako import exceptions
43 43 from pyramid.threadlocal import get_current_registry
44 44 from rhodecode.lib.request import Request
45 45
46 46 from rhodecode.lib.fakemod import create_module
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123
124 123 if _group:
125 124 _group = _group.rstrip('/')
126 125 return _group
127 126
128 127
129 128 def get_user_group_slug(request):
130 129 _user_group = ''
131 130
132 131 if hasattr(request, 'db_user_group'):
133 132 _user_group = request.db_user_group.users_group_name
134 133 elif getattr(request, 'matchdict', None):
135 134 # pyramid
136 135 _user_group = request.matchdict.get('user_group_id')
137 136 _user_group_name = request.matchdict.get('user_group_name')
138 137 try:
139 138 if _user_group:
140 139 _user_group = UserGroup.get(_user_group)
141 140 elif _user_group_name:
142 141 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 142
144 143 if _user_group:
145 144 _user_group = _user_group.users_group_name
146 145 except Exception:
147 146 log.exception('Failed to get user group by id and name')
148 147 # catch all failures here
149 148 return None
150 149
151 150 return _user_group
152 151
153 152
154 153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 154 """
156 155 Scans given path for repos and return (name,(type,path)) tuple
157 156
158 157 :param path: path to scan for repositories
159 158 :param recursive: recursive search and return names with subdirs in front
160 159 """
161 160
162 161 # remove ending slash for better results
163 162 path = path.rstrip(os.sep)
164 163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 164
166 165 def _get_repos(p):
167 166 dirpaths = _get_dirpaths(p)
168 167 if not _is_dir_writable(p):
169 168 log.warning('repo path without write access: %s', p)
170 169
171 170 for dirpath in dirpaths:
172 171 if os.path.isfile(os.path.join(p, dirpath)):
173 172 continue
174 173 cur_path = os.path.join(p, dirpath)
175 174
176 175 # skip removed repos
177 176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 177 continue
179 178
180 179 #skip .<somethin> dirs
181 180 if dirpath.startswith('.'):
182 181 continue
183 182
184 183 try:
185 184 scm_info = get_scm(cur_path)
186 185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 186 except VCSError:
188 187 if not recursive:
189 188 continue
190 189 #check if this dir containts other repos for recursive scan
191 190 rec_path = os.path.join(p, dirpath)
192 191 if os.path.isdir(rec_path):
193 192 for inner_scm in _get_repos(rec_path):
194 193 yield inner_scm
195 194
196 195 return _get_repos(path)
197 196
198 197
199 198 def _get_dirpaths(p):
200 199 try:
201 200 # OS-independable way of checking if we have at least read-only
202 201 # access or not.
203 202 dirpaths = os.listdir(p)
204 203 except OSError:
205 204 log.warning('ignoring repo path without read access: %s', p)
206 205 return []
207 206
208 207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 208 # decode paths and suddenly returns unicode objects itself. The items it
210 209 # cannot decode are returned as strings and cause issues.
211 210 #
212 211 # Those paths are ignored here until a solid solution for path handling has
213 212 # been built.
214 213 expected_type = type(p)
215 214
216 215 def _has_correct_type(item):
217 216 if type(item) is not expected_type:
218 217 log.error(
219 218 u"Ignoring path %s since it cannot be decoded into unicode.",
220 219 # Using "repr" to make sure that we see the byte value in case
221 220 # of support.
222 221 repr(item))
223 222 return False
224 223 return True
225 224
226 225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227 226
228 227 return dirpaths
229 228
230 229
231 230 def _is_dir_writable(path):
232 231 """
233 232 Probe if `path` is writable.
234 233
235 234 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 235 possible to create a file inside of `path`, stat does not produce reliable
237 236 results in this case.
238 237 """
239 238 try:
240 239 with tempfile.TemporaryFile(dir=path):
241 240 pass
242 241 except OSError:
243 242 return False
244 243 return True
245 244
246 245
247 246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 247 """
249 248 Returns True if given path is a valid repository False otherwise.
250 249 If expect_scm param is given also, compare if given scm is the same
251 250 as expected from scm parameter. If explicit_scm is given don't try to
252 251 detect the scm, just use the given one to check if repo is valid
253 252
254 253 :param repo_name:
255 254 :param base_path:
256 255 :param expect_scm:
257 256 :param explicit_scm:
258 257 :param config:
259 258
260 259 :return True: if given path is a valid repository
261 260 """
262 261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 262 log.debug('Checking if `%s` is a valid path for repository. '
264 263 'Explicit type: %s', repo_name, explicit_scm)
265 264
266 265 try:
267 266 if explicit_scm:
268 267 detected_scms = [get_scm_backend(explicit_scm)(
269 268 full_path, config=config).alias]
270 269 else:
271 270 detected_scms = get_scm(full_path)
272 271
273 272 if expect_scm:
274 273 return detected_scms[0] == expect_scm
275 274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 275 return True
277 276 except VCSError:
278 277 log.debug('path: %s is not a valid repo !', full_path)
279 278 return False
280 279
281 280
282 281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 282 """
284 283 Returns True if given path is a repository group, False otherwise
285 284
286 285 :param repo_name:
287 286 :param base_path:
288 287 """
289 288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 289 log.debug('Checking if `%s` is a valid path for repository group',
291 290 repo_group_name)
292 291
293 292 # check if it's not a repo
294 293 if is_valid_repo(repo_group_name, base_path):
295 294 log.debug('Repo called %s exist, it is not a valid '
296 295 'repo group' % repo_group_name)
297 296 return False
298 297
299 298 try:
300 299 # we need to check bare git repos at higher level
301 300 # since we might match branches/hooks/info/objects or possible
302 301 # other things inside bare git repo
303 302 scm_ = get_scm(os.path.dirname(full_path))
304 303 log.debug('path: %s is a vcs object:%s, not valid '
305 304 'repo group' % (full_path, scm_))
306 305 return False
307 306 except VCSError:
308 307 pass
309 308
310 309 # check if it's a valid path
311 310 if skip_path_check or os.path.isdir(full_path):
312 311 log.debug('path: %s is a valid repo group !', full_path)
313 312 return True
314 313
315 314 log.debug('path: %s is not a valid repo group !', full_path)
316 315 return False
317 316
318 317
319 318 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
320 319 while True:
321 320 ok = raw_input(prompt)
322 321 if ok.lower() in ('y', 'ye', 'yes'):
323 322 return True
324 323 if ok.lower() in ('n', 'no', 'nop', 'nope'):
325 324 return False
326 325 retries = retries - 1
327 326 if retries < 0:
328 327 raise IOError
329 328 print(complaint)
330 329
331 330 # propagated from mercurial documentation
332 331 ui_sections = [
333 332 'alias', 'auth',
334 333 'decode/encode', 'defaults',
335 334 'diff', 'email',
336 335 'extensions', 'format',
337 336 'merge-patterns', 'merge-tools',
338 337 'hooks', 'http_proxy',
339 338 'smtp', 'patch',
340 339 'paths', 'profiling',
341 340 'server', 'trusted',
342 341 'ui', 'web', ]
343 342
344 343
345 344 def config_data_from_db(clear_session=True, repo=None):
346 345 """
347 346 Read the configuration data from the database and return configuration
348 347 tuples.
349 348 """
350 349 from rhodecode.model.settings import VcsSettingsModel
351 350
352 351 config = []
353 352
354 353 sa = meta.Session()
355 354 settings_model = VcsSettingsModel(repo=repo, sa=sa)
356 355
357 356 ui_settings = settings_model.get_ui_settings()
358 357
359 358 ui_data = []
360 359 for setting in ui_settings:
361 360 if setting.active:
362 361 ui_data.append((setting.section, setting.key, setting.value))
363 362 config.append((
364 363 safe_str(setting.section), safe_str(setting.key),
365 364 safe_str(setting.value)))
366 365 if setting.key == 'push_ssl':
367 366 # force set push_ssl requirement to False, rhodecode
368 367 # handles that
369 368 config.append((
370 369 safe_str(setting.section), safe_str(setting.key), False))
371 370 log.debug(
372 371 'settings ui from db: %s',
373 372 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
374 373 if clear_session:
375 374 meta.Session.remove()
376 375
377 376 # TODO: mikhail: probably it makes no sense to re-read hooks information.
378 377 # It's already there and activated/deactivated
379 378 skip_entries = []
380 379 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
381 380 if 'pull' not in enabled_hook_classes:
382 381 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
383 382 if 'push' not in enabled_hook_classes:
384 383 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
385 384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
386 385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
387 386
388 387 config = [entry for entry in config if entry[:2] not in skip_entries]
389 388
390 389 return config
391 390
392 391
393 392 def make_db_config(clear_session=True, repo=None):
394 393 """
395 394 Create a :class:`Config` instance based on the values in the database.
396 395 """
397 396 config = Config()
398 397 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
399 398 for section, option, value in config_data:
400 399 config.set(section, option, value)
401 400 return config
402 401
403 402
404 403 def get_enabled_hook_classes(ui_settings):
405 404 """
406 405 Return the enabled hook classes.
407 406
408 407 :param ui_settings: List of ui_settings as returned
409 408 by :meth:`VcsSettingsModel.get_ui_settings`
410 409
411 410 :return: a list with the enabled hook classes. The order is not guaranteed.
412 411 :rtype: list
413 412 """
414 413 enabled_hooks = []
415 414 active_hook_keys = [
416 415 key for section, key, value, active in ui_settings
417 416 if section == 'hooks' and active]
418 417
419 418 hook_names = {
420 419 RhodeCodeUi.HOOK_PUSH: 'push',
421 420 RhodeCodeUi.HOOK_PULL: 'pull',
422 421 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
423 422 }
424 423
425 424 for key in active_hook_keys:
426 425 hook = hook_names.get(key)
427 426 if hook:
428 427 enabled_hooks.append(hook)
429 428
430 429 return enabled_hooks
431 430
432 431
433 432 def set_rhodecode_config(config):
434 433 """
435 434 Updates pyramid config with new settings from database
436 435
437 436 :param config:
438 437 """
439 438 from rhodecode.model.settings import SettingsModel
440 439 app_settings = SettingsModel().get_all_settings()
441 440
442 441 for k, v in app_settings.items():
443 442 config[k] = v
444 443
445 444
446 445 def get_rhodecode_realm():
447 446 """
448 447 Return the rhodecode realm from database.
449 448 """
450 449 from rhodecode.model.settings import SettingsModel
451 450 realm = SettingsModel().get_setting_by_name('realm')
452 451 return safe_str(realm.app_settings_value)
453 452
454 453
455 454 def get_rhodecode_base_path():
456 455 """
457 456 Returns the base path. The base path is the filesystem path which points
458 457 to the repository store.
459 458 """
460 459 from rhodecode.model.settings import SettingsModel
461 460 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
462 461 return safe_str(paths_ui.ui_value)
463 462
464 463
465 464 def map_groups(path):
466 465 """
467 466 Given a full path to a repository, create all nested groups that this
468 467 repo is inside. This function creates parent-child relationships between
469 468 groups and creates default perms for all new groups.
470 469
471 470 :param paths: full path to repository
472 471 """
473 472 from rhodecode.model.repo_group import RepoGroupModel
474 473 sa = meta.Session()
475 474 groups = path.split(Repository.NAME_SEP)
476 475 parent = None
477 476 group = None
478 477
479 478 # last element is repo in nested groups structure
480 479 groups = groups[:-1]
481 480 rgm = RepoGroupModel(sa)
482 481 owner = User.get_first_super_admin()
483 482 for lvl, group_name in enumerate(groups):
484 483 group_name = '/'.join(groups[:lvl] + [group_name])
485 484 group = RepoGroup.get_by_group_name(group_name)
486 485 desc = '%s group' % group_name
487 486
488 487 # skip folders that are now removed repos
489 488 if REMOVED_REPO_PAT.match(group_name):
490 489 break
491 490
492 491 if group is None:
493 492 log.debug('creating group level: %s group_name: %s',
494 493 lvl, group_name)
495 494 group = RepoGroup(group_name, parent)
496 495 group.group_description = desc
497 496 group.user = owner
498 497 sa.add(group)
499 498 perm_obj = rgm._create_default_perms(group)
500 499 sa.add(perm_obj)
501 500 sa.flush()
502 501
503 502 parent = group
504 503 return group
505 504
506 505
507 506 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
508 507 """
509 508 maps all repos given in initial_repo_list, non existing repositories
510 509 are created, if remove_obsolete is True it also checks for db entries
511 510 that are not in initial_repo_list and removes them.
512 511
513 512 :param initial_repo_list: list of repositories found by scanning methods
514 513 :param remove_obsolete: check for obsolete entries in database
515 514 """
516 515 from rhodecode.model.repo import RepoModel
517 516 from rhodecode.model.repo_group import RepoGroupModel
518 517 from rhodecode.model.settings import SettingsModel
519 518
520 519 sa = meta.Session()
521 520 repo_model = RepoModel()
522 521 user = User.get_first_super_admin()
523 522 added = []
524 523
525 524 # creation defaults
526 525 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
527 526 enable_statistics = defs.get('repo_enable_statistics')
528 527 enable_locking = defs.get('repo_enable_locking')
529 528 enable_downloads = defs.get('repo_enable_downloads')
530 529 private = defs.get('repo_private')
531 530
532 531 for name, repo in initial_repo_list.items():
533 532 group = map_groups(name)
534 533 unicode_name = safe_unicode(name)
535 534 db_repo = repo_model.get_by_repo_name(unicode_name)
536 535 # found repo that is on filesystem not in RhodeCode database
537 536 if not db_repo:
538 537 log.info('repository %s not found, creating now', name)
539 538 added.append(name)
540 539 desc = (repo.description
541 540 if repo.description != 'unknown'
542 541 else '%s repository' % name)
543 542
544 543 db_repo = repo_model._create_repo(
545 544 repo_name=name,
546 545 repo_type=repo.alias,
547 546 description=desc,
548 547 repo_group=getattr(group, 'group_id', None),
549 548 owner=user,
550 549 enable_locking=enable_locking,
551 550 enable_downloads=enable_downloads,
552 551 enable_statistics=enable_statistics,
553 552 private=private,
554 553 state=Repository.STATE_CREATED
555 554 )
556 555 sa.commit()
557 556 # we added that repo just now, and make sure we updated server info
558 557 if db_repo.repo_type == 'git':
559 558 git_repo = db_repo.scm_instance()
560 559 # update repository server-info
561 560 log.debug('Running update server info')
562 561 git_repo._update_server_info()
563 562
564 563 db_repo.update_commit_cache()
565 564
566 565 config = db_repo._config
567 566 config.set('extensions', 'largefiles', '')
568 567 repo = db_repo.scm_instance(config=config)
569 568 repo.install_hooks()
570 569
571 570 removed = []
572 571 if remove_obsolete:
573 572 # remove from database those repositories that are not in the filesystem
574 573 for repo in sa.query(Repository).all():
575 574 if repo.repo_name not in initial_repo_list.keys():
576 575 log.debug("Removing non-existing repository found in db `%s`",
577 576 repo.repo_name)
578 577 try:
579 578 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
580 579 sa.commit()
581 580 removed.append(repo.repo_name)
582 581 except Exception:
583 582 # don't hold further removals on error
584 583 log.error(traceback.format_exc())
585 584 sa.rollback()
586 585
587 586 def splitter(full_repo_name):
588 587 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
589 588 gr_name = None
590 589 if len(_parts) == 2:
591 590 gr_name = _parts[0]
592 591 return gr_name
593 592
594 593 initial_repo_group_list = [splitter(x) for x in
595 594 initial_repo_list.keys() if splitter(x)]
596 595
597 596 # remove from database those repository groups that are not in the
598 597 # filesystem due to parent child relationships we need to delete them
599 598 # in a specific order of most nested first
600 599 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
601 600 nested_sort = lambda gr: len(gr.split('/'))
602 601 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
603 602 if group_name not in initial_repo_group_list:
604 603 repo_group = RepoGroup.get_by_group_name(group_name)
605 604 if (repo_group.children.all() or
606 605 not RepoGroupModel().check_exist_filesystem(
607 606 group_name=group_name, exc_on_failure=False)):
608 607 continue
609 608
610 609 log.info(
611 610 'Removing non-existing repository group found in db `%s`',
612 611 group_name)
613 612 try:
614 613 RepoGroupModel(sa).delete(group_name, fs_remove=False)
615 614 sa.commit()
616 615 removed.append(group_name)
617 616 except Exception:
618 617 # don't hold further removals on error
619 618 log.exception(
620 619 'Unable to remove repository group `%s`',
621 620 group_name)
622 621 sa.rollback()
623 622 raise
624 623
625 624 return added, removed
626 625
627 626
628 627 def load_rcextensions(root_path):
629 628 import rhodecode
630 629 from rhodecode.config import conf
631 630
632 631 path = os.path.join(root_path, 'rcextensions', '__init__.py')
633 632 if os.path.isfile(path):
634 633 rcext = create_module('rc', path)
635 634 EXT = rhodecode.EXTENSIONS = rcext
636 635 log.debug('Found rcextensions now loading %s...', rcext)
637 636
638 637 # Additional mappings that are not present in the pygments lexers
639 638 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
640 639
641 640 # auto check if the module is not missing any data, set to default if is
642 641 # this will help autoupdate new feature of rcext module
643 642 #from rhodecode.config import rcextensions
644 643 #for k in dir(rcextensions):
645 644 # if not k.startswith('_') and not hasattr(EXT, k):
646 645 # setattr(EXT, k, getattr(rcextensions, k))
647 646
648 647
649 648 def get_custom_lexer(extension):
650 649 """
651 650 returns a custom lexer if it is defined in rcextensions module, or None
652 651 if there's no custom lexer defined
653 652 """
654 653 import rhodecode
655 654 from pygments import lexers
656 655
657 656 # custom override made by RhodeCode
658 657 if extension in ['mako']:
659 658 return lexers.get_lexer_by_name('html+mako')
660 659
661 660 # check if we didn't define this extension as other lexer
662 661 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
663 662 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
664 663 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
665 664 return lexers.get_lexer_by_name(_lexer_name)
666 665
667 666
668 667 #==============================================================================
669 668 # TEST FUNCTIONS AND CREATORS
670 669 #==============================================================================
671 670 def create_test_index(repo_location, config):
672 671 """
673 672 Makes default test index.
674 673 """
675 674 import rc_testdata
676 675
677 676 rc_testdata.extract_search_index(
678 677 'vcs_search_index', os.path.dirname(config['search.location']))
679 678
680 679
681 680 def create_test_directory(test_path):
682 681 """
683 682 Create test directory if it doesn't exist.
684 683 """
685 684 if not os.path.isdir(test_path):
686 685 log.debug('Creating testdir %s', test_path)
687 686 os.makedirs(test_path)
688 687
689 688
690 689 def create_test_database(test_path, config):
691 690 """
692 691 Makes a fresh database.
693 692 """
694 693 from rhodecode.lib.db_manage import DbManage
695 694
696 695 # PART ONE create db
697 696 dbconf = config['sqlalchemy.db1.url']
698 697 log.debug('making test db %s', dbconf)
699 698
700 699 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
701 700 tests=True, cli_args={'force_ask': True})
702 701 dbmanage.create_tables(override=True)
703 702 dbmanage.set_db_version()
704 703 # for tests dynamically set new root paths based on generated content
705 704 dbmanage.create_settings(dbmanage.config_prompt(test_path))
706 705 dbmanage.create_default_user()
707 706 dbmanage.create_test_admin_and_users()
708 707 dbmanage.create_permissions()
709 708 dbmanage.populate_default_permissions()
710 709 Session().commit()
711 710
712 711
713 712 def create_test_repositories(test_path, config):
714 713 """
715 714 Creates test repositories in the temporary directory. Repositories are
716 715 extracted from archives within the rc_testdata package.
717 716 """
718 717 import rc_testdata
719 718 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
720 719
721 720 log.debug('making test vcs repositories')
722 721
723 722 idx_path = config['search.location']
724 723 data_path = config['cache_dir']
725 724
726 725 # clean index and data
727 726 if idx_path and os.path.exists(idx_path):
728 727 log.debug('remove %s', idx_path)
729 728 shutil.rmtree(idx_path)
730 729
731 730 if data_path and os.path.exists(data_path):
732 731 log.debug('remove %s', data_path)
733 732 shutil.rmtree(data_path)
734 733
735 734 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
736 735 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
737 736
738 737 # Note: Subversion is in the process of being integrated with the system,
739 738 # until we have a properly packed version of the test svn repository, this
740 739 # tries to copy over the repo from a package "rc_testdata"
741 740 svn_repo_path = rc_testdata.get_svn_repo_archive()
742 741 with tarfile.open(svn_repo_path) as tar:
743 742 tar.extractall(jn(test_path, SVN_REPO))
744 743
745 744
746 745 def password_changed(auth_user, session):
747 746 # Never report password change in case of default user or anonymous user.
748 747 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
749 748 return False
750 749
751 750 password_hash = md5(auth_user.password) if auth_user.password else None
752 751 rhodecode_user = session.get('rhodecode_user', {})
753 752 session_password_hash = rhodecode_user.get('password', '')
754 753 return password_hash != session_password_hash
755 754
756 755
757 756 def read_opensource_licenses():
758 757 global _license_cache
759 758
760 759 if not _license_cache:
761 760 licenses = pkg_resources.resource_string(
762 761 'rhodecode', 'config/licenses.json')
763 762 _license_cache = json.loads(licenses)
764 763
765 764 return _license_cache
766 765
767 766
768 767 def generate_platform_uuid():
769 768 """
770 769 Generates platform UUID based on it's name
771 770 """
772 771 import platform
773 772
774 773 try:
775 774 uuid_list = [platform.platform()]
776 775 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
777 776 except Exception as e:
778 777 log.error('Failed to generate host uuid: %s' % e)
779 778 return 'UNDEFINED'
General Comments 0
You need to be logged in to leave comments. Login now