##// END OF EJS Templates
caches: introduce new conditional cache function.
marcink -
r2891:63610fd1 default
parent child Browse files
Show More
@@ -1,75 +1,74 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 from dogpile.cache import register_backend
22 from dogpile.cache import register_backend
23 from dogpile.cache import make_region
24
23
25 register_backend(
24 register_backend(
26 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
27 "LRUMemoryBackend")
26 "LRUMemoryBackend")
28
27
29 register_backend(
28 register_backend(
30 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
31 "FileNamespaceBackend")
30 "FileNamespaceBackend")
32
31
33 register_backend(
32 register_backend(
34 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
35 "RedisPickleBackend")
34 "RedisPickleBackend")
36
35
37
36
38 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
39
38
40 from . import region_meta
39 from . import region_meta
41 from .utils import (
40 from .utils import (
42 get_default_cache_settings, key_generator, get_or_create_region,
41 get_default_cache_settings, key_generator, get_or_create_region,
43 clear_cache_namespace)
42 clear_cache_namespace, make_region)
44
43
45
44
46 def configure_dogpile_cache(settings):
45 def configure_dogpile_cache(settings):
47 cache_dir = settings.get('cache_dir')
46 cache_dir = settings.get('cache_dir')
48 if cache_dir:
47 if cache_dir:
49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
50
49
51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
52
51
53 # inspect available namespaces
52 # inspect available namespaces
54 avail_regions = set()
53 avail_regions = set()
55 for key in rc_cache_data.keys():
54 for key in rc_cache_data.keys():
56 namespace_name = key.split('.', 1)[0]
55 namespace_name = key.split('.', 1)[0]
57 avail_regions.add(namespace_name)
56 avail_regions.add(namespace_name)
58 log.debug('dogpile: found following cache regions: %s', avail_regions)
57 log.debug('dogpile: found following cache regions: %s', avail_regions)
59
58
60 # register them into namespace
59 # register them into namespace
61 for region_name in avail_regions:
60 for region_name in avail_regions:
62 new_region = make_region(
61 new_region = make_region(
63 name=region_name,
62 name=region_name,
64 function_key_generator=key_generator
63 function_key_generator=key_generator
65 )
64 )
66
65
67 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
66 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
68
67
69 log.debug('dogpile: registering a new region %s[%s]',
68 log.debug('dogpile: registering a new region %s[%s]',
70 region_name, new_region.__dict__)
69 region_name, new_region.__dict__)
71 region_meta.dogpile_cache_regions[region_name] = new_region
70 region_meta.dogpile_cache_regions[region_name] = new_region
72
71
73
72
74 def includeme(config):
73 def includeme(config):
75 configure_dogpile_cache(config.registry.settings)
74 configure_dogpile_cache(config.registry.settings)
@@ -1,106 +1,185 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import logging
21 import logging
22 from dogpile.cache import make_region
22 import functools
23
24 from dogpile.cache import CacheRegion
25 from dogpile.cache.util import compat
23
26
24 from rhodecode.lib.utils import safe_str, sha1
27 from rhodecode.lib.utils import safe_str, sha1
25 from . import region_meta
28 from . import region_meta
26
29
27 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
28
31
29
32
33 class RhodeCodeCacheRegion(CacheRegion):
34
35 def conditional_cache_on_arguments(
36 self, namespace=None,
37 expiration_time=None,
38 should_cache_fn=None,
39 to_str=compat.string_type,
40 function_key_generator=None,
41 condition=True):
42 """
43 Custom conditional decorator, that will not touch any dogpile internals if
44 condition isn't meet. This works a bit different than should_cache_fn
45 And it's faster in cases we don't ever want to compute cached values
46 """
47 expiration_time_is_callable = compat.callable(expiration_time)
48
49 if function_key_generator is None:
50 function_key_generator = self.function_key_generator
51
52 def decorator(fn):
53 if to_str is compat.string_type:
54 # backwards compatible
55 key_generator = function_key_generator(namespace, fn)
56 else:
57 key_generator = function_key_generator(namespace, fn, to_str=to_str)
58
59 @functools.wraps(fn)
60 def decorate(*arg, **kw):
61 key = key_generator(*arg, **kw)
62
63 @functools.wraps(fn)
64 def creator():
65 return fn(*arg, **kw)
66
67 if not condition:
68 return creator()
69
70 timeout = expiration_time() if expiration_time_is_callable \
71 else expiration_time
72
73 return self.get_or_create(key, creator, timeout, should_cache_fn)
74
75 def invalidate(*arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.delete(key)
78
79 def set_(value, *arg, **kw):
80 key = key_generator(*arg, **kw)
81 self.set(key, value)
82
83 def get(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 return self.get(key)
86
87 def refresh(*arg, **kw):
88 key = key_generator(*arg, **kw)
89 value = fn(*arg, **kw)
90 self.set(key, value)
91 return value
92
93 decorate.set = set_
94 decorate.invalidate = invalidate
95 decorate.refresh = refresh
96 decorate.get = get
97 decorate.original = fn
98 decorate.key_generator = key_generator
99
100 return decorate
101
102 return decorator
103
104
105 def make_region(*arg, **kw):
106 return RhodeCodeCacheRegion(*arg, **kw)
107
108
30 def get_default_cache_settings(settings, prefixes=None):
109 def get_default_cache_settings(settings, prefixes=None):
31 prefixes = prefixes or []
110 prefixes = prefixes or []
32 cache_settings = {}
111 cache_settings = {}
33 for key in settings.keys():
112 for key in settings.keys():
34 for prefix in prefixes:
113 for prefix in prefixes:
35 if key.startswith(prefix):
114 if key.startswith(prefix):
36 name = key.split(prefix)[1].strip()
115 name = key.split(prefix)[1].strip()
37 val = settings[key]
116 val = settings[key]
38 if isinstance(val, basestring):
117 if isinstance(val, basestring):
39 val = val.strip()
118 val = val.strip()
40 cache_settings[name] = val
119 cache_settings[name] = val
41 return cache_settings
120 return cache_settings
42
121
43
122
44 def compute_key_from_params(*args):
123 def compute_key_from_params(*args):
45 """
124 """
46 Helper to compute key from given params to be used in cache manager
125 Helper to compute key from given params to be used in cache manager
47 """
126 """
48 return sha1("_".join(map(safe_str, args)))
127 return sha1("_".join(map(safe_str, args)))
49
128
50
129
51 def key_generator(namespace, fn):
130 def key_generator(namespace, fn):
52 fname = fn.__name__
131 fname = fn.__name__
53
132
54 def generate_key(*args):
133 def generate_key(*args):
55 namespace_pref = namespace or 'default'
134 namespace_pref = namespace or 'default'
56 arg_key = compute_key_from_params(*args)
135 arg_key = compute_key_from_params(*args)
57 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
136 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
58
137
59 return final_key
138 return final_key
60
139
61 return generate_key
140 return generate_key
62
141
63
142
64 def get_or_create_region(region_name, region_namespace=None):
143 def get_or_create_region(region_name, region_namespace=None):
65 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
144 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
66 region_obj = region_meta.dogpile_cache_regions.get(region_name)
145 region_obj = region_meta.dogpile_cache_regions.get(region_name)
67 if not region_obj:
146 if not region_obj:
68 raise EnvironmentError(
147 raise EnvironmentError(
69 'Region `{}` not in configured: {}.'.format(
148 'Region `{}` not in configured: {}.'.format(
70 region_name, region_meta.dogpile_cache_regions.keys()))
149 region_name, region_meta.dogpile_cache_regions.keys()))
71
150
72 region_uid_name = '{}:{}'.format(region_name, region_namespace)
151 region_uid_name = '{}:{}'.format(region_name, region_namespace)
73 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
152 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
74 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
153 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
75 if region_exist:
154 if region_exist:
76 log.debug('Using already configured region: %s', region_namespace)
155 log.debug('Using already configured region: %s', region_namespace)
77 return region_exist
156 return region_exist
78 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
157 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
79 expiration_time = region_obj.expiration_time
158 expiration_time = region_obj.expiration_time
80
159
81 if not os.path.isdir(cache_dir):
160 if not os.path.isdir(cache_dir):
82 os.makedirs(cache_dir)
161 os.makedirs(cache_dir)
83 new_region = make_region(
162 new_region = make_region(
84 name=region_uid_name, function_key_generator=key_generator
163 name=region_uid_name, function_key_generator=key_generator
85 )
164 )
86 namespace_filename = os.path.join(
165 namespace_filename = os.path.join(
87 cache_dir, "{}.cache.dbm".format(region_namespace))
166 cache_dir, "{}.cache.dbm".format(region_namespace))
88 # special type that allows 1db per namespace
167 # special type that allows 1db per namespace
89 new_region.configure(
168 new_region.configure(
90 backend='dogpile.cache.rc.file_namespace',
169 backend='dogpile.cache.rc.file_namespace',
91 expiration_time=expiration_time,
170 expiration_time=expiration_time,
92 arguments={"filename": namespace_filename}
171 arguments={"filename": namespace_filename}
93 )
172 )
94
173
95 # create and save in region caches
174 # create and save in region caches
96 log.debug('configuring new region: %s',region_uid_name)
175 log.debug('configuring new region: %s',region_uid_name)
97 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
176 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
98
177
99 return region_obj
178 return region_obj
100
179
101
180
102 def clear_cache_namespace(cache_region, cache_namespace_uid):
181 def clear_cache_namespace(cache_region, cache_namespace_uid):
103 region = get_or_create_region(cache_region, cache_namespace_uid)
182 region = get_or_create_region(cache_region, cache_namespace_uid)
104 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
183 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
105 region.delete_multi(cache_keys)
184 region.delete_multi(cache_keys)
106 return len(cache_keys)
185 return len(cache_keys)
@@ -1,779 +1,778 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 import hashlib
36 import hashlib
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from webhelpers.text import collapse, remove_formatting, strip_tags
41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from mako import exceptions
42 from mako import exceptions
43 from pyramid.threadlocal import get_current_registry
43 from pyramid.threadlocal import get_current_registry
44 from rhodecode.lib.request import Request
44 from rhodecode.lib.request import Request
45
45
46 from rhodecode.lib.fakemod import create_module
46 from rhodecode.lib.fakemod import create_module
47 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
63 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
64 # regular expressions.
64 # regular expressions.
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66
66
67 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72
72
73 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75
75
76 _license_cache = None
76 _license_cache = None
77
77
78
78
79 def repo_name_slug(value):
79 def repo_name_slug(value):
80 """
80 """
81 Return slug of name of repository
81 Return slug of name of repository
82 This function is called on each creation/modification
82 This function is called on each creation/modification
83 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
84 """
84 """
85 replacement_char = '-'
85 replacement_char = '-'
86
86
87 slug = remove_formatting(value)
87 slug = remove_formatting(value)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
90 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
91 return slug
91 return slug
92
92
93
93
94 #==============================================================================
94 #==============================================================================
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 #==============================================================================
96 #==============================================================================
97 def get_repo_slug(request):
97 def get_repo_slug(request):
98 _repo = ''
98 _repo = ''
99
99
100 if hasattr(request, 'db_repo'):
100 if hasattr(request, 'db_repo'):
101 # if our requests has set db reference use it for name, this
101 # if our requests has set db reference use it for name, this
102 # translates the example.com/_<id> into proper repo names
102 # translates the example.com/_<id> into proper repo names
103 _repo = request.db_repo.repo_name
103 _repo = request.db_repo.repo_name
104 elif getattr(request, 'matchdict', None):
104 elif getattr(request, 'matchdict', None):
105 # pyramid
105 # pyramid
106 _repo = request.matchdict.get('repo_name')
106 _repo = request.matchdict.get('repo_name')
107
107
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repo_group_slug(request):
113 def get_repo_group_slug(request):
114 _group = ''
114 _group = ''
115 if hasattr(request, 'db_repo_group'):
115 if hasattr(request, 'db_repo_group'):
116 # if our requests has set db reference use it for name, this
116 # if our requests has set db reference use it for name, this
117 # translates the example.com/_<id> into proper repo group names
117 # translates the example.com/_<id> into proper repo group names
118 _group = request.db_repo_group.group_name
118 _group = request.db_repo_group.group_name
119 elif getattr(request, 'matchdict', None):
119 elif getattr(request, 'matchdict', None):
120 # pyramid
120 # pyramid
121 _group = request.matchdict.get('repo_group_name')
121 _group = request.matchdict.get('repo_group_name')
122
122
123
124 if _group:
123 if _group:
125 _group = _group.rstrip('/')
124 _group = _group.rstrip('/')
126 return _group
125 return _group
127
126
128
127
129 def get_user_group_slug(request):
128 def get_user_group_slug(request):
130 _user_group = ''
129 _user_group = ''
131
130
132 if hasattr(request, 'db_user_group'):
131 if hasattr(request, 'db_user_group'):
133 _user_group = request.db_user_group.users_group_name
132 _user_group = request.db_user_group.users_group_name
134 elif getattr(request, 'matchdict', None):
133 elif getattr(request, 'matchdict', None):
135 # pyramid
134 # pyramid
136 _user_group = request.matchdict.get('user_group_id')
135 _user_group = request.matchdict.get('user_group_id')
137 _user_group_name = request.matchdict.get('user_group_name')
136 _user_group_name = request.matchdict.get('user_group_name')
138 try:
137 try:
139 if _user_group:
138 if _user_group:
140 _user_group = UserGroup.get(_user_group)
139 _user_group = UserGroup.get(_user_group)
141 elif _user_group_name:
140 elif _user_group_name:
142 _user_group = UserGroup.get_by_group_name(_user_group_name)
141 _user_group = UserGroup.get_by_group_name(_user_group_name)
143
142
144 if _user_group:
143 if _user_group:
145 _user_group = _user_group.users_group_name
144 _user_group = _user_group.users_group_name
146 except Exception:
145 except Exception:
147 log.exception('Failed to get user group by id and name')
146 log.exception('Failed to get user group by id and name')
148 # catch all failures here
147 # catch all failures here
149 return None
148 return None
150
149
151 return _user_group
150 return _user_group
152
151
153
152
154 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 """
154 """
156 Scans given path for repos and return (name,(type,path)) tuple
155 Scans given path for repos and return (name,(type,path)) tuple
157
156
158 :param path: path to scan for repositories
157 :param path: path to scan for repositories
159 :param recursive: recursive search and return names with subdirs in front
158 :param recursive: recursive search and return names with subdirs in front
160 """
159 """
161
160
162 # remove ending slash for better results
161 # remove ending slash for better results
163 path = path.rstrip(os.sep)
162 path = path.rstrip(os.sep)
164 log.debug('now scanning in %s location recursive:%s...', path, recursive)
163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165
164
166 def _get_repos(p):
165 def _get_repos(p):
167 dirpaths = _get_dirpaths(p)
166 dirpaths = _get_dirpaths(p)
168 if not _is_dir_writable(p):
167 if not _is_dir_writable(p):
169 log.warning('repo path without write access: %s', p)
168 log.warning('repo path without write access: %s', p)
170
169
171 for dirpath in dirpaths:
170 for dirpath in dirpaths:
172 if os.path.isfile(os.path.join(p, dirpath)):
171 if os.path.isfile(os.path.join(p, dirpath)):
173 continue
172 continue
174 cur_path = os.path.join(p, dirpath)
173 cur_path = os.path.join(p, dirpath)
175
174
176 # skip removed repos
175 # skip removed repos
177 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 continue
177 continue
179
178
180 #skip .<somethin> dirs
179 #skip .<somethin> dirs
181 if dirpath.startswith('.'):
180 if dirpath.startswith('.'):
182 continue
181 continue
183
182
184 try:
183 try:
185 scm_info = get_scm(cur_path)
184 scm_info = get_scm(cur_path)
186 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 except VCSError:
186 except VCSError:
188 if not recursive:
187 if not recursive:
189 continue
188 continue
190 #check if this dir containts other repos for recursive scan
189 #check if this dir containts other repos for recursive scan
191 rec_path = os.path.join(p, dirpath)
190 rec_path = os.path.join(p, dirpath)
192 if os.path.isdir(rec_path):
191 if os.path.isdir(rec_path):
193 for inner_scm in _get_repos(rec_path):
192 for inner_scm in _get_repos(rec_path):
194 yield inner_scm
193 yield inner_scm
195
194
196 return _get_repos(path)
195 return _get_repos(path)
197
196
198
197
199 def _get_dirpaths(p):
198 def _get_dirpaths(p):
200 try:
199 try:
201 # OS-independable way of checking if we have at least read-only
200 # OS-independable way of checking if we have at least read-only
202 # access or not.
201 # access or not.
203 dirpaths = os.listdir(p)
202 dirpaths = os.listdir(p)
204 except OSError:
203 except OSError:
205 log.warning('ignoring repo path without read access: %s', p)
204 log.warning('ignoring repo path without read access: %s', p)
206 return []
205 return []
207
206
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 # decode paths and suddenly returns unicode objects itself. The items it
208 # decode paths and suddenly returns unicode objects itself. The items it
210 # cannot decode are returned as strings and cause issues.
209 # cannot decode are returned as strings and cause issues.
211 #
210 #
212 # Those paths are ignored here until a solid solution for path handling has
211 # Those paths are ignored here until a solid solution for path handling has
213 # been built.
212 # been built.
214 expected_type = type(p)
213 expected_type = type(p)
215
214
216 def _has_correct_type(item):
215 def _has_correct_type(item):
217 if type(item) is not expected_type:
216 if type(item) is not expected_type:
218 log.error(
217 log.error(
219 u"Ignoring path %s since it cannot be decoded into unicode.",
218 u"Ignoring path %s since it cannot be decoded into unicode.",
220 # Using "repr" to make sure that we see the byte value in case
219 # Using "repr" to make sure that we see the byte value in case
221 # of support.
220 # of support.
222 repr(item))
221 repr(item))
223 return False
222 return False
224 return True
223 return True
225
224
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227
226
228 return dirpaths
227 return dirpaths
229
228
230
229
231 def _is_dir_writable(path):
230 def _is_dir_writable(path):
232 """
231 """
233 Probe if `path` is writable.
232 Probe if `path` is writable.
234
233
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
234 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 possible to create a file inside of `path`, stat does not produce reliable
235 possible to create a file inside of `path`, stat does not produce reliable
237 results in this case.
236 results in this case.
238 """
237 """
239 try:
238 try:
240 with tempfile.TemporaryFile(dir=path):
239 with tempfile.TemporaryFile(dir=path):
241 pass
240 pass
242 except OSError:
241 except OSError:
243 return False
242 return False
244 return True
243 return True
245
244
246
245
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 """
247 """
249 Returns True if given path is a valid repository False otherwise.
248 Returns True if given path is a valid repository False otherwise.
250 If expect_scm param is given also, compare if given scm is the same
249 If expect_scm param is given also, compare if given scm is the same
251 as expected from scm parameter. If explicit_scm is given don't try to
250 as expected from scm parameter. If explicit_scm is given don't try to
252 detect the scm, just use the given one to check if repo is valid
251 detect the scm, just use the given one to check if repo is valid
253
252
254 :param repo_name:
253 :param repo_name:
255 :param base_path:
254 :param base_path:
256 :param expect_scm:
255 :param expect_scm:
257 :param explicit_scm:
256 :param explicit_scm:
258 :param config:
257 :param config:
259
258
260 :return True: if given path is a valid repository
259 :return True: if given path is a valid repository
261 """
260 """
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 log.debug('Checking if `%s` is a valid path for repository. '
262 log.debug('Checking if `%s` is a valid path for repository. '
264 'Explicit type: %s', repo_name, explicit_scm)
263 'Explicit type: %s', repo_name, explicit_scm)
265
264
266 try:
265 try:
267 if explicit_scm:
266 if explicit_scm:
268 detected_scms = [get_scm_backend(explicit_scm)(
267 detected_scms = [get_scm_backend(explicit_scm)(
269 full_path, config=config).alias]
268 full_path, config=config).alias]
270 else:
269 else:
271 detected_scms = get_scm(full_path)
270 detected_scms = get_scm(full_path)
272
271
273 if expect_scm:
272 if expect_scm:
274 return detected_scms[0] == expect_scm
273 return detected_scms[0] == expect_scm
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 return True
275 return True
277 except VCSError:
276 except VCSError:
278 log.debug('path: %s is not a valid repo !', full_path)
277 log.debug('path: %s is not a valid repo !', full_path)
279 return False
278 return False
280
279
281
280
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 """
282 """
284 Returns True if given path is a repository group, False otherwise
283 Returns True if given path is a repository group, False otherwise
285
284
286 :param repo_name:
285 :param repo_name:
287 :param base_path:
286 :param base_path:
288 """
287 """
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 log.debug('Checking if `%s` is a valid path for repository group',
289 log.debug('Checking if `%s` is a valid path for repository group',
291 repo_group_name)
290 repo_group_name)
292
291
293 # check if it's not a repo
292 # check if it's not a repo
294 if is_valid_repo(repo_group_name, base_path):
293 if is_valid_repo(repo_group_name, base_path):
295 log.debug('Repo called %s exist, it is not a valid '
294 log.debug('Repo called %s exist, it is not a valid '
296 'repo group' % repo_group_name)
295 'repo group' % repo_group_name)
297 return False
296 return False
298
297
299 try:
298 try:
300 # we need to check bare git repos at higher level
299 # we need to check bare git repos at higher level
301 # since we might match branches/hooks/info/objects or possible
300 # since we might match branches/hooks/info/objects or possible
302 # other things inside bare git repo
301 # other things inside bare git repo
303 scm_ = get_scm(os.path.dirname(full_path))
302 scm_ = get_scm(os.path.dirname(full_path))
304 log.debug('path: %s is a vcs object:%s, not valid '
303 log.debug('path: %s is a vcs object:%s, not valid '
305 'repo group' % (full_path, scm_))
304 'repo group' % (full_path, scm_))
306 return False
305 return False
307 except VCSError:
306 except VCSError:
308 pass
307 pass
309
308
310 # check if it's a valid path
309 # check if it's a valid path
311 if skip_path_check or os.path.isdir(full_path):
310 if skip_path_check or os.path.isdir(full_path):
312 log.debug('path: %s is a valid repo group !', full_path)
311 log.debug('path: %s is a valid repo group !', full_path)
313 return True
312 return True
314
313
315 log.debug('path: %s is not a valid repo group !', full_path)
314 log.debug('path: %s is not a valid repo group !', full_path)
316 return False
315 return False
317
316
318
317
319 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
318 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
320 while True:
319 while True:
321 ok = raw_input(prompt)
320 ok = raw_input(prompt)
322 if ok.lower() in ('y', 'ye', 'yes'):
321 if ok.lower() in ('y', 'ye', 'yes'):
323 return True
322 return True
324 if ok.lower() in ('n', 'no', 'nop', 'nope'):
323 if ok.lower() in ('n', 'no', 'nop', 'nope'):
325 return False
324 return False
326 retries = retries - 1
325 retries = retries - 1
327 if retries < 0:
326 if retries < 0:
328 raise IOError
327 raise IOError
329 print(complaint)
328 print(complaint)
330
329
331 # propagated from mercurial documentation
330 # propagated from mercurial documentation
332 ui_sections = [
331 ui_sections = [
333 'alias', 'auth',
332 'alias', 'auth',
334 'decode/encode', 'defaults',
333 'decode/encode', 'defaults',
335 'diff', 'email',
334 'diff', 'email',
336 'extensions', 'format',
335 'extensions', 'format',
337 'merge-patterns', 'merge-tools',
336 'merge-patterns', 'merge-tools',
338 'hooks', 'http_proxy',
337 'hooks', 'http_proxy',
339 'smtp', 'patch',
338 'smtp', 'patch',
340 'paths', 'profiling',
339 'paths', 'profiling',
341 'server', 'trusted',
340 'server', 'trusted',
342 'ui', 'web', ]
341 'ui', 'web', ]
343
342
344
343
345 def config_data_from_db(clear_session=True, repo=None):
344 def config_data_from_db(clear_session=True, repo=None):
346 """
345 """
347 Read the configuration data from the database and return configuration
346 Read the configuration data from the database and return configuration
348 tuples.
347 tuples.
349 """
348 """
350 from rhodecode.model.settings import VcsSettingsModel
349 from rhodecode.model.settings import VcsSettingsModel
351
350
352 config = []
351 config = []
353
352
354 sa = meta.Session()
353 sa = meta.Session()
355 settings_model = VcsSettingsModel(repo=repo, sa=sa)
354 settings_model = VcsSettingsModel(repo=repo, sa=sa)
356
355
357 ui_settings = settings_model.get_ui_settings()
356 ui_settings = settings_model.get_ui_settings()
358
357
359 ui_data = []
358 ui_data = []
360 for setting in ui_settings:
359 for setting in ui_settings:
361 if setting.active:
360 if setting.active:
362 ui_data.append((setting.section, setting.key, setting.value))
361 ui_data.append((setting.section, setting.key, setting.value))
363 config.append((
362 config.append((
364 safe_str(setting.section), safe_str(setting.key),
363 safe_str(setting.section), safe_str(setting.key),
365 safe_str(setting.value)))
364 safe_str(setting.value)))
366 if setting.key == 'push_ssl':
365 if setting.key == 'push_ssl':
367 # force set push_ssl requirement to False, rhodecode
366 # force set push_ssl requirement to False, rhodecode
368 # handles that
367 # handles that
369 config.append((
368 config.append((
370 safe_str(setting.section), safe_str(setting.key), False))
369 safe_str(setting.section), safe_str(setting.key), False))
371 log.debug(
370 log.debug(
372 'settings ui from db: %s',
371 'settings ui from db: %s',
373 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
372 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
374 if clear_session:
373 if clear_session:
375 meta.Session.remove()
374 meta.Session.remove()
376
375
377 # TODO: mikhail: probably it makes no sense to re-read hooks information.
376 # TODO: mikhail: probably it makes no sense to re-read hooks information.
378 # It's already there and activated/deactivated
377 # It's already there and activated/deactivated
379 skip_entries = []
378 skip_entries = []
380 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
379 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
381 if 'pull' not in enabled_hook_classes:
380 if 'pull' not in enabled_hook_classes:
382 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
381 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
383 if 'push' not in enabled_hook_classes:
382 if 'push' not in enabled_hook_classes:
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
383 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
387
386
388 config = [entry for entry in config if entry[:2] not in skip_entries]
387 config = [entry for entry in config if entry[:2] not in skip_entries]
389
388
390 return config
389 return config
391
390
392
391
393 def make_db_config(clear_session=True, repo=None):
392 def make_db_config(clear_session=True, repo=None):
394 """
393 """
395 Create a :class:`Config` instance based on the values in the database.
394 Create a :class:`Config` instance based on the values in the database.
396 """
395 """
397 config = Config()
396 config = Config()
398 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
397 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
399 for section, option, value in config_data:
398 for section, option, value in config_data:
400 config.set(section, option, value)
399 config.set(section, option, value)
401 return config
400 return config
402
401
403
402
404 def get_enabled_hook_classes(ui_settings):
403 def get_enabled_hook_classes(ui_settings):
405 """
404 """
406 Return the enabled hook classes.
405 Return the enabled hook classes.
407
406
408 :param ui_settings: List of ui_settings as returned
407 :param ui_settings: List of ui_settings as returned
409 by :meth:`VcsSettingsModel.get_ui_settings`
408 by :meth:`VcsSettingsModel.get_ui_settings`
410
409
411 :return: a list with the enabled hook classes. The order is not guaranteed.
410 :return: a list with the enabled hook classes. The order is not guaranteed.
412 :rtype: list
411 :rtype: list
413 """
412 """
414 enabled_hooks = []
413 enabled_hooks = []
415 active_hook_keys = [
414 active_hook_keys = [
416 key for section, key, value, active in ui_settings
415 key for section, key, value, active in ui_settings
417 if section == 'hooks' and active]
416 if section == 'hooks' and active]
418
417
419 hook_names = {
418 hook_names = {
420 RhodeCodeUi.HOOK_PUSH: 'push',
419 RhodeCodeUi.HOOK_PUSH: 'push',
421 RhodeCodeUi.HOOK_PULL: 'pull',
420 RhodeCodeUi.HOOK_PULL: 'pull',
422 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
421 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
423 }
422 }
424
423
425 for key in active_hook_keys:
424 for key in active_hook_keys:
426 hook = hook_names.get(key)
425 hook = hook_names.get(key)
427 if hook:
426 if hook:
428 enabled_hooks.append(hook)
427 enabled_hooks.append(hook)
429
428
430 return enabled_hooks
429 return enabled_hooks
431
430
432
431
433 def set_rhodecode_config(config):
432 def set_rhodecode_config(config):
434 """
433 """
435 Updates pyramid config with new settings from database
434 Updates pyramid config with new settings from database
436
435
437 :param config:
436 :param config:
438 """
437 """
439 from rhodecode.model.settings import SettingsModel
438 from rhodecode.model.settings import SettingsModel
440 app_settings = SettingsModel().get_all_settings()
439 app_settings = SettingsModel().get_all_settings()
441
440
442 for k, v in app_settings.items():
441 for k, v in app_settings.items():
443 config[k] = v
442 config[k] = v
444
443
445
444
446 def get_rhodecode_realm():
445 def get_rhodecode_realm():
447 """
446 """
448 Return the rhodecode realm from database.
447 Return the rhodecode realm from database.
449 """
448 """
450 from rhodecode.model.settings import SettingsModel
449 from rhodecode.model.settings import SettingsModel
451 realm = SettingsModel().get_setting_by_name('realm')
450 realm = SettingsModel().get_setting_by_name('realm')
452 return safe_str(realm.app_settings_value)
451 return safe_str(realm.app_settings_value)
453
452
454
453
455 def get_rhodecode_base_path():
454 def get_rhodecode_base_path():
456 """
455 """
457 Returns the base path. The base path is the filesystem path which points
456 Returns the base path. The base path is the filesystem path which points
458 to the repository store.
457 to the repository store.
459 """
458 """
460 from rhodecode.model.settings import SettingsModel
459 from rhodecode.model.settings import SettingsModel
461 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
460 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
462 return safe_str(paths_ui.ui_value)
461 return safe_str(paths_ui.ui_value)
463
462
464
463
465 def map_groups(path):
464 def map_groups(path):
466 """
465 """
467 Given a full path to a repository, create all nested groups that this
466 Given a full path to a repository, create all nested groups that this
468 repo is inside. This function creates parent-child relationships between
467 repo is inside. This function creates parent-child relationships between
469 groups and creates default perms for all new groups.
468 groups and creates default perms for all new groups.
470
469
471 :param paths: full path to repository
470 :param paths: full path to repository
472 """
471 """
473 from rhodecode.model.repo_group import RepoGroupModel
472 from rhodecode.model.repo_group import RepoGroupModel
474 sa = meta.Session()
473 sa = meta.Session()
475 groups = path.split(Repository.NAME_SEP)
474 groups = path.split(Repository.NAME_SEP)
476 parent = None
475 parent = None
477 group = None
476 group = None
478
477
479 # last element is repo in nested groups structure
478 # last element is repo in nested groups structure
480 groups = groups[:-1]
479 groups = groups[:-1]
481 rgm = RepoGroupModel(sa)
480 rgm = RepoGroupModel(sa)
482 owner = User.get_first_super_admin()
481 owner = User.get_first_super_admin()
483 for lvl, group_name in enumerate(groups):
482 for lvl, group_name in enumerate(groups):
484 group_name = '/'.join(groups[:lvl] + [group_name])
483 group_name = '/'.join(groups[:lvl] + [group_name])
485 group = RepoGroup.get_by_group_name(group_name)
484 group = RepoGroup.get_by_group_name(group_name)
486 desc = '%s group' % group_name
485 desc = '%s group' % group_name
487
486
488 # skip folders that are now removed repos
487 # skip folders that are now removed repos
489 if REMOVED_REPO_PAT.match(group_name):
488 if REMOVED_REPO_PAT.match(group_name):
490 break
489 break
491
490
492 if group is None:
491 if group is None:
493 log.debug('creating group level: %s group_name: %s',
492 log.debug('creating group level: %s group_name: %s',
494 lvl, group_name)
493 lvl, group_name)
495 group = RepoGroup(group_name, parent)
494 group = RepoGroup(group_name, parent)
496 group.group_description = desc
495 group.group_description = desc
497 group.user = owner
496 group.user = owner
498 sa.add(group)
497 sa.add(group)
499 perm_obj = rgm._create_default_perms(group)
498 perm_obj = rgm._create_default_perms(group)
500 sa.add(perm_obj)
499 sa.add(perm_obj)
501 sa.flush()
500 sa.flush()
502
501
503 parent = group
502 parent = group
504 return group
503 return group
505
504
506
505
507 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
506 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
508 """
507 """
509 maps all repos given in initial_repo_list, non existing repositories
508 maps all repos given in initial_repo_list, non existing repositories
510 are created, if remove_obsolete is True it also checks for db entries
509 are created, if remove_obsolete is True it also checks for db entries
511 that are not in initial_repo_list and removes them.
510 that are not in initial_repo_list and removes them.
512
511
513 :param initial_repo_list: list of repositories found by scanning methods
512 :param initial_repo_list: list of repositories found by scanning methods
514 :param remove_obsolete: check for obsolete entries in database
513 :param remove_obsolete: check for obsolete entries in database
515 """
514 """
516 from rhodecode.model.repo import RepoModel
515 from rhodecode.model.repo import RepoModel
517 from rhodecode.model.repo_group import RepoGroupModel
516 from rhodecode.model.repo_group import RepoGroupModel
518 from rhodecode.model.settings import SettingsModel
517 from rhodecode.model.settings import SettingsModel
519
518
520 sa = meta.Session()
519 sa = meta.Session()
521 repo_model = RepoModel()
520 repo_model = RepoModel()
522 user = User.get_first_super_admin()
521 user = User.get_first_super_admin()
523 added = []
522 added = []
524
523
525 # creation defaults
524 # creation defaults
526 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
525 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
527 enable_statistics = defs.get('repo_enable_statistics')
526 enable_statistics = defs.get('repo_enable_statistics')
528 enable_locking = defs.get('repo_enable_locking')
527 enable_locking = defs.get('repo_enable_locking')
529 enable_downloads = defs.get('repo_enable_downloads')
528 enable_downloads = defs.get('repo_enable_downloads')
530 private = defs.get('repo_private')
529 private = defs.get('repo_private')
531
530
532 for name, repo in initial_repo_list.items():
531 for name, repo in initial_repo_list.items():
533 group = map_groups(name)
532 group = map_groups(name)
534 unicode_name = safe_unicode(name)
533 unicode_name = safe_unicode(name)
535 db_repo = repo_model.get_by_repo_name(unicode_name)
534 db_repo = repo_model.get_by_repo_name(unicode_name)
536 # found repo that is on filesystem not in RhodeCode database
535 # found repo that is on filesystem not in RhodeCode database
537 if not db_repo:
536 if not db_repo:
538 log.info('repository %s not found, creating now', name)
537 log.info('repository %s not found, creating now', name)
539 added.append(name)
538 added.append(name)
540 desc = (repo.description
539 desc = (repo.description
541 if repo.description != 'unknown'
540 if repo.description != 'unknown'
542 else '%s repository' % name)
541 else '%s repository' % name)
543
542
544 db_repo = repo_model._create_repo(
543 db_repo = repo_model._create_repo(
545 repo_name=name,
544 repo_name=name,
546 repo_type=repo.alias,
545 repo_type=repo.alias,
547 description=desc,
546 description=desc,
548 repo_group=getattr(group, 'group_id', None),
547 repo_group=getattr(group, 'group_id', None),
549 owner=user,
548 owner=user,
550 enable_locking=enable_locking,
549 enable_locking=enable_locking,
551 enable_downloads=enable_downloads,
550 enable_downloads=enable_downloads,
552 enable_statistics=enable_statistics,
551 enable_statistics=enable_statistics,
553 private=private,
552 private=private,
554 state=Repository.STATE_CREATED
553 state=Repository.STATE_CREATED
555 )
554 )
556 sa.commit()
555 sa.commit()
557 # we added that repo just now, and make sure we updated server info
556 # we added that repo just now, and make sure we updated server info
558 if db_repo.repo_type == 'git':
557 if db_repo.repo_type == 'git':
559 git_repo = db_repo.scm_instance()
558 git_repo = db_repo.scm_instance()
560 # update repository server-info
559 # update repository server-info
561 log.debug('Running update server info')
560 log.debug('Running update server info')
562 git_repo._update_server_info()
561 git_repo._update_server_info()
563
562
564 db_repo.update_commit_cache()
563 db_repo.update_commit_cache()
565
564
566 config = db_repo._config
565 config = db_repo._config
567 config.set('extensions', 'largefiles', '')
566 config.set('extensions', 'largefiles', '')
568 repo = db_repo.scm_instance(config=config)
567 repo = db_repo.scm_instance(config=config)
569 repo.install_hooks()
568 repo.install_hooks()
570
569
571 removed = []
570 removed = []
572 if remove_obsolete:
571 if remove_obsolete:
573 # remove from database those repositories that are not in the filesystem
572 # remove from database those repositories that are not in the filesystem
574 for repo in sa.query(Repository).all():
573 for repo in sa.query(Repository).all():
575 if repo.repo_name not in initial_repo_list.keys():
574 if repo.repo_name not in initial_repo_list.keys():
576 log.debug("Removing non-existing repository found in db `%s`",
575 log.debug("Removing non-existing repository found in db `%s`",
577 repo.repo_name)
576 repo.repo_name)
578 try:
577 try:
579 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
578 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
580 sa.commit()
579 sa.commit()
581 removed.append(repo.repo_name)
580 removed.append(repo.repo_name)
582 except Exception:
581 except Exception:
583 # don't hold further removals on error
582 # don't hold further removals on error
584 log.error(traceback.format_exc())
583 log.error(traceback.format_exc())
585 sa.rollback()
584 sa.rollback()
586
585
587 def splitter(full_repo_name):
586 def splitter(full_repo_name):
588 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
587 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
589 gr_name = None
588 gr_name = None
590 if len(_parts) == 2:
589 if len(_parts) == 2:
591 gr_name = _parts[0]
590 gr_name = _parts[0]
592 return gr_name
591 return gr_name
593
592
594 initial_repo_group_list = [splitter(x) for x in
593 initial_repo_group_list = [splitter(x) for x in
595 initial_repo_list.keys() if splitter(x)]
594 initial_repo_list.keys() if splitter(x)]
596
595
597 # remove from database those repository groups that are not in the
596 # remove from database those repository groups that are not in the
598 # filesystem due to parent child relationships we need to delete them
597 # filesystem due to parent child relationships we need to delete them
599 # in a specific order of most nested first
598 # in a specific order of most nested first
600 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
599 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
601 nested_sort = lambda gr: len(gr.split('/'))
600 nested_sort = lambda gr: len(gr.split('/'))
602 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
601 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
603 if group_name not in initial_repo_group_list:
602 if group_name not in initial_repo_group_list:
604 repo_group = RepoGroup.get_by_group_name(group_name)
603 repo_group = RepoGroup.get_by_group_name(group_name)
605 if (repo_group.children.all() or
604 if (repo_group.children.all() or
606 not RepoGroupModel().check_exist_filesystem(
605 not RepoGroupModel().check_exist_filesystem(
607 group_name=group_name, exc_on_failure=False)):
606 group_name=group_name, exc_on_failure=False)):
608 continue
607 continue
609
608
610 log.info(
609 log.info(
611 'Removing non-existing repository group found in db `%s`',
610 'Removing non-existing repository group found in db `%s`',
612 group_name)
611 group_name)
613 try:
612 try:
614 RepoGroupModel(sa).delete(group_name, fs_remove=False)
613 RepoGroupModel(sa).delete(group_name, fs_remove=False)
615 sa.commit()
614 sa.commit()
616 removed.append(group_name)
615 removed.append(group_name)
617 except Exception:
616 except Exception:
618 # don't hold further removals on error
617 # don't hold further removals on error
619 log.exception(
618 log.exception(
620 'Unable to remove repository group `%s`',
619 'Unable to remove repository group `%s`',
621 group_name)
620 group_name)
622 sa.rollback()
621 sa.rollback()
623 raise
622 raise
624
623
625 return added, removed
624 return added, removed
626
625
627
626
628 def load_rcextensions(root_path):
627 def load_rcextensions(root_path):
629 import rhodecode
628 import rhodecode
630 from rhodecode.config import conf
629 from rhodecode.config import conf
631
630
632 path = os.path.join(root_path, 'rcextensions', '__init__.py')
631 path = os.path.join(root_path, 'rcextensions', '__init__.py')
633 if os.path.isfile(path):
632 if os.path.isfile(path):
634 rcext = create_module('rc', path)
633 rcext = create_module('rc', path)
635 EXT = rhodecode.EXTENSIONS = rcext
634 EXT = rhodecode.EXTENSIONS = rcext
636 log.debug('Found rcextensions now loading %s...', rcext)
635 log.debug('Found rcextensions now loading %s...', rcext)
637
636
638 # Additional mappings that are not present in the pygments lexers
637 # Additional mappings that are not present in the pygments lexers
639 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
638 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
640
639
641 # auto check if the module is not missing any data, set to default if is
640 # auto check if the module is not missing any data, set to default if is
642 # this will help autoupdate new feature of rcext module
641 # this will help autoupdate new feature of rcext module
643 #from rhodecode.config import rcextensions
642 #from rhodecode.config import rcextensions
644 #for k in dir(rcextensions):
643 #for k in dir(rcextensions):
645 # if not k.startswith('_') and not hasattr(EXT, k):
644 # if not k.startswith('_') and not hasattr(EXT, k):
646 # setattr(EXT, k, getattr(rcextensions, k))
645 # setattr(EXT, k, getattr(rcextensions, k))
647
646
648
647
649 def get_custom_lexer(extension):
648 def get_custom_lexer(extension):
650 """
649 """
651 returns a custom lexer if it is defined in rcextensions module, or None
650 returns a custom lexer if it is defined in rcextensions module, or None
652 if there's no custom lexer defined
651 if there's no custom lexer defined
653 """
652 """
654 import rhodecode
653 import rhodecode
655 from pygments import lexers
654 from pygments import lexers
656
655
657 # custom override made by RhodeCode
656 # custom override made by RhodeCode
658 if extension in ['mako']:
657 if extension in ['mako']:
659 return lexers.get_lexer_by_name('html+mako')
658 return lexers.get_lexer_by_name('html+mako')
660
659
661 # check if we didn't define this extension as other lexer
660 # check if we didn't define this extension as other lexer
662 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
661 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
663 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
662 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
664 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
663 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
665 return lexers.get_lexer_by_name(_lexer_name)
664 return lexers.get_lexer_by_name(_lexer_name)
666
665
667
666
668 #==============================================================================
667 #==============================================================================
669 # TEST FUNCTIONS AND CREATORS
668 # TEST FUNCTIONS AND CREATORS
670 #==============================================================================
669 #==============================================================================
671 def create_test_index(repo_location, config):
670 def create_test_index(repo_location, config):
672 """
671 """
673 Makes default test index.
672 Makes default test index.
674 """
673 """
675 import rc_testdata
674 import rc_testdata
676
675
677 rc_testdata.extract_search_index(
676 rc_testdata.extract_search_index(
678 'vcs_search_index', os.path.dirname(config['search.location']))
677 'vcs_search_index', os.path.dirname(config['search.location']))
679
678
680
679
681 def create_test_directory(test_path):
680 def create_test_directory(test_path):
682 """
681 """
683 Create test directory if it doesn't exist.
682 Create test directory if it doesn't exist.
684 """
683 """
685 if not os.path.isdir(test_path):
684 if not os.path.isdir(test_path):
686 log.debug('Creating testdir %s', test_path)
685 log.debug('Creating testdir %s', test_path)
687 os.makedirs(test_path)
686 os.makedirs(test_path)
688
687
689
688
690 def create_test_database(test_path, config):
689 def create_test_database(test_path, config):
691 """
690 """
692 Makes a fresh database.
691 Makes a fresh database.
693 """
692 """
694 from rhodecode.lib.db_manage import DbManage
693 from rhodecode.lib.db_manage import DbManage
695
694
696 # PART ONE create db
695 # PART ONE create db
697 dbconf = config['sqlalchemy.db1.url']
696 dbconf = config['sqlalchemy.db1.url']
698 log.debug('making test db %s', dbconf)
697 log.debug('making test db %s', dbconf)
699
698
700 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
699 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
701 tests=True, cli_args={'force_ask': True})
700 tests=True, cli_args={'force_ask': True})
702 dbmanage.create_tables(override=True)
701 dbmanage.create_tables(override=True)
703 dbmanage.set_db_version()
702 dbmanage.set_db_version()
704 # for tests dynamically set new root paths based on generated content
703 # for tests dynamically set new root paths based on generated content
705 dbmanage.create_settings(dbmanage.config_prompt(test_path))
704 dbmanage.create_settings(dbmanage.config_prompt(test_path))
706 dbmanage.create_default_user()
705 dbmanage.create_default_user()
707 dbmanage.create_test_admin_and_users()
706 dbmanage.create_test_admin_and_users()
708 dbmanage.create_permissions()
707 dbmanage.create_permissions()
709 dbmanage.populate_default_permissions()
708 dbmanage.populate_default_permissions()
710 Session().commit()
709 Session().commit()
711
710
712
711
713 def create_test_repositories(test_path, config):
712 def create_test_repositories(test_path, config):
714 """
713 """
715 Creates test repositories in the temporary directory. Repositories are
714 Creates test repositories in the temporary directory. Repositories are
716 extracted from archives within the rc_testdata package.
715 extracted from archives within the rc_testdata package.
717 """
716 """
718 import rc_testdata
717 import rc_testdata
719 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
718 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
720
719
721 log.debug('making test vcs repositories')
720 log.debug('making test vcs repositories')
722
721
723 idx_path = config['search.location']
722 idx_path = config['search.location']
724 data_path = config['cache_dir']
723 data_path = config['cache_dir']
725
724
726 # clean index and data
725 # clean index and data
727 if idx_path and os.path.exists(idx_path):
726 if idx_path and os.path.exists(idx_path):
728 log.debug('remove %s', idx_path)
727 log.debug('remove %s', idx_path)
729 shutil.rmtree(idx_path)
728 shutil.rmtree(idx_path)
730
729
731 if data_path and os.path.exists(data_path):
730 if data_path and os.path.exists(data_path):
732 log.debug('remove %s', data_path)
731 log.debug('remove %s', data_path)
733 shutil.rmtree(data_path)
732 shutil.rmtree(data_path)
734
733
735 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
734 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
736 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
735 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
737
736
738 # Note: Subversion is in the process of being integrated with the system,
737 # Note: Subversion is in the process of being integrated with the system,
739 # until we have a properly packed version of the test svn repository, this
738 # until we have a properly packed version of the test svn repository, this
740 # tries to copy over the repo from a package "rc_testdata"
739 # tries to copy over the repo from a package "rc_testdata"
741 svn_repo_path = rc_testdata.get_svn_repo_archive()
740 svn_repo_path = rc_testdata.get_svn_repo_archive()
742 with tarfile.open(svn_repo_path) as tar:
741 with tarfile.open(svn_repo_path) as tar:
743 tar.extractall(jn(test_path, SVN_REPO))
742 tar.extractall(jn(test_path, SVN_REPO))
744
743
745
744
746 def password_changed(auth_user, session):
745 def password_changed(auth_user, session):
747 # Never report password change in case of default user or anonymous user.
746 # Never report password change in case of default user or anonymous user.
748 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
747 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
749 return False
748 return False
750
749
751 password_hash = md5(auth_user.password) if auth_user.password else None
750 password_hash = md5(auth_user.password) if auth_user.password else None
752 rhodecode_user = session.get('rhodecode_user', {})
751 rhodecode_user = session.get('rhodecode_user', {})
753 session_password_hash = rhodecode_user.get('password', '')
752 session_password_hash = rhodecode_user.get('password', '')
754 return password_hash != session_password_hash
753 return password_hash != session_password_hash
755
754
756
755
757 def read_opensource_licenses():
756 def read_opensource_licenses():
758 global _license_cache
757 global _license_cache
759
758
760 if not _license_cache:
759 if not _license_cache:
761 licenses = pkg_resources.resource_string(
760 licenses = pkg_resources.resource_string(
762 'rhodecode', 'config/licenses.json')
761 'rhodecode', 'config/licenses.json')
763 _license_cache = json.loads(licenses)
762 _license_cache = json.loads(licenses)
764
763
765 return _license_cache
764 return _license_cache
766
765
767
766
768 def generate_platform_uuid():
767 def generate_platform_uuid():
769 """
768 """
770 Generates platform UUID based on it's name
769 Generates platform UUID based on it's name
771 """
770 """
772 import platform
771 import platform
773
772
774 try:
773 try:
775 uuid_list = [platform.platform()]
774 uuid_list = [platform.platform()]
776 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
775 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
777 except Exception as e:
776 except Exception as e:
778 log.error('Failed to generate host uuid: %s' % e)
777 log.error('Failed to generate host uuid: %s' % e)
779 return 'UNDEFINED'
778 return 'UNDEFINED'
General Comments 0
You need to be logged in to leave comments. Login now