##// END OF EJS Templates
fix(vcs-operations): fixed problems with locked repos and with branch permissions reporting, previously it shown error 500 when those cases were handled
super-admin -
r5541:8026ecb9 default
parent child Browse files
Show More
@@ -1,98 +1,99 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20
20 import rhodecode
21 import rhodecode
21 import collections
22 import collections
22
23
23 from rhodecode.config import utils
24 from rhodecode.config import utils
24
25
25 from rhodecode.lib.utils import load_rcextensions
26 from rhodecode.lib.utils import load_rcextensions
26 from rhodecode.lib.utils2 import str2bool
27 from rhodecode.lib.utils2 import str2bool
27 from rhodecode.lib.vcs import connect_vcs
28 from rhodecode.lib.vcs import connect_vcs
28
29
29 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
30
31
31
32
32 def propagate_rhodecode_config(global_config, settings, config):
33 def propagate_rhodecode_config(global_config, settings, config):
33 # Store the settings to make them available to other modules.
34 # Store the settings to make them available to other modules.
34 settings_merged = global_config.copy()
35 settings_merged = global_config.copy()
35 settings_merged.update(settings)
36 settings_merged.update(settings)
36 if config:
37 if config:
37 settings_merged.update(config)
38 settings_merged.update(config)
38
39
39 rhodecode.PYRAMID_SETTINGS = settings_merged
40 rhodecode.PYRAMID_SETTINGS = settings_merged
40 rhodecode.CONFIG = settings_merged
41 rhodecode.CONFIG = settings_merged
41
42
42 if 'default_user_id' not in rhodecode.CONFIG:
43 if 'default_user_id' not in rhodecode.CONFIG:
43 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
44 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
44 log.debug('set rhodecode.CONFIG data')
45 log.debug('set rhodecode.CONFIG data')
45
46
46
47
47 def load_pyramid_environment(global_config, settings):
48 def load_pyramid_environment(global_config, settings):
48 # Some parts of the code expect a merge of global and app settings.
49 # Some parts of the code expect a merge of global and app settings.
49 settings_merged = global_config.copy()
50 settings_merged = global_config.copy()
50 settings_merged.update(settings)
51 settings_merged.update(settings)
51
52
52 # TODO(marcink): probably not required anymore
53 # TODO(marcink): probably not required anymore
53 # configure channelstream,
54 # configure channelstream,
54 settings_merged['channelstream_config'] = {
55 settings_merged['channelstream_config'] = {
55 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
56 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
56 'server': settings_merged.get('channelstream.server'),
57 'server': settings_merged.get('channelstream.server'),
57 'secret': settings_merged.get('channelstream.secret')
58 'secret': settings_merged.get('channelstream.secret')
58 }
59 }
59
60
60 # If this is a test run we prepare the test environment like
61 # If this is a test run we prepare the test environment like
61 # creating a test database, test search index and test repositories.
62 # creating a test database, test search index and test repositories.
62 # This has to be done before the database connection is initialized.
63 # This has to be done before the database connection is initialized.
63 if rhodecode.is_test:
64 if rhodecode.is_test:
64 rhodecode.disable_error_handler = True
65 rhodecode.disable_error_handler = True
65 from rhodecode import authentication
66 from rhodecode import authentication
66 authentication.plugin_default_auth_ttl = 0
67 authentication.plugin_default_auth_ttl = 0
67
68
68 utils.initialize_test_environment(settings_merged)
69 utils.initialize_test_environment(settings_merged)
69
70
70 # Initialize the database connection.
71 # Initialize the database connection.
71 utils.initialize_database(settings_merged)
72 utils.initialize_database(settings_merged)
72
73
73 load_rcextensions(root_path=settings_merged['here'])
74 load_rcextensions(root_path=settings_merged['here'])
74
75
75 # Limit backends to `vcs.backends` from configuration, and preserve the order
76 # Limit backends to `vcs.backends` from configuration, and preserve the order
76 for alias in list(rhodecode.BACKENDS.keys()):
77 for alias in list(rhodecode.BACKENDS.keys()):
77 if alias not in settings['vcs.backends']:
78 if alias not in settings['vcs.backends']:
78 del rhodecode.BACKENDS[alias]
79 del rhodecode.BACKENDS[alias]
79
80
80 _sorted_backend = sorted(rhodecode.BACKENDS.items(),
81 _sorted_backend = sorted(rhodecode.BACKENDS.items(),
81 key=lambda item: settings['vcs.backends'].index(item[0]))
82 key=lambda item: settings['vcs.backends'].index(item[0]))
82 rhodecode.BACKENDS = collections.OrderedDict(_sorted_backend)
83 rhodecode.BACKENDS = collections.OrderedDict(_sorted_backend)
83
84
84 log.info('Enabled VCS backends: %s', list(rhodecode.BACKENDS.keys()))
85 log.info('Enabled VCS backends: %s', list(rhodecode.BACKENDS.keys()))
85
86
86 # initialize vcs client and optionally run the server if enabled
87 # initialize vcs client and optionally run the server if enabled
87 vcs_server_uri = settings['vcs.server']
88 vcs_server_uri = settings['vcs.server']
88 vcs_server_enabled = settings['vcs.server.enable']
89 vcs_server_enabled = settings['vcs.server.enable']
89
90
90 utils.configure_vcs(settings)
91 utils.configure_vcs(settings)
91
92
92 # first run, to store data...
93 # first run, to store data...
93 propagate_rhodecode_config(global_config, settings, {})
94 propagate_rhodecode_config(global_config, settings, {})
94
95
95 if vcs_server_enabled:
96 if vcs_server_enabled:
96 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
97 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
97 else:
98 else:
98 log.warning('vcs-server not enabled, vcs connection unavailable')
99 log.warning('vcs-server not enabled, vcs connection unavailable')
@@ -1,104 +1,104 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import traceback
20 import traceback
21
21
22 from rhodecode.model import meta
22 from rhodecode.model import meta
23
23
24 from rhodecode.lib import hooks_base
24 from rhodecode.lib import hooks_base
25 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
25 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 class Hooks(object):
31 class Hooks(object):
32 """
32 """
33 Exposes the hooks for remote callbacks
33 Exposes the hooks for remote callbacks
34 """
34 """
35 def __init__(self, request=None, log_prefix=''):
35 def __init__(self, request=None, log_prefix=''):
36 self.log_prefix = log_prefix
36 self.log_prefix = log_prefix
37 self.request = request
37 self.request = request
38
38
39 def repo_size(self, extras):
39 def repo_size(self, extras):
40 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
40 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
41 return self._call_hook(hooks_base.repo_size, extras)
41 return self._call_hook(hooks_base.repo_size, extras)
42
42
43 def pre_pull(self, extras):
43 def pre_pull(self, extras):
44 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
44 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
45 return self._call_hook(hooks_base.pre_pull, extras)
45 return self._call_hook(hooks_base.pre_pull, extras)
46
46
47 def post_pull(self, extras):
47 def post_pull(self, extras):
48 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
48 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
49 return self._call_hook(hooks_base.post_pull, extras)
49 return self._call_hook(hooks_base.post_pull, extras)
50
50
51 def pre_push(self, extras):
51 def pre_push(self, extras):
52 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
52 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
53 return self._call_hook(hooks_base.pre_push, extras)
53 return self._call_hook(hooks_base.pre_push, extras)
54
54
55 def post_push(self, extras):
55 def post_push(self, extras):
56 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
56 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
57 return self._call_hook(hooks_base.post_push, extras)
57 return self._call_hook(hooks_base.post_push, extras)
58
58
59 def _call_hook(self, hook, extras):
59 def _call_hook(self, hook, extras):
60 extras = AttributeDict(extras)
60 extras = AttributeDict(extras)
61 _server_url = extras['server_url']
61 _server_url = extras['server_url']
62
62
63 extras.request = self.request
63 extras.request = self.request
64
64
65 try:
65 try:
66 result = hook(extras)
66 result = hook(extras)
67 if result is None:
67 if result is None:
68 raise Exception(f'Failed to obtain hook result from func: {hook}')
68 raise Exception(f'Failed to obtain hook result from func: {hook}')
69 except HTTPBranchProtected as handled_error:
69 except HTTPBranchProtected as error:
70 # Those special cases don't need error reporting. It's a case of
70 # Those special cases don't need error reporting. It's a case of
71 # locked repo or protected branch
71 # locked repo or protected branch
72 result = AttributeDict({
72 result = AttributeDict({
73 'status': handled_error.code,
73 'status': error.code,
74 'output': handled_error.explanation
74 'output': error.explanation
75 })
75 })
76 except (HTTPLockedRC, Exception) as error:
76 except (HTTPLockedRC, Exception) as error:
77 # locked needs different handling since we need to also
77 # locked needs different handling since we need to also
78 # handle PULL operations
78 # handle PULL operations
79 exc_tb = ''
79 exc_tb = ''
80 if not isinstance(error, HTTPLockedRC):
80 if not isinstance(error, HTTPLockedRC):
81 exc_tb = traceback.format_exc()
81 exc_tb = traceback.format_exc()
82 log.exception('%sException when handling hook %s', self.log_prefix, hook)
82 log.exception('%sException when handling hook %s', self.log_prefix, hook)
83 error_args = error.args
83 error_args = error.args
84 return {
84 return {
85 'status': 128,
85 'status': 128,
86 'output': '',
86 'output': '',
87 'exception': type(error).__name__,
87 'exception': type(error).__name__,
88 'exception_traceback': exc_tb,
88 'exception_traceback': exc_tb,
89 'exception_args': error_args,
89 'exception_args': error_args,
90 }
90 }
91 finally:
91 finally:
92 meta.Session.remove()
92 meta.Session.remove()
93
93
94 log.debug('%sGot hook call response %s', self.log_prefix, result)
94 log.debug('%sGot hook call response %s', self.log_prefix, result)
95 return {
95 return {
96 'status': result.status,
96 'status': result.status,
97 'output': result.output,
97 'output': result.output,
98 }
98 }
99
99
100 def __enter__(self):
100 def __enter__(self):
101 return self
101 return self
102
102
103 def __exit__(self, exc_type, exc_val, exc_tb):
103 def __exit__(self, exc_type, exc_val, exc_tb):
104 pass
104 pass
@@ -1,548 +1,550 b''
1 # Copyright (C) 2013-2023 RhodeCode GmbH
1 # Copyright (C) 2013-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 Set of hooks run by RhodeCode Enterprise
21 Set of hooks run by RhodeCode Enterprise
22 """
22 """
23
23
24 import os
24 import os
25 import logging
25 import logging
26
26
27 import rhodecode
27 import rhodecode
28 from rhodecode import events
28 from rhodecode import events
29 from rhodecode.lib import helpers as h
29 from rhodecode.lib import helpers as h
30 from rhodecode.lib import audit_logger
30 from rhodecode.lib import audit_logger
31 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
31 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
32 from rhodecode.lib.exceptions import (
32 from rhodecode.lib.exceptions import (
33 HTTPLockedRC, HTTPBranchProtected, UserCreationError, ClientNotSupportedError)
33 HTTPLockedRC, HTTPBranchProtected, UserCreationError, ClientNotSupportedError)
34 from rhodecode.model.db import Repository, User
34 from rhodecode.model.db import Repository, User
35 from rhodecode.lib.statsd_client import StatsdClient
35 from rhodecode.lib.statsd_client import StatsdClient
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HookResponse(object):
40 class HookResponse(object):
41 def __init__(self, status, output):
41 def __init__(self, status, output):
42 self.status = status
42 self.status = status
43 self.output = output
43 self.output = output
44
44
45 def __add__(self, other):
45 def __add__(self, other):
46 other_status = getattr(other, 'status', 0)
46 other_status = getattr(other, 'status', 0)
47 new_status = max(self.status, other_status)
47 new_status = max(self.status, other_status)
48 other_output = getattr(other, 'output', '')
48 other_output = getattr(other, 'output', '')
49 new_output = self.output + other_output
49 new_output = self.output + other_output
50
50
51 return HookResponse(new_status, new_output)
51 return HookResponse(new_status, new_output)
52
52
53 def __bool__(self):
53 def __bool__(self):
54 return self.status == 0
54 return self.status == 0
55
55
56 def to_json(self):
56 def to_json(self):
57 return {'status': self.status, 'output': self.output}
57 return {'status': self.status, 'output': self.output}
58
58
59
59
60 def is_shadow_repo(extras):
60 def is_shadow_repo(extras):
61 """
61 """
62 Returns ``True`` if this is an action executed against a shadow repository.
62 Returns ``True`` if this is an action executed against a shadow repository.
63 """
63 """
64 return extras['is_shadow_repo']
64 return extras['is_shadow_repo']
65
65
66
66
67 def check_vcs_client(extras):
67 def check_vcs_client(extras):
68 """
68 """
69 Checks if vcs client is allowed (Only works in enterprise edition)
69 Checks if vcs client is allowed (Only works in enterprise edition)
70 """
70 """
71 try:
71 try:
72 from rc_ee.lib.security.utils import is_vcs_client_whitelisted
72 from rc_ee.lib.security.utils import is_vcs_client_whitelisted
73 except ModuleNotFoundError:
73 except ModuleNotFoundError:
74 is_vcs_client_whitelisted = lambda *x: True
74 is_vcs_client_whitelisted = lambda *x: True
75 backend = extras.get('scm')
75 backend = extras.get('scm')
76 if not is_vcs_client_whitelisted(extras.get('user_agent'), backend):
76 if not is_vcs_client_whitelisted(extras.get('user_agent'), backend):
77 raise ClientNotSupportedError(f"Your {backend} client is forbidden")
77 raise ClientNotSupportedError(f"Your {backend} client is forbidden")
78
78
79 def _get_scm_size(alias, root_path):
79 def _get_scm_size(alias, root_path):
80
80
81 if not alias.startswith('.'):
81 if not alias.startswith('.'):
82 alias += '.'
82 alias += '.'
83
83
84 size_scm, size_root = 0, 0
84 size_scm, size_root = 0, 0
85 for path, unused_dirs, files in os.walk(safe_str(root_path)):
85 for path, unused_dirs, files in os.walk(safe_str(root_path)):
86 if path.find(alias) != -1:
86 if path.find(alias) != -1:
87 for f in files:
87 for f in files:
88 try:
88 try:
89 size_scm += os.path.getsize(os.path.join(path, f))
89 size_scm += os.path.getsize(os.path.join(path, f))
90 except OSError:
90 except OSError:
91 pass
91 pass
92 else:
92 else:
93 for f in files:
93 for f in files:
94 try:
94 try:
95 size_root += os.path.getsize(os.path.join(path, f))
95 size_root += os.path.getsize(os.path.join(path, f))
96 except OSError:
96 except OSError:
97 pass
97 pass
98
98
99 size_scm_f = h.format_byte_size_binary(size_scm)
99 size_scm_f = h.format_byte_size_binary(size_scm)
100 size_root_f = h.format_byte_size_binary(size_root)
100 size_root_f = h.format_byte_size_binary(size_root)
101 size_total_f = h.format_byte_size_binary(size_root + size_scm)
101 size_total_f = h.format_byte_size_binary(size_root + size_scm)
102
102
103 return size_scm_f, size_root_f, size_total_f
103 return size_scm_f, size_root_f, size_total_f
104
104
105
105
106 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
106 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
107 def repo_size(extras):
107 def repo_size(extras):
108 """Present size of repository after push."""
108 """Present size of repository after push."""
109 repo = Repository.get_by_repo_name(extras.repository)
109 repo = Repository.get_by_repo_name(extras.repository)
110 vcs_part = f'.{repo.repo_type}'
110 vcs_part = f'.{repo.repo_type}'
111 size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path)
111 size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path)
112 msg = (f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n')
112 msg = (f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n')
113 return HookResponse(0, msg)
113 return HookResponse(0, msg)
114
114
115
115
116 def pre_push(extras):
116 def pre_push(extras):
117 """
117 """
118 Hook executed before pushing code.
118 Hook executed before pushing code.
119
119
120 It bans pushing when the repository is locked.
120 It bans pushing when the repository is locked.
121 """
121 """
122
122
123 check_vcs_client(extras)
123 check_vcs_client(extras)
124 user = User.get_by_username(extras.username)
124 user = User.get_by_username(extras.username)
125 output = ''
125 output = ''
126 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
126 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
127 locked_by = User.get(extras.locked_by[0]).username
127 locked_by = User.get(extras.locked_by[0]).username
128 reason = extras.locked_by[2]
128 reason = extras.locked_by[2]
129 # this exception is interpreted in git/hg middlewares and based
129 # this exception is interpreted in git/hg middlewares and based
130 # on that proper return code is server to client
130 # on that proper return code is server to client
131 _http_ret = HTTPLockedRC(
131 _http_ret = HTTPLockedRC(
132 _locked_by_explanation(extras.repository, locked_by, reason))
132 _locked_by_explanation(extras.repository, locked_by, reason))
133 if str(_http_ret.code).startswith('2'):
133 if str(_http_ret.code).startswith('2'):
134 # 2xx Codes don't raise exceptions
134 # 2xx Codes don't raise exceptions
135 output = _http_ret.title
135 output = _http_ret.title
136 else:
136 else:
137 raise _http_ret
137 raise _http_ret
138
138
139 hook_response = ''
139 hook_response = ''
140 if not is_shadow_repo(extras):
140 if not is_shadow_repo(extras):
141
141
142 if extras.commit_ids and extras.check_branch_perms:
142 if extras.commit_ids and extras.check_branch_perms:
143 auth_user = user.AuthUser()
143 auth_user = user.AuthUser()
144 repo = Repository.get_by_repo_name(extras.repository)
144 repo = Repository.get_by_repo_name(extras.repository)
145 if not repo:
146 raise ValueError(f'Repo for {extras.repository} not found')
145 affected_branches = []
147 affected_branches = []
146 if repo.repo_type == 'hg':
148 if repo.repo_type == 'hg':
147 for entry in extras.commit_ids:
149 for entry in extras.commit_ids:
148 if entry['type'] == 'branch':
150 if entry['type'] == 'branch':
149 is_forced = bool(entry['multiple_heads'])
151 is_forced = bool(entry['multiple_heads'])
150 affected_branches.append([entry['name'], is_forced])
152 affected_branches.append([entry['name'], is_forced])
151 elif repo.repo_type == 'git':
153 elif repo.repo_type == 'git':
152 for entry in extras.commit_ids:
154 for entry in extras.commit_ids:
153 if entry['type'] == 'heads':
155 if entry['type'] == 'heads':
154 is_forced = bool(entry['pruned_sha'])
156 is_forced = bool(entry['pruned_sha'])
155 affected_branches.append([entry['name'], is_forced])
157 affected_branches.append([entry['name'], is_forced])
156
158
157 for branch_name, is_forced in affected_branches:
159 for branch_name, is_forced in affected_branches:
158
160
159 rule, branch_perm = auth_user.get_rule_and_branch_permission(
161 rule, branch_perm = auth_user.get_rule_and_branch_permission(
160 extras.repository, branch_name)
162 extras.repository, branch_name)
161 if not branch_perm:
163 if not branch_perm:
162 # no branch permission found for this branch, just keep checking
164 # no branch permission found for this branch, just keep checking
163 continue
165 continue
164
166
165 if branch_perm == 'branch.push_force':
167 if branch_perm == 'branch.push_force':
166 continue
168 continue
167 elif branch_perm == 'branch.push' and is_forced is False:
169 elif branch_perm == 'branch.push' and is_forced is False:
168 continue
170 continue
169 elif branch_perm == 'branch.push' and is_forced is True:
171 elif branch_perm == 'branch.push' and is_forced is True:
170 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \
172 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \
171 f'FORCE PUSH FORBIDDEN.'
173 f'FORCE PUSH FORBIDDEN.'
172 else:
174 else:
173 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.'
175 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.'
174
176
175 if halt_message:
177 if halt_message:
176 _http_ret = HTTPBranchProtected(halt_message)
178 _http_ret = HTTPBranchProtected(halt_message)
177 raise _http_ret
179 raise _http_ret
178
180
179 # Propagate to external components. This is done after checking the
181 # Propagate to external components. This is done after checking the
180 # lock, for consistent behavior.
182 # lock, for consistent behavior.
181 hook_response = pre_push_extension(
183 hook_response = pre_push_extension(
182 repo_store_path=Repository.base_path(), **extras)
184 repo_store_path=Repository.base_path(), **extras)
183 events.trigger(events.RepoPrePushEvent(
185 events.trigger(events.RepoPrePushEvent(
184 repo_name=extras.repository, extras=extras))
186 repo_name=extras.repository, extras=extras))
185
187
186 return HookResponse(0, output) + hook_response
188 return HookResponse(0, output) + hook_response
187
189
188
190
189 def pre_pull(extras):
191 def pre_pull(extras):
190 """
192 """
191 Hook executed before pulling the code.
193 Hook executed before pulling the code.
192
194
193 It bans pulling when the repository is locked.
195 It bans pulling when the repository is locked.
194 """
196 """
195
197
196 check_vcs_client(extras)
198 check_vcs_client(extras)
197 output = ''
199 output = ''
198 if extras.locked_by[0]:
200 if extras.locked_by[0]:
199 locked_by = User.get(extras.locked_by[0]).username
201 locked_by = User.get(extras.locked_by[0]).username
200 reason = extras.locked_by[2]
202 reason = extras.locked_by[2]
201 # this exception is interpreted in git/hg middlewares and based
203 # this exception is interpreted in git/hg middlewares and based
202 # on that proper return code is server to client
204 # on that proper return code is server to client
203 _http_ret = HTTPLockedRC(
205 _http_ret = HTTPLockedRC(
204 _locked_by_explanation(extras.repository, locked_by, reason))
206 _locked_by_explanation(extras.repository, locked_by, reason))
205 if str(_http_ret.code).startswith('2'):
207 if str(_http_ret.code).startswith('2'):
206 # 2xx Codes don't raise exceptions
208 # 2xx Codes don't raise exceptions
207 output = _http_ret.title
209 output = _http_ret.title
208 else:
210 else:
209 raise _http_ret
211 raise _http_ret
210
212
211 # Propagate to external components. This is done after checking the
213 # Propagate to external components. This is done after checking the
212 # lock, for consistent behavior.
214 # lock, for consistent behavior.
213 hook_response = ''
215 hook_response = ''
214 if not is_shadow_repo(extras):
216 if not is_shadow_repo(extras):
215 extras.hook_type = extras.hook_type or 'pre_pull'
217 extras.hook_type = extras.hook_type or 'pre_pull'
216 hook_response = pre_pull_extension(
218 hook_response = pre_pull_extension(
217 repo_store_path=Repository.base_path(), **extras)
219 repo_store_path=Repository.base_path(), **extras)
218 events.trigger(events.RepoPrePullEvent(
220 events.trigger(events.RepoPrePullEvent(
219 repo_name=extras.repository, extras=extras))
221 repo_name=extras.repository, extras=extras))
220
222
221 return HookResponse(0, output) + hook_response
223 return HookResponse(0, output) + hook_response
222
224
223
225
224 def post_pull(extras):
226 def post_pull(extras):
225 """Hook executed after client pulls the code."""
227 """Hook executed after client pulls the code."""
226
228
227 audit_user = audit_logger.UserWrap(
229 audit_user = audit_logger.UserWrap(
228 username=extras.username,
230 username=extras.username,
229 ip_addr=extras.ip)
231 ip_addr=extras.ip)
230 repo = audit_logger.RepoWrap(repo_name=extras.repository)
232 repo = audit_logger.RepoWrap(repo_name=extras.repository)
231 audit_logger.store(
233 audit_logger.store(
232 'user.pull', action_data={'user_agent': extras.user_agent},
234 'user.pull', action_data={'user_agent': extras.user_agent},
233 user=audit_user, repo=repo, commit=True)
235 user=audit_user, repo=repo, commit=True)
234
236
235 statsd = StatsdClient.statsd
237 statsd = StatsdClient.statsd
236 if statsd:
238 if statsd:
237 statsd.incr('rhodecode_pull_total', tags=[
239 statsd.incr('rhodecode_pull_total', tags=[
238 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
240 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
239 ])
241 ])
240 output = ''
242 output = ''
241 # make lock is a tri state False, True, None. We only make lock on True
243 # make lock is a tri state False, True, None. We only make lock on True
242 if extras.make_lock is True and not is_shadow_repo(extras):
244 if extras.make_lock is True and not is_shadow_repo(extras):
243 user = User.get_by_username(extras.username)
245 user = User.get_by_username(extras.username)
244 Repository.lock(Repository.get_by_repo_name(extras.repository),
246 Repository.lock(Repository.get_by_repo_name(extras.repository),
245 user.user_id,
247 user.user_id,
246 lock_reason=Repository.LOCK_PULL)
248 lock_reason=Repository.LOCK_PULL)
247 msg = 'Made lock on repo `{}`'.format(extras.repository)
249 msg = 'Made lock on repo `{}`'.format(extras.repository)
248 output += msg
250 output += msg
249
251
250 if extras.locked_by[0]:
252 if extras.locked_by[0]:
251 locked_by = User.get(extras.locked_by[0]).username
253 locked_by = User.get(extras.locked_by[0]).username
252 reason = extras.locked_by[2]
254 reason = extras.locked_by[2]
253 _http_ret = HTTPLockedRC(
255 _http_ret = HTTPLockedRC(
254 _locked_by_explanation(extras.repository, locked_by, reason))
256 _locked_by_explanation(extras.repository, locked_by, reason))
255 if str(_http_ret.code).startswith('2'):
257 if str(_http_ret.code).startswith('2'):
256 # 2xx Codes don't raise exceptions
258 # 2xx Codes don't raise exceptions
257 output += _http_ret.title
259 output += _http_ret.title
258
260
259 # Propagate to external components.
261 # Propagate to external components.
260 hook_response = ''
262 hook_response = ''
261 if not is_shadow_repo(extras):
263 if not is_shadow_repo(extras):
262 extras.hook_type = extras.hook_type or 'post_pull'
264 extras.hook_type = extras.hook_type or 'post_pull'
263 hook_response = post_pull_extension(
265 hook_response = post_pull_extension(
264 repo_store_path=Repository.base_path(), **extras)
266 repo_store_path=Repository.base_path(), **extras)
265 events.trigger(events.RepoPullEvent(
267 events.trigger(events.RepoPullEvent(
266 repo_name=extras.repository, extras=extras))
268 repo_name=extras.repository, extras=extras))
267
269
268 return HookResponse(0, output) + hook_response
270 return HookResponse(0, output) + hook_response
269
271
270
272
271 def post_push(extras):
273 def post_push(extras):
272 """Hook executed after user pushes to the repository."""
274 """Hook executed after user pushes to the repository."""
273 commit_ids = extras.commit_ids
275 commit_ids = extras.commit_ids
274
276
275 # log the push call
277 # log the push call
276 audit_user = audit_logger.UserWrap(
278 audit_user = audit_logger.UserWrap(
277 username=extras.username, ip_addr=extras.ip)
279 username=extras.username, ip_addr=extras.ip)
278 repo = audit_logger.RepoWrap(repo_name=extras.repository)
280 repo = audit_logger.RepoWrap(repo_name=extras.repository)
279 audit_logger.store(
281 audit_logger.store(
280 'user.push', action_data={
282 'user.push', action_data={
281 'user_agent': extras.user_agent,
283 'user_agent': extras.user_agent,
282 'commit_ids': commit_ids[:400]},
284 'commit_ids': commit_ids[:400]},
283 user=audit_user, repo=repo, commit=True)
285 user=audit_user, repo=repo, commit=True)
284
286
285 statsd = StatsdClient.statsd
287 statsd = StatsdClient.statsd
286 if statsd:
288 if statsd:
287 statsd.incr('rhodecode_push_total', tags=[
289 statsd.incr('rhodecode_push_total', tags=[
288 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
290 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
289 ])
291 ])
290
292
291 # Propagate to external components.
293 # Propagate to external components.
292 output = ''
294 output = ''
293 # make lock is a tri state False, True, None. We only release lock on False
295 # make lock is a tri state False, True, None. We only release lock on False
294 if extras.make_lock is False and not is_shadow_repo(extras):
296 if extras.make_lock is False and not is_shadow_repo(extras):
295 Repository.unlock(Repository.get_by_repo_name(extras.repository))
297 Repository.unlock(Repository.get_by_repo_name(extras.repository))
296 msg = f'Released lock on repo `{extras.repository}`\n'
298 msg = f'Released lock on repo `{extras.repository}`\n'
297 output += msg
299 output += msg
298
300
299 if extras.locked_by[0]:
301 if extras.locked_by[0]:
300 locked_by = User.get(extras.locked_by[0]).username
302 locked_by = User.get(extras.locked_by[0]).username
301 reason = extras.locked_by[2]
303 reason = extras.locked_by[2]
302 _http_ret = HTTPLockedRC(
304 _http_ret = HTTPLockedRC(
303 _locked_by_explanation(extras.repository, locked_by, reason))
305 _locked_by_explanation(extras.repository, locked_by, reason))
304 # TODO: johbo: if not?
306 # TODO: johbo: if not?
305 if str(_http_ret.code).startswith('2'):
307 if str(_http_ret.code).startswith('2'):
306 # 2xx Codes don't raise exceptions
308 # 2xx Codes don't raise exceptions
307 output += _http_ret.title
309 output += _http_ret.title
308
310
309 if extras.new_refs:
311 if extras.new_refs:
310 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
312 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
311 safe_str(extras.server_url), safe_str(extras.repository))
313 safe_str(extras.server_url), safe_str(extras.repository))
312
314
313 for branch_name in extras.new_refs['branches']:
315 for branch_name in extras.new_refs['branches']:
314 pr_link = tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))
316 pr_link = tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))
315 output += f'RhodeCode: open pull request link: {pr_link}\n'
317 output += f'RhodeCode: open pull request link: {pr_link}\n'
316
318
317 for book_name in extras.new_refs['bookmarks']:
319 for book_name in extras.new_refs['bookmarks']:
318 pr_link = tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))
320 pr_link = tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))
319 output += f'RhodeCode: open pull request link: {pr_link}\n'
321 output += f'RhodeCode: open pull request link: {pr_link}\n'
320
322
321 hook_response = ''
323 hook_response = ''
322 if not is_shadow_repo(extras):
324 if not is_shadow_repo(extras):
323 hook_response = post_push_extension(
325 hook_response = post_push_extension(
324 repo_store_path=Repository.base_path(),
326 repo_store_path=Repository.base_path(),
325 **extras)
327 **extras)
326 events.trigger(events.RepoPushEvent(
328 events.trigger(events.RepoPushEvent(
327 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
329 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
328
330
329 output += 'RhodeCode: push completed\n'
331 output += 'RhodeCode: push completed\n'
330 return HookResponse(0, output) + hook_response
332 return HookResponse(0, output) + hook_response
331
333
332
334
333 def _locked_by_explanation(repo_name, user_name, reason):
335 def _locked_by_explanation(repo_name, user_name, reason):
334 message = f'Repository `{repo_name}` locked by user `{user_name}`. Reason:`{reason}`'
336 message = f'Repository `{repo_name}` locked by user `{user_name}`. Reason:`{reason}`'
335 return message
337 return message
336
338
337
339
338 def check_allowed_create_user(user_dict, created_by, **kwargs):
340 def check_allowed_create_user(user_dict, created_by, **kwargs):
339 # pre create hooks
341 # pre create hooks
340 if pre_create_user.is_active():
342 if pre_create_user.is_active():
341 hook_result = pre_create_user(created_by=created_by, **user_dict)
343 hook_result = pre_create_user(created_by=created_by, **user_dict)
342 allowed = hook_result.status == 0
344 allowed = hook_result.status == 0
343 if not allowed:
345 if not allowed:
344 reason = hook_result.output
346 reason = hook_result.output
345 raise UserCreationError(reason)
347 raise UserCreationError(reason)
346
348
347
349
348 class ExtensionCallback(object):
350 class ExtensionCallback(object):
349 """
351 """
350 Forwards a given call to rcextensions, sanitizes keyword arguments.
352 Forwards a given call to rcextensions, sanitizes keyword arguments.
351
353
352 Does check if there is an extension active for that hook. If it is
354 Does check if there is an extension active for that hook. If it is
353 there, it will forward all `kwargs_keys` keyword arguments to the
355 there, it will forward all `kwargs_keys` keyword arguments to the
354 extension callback.
356 extension callback.
355 """
357 """
356
358
357 def __init__(self, hook_name, kwargs_keys):
359 def __init__(self, hook_name, kwargs_keys):
358 self._hook_name = hook_name
360 self._hook_name = hook_name
359 self._kwargs_keys = set(kwargs_keys)
361 self._kwargs_keys = set(kwargs_keys)
360
362
361 def __call__(self, *args, **kwargs):
363 def __call__(self, *args, **kwargs):
362 log.debug('Calling extension callback for `%s`', self._hook_name)
364 log.debug('Calling extension callback for `%s`', self._hook_name)
363 callback = self._get_callback()
365 callback = self._get_callback()
364 if not callback:
366 if not callback:
365 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
367 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
366 return
368 return
367
369
368 kwargs_to_pass = {}
370 kwargs_to_pass = {}
369 for key in self._kwargs_keys:
371 for key in self._kwargs_keys:
370 try:
372 try:
371 kwargs_to_pass[key] = kwargs[key]
373 kwargs_to_pass[key] = kwargs[key]
372 except KeyError:
374 except KeyError:
373 log.error('Failed to fetch %s key from given kwargs. '
375 log.error('Failed to fetch %s key from given kwargs. '
374 'Expected keys: %s', key, self._kwargs_keys)
376 'Expected keys: %s', key, self._kwargs_keys)
375 raise
377 raise
376
378
377 # backward compat for removed api_key for old hooks. This was it works
379 # backward compat for removed api_key for old hooks. This was it works
378 # with older rcextensions that require api_key present
380 # with older rcextensions that require api_key present
379 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
381 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
380 kwargs_to_pass['api_key'] = '_DEPRECATED_'
382 kwargs_to_pass['api_key'] = '_DEPRECATED_'
381 return callback(**kwargs_to_pass)
383 return callback(**kwargs_to_pass)
382
384
383 def is_active(self):
385 def is_active(self):
384 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
386 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
385
387
386 def _get_callback(self):
388 def _get_callback(self):
387 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
389 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
388
390
389
391
390 pre_pull_extension = ExtensionCallback(
392 pre_pull_extension = ExtensionCallback(
391 hook_name='PRE_PULL_HOOK',
393 hook_name='PRE_PULL_HOOK',
392 kwargs_keys=(
394 kwargs_keys=(
393 'server_url', 'config', 'scm', 'username', 'ip', 'action',
395 'server_url', 'config', 'scm', 'username', 'ip', 'action',
394 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
396 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
395
397
396
398
397 post_pull_extension = ExtensionCallback(
399 post_pull_extension = ExtensionCallback(
398 hook_name='PULL_HOOK',
400 hook_name='PULL_HOOK',
399 kwargs_keys=(
401 kwargs_keys=(
400 'server_url', 'config', 'scm', 'username', 'ip', 'action',
402 'server_url', 'config', 'scm', 'username', 'ip', 'action',
401 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
403 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
402
404
403
405
404 pre_push_extension = ExtensionCallback(
406 pre_push_extension = ExtensionCallback(
405 hook_name='PRE_PUSH_HOOK',
407 hook_name='PRE_PUSH_HOOK',
406 kwargs_keys=(
408 kwargs_keys=(
407 'server_url', 'config', 'scm', 'username', 'ip', 'action',
409 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
410 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
409
411
410
412
411 post_push_extension = ExtensionCallback(
413 post_push_extension = ExtensionCallback(
412 hook_name='PUSH_HOOK',
414 hook_name='PUSH_HOOK',
413 kwargs_keys=(
415 kwargs_keys=(
414 'server_url', 'config', 'scm', 'username', 'ip', 'action',
416 'server_url', 'config', 'scm', 'username', 'ip', 'action',
415 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
417 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
416
418
417
419
418 pre_create_user = ExtensionCallback(
420 pre_create_user = ExtensionCallback(
419 hook_name='PRE_CREATE_USER_HOOK',
421 hook_name='PRE_CREATE_USER_HOOK',
420 kwargs_keys=(
422 kwargs_keys=(
421 'username', 'password', 'email', 'firstname', 'lastname', 'active',
423 'username', 'password', 'email', 'firstname', 'lastname', 'active',
422 'admin', 'created_by'))
424 'admin', 'created_by'))
423
425
424
426
425 create_pull_request = ExtensionCallback(
427 create_pull_request = ExtensionCallback(
426 hook_name='CREATE_PULL_REQUEST',
428 hook_name='CREATE_PULL_REQUEST',
427 kwargs_keys=(
429 kwargs_keys=(
428 'server_url', 'config', 'scm', 'username', 'ip', 'action',
430 'server_url', 'config', 'scm', 'username', 'ip', 'action',
429 'repository', 'pull_request_id', 'url', 'title', 'description',
431 'repository', 'pull_request_id', 'url', 'title', 'description',
430 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
432 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
431 'mergeable', 'source', 'target', 'author', 'reviewers'))
433 'mergeable', 'source', 'target', 'author', 'reviewers'))
432
434
433
435
434 merge_pull_request = ExtensionCallback(
436 merge_pull_request = ExtensionCallback(
435 hook_name='MERGE_PULL_REQUEST',
437 hook_name='MERGE_PULL_REQUEST',
436 kwargs_keys=(
438 kwargs_keys=(
437 'server_url', 'config', 'scm', 'username', 'ip', 'action',
439 'server_url', 'config', 'scm', 'username', 'ip', 'action',
438 'repository', 'pull_request_id', 'url', 'title', 'description',
440 'repository', 'pull_request_id', 'url', 'title', 'description',
439 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
441 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
440 'mergeable', 'source', 'target', 'author', 'reviewers'))
442 'mergeable', 'source', 'target', 'author', 'reviewers'))
441
443
442
444
443 close_pull_request = ExtensionCallback(
445 close_pull_request = ExtensionCallback(
444 hook_name='CLOSE_PULL_REQUEST',
446 hook_name='CLOSE_PULL_REQUEST',
445 kwargs_keys=(
447 kwargs_keys=(
446 'server_url', 'config', 'scm', 'username', 'ip', 'action',
448 'server_url', 'config', 'scm', 'username', 'ip', 'action',
447 'repository', 'pull_request_id', 'url', 'title', 'description',
449 'repository', 'pull_request_id', 'url', 'title', 'description',
448 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
450 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
449 'mergeable', 'source', 'target', 'author', 'reviewers'))
451 'mergeable', 'source', 'target', 'author', 'reviewers'))
450
452
451
453
452 review_pull_request = ExtensionCallback(
454 review_pull_request = ExtensionCallback(
453 hook_name='REVIEW_PULL_REQUEST',
455 hook_name='REVIEW_PULL_REQUEST',
454 kwargs_keys=(
456 kwargs_keys=(
455 'server_url', 'config', 'scm', 'username', 'ip', 'action',
457 'server_url', 'config', 'scm', 'username', 'ip', 'action',
456 'repository', 'pull_request_id', 'url', 'title', 'description',
458 'repository', 'pull_request_id', 'url', 'title', 'description',
457 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
459 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
458 'mergeable', 'source', 'target', 'author', 'reviewers'))
460 'mergeable', 'source', 'target', 'author', 'reviewers'))
459
461
460
462
461 comment_pull_request = ExtensionCallback(
463 comment_pull_request = ExtensionCallback(
462 hook_name='COMMENT_PULL_REQUEST',
464 hook_name='COMMENT_PULL_REQUEST',
463 kwargs_keys=(
465 kwargs_keys=(
464 'server_url', 'config', 'scm', 'username', 'ip', 'action',
466 'server_url', 'config', 'scm', 'username', 'ip', 'action',
465 'repository', 'pull_request_id', 'url', 'title', 'description',
467 'repository', 'pull_request_id', 'url', 'title', 'description',
466 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
468 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
467 'mergeable', 'source', 'target', 'author', 'reviewers'))
469 'mergeable', 'source', 'target', 'author', 'reviewers'))
468
470
469
471
470 comment_edit_pull_request = ExtensionCallback(
472 comment_edit_pull_request = ExtensionCallback(
471 hook_name='COMMENT_EDIT_PULL_REQUEST',
473 hook_name='COMMENT_EDIT_PULL_REQUEST',
472 kwargs_keys=(
474 kwargs_keys=(
473 'server_url', 'config', 'scm', 'username', 'ip', 'action',
475 'server_url', 'config', 'scm', 'username', 'ip', 'action',
474 'repository', 'pull_request_id', 'url', 'title', 'description',
476 'repository', 'pull_request_id', 'url', 'title', 'description',
475 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
477 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
476 'mergeable', 'source', 'target', 'author', 'reviewers'))
478 'mergeable', 'source', 'target', 'author', 'reviewers'))
477
479
478
480
479 update_pull_request = ExtensionCallback(
481 update_pull_request = ExtensionCallback(
480 hook_name='UPDATE_PULL_REQUEST',
482 hook_name='UPDATE_PULL_REQUEST',
481 kwargs_keys=(
483 kwargs_keys=(
482 'server_url', 'config', 'scm', 'username', 'ip', 'action',
484 'server_url', 'config', 'scm', 'username', 'ip', 'action',
483 'repository', 'pull_request_id', 'url', 'title', 'description',
485 'repository', 'pull_request_id', 'url', 'title', 'description',
484 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
486 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
485 'mergeable', 'source', 'target', 'author', 'reviewers'))
487 'mergeable', 'source', 'target', 'author', 'reviewers'))
486
488
487
489
488 create_user = ExtensionCallback(
490 create_user = ExtensionCallback(
489 hook_name='CREATE_USER_HOOK',
491 hook_name='CREATE_USER_HOOK',
490 kwargs_keys=(
492 kwargs_keys=(
491 'username', 'full_name_or_username', 'full_contact', 'user_id',
493 'username', 'full_name_or_username', 'full_contact', 'user_id',
492 'name', 'firstname', 'short_contact', 'admin', 'lastname',
494 'name', 'firstname', 'short_contact', 'admin', 'lastname',
493 'ip_addresses', 'extern_type', 'extern_name',
495 'ip_addresses', 'extern_type', 'extern_name',
494 'email', 'api_keys', 'last_login',
496 'email', 'api_keys', 'last_login',
495 'full_name', 'active', 'password', 'emails',
497 'full_name', 'active', 'password', 'emails',
496 'inherit_default_permissions', 'created_by', 'created_on'))
498 'inherit_default_permissions', 'created_by', 'created_on'))
497
499
498
500
499 delete_user = ExtensionCallback(
501 delete_user = ExtensionCallback(
500 hook_name='DELETE_USER_HOOK',
502 hook_name='DELETE_USER_HOOK',
501 kwargs_keys=(
503 kwargs_keys=(
502 'username', 'full_name_or_username', 'full_contact', 'user_id',
504 'username', 'full_name_or_username', 'full_contact', 'user_id',
503 'name', 'firstname', 'short_contact', 'admin', 'lastname',
505 'name', 'firstname', 'short_contact', 'admin', 'lastname',
504 'ip_addresses',
506 'ip_addresses',
505 'email', 'last_login',
507 'email', 'last_login',
506 'full_name', 'active', 'password', 'emails',
508 'full_name', 'active', 'password', 'emails',
507 'inherit_default_permissions', 'deleted_by'))
509 'inherit_default_permissions', 'deleted_by'))
508
510
509
511
510 create_repository = ExtensionCallback(
512 create_repository = ExtensionCallback(
511 hook_name='CREATE_REPO_HOOK',
513 hook_name='CREATE_REPO_HOOK',
512 kwargs_keys=(
514 kwargs_keys=(
513 'repo_name', 'repo_type', 'description', 'private', 'created_on',
515 'repo_name', 'repo_type', 'description', 'private', 'created_on',
514 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
516 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
515 'clone_uri', 'fork_id', 'group_id', 'created_by'))
517 'clone_uri', 'fork_id', 'group_id', 'created_by'))
516
518
517
519
518 delete_repository = ExtensionCallback(
520 delete_repository = ExtensionCallback(
519 hook_name='DELETE_REPO_HOOK',
521 hook_name='DELETE_REPO_HOOK',
520 kwargs_keys=(
522 kwargs_keys=(
521 'repo_name', 'repo_type', 'description', 'private', 'created_on',
523 'repo_name', 'repo_type', 'description', 'private', 'created_on',
522 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
524 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
523 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
525 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
524
526
525
527
526 comment_commit_repository = ExtensionCallback(
528 comment_commit_repository = ExtensionCallback(
527 hook_name='COMMENT_COMMIT_REPO_HOOK',
529 hook_name='COMMENT_COMMIT_REPO_HOOK',
528 kwargs_keys=(
530 kwargs_keys=(
529 'repo_name', 'repo_type', 'description', 'private', 'created_on',
531 'repo_name', 'repo_type', 'description', 'private', 'created_on',
530 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
532 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
531 'clone_uri', 'fork_id', 'group_id',
533 'clone_uri', 'fork_id', 'group_id',
532 'repository', 'created_by', 'comment', 'commit'))
534 'repository', 'created_by', 'comment', 'commit'))
533
535
534 comment_edit_commit_repository = ExtensionCallback(
536 comment_edit_commit_repository = ExtensionCallback(
535 hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK',
537 hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK',
536 kwargs_keys=(
538 kwargs_keys=(
537 'repo_name', 'repo_type', 'description', 'private', 'created_on',
539 'repo_name', 'repo_type', 'description', 'private', 'created_on',
538 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
540 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
539 'clone_uri', 'fork_id', 'group_id',
541 'clone_uri', 'fork_id', 'group_id',
540 'repository', 'created_by', 'comment', 'commit'))
542 'repository', 'created_by', 'comment', 'commit'))
541
543
542
544
543 create_repository_group = ExtensionCallback(
545 create_repository_group = ExtensionCallback(
544 hook_name='CREATE_REPO_GROUP_HOOK',
546 hook_name='CREATE_REPO_GROUP_HOOK',
545 kwargs_keys=(
547 kwargs_keys=(
546 'group_name', 'group_parent_id', 'group_description',
548 'group_name', 'group_parent_id', 'group_description',
547 'group_id', 'user_id', 'created_by', 'created_on',
549 'group_id', 'user_id', 'created_by', 'created_on',
548 'enable_locking'))
550 'enable_locking'))
@@ -1,662 +1,662 b''
1
1
2
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import importlib
29 import importlib
30 from functools import wraps
30 from functools import wraps
31
31
32 import time
32 import time
33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
34
34
35 from pyramid.httpexceptions import (
35 from pyramid.httpexceptions import (
36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
40 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
41 from rhodecode.lib import rc_cache
41 from rhodecode.lib import rc_cache
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
44 from rhodecode.lib.base import (
44 from rhodecode.lib.base import (
45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
46 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
46 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
48 from rhodecode.lib.middleware import appenlight
48 from rhodecode.lib.middleware import appenlight
49 from rhodecode.lib.middleware.utils import scm_app_http
49 from rhodecode.lib.middleware.utils import scm_app_http
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
53 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.backends import base
54 from rhodecode.lib.vcs.backends import base
55
55
56 from rhodecode.model import meta
56 from rhodecode.model import meta
57 from rhodecode.model.db import User, Repository, PullRequest
57 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.pull_request import PullRequestModel
59 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 def initialize_generator(factory):
65 def initialize_generator(factory):
66 """
66 """
67 Initializes the returned generator by draining its first element.
67 Initializes the returned generator by draining its first element.
68
68
69 This can be used to give a generator an initializer, which is the code
69 This can be used to give a generator an initializer, which is the code
70 up to the first yield statement. This decorator enforces that the first
70 up to the first yield statement. This decorator enforces that the first
71 produced element has the value ``"__init__"`` to make its special
71 produced element has the value ``"__init__"`` to make its special
72 purpose very explicit in the using code.
72 purpose very explicit in the using code.
73 """
73 """
74
74
75 @wraps(factory)
75 @wraps(factory)
76 def wrapper(*args, **kwargs):
76 def wrapper(*args, **kwargs):
77 gen = factory(*args, **kwargs)
77 gen = factory(*args, **kwargs)
78 try:
78 try:
79 init = next(gen)
79 init = next(gen)
80 except StopIteration:
80 except StopIteration:
81 raise ValueError('Generator must yield at least one element.')
81 raise ValueError('Generator must yield at least one element.')
82 if init != "__init__":
82 if init != "__init__":
83 raise ValueError('First yielded element must be "__init__".')
83 raise ValueError('First yielded element must be "__init__".')
84 return gen
84 return gen
85 return wrapper
85 return wrapper
86
86
87
87
88 class SimpleVCS(object):
88 class SimpleVCS(object):
89 """Common functionality for SCM HTTP handlers."""
89 """Common functionality for SCM HTTP handlers."""
90
90
91 SCM = 'unknown'
91 SCM = 'unknown'
92
92
93 acl_repo_name = None
93 acl_repo_name = None
94 url_repo_name = None
94 url_repo_name = None
95 vcs_repo_name = None
95 vcs_repo_name = None
96 rc_extras = {}
96 rc_extras = {}
97
97
98 # We have to handle requests to shadow repositories different than requests
98 # We have to handle requests to shadow repositories different than requests
99 # to normal repositories. Therefore we have to distinguish them. To do this
99 # to normal repositories. Therefore we have to distinguish them. To do this
100 # we use this regex which will match only on URLs pointing to shadow
100 # we use this regex which will match only on URLs pointing to shadow
101 # repositories.
101 # repositories.
102 shadow_repo_re = re.compile(
102 shadow_repo_re = re.compile(
103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
104 '(?P<target>{slug_pat})/' # target repo
104 '(?P<target>{slug_pat})/' # target repo
105 'pull-request/(?P<pr_id>\\d+)/' # pull request
105 'pull-request/(?P<pr_id>\\d+)/' # pull request
106 'repository$' # shadow repo
106 'repository$' # shadow repo
107 .format(slug_pat=SLUG_RE.pattern))
107 .format(slug_pat=SLUG_RE.pattern))
108
108
109 def __init__(self, config, registry):
109 def __init__(self, config, registry):
110 self.registry = registry
110 self.registry = registry
111 self.config = config
111 self.config = config
112 # re-populated by specialized middleware
112 # re-populated by specialized middleware
113 self.repo_vcs_config = base.Config()
113 self.repo_vcs_config = base.Config()
114
114
115 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
115 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
116 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
116 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
117
117
118 # authenticate this VCS request using authfunc
118 # authenticate this VCS request using authfunc
119 auth_ret_code_detection = \
119 auth_ret_code_detection = \
120 str2bool(self.config.get('auth_ret_code_detection', False))
120 str2bool(self.config.get('auth_ret_code_detection', False))
121 self.authenticate = BasicAuth(
121 self.authenticate = BasicAuth(
122 '', authenticate, registry, config.get('auth_ret_code'),
122 '', authenticate, registry, config.get('auth_ret_code'),
123 auth_ret_code_detection, rc_realm=realm)
123 auth_ret_code_detection, rc_realm=realm)
124 self.ip_addr = '0.0.0.0'
124 self.ip_addr = '0.0.0.0'
125
125
126 @LazyProperty
126 @LazyProperty
127 def global_vcs_config(self):
127 def global_vcs_config(self):
128 try:
128 try:
129 return VcsSettingsModel().get_ui_settings_as_config_obj()
129 return VcsSettingsModel().get_ui_settings_as_config_obj()
130 except Exception:
130 except Exception:
131 return base.Config()
131 return base.Config()
132
132
133 @property
133 @property
134 def base_path(self):
134 def base_path(self):
135 settings_path = self.config.get('repo_store.path')
135 settings_path = self.config.get('repo_store.path')
136
136
137 if not settings_path:
137 if not settings_path:
138 raise ValueError('FATAL: repo_store.path is empty')
138 raise ValueError('FATAL: repo_store.path is empty')
139 return settings_path
139 return settings_path
140
140
141 def set_repo_names(self, environ):
141 def set_repo_names(self, environ):
142 """
142 """
143 This will populate the attributes acl_repo_name, url_repo_name,
143 This will populate the attributes acl_repo_name, url_repo_name,
144 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
144 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
145 shadow) repositories all names are equal. In case of requests to a
145 shadow) repositories all names are equal. In case of requests to a
146 shadow repository the acl-name points to the target repo of the pull
146 shadow repository the acl-name points to the target repo of the pull
147 request and the vcs-name points to the shadow repo file system path.
147 request and the vcs-name points to the shadow repo file system path.
148 The url-name is always the URL used by the vcs client program.
148 The url-name is always the URL used by the vcs client program.
149
149
150 Example in case of a shadow repo:
150 Example in case of a shadow repo:
151 acl_repo_name = RepoGroup/MyRepo
151 acl_repo_name = RepoGroup/MyRepo
152 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
152 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
153 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
153 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
154 """
154 """
155 # First we set the repo name from URL for all attributes. This is the
155 # First we set the repo name from URL for all attributes. This is the
156 # default if handling normal (non shadow) repo requests.
156 # default if handling normal (non shadow) repo requests.
157 self.url_repo_name = self._get_repository_name(environ)
157 self.url_repo_name = self._get_repository_name(environ)
158 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
158 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
159 self.is_shadow_repo = False
159 self.is_shadow_repo = False
160
160
161 # Check if this is a request to a shadow repository.
161 # Check if this is a request to a shadow repository.
162 match = self.shadow_repo_re.match(self.url_repo_name)
162 match = self.shadow_repo_re.match(self.url_repo_name)
163 if match:
163 if match:
164 match_dict = match.groupdict()
164 match_dict = match.groupdict()
165
165
166 # Build acl repo name from regex match.
166 # Build acl repo name from regex match.
167 acl_repo_name = safe_str('{groups}{target}'.format(
167 acl_repo_name = safe_str('{groups}{target}'.format(
168 groups=match_dict['groups'] or '',
168 groups=match_dict['groups'] or '',
169 target=match_dict['target']))
169 target=match_dict['target']))
170
170
171 # Retrieve pull request instance by ID from regex match.
171 # Retrieve pull request instance by ID from regex match.
172 pull_request = PullRequest.get(match_dict['pr_id'])
172 pull_request = PullRequest.get(match_dict['pr_id'])
173
173
174 # Only proceed if we got a pull request and if acl repo name from
174 # Only proceed if we got a pull request and if acl repo name from
175 # URL equals the target repo name of the pull request.
175 # URL equals the target repo name of the pull request.
176 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
176 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
177
177
178 # Get file system path to shadow repository.
178 # Get file system path to shadow repository.
179 workspace_id = PullRequestModel()._workspace_id(pull_request)
179 workspace_id = PullRequestModel()._workspace_id(pull_request)
180 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
180 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
181
181
182 # Store names for later usage.
182 # Store names for later usage.
183 self.vcs_repo_name = vcs_repo_name
183 self.vcs_repo_name = vcs_repo_name
184 self.acl_repo_name = acl_repo_name
184 self.acl_repo_name = acl_repo_name
185 self.is_shadow_repo = True
185 self.is_shadow_repo = True
186
186
187 log.debug('Setting all VCS repository names: %s', {
187 log.debug('Setting all VCS repository names: %s', {
188 'acl_repo_name': self.acl_repo_name,
188 'acl_repo_name': self.acl_repo_name,
189 'url_repo_name': self.url_repo_name,
189 'url_repo_name': self.url_repo_name,
190 'vcs_repo_name': self.vcs_repo_name,
190 'vcs_repo_name': self.vcs_repo_name,
191 })
191 })
192
192
193 @property
193 @property
194 def scm_app(self):
194 def scm_app(self):
195 custom_implementation = self.config['vcs.scm_app_implementation']
195 custom_implementation = self.config['vcs.scm_app_implementation']
196 if custom_implementation == 'http':
196 if custom_implementation == 'http':
197 log.debug('Using HTTP implementation of scm app.')
197 log.debug('Using HTTP implementation of scm app.')
198 scm_app_impl = scm_app_http
198 scm_app_impl = scm_app_http
199 else:
199 else:
200 log.debug('Using custom implementation of scm_app: "{}"'.format(
200 log.debug('Using custom implementation of scm_app: "{}"'.format(
201 custom_implementation))
201 custom_implementation))
202 scm_app_impl = importlib.import_module(custom_implementation)
202 scm_app_impl = importlib.import_module(custom_implementation)
203 return scm_app_impl
203 return scm_app_impl
204
204
205 def _get_by_id(self, repo_name):
205 def _get_by_id(self, repo_name):
206 """
206 """
207 Gets a special pattern _<ID> from clone url and tries to replace it
207 Gets a special pattern _<ID> from clone url and tries to replace it
208 with a repository_name for support of _<ID> non changeable urls
208 with a repository_name for support of _<ID> non changeable urls
209 """
209 """
210
210
211 data = repo_name.split('/')
211 data = repo_name.split('/')
212 if len(data) >= 2:
212 if len(data) >= 2:
213 from rhodecode.model.repo import RepoModel
213 from rhodecode.model.repo import RepoModel
214 by_id_match = RepoModel().get_repo_by_id(repo_name)
214 by_id_match = RepoModel().get_repo_by_id(repo_name)
215 if by_id_match:
215 if by_id_match:
216 data[1] = by_id_match.repo_name
216 data[1] = by_id_match.repo_name
217
217
218 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
218 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
219 # and we use this data
219 # and we use this data
220 maybe_new_path = '/'.join(data)
220 maybe_new_path = '/'.join(data)
221 return safe_bytes(maybe_new_path).decode('latin1')
221 return safe_bytes(maybe_new_path).decode('latin1')
222
222
223 def _invalidate_cache(self, repo_name):
223 def _invalidate_cache(self, repo_name):
224 """
224 """
225 Set's cache for this repository for invalidation on next access
225 Set's cache for this repository for invalidation on next access
226
226
227 :param repo_name: full repo name, also a cache key
227 :param repo_name: full repo name, also a cache key
228 """
228 """
229 ScmModel().mark_for_invalidation(repo_name)
229 ScmModel().mark_for_invalidation(repo_name)
230
230
231 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
231 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
232 db_repo = Repository.get_by_repo_name(repo_name)
232 db_repo = Repository.get_by_repo_name(repo_name)
233 if not db_repo:
233 if not db_repo:
234 log.debug('Repository `%s` not found inside the database.',
234 log.debug('Repository `%s` not found inside the database.',
235 repo_name)
235 repo_name)
236 return False
236 return False
237
237
238 if db_repo.repo_type != scm_type:
238 if db_repo.repo_type != scm_type:
239 log.warning(
239 log.warning(
240 'Repository `%s` have incorrect scm_type, expected %s got %s',
240 'Repository `%s` have incorrect scm_type, expected %s got %s',
241 repo_name, db_repo.repo_type, scm_type)
241 repo_name, db_repo.repo_type, scm_type)
242 return False
242 return False
243
243
244 config = db_repo._config
244 config = db_repo._config
245 config.set('extensions', 'largefiles', '')
245 config.set('extensions', 'largefiles', '')
246 return is_valid_repo(
246 return is_valid_repo(
247 repo_name, base_path,
247 repo_name, base_path,
248 explicit_scm=scm_type, expect_scm=scm_type, config=config)
248 explicit_scm=scm_type, expect_scm=scm_type, config=config)
249
249
250 def valid_and_active_user(self, user):
250 def valid_and_active_user(self, user):
251 """
251 """
252 Checks if that user is not empty, and if it's actually object it checks
252 Checks if that user is not empty, and if it's actually object it checks
253 if he's active.
253 if he's active.
254
254
255 :param user: user object or None
255 :param user: user object or None
256 :return: boolean
256 :return: boolean
257 """
257 """
258 if user is None:
258 if user is None:
259 return False
259 return False
260
260
261 elif user.active:
261 elif user.active:
262 return True
262 return True
263
263
264 return False
264 return False
265
265
266 @property
266 @property
267 def is_shadow_repo_dir(self):
267 def is_shadow_repo_dir(self):
268 return os.path.isdir(self.vcs_repo_name)
268 return os.path.isdir(self.vcs_repo_name)
269
269
270 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
270 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
271 plugin_id='', plugin_cache_active=False, cache_ttl=0):
271 plugin_id='', plugin_cache_active=False, cache_ttl=0):
272 """
272 """
273 Checks permissions using action (push/pull) user and repository
273 Checks permissions using action (push/pull) user and repository
274 name. If plugin_cache and ttl is set it will use the plugin which
274 name. If plugin_cache and ttl is set it will use the plugin which
275 authenticated the user to store the cached permissions result for N
275 authenticated the user to store the cached permissions result for N
276 amount of seconds as in cache_ttl
276 amount of seconds as in cache_ttl
277
277
278 :param action: push or pull action
278 :param action: push or pull action
279 :param user: user instance
279 :param user: user instance
280 :param repo_name: repository name
280 :param repo_name: repository name
281 """
281 """
282
282
283 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
283 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
284 plugin_id, plugin_cache_active, cache_ttl)
284 plugin_id, plugin_cache_active, cache_ttl)
285
285
286 user_id = user.user_id
286 user_id = user.user_id
287 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
287 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
288 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
288 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
289
289
290 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
290 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
291 expiration_time=cache_ttl,
291 expiration_time=cache_ttl,
292 condition=plugin_cache_active)
292 condition=plugin_cache_active)
293 def compute_perm_vcs(
293 def compute_perm_vcs(
294 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
294 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
295
295
296 log.debug('auth: calculating permission access now...')
296 log.debug('auth: calculating permission access now for vcs operation: %s', action)
297 # check IP
297 # check IP
298 inherit = user.inherit_default_permissions
298 inherit = user.inherit_default_permissions
299 ip_allowed = AuthUser.check_ip_allowed(
299 ip_allowed = AuthUser.check_ip_allowed(
300 user_id, ip_addr, inherit_from_default=inherit)
300 user_id, ip_addr, inherit_from_default=inherit)
301 if ip_allowed:
301 if ip_allowed:
302 log.info('Access for IP:%s allowed', ip_addr)
302 log.info('Access for IP:%s allowed', ip_addr)
303 else:
303 else:
304 return False
304 return False
305
305
306 if action == 'push':
306 if action == 'push':
307 perms = ('repository.write', 'repository.admin')
307 perms = ('repository.write', 'repository.admin')
308 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
308 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
309 return False
309 return False
310
310
311 else:
311 else:
312 # any other action need at least read permission
312 # any other action need at least read permission
313 perms = (
313 perms = (
314 'repository.read', 'repository.write', 'repository.admin')
314 'repository.read', 'repository.write', 'repository.admin')
315 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
315 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
316 return False
316 return False
317
317
318 return True
318 return True
319
319
320 start = time.time()
320 start = time.time()
321 log.debug('Running plugin `%s` permissions check', plugin_id)
321 log.debug('Running plugin `%s` permissions check', plugin_id)
322
322
323 # for environ based auth, password can be empty, but then the validation is
323 # for environ based auth, password can be empty, but then the validation is
324 # on the server that fills in the env data needed for authentication
324 # on the server that fills in the env data needed for authentication
325 perm_result = compute_perm_vcs(
325 perm_result = compute_perm_vcs(
326 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
326 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
327
327
328 auth_time = time.time() - start
328 auth_time = time.time() - start
329 log.debug('Permissions for plugin `%s` completed in %.4fs, '
329 log.debug('Permissions for plugin `%s` completed in %.4fs, '
330 'expiration time of fetched cache %.1fs.',
330 'expiration time of fetched cache %.1fs.',
331 plugin_id, auth_time, cache_ttl)
331 plugin_id, auth_time, cache_ttl)
332
332
333 return perm_result
333 return perm_result
334
334
335 def _get_http_scheme(self, environ):
335 def _get_http_scheme(self, environ):
336 try:
336 try:
337 return environ['wsgi.url_scheme']
337 return environ['wsgi.url_scheme']
338 except Exception:
338 except Exception:
339 log.exception('Failed to read http scheme')
339 log.exception('Failed to read http scheme')
340 return 'http'
340 return 'http'
341
341
342 def _get_default_cache_ttl(self):
342 def _get_default_cache_ttl(self):
343 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
343 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
344 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
344 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
345 plugin_settings = plugin.get_settings()
345 plugin_settings = plugin.get_settings()
346 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
346 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
347 plugin_settings) or (False, 0)
347 plugin_settings) or (False, 0)
348 return plugin_cache_active, cache_ttl
348 return plugin_cache_active, cache_ttl
349
349
350 def __call__(self, environ, start_response):
350 def __call__(self, environ, start_response):
351 try:
351 try:
352 return self._handle_request(environ, start_response)
352 return self._handle_request(environ, start_response)
353 except Exception:
353 except Exception:
354 log.exception("Exception while handling request")
354 log.exception("Exception while handling request")
355 appenlight.track_exception(environ)
355 appenlight.track_exception(environ)
356 return HTTPInternalServerError()(environ, start_response)
356 return HTTPInternalServerError()(environ, start_response)
357 finally:
357 finally:
358 meta.Session.remove()
358 meta.Session.remove()
359
359
360 def _handle_request(self, environ, start_response):
360 def _handle_request(self, environ, start_response):
361 if not self.url_repo_name:
361 if not self.url_repo_name:
362 log.warning('Repository name is empty: %s', self.url_repo_name)
362 log.warning('Repository name is empty: %s', self.url_repo_name)
363 # failed to get repo name, we fail now
363 # failed to get repo name, we fail now
364 return HTTPNotFound()(environ, start_response)
364 return HTTPNotFound()(environ, start_response)
365 log.debug('Extracted repo name is %s', self.url_repo_name)
365 log.debug('Extracted repo name is %s', self.url_repo_name)
366
366
367 ip_addr = get_ip_addr(environ)
367 ip_addr = get_ip_addr(environ)
368 user_agent = get_user_agent(environ)
368 user_agent = get_user_agent(environ)
369 username = None
369 username = None
370
370
371 # skip passing error to error controller
371 # skip passing error to error controller
372 environ['pylons.status_code_redirect'] = True
372 environ['pylons.status_code_redirect'] = True
373
373
374 # ======================================================================
374 # ======================================================================
375 # GET ACTION PULL or PUSH
375 # GET ACTION PULL or PUSH
376 # ======================================================================
376 # ======================================================================
377 action = self._get_action(environ)
377 action = self._get_action(environ)
378
378
379 # ======================================================================
379 # ======================================================================
380 # Check if this is a request to a shadow repository of a pull request.
380 # Check if this is a request to a shadow repository of a pull request.
381 # In this case only pull action is allowed.
381 # In this case only pull action is allowed.
382 # ======================================================================
382 # ======================================================================
383 if self.is_shadow_repo and action != 'pull':
383 if self.is_shadow_repo and action != 'pull':
384 reason = 'Only pull action is allowed for shadow repositories.'
384 reason = 'Only pull action is allowed for shadow repositories.'
385 log.debug('User not allowed to proceed, %s', reason)
385 log.debug('User not allowed to proceed, %s', reason)
386 return HTTPNotAcceptable(reason)(environ, start_response)
386 return HTTPNotAcceptable(reason)(environ, start_response)
387
387
388 # Check if the shadow repo actually exists, in case someone refers
388 # Check if the shadow repo actually exists, in case someone refers
389 # to it, and it has been deleted because of successful merge.
389 # to it, and it has been deleted because of successful merge.
390 if self.is_shadow_repo and not self.is_shadow_repo_dir:
390 if self.is_shadow_repo and not self.is_shadow_repo_dir:
391 log.debug(
391 log.debug(
392 'Shadow repo detected, and shadow repo dir `%s` is missing',
392 'Shadow repo detected, and shadow repo dir `%s` is missing',
393 self.is_shadow_repo_dir)
393 self.is_shadow_repo_dir)
394 return HTTPNotFound()(environ, start_response)
394 return HTTPNotFound()(environ, start_response)
395
395
396 # ======================================================================
396 # ======================================================================
397 # CHECK ANONYMOUS PERMISSION
397 # CHECK ANONYMOUS PERMISSION
398 # ======================================================================
398 # ======================================================================
399 detect_force_push = False
399 detect_force_push = False
400 check_branch_perms = False
400 check_branch_perms = False
401 if action in ['pull', 'push']:
401 if action in ['pull', 'push']:
402 user_obj = anonymous_user = User.get_default_user()
402 user_obj = anonymous_user = User.get_default_user()
403 auth_user = user_obj.AuthUser()
403 auth_user = user_obj.AuthUser()
404 username = anonymous_user.username
404 username = anonymous_user.username
405 if anonymous_user.active:
405 if anonymous_user.active:
406 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
406 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
407 # ONLY check permissions if the user is activated
407 # ONLY check permissions if the user is activated
408 anonymous_perm = self._check_permission(
408 anonymous_perm = self._check_permission(
409 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
409 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
410 plugin_id='anonymous_access',
410 plugin_id='anonymous_access',
411 plugin_cache_active=plugin_cache_active,
411 plugin_cache_active=plugin_cache_active,
412 cache_ttl=cache_ttl,
412 cache_ttl=cache_ttl,
413 )
413 )
414 else:
414 else:
415 anonymous_perm = False
415 anonymous_perm = False
416
416
417 if not anonymous_user.active or not anonymous_perm:
417 if not anonymous_user.active or not anonymous_perm:
418 if not anonymous_user.active:
418 if not anonymous_user.active:
419 log.debug('Anonymous access is disabled, running '
419 log.debug('Anonymous access is disabled, running '
420 'authentication')
420 'authentication')
421
421
422 if not anonymous_perm:
422 if not anonymous_perm:
423 log.debug('Not enough credentials to access repo: `%s` '
423 log.debug('Not enough credentials to access repo: `%s` '
424 'repository as anonymous user', self.acl_repo_name)
424 'repository as anonymous user', self.acl_repo_name)
425
425
426 username = None
426 username = None
427 # ==============================================================
427 # ==============================================================
428 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
428 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
429 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
429 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
430 # ==============================================================
430 # ==============================================================
431
431
432 # try to auth based on environ, container auth methods
432 # try to auth based on environ, container auth methods
433 log.debug('Running PRE-AUTH for container|headers based authentication')
433 log.debug('Running PRE-AUTH for container|headers based authentication')
434
434
435 # headers auth, by just reading special headers and bypass the auth with user/passwd
435 # headers auth, by just reading special headers and bypass the auth with user/passwd
436 pre_auth = authenticate(
436 pre_auth = authenticate(
437 '', '', environ, VCS_TYPE, registry=self.registry,
437 '', '', environ, VCS_TYPE, registry=self.registry,
438 acl_repo_name=self.acl_repo_name)
438 acl_repo_name=self.acl_repo_name)
439
439
440 if pre_auth and pre_auth.get('username'):
440 if pre_auth and pre_auth.get('username'):
441 username = pre_auth['username']
441 username = pre_auth['username']
442 log.debug('PRE-AUTH got `%s` as username', username)
442 log.debug('PRE-AUTH got `%s` as username', username)
443 if pre_auth:
443 if pre_auth:
444 log.debug('PRE-AUTH successful from %s',
444 log.debug('PRE-AUTH successful from %s',
445 pre_auth.get('auth_data', {}).get('_plugin'))
445 pre_auth.get('auth_data', {}).get('_plugin'))
446
446
447 # If not authenticated by the container, running basic auth
447 # If not authenticated by the container, running basic auth
448 # before inject the calling repo_name for special scope checks
448 # before inject the calling repo_name for special scope checks
449 self.authenticate.acl_repo_name = self.acl_repo_name
449 self.authenticate.acl_repo_name = self.acl_repo_name
450
450
451 plugin_cache_active, cache_ttl = False, 0
451 plugin_cache_active, cache_ttl = False, 0
452 plugin = None
452 plugin = None
453
453
454 # regular auth chain
454 # regular auth chain
455 if not username:
455 if not username:
456 self.authenticate.realm = self.authenticate.get_rc_realm()
456 self.authenticate.realm = self.authenticate.get_rc_realm()
457
457
458 try:
458 try:
459 auth_result = self.authenticate(environ)
459 auth_result = self.authenticate(environ)
460 except (UserCreationError, NotAllowedToCreateUserError) as e:
460 except (UserCreationError, NotAllowedToCreateUserError) as e:
461 log.error(e)
461 log.error(e)
462 reason = safe_str(e)
462 reason = safe_str(e)
463 return HTTPNotAcceptable(reason)(environ, start_response)
463 return HTTPNotAcceptable(reason)(environ, start_response)
464
464
465 if isinstance(auth_result, dict):
465 if isinstance(auth_result, dict):
466 AUTH_TYPE.update(environ, 'basic')
466 AUTH_TYPE.update(environ, 'basic')
467 REMOTE_USER.update(environ, auth_result['username'])
467 REMOTE_USER.update(environ, auth_result['username'])
468 username = auth_result['username']
468 username = auth_result['username']
469 plugin = auth_result.get('auth_data', {}).get('_plugin')
469 plugin = auth_result.get('auth_data', {}).get('_plugin')
470 log.info(
470 log.info(
471 'MAIN-AUTH successful for user `%s` from %s plugin',
471 'MAIN-AUTH successful for user `%s` from %s plugin',
472 username, plugin)
472 username, plugin)
473
473
474 plugin_cache_active, cache_ttl = auth_result.get(
474 plugin_cache_active, cache_ttl = auth_result.get(
475 'auth_data', {}).get('_ttl_cache') or (False, 0)
475 'auth_data', {}).get('_ttl_cache') or (False, 0)
476 else:
476 else:
477 return auth_result.wsgi_application(environ, start_response)
477 return auth_result.wsgi_application(environ, start_response)
478
478
479 # ==============================================================
479 # ==============================================================
480 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
480 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
481 # ==============================================================
481 # ==============================================================
482 user = User.get_by_username(username)
482 user = User.get_by_username(username)
483 if not self.valid_and_active_user(user):
483 if not self.valid_and_active_user(user):
484 return HTTPForbidden()(environ, start_response)
484 return HTTPForbidden()(environ, start_response)
485 username = user.username
485 username = user.username
486 user_id = user.user_id
486 user_id = user.user_id
487
487
488 # check user attributes for password change flag
488 # check user attributes for password change flag
489 user_obj = user
489 user_obj = user
490 auth_user = user_obj.AuthUser()
490 auth_user = user_obj.AuthUser()
491 if user_obj and user_obj.username != User.DEFAULT_USER and \
491 if user_obj and user_obj.username != User.DEFAULT_USER and \
492 user_obj.user_data.get('force_password_change'):
492 user_obj.user_data.get('force_password_change'):
493 reason = 'password change required'
493 reason = 'password change required'
494 log.debug('User not allowed to authenticate, %s', reason)
494 log.debug('User not allowed to authenticate, %s', reason)
495 return HTTPNotAcceptable(reason)(environ, start_response)
495 return HTTPNotAcceptable(reason)(environ, start_response)
496
496
497 # check permissions for this repository
497 # check permissions for this repository
498 perm = self._check_permission(
498 perm = self._check_permission(
499 action, user, auth_user, self.acl_repo_name, ip_addr,
499 action, user, auth_user, self.acl_repo_name, ip_addr,
500 plugin, plugin_cache_active, cache_ttl)
500 plugin, plugin_cache_active, cache_ttl)
501 if not perm:
501 if not perm:
502 return HTTPForbidden()(environ, start_response)
502 return HTTPForbidden()(environ, start_response)
503 environ['rc_auth_user_id'] = str(user_id)
503 environ['rc_auth_user_id'] = str(user_id)
504
504
505 if action == 'push':
505 if action == 'push':
506 perms = auth_user.get_branch_permissions(self.acl_repo_name)
506 perms = auth_user.get_branch_permissions(self.acl_repo_name)
507 if perms:
507 if perms:
508 check_branch_perms = True
508 check_branch_perms = True
509 detect_force_push = True
509 detect_force_push = True
510
510
511 # extras are injected into UI object and later available
511 # extras are injected into UI object and later available
512 # in hooks executed by RhodeCode
512 # in hooks executed by RhodeCode
513 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
513 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
514
514
515 extras = vcs_operation_context(
515 extras = vcs_operation_context(
516 environ, repo_name=self.acl_repo_name, username=username,
516 environ, repo_name=self.acl_repo_name, username=username,
517 action=action, scm=self.SCM, check_locking=check_locking,
517 action=action, scm=self.SCM, check_locking=check_locking,
518 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
518 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
519 detect_force_push=detect_force_push
519 detect_force_push=detect_force_push
520 )
520 )
521
521
522 # ======================================================================
522 # ======================================================================
523 # REQUEST HANDLING
523 # REQUEST HANDLING
524 # ======================================================================
524 # ======================================================================
525 repo_path = os.path.join(
525 repo_path = os.path.join(
526 safe_str(self.base_path), safe_str(self.vcs_repo_name))
526 safe_str(self.base_path), safe_str(self.vcs_repo_name))
527 log.debug('Repository path is %s', repo_path)
527 log.debug('Repository path is %s', repo_path)
528
528
529 fix_PATH()
529 fix_PATH()
530
530
531 log.info(
531 log.info(
532 '%s action on %s repo "%s" by "%s" from %s %s',
532 '%s action on %s repo "%s" by "%s" from %s %s',
533 action, self.SCM, safe_str(self.url_repo_name),
533 action, self.SCM, safe_str(self.url_repo_name),
534 safe_str(username), ip_addr, user_agent)
534 safe_str(username), ip_addr, user_agent)
535
535
536 return self._generate_vcs_response(
536 return self._generate_vcs_response(
537 environ, start_response, repo_path, extras, action)
537 environ, start_response, repo_path, extras, action)
538
538
539 def _get_txn_id(self, environ):
539 def _get_txn_id(self, environ):
540
540
541 for k in ['RAW_URI', 'HTTP_DESTINATION']:
541 for k in ['RAW_URI', 'HTTP_DESTINATION']:
542 url = environ.get(k)
542 url = environ.get(k)
543 if not url:
543 if not url:
544 continue
544 continue
545
545
546 # regex to search for svn-txn-id
546 # regex to search for svn-txn-id
547 pattern = r'/!svn/txr/([^/]+)/'
547 pattern = r'/!svn/txr/([^/]+)/'
548
548
549 # Search for the pattern in the URL
549 # Search for the pattern in the URL
550 match = re.search(pattern, url)
550 match = re.search(pattern, url)
551
551
552 # Check if a match is found and extract the captured group
552 # Check if a match is found and extract the captured group
553 if match:
553 if match:
554 txn_id = match.group(1)
554 txn_id = match.group(1)
555 return txn_id
555 return txn_id
556
556
557 @initialize_generator
557 @initialize_generator
558 def _generate_vcs_response(
558 def _generate_vcs_response(
559 self, environ, start_response, repo_path, extras, action):
559 self, environ, start_response, repo_path, extras, action):
560 """
560 """
561 Returns a generator for the response content.
561 Returns a generator for the response content.
562
562
563 This method is implemented as a generator, so that it can trigger
563 This method is implemented as a generator, so that it can trigger
564 the cache validation after all content sent back to the client. It
564 the cache validation after all content sent back to the client. It
565 also handles the locking exceptions which will be triggered when
565 also handles the locking exceptions which will be triggered when
566 the first chunk is produced by the underlying WSGI application.
566 the first chunk is produced by the underlying WSGI application.
567 """
567 """
568 svn_txn_id = ''
568 svn_txn_id = ''
569 if action == 'push':
569 if action == 'push':
570 svn_txn_id = self._get_txn_id(environ)
570 svn_txn_id = self._get_txn_id(environ)
571
571
572 callback_daemon, extras = self._prepare_callback_daemon(
572 callback_daemon, extras = self._prepare_callback_daemon(
573 extras, environ, action, txn_id=svn_txn_id)
573 extras, environ, action, txn_id=svn_txn_id)
574
574
575 if svn_txn_id:
575 if svn_txn_id:
576
576
577 port = safe_int(extras['hooks_uri'].split(':')[-1])
577 port = safe_int(extras['hooks_uri'].split(':')[-1])
578 txn_id_data = extras.copy()
578 txn_id_data = extras.copy()
579 txn_id_data.update({'port': port})
579 txn_id_data.update({'port': port})
580 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
580 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
581
581
582 full_repo_path = repo_path
582 full_repo_path = repo_path
583 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
583 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
584
584
585 log.debug('HOOKS extras is %s', extras)
585 log.debug('HOOKS extras is %s', extras)
586
586
587 http_scheme = self._get_http_scheme(environ)
587 http_scheme = self._get_http_scheme(environ)
588
588
589 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
589 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
590 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
590 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
591 with callback_daemon:
591 with callback_daemon:
592 app.rc_extras = extras
592 app.rc_extras = extras
593
593
594 try:
594 try:
595 response = app(environ, start_response)
595 response = app(environ, start_response)
596 finally:
596 finally:
597 # This statement works together with the decorator
597 # This statement works together with the decorator
598 # "initialize_generator" above. The decorator ensures that
598 # "initialize_generator" above. The decorator ensures that
599 # we hit the first yield statement before the generator is
599 # we hit the first yield statement before the generator is
600 # returned back to the WSGI server. This is needed to
600 # returned back to the WSGI server. This is needed to
601 # ensure that the call to "app" above triggers the
601 # ensure that the call to "app" above triggers the
602 # needed callback to "start_response" before the
602 # needed callback to "start_response" before the
603 # generator is actually used.
603 # generator is actually used.
604 yield "__init__"
604 yield "__init__"
605
605
606 # iter content
606 # iter content
607 for chunk in response:
607 for chunk in response:
608 yield chunk
608 yield chunk
609
609
610 try:
610 try:
611 # invalidate cache on push
611 # invalidate cache on push
612 if action == 'push':
612 if action == 'push':
613 self._invalidate_cache(self.url_repo_name)
613 self._invalidate_cache(self.url_repo_name)
614 finally:
614 finally:
615 meta.Session.remove()
615 meta.Session.remove()
616
616
617 def _get_repository_name(self, environ):
617 def _get_repository_name(self, environ):
618 """Get repository name out of the environmnent
618 """Get repository name out of the environmnent
619
619
620 :param environ: WSGI environment
620 :param environ: WSGI environment
621 """
621 """
622 raise NotImplementedError()
622 raise NotImplementedError()
623
623
624 def _get_action(self, environ):
624 def _get_action(self, environ):
625 """Map request commands into a pull or push command.
625 """Map request commands into a pull or push command.
626
626
627 :param environ: WSGI environment
627 :param environ: WSGI environment
628 """
628 """
629 raise NotImplementedError()
629 raise NotImplementedError()
630
630
631 def _create_wsgi_app(self, repo_path, repo_name, config):
631 def _create_wsgi_app(self, repo_path, repo_name, config):
632 """Return the WSGI app that will finally handle the request."""
632 """Return the WSGI app that will finally handle the request."""
633 raise NotImplementedError()
633 raise NotImplementedError()
634
634
635 def _create_config(self, extras, repo_name, scheme='http'):
635 def _create_config(self, extras, repo_name, scheme='http'):
636 """Create a safe config representation."""
636 """Create a safe config representation."""
637 raise NotImplementedError()
637 raise NotImplementedError()
638
638
639 def _should_use_callback_daemon(self, extras, environ, action):
639 def _should_use_callback_daemon(self, extras, environ, action):
640 if extras.get('is_shadow_repo'):
640 if extras.get('is_shadow_repo'):
641 # we don't want to execute hooks, and callback daemon for shadow repos
641 # we don't want to execute hooks, and callback daemon for shadow repos
642 return False
642 return False
643 return True
643 return True
644
644
645 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
645 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
646 protocol = vcs_settings.HOOKS_PROTOCOL
646 protocol = vcs_settings.HOOKS_PROTOCOL
647
647
648 if not self._should_use_callback_daemon(extras, environ, action):
648 if not self._should_use_callback_daemon(extras, environ, action):
649 # disable callback daemon for actions that don't require it
649 # disable callback daemon for actions that don't require it
650 protocol = 'local'
650 protocol = 'local'
651
651
652 return prepare_callback_daemon(
652 return prepare_callback_daemon(
653 extras, protocol=protocol,
653 extras, protocol=protocol,
654 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
654 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
655
655
656
656
657 def _should_check_locking(query_string):
657 def _should_check_locking(query_string):
658 # this is kind of hacky, but due to how mercurial handles client-server
658 # this is kind of hacky, but due to how mercurial handles client-server
659 # server see all operation on commit; bookmarks, phases and
659 # server see all operation on commit; bookmarks, phases and
660 # obsolescence marker in different transaction, we don't want to check
660 # obsolescence marker in different transaction, we don't want to check
661 # locking on those
661 # locking on those
662 return query_string not in ['cmd=listkeys']
662 return query_string not in ['cmd=listkeys']
@@ -1,835 +1,864 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24
24
25 import decorator
25 import decorator
26 import logging
26 import logging
27 import os
27 import os
28 import re
28 import re
29 import sys
29 import sys
30 import shutil
30 import shutil
31 import socket
31 import socket
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35
35
36 from functools import wraps
36 from functools import wraps
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42
42
43 from mako import exceptions
43 from mako import exceptions
44
44
45 from rhodecode import ConfigGet
45 from rhodecode import ConfigGet
46 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRC
46 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
47 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
47 from rhodecode.lib.type_utils import AttributeDict
48 from rhodecode.lib.type_utils import AttributeDict
48 from rhodecode.lib.str_utils import safe_bytes, safe_str
49 from rhodecode.lib.str_utils import safe_bytes, safe_str
49 from rhodecode.lib.vcs.backends.base import Config
50 from rhodecode.lib.vcs.backends.base import Config
50 from rhodecode.lib.vcs.exceptions import VCSError
51 from rhodecode.lib.vcs.exceptions import VCSError
51 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
52 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
52 from rhodecode.lib.ext_json import sjson as json
53 from rhodecode.lib.ext_json import sjson as json
53 from rhodecode.model import meta
54 from rhodecode.model import meta
54 from rhodecode.model.db import (
55 from rhodecode.model.db import (
55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
57
58
58
59
59 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
60
61
61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62
63
63 # String which contains characters that are not allowed in slug names for
64 # String which contains characters that are not allowed in slug names for
64 # repositories or repository groups. It is properly escaped to use it in
65 # repositories or repository groups. It is properly escaped to use it in
65 # regular expressions.
66 # regular expressions.
66 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67
68
68 # Regex that matches forbidden characters in repo/group slugs.
69 # Regex that matches forbidden characters in repo/group slugs.
69 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
70 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
70
71
71 # Regex that matches allowed characters in repo/group slugs.
72 # Regex that matches allowed characters in repo/group slugs.
72 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
73 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
73
74
74 # Regex that matches whole repo/group slugs.
75 # Regex that matches whole repo/group slugs.
75 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
76 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
76
77
77 _license_cache = None
78 _license_cache = None
78
79
79
80
80 def adopt_for_celery(func):
81 def adopt_for_celery(func):
81 """
82 """
82 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
83 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
83 for further usage as a celery tasks.
84 for further usage as a celery tasks.
84 """
85 """
85 @wraps(func)
86 @wraps(func)
86 def wrapper(extras):
87 def wrapper(extras):
87 extras = AttributeDict(extras)
88 extras = AttributeDict(extras)
88 try:
89 try:
89 # HooksResponse implements to_json method which must be used there.
90 # HooksResponse implements to_json method which must be used there.
90 return func(extras).to_json()
91 return func(extras).to_json()
92 except HTTPBranchProtected as error:
93 # Those special cases don't need error reporting. It's a case of
94 # locked repo or protected branch
95 error_args = error.args
96 return {
97 'status': error.code,
98 'output': error.explanation,
99 'exception': type(error).__name__,
100 'exception_args': error_args,
101 'exception_traceback': '',
102 }
103 except HTTPLockedRC as error:
104 # Those special cases don't need error reporting. It's a case of
105 # locked repo or protected branch
106 error_args = error.args
107 return {
108 'status': error.code,
109 'output': error.explanation,
110 'exception': type(error).__name__,
111 'exception_args': error_args,
112 'exception_traceback': '',
113 }
91 except Exception as e:
114 except Exception as e:
92 return {'status': 128, 'exception': type(e).__name__, 'exception_args': e.args}
115 return {
116 'status': 128,
117 'output': '',
118 'exception': type(e).__name__,
119 'exception_args': e.args,
120 'exception_traceback': '',
121 }
93 return wrapper
122 return wrapper
94
123
95
124
96 def repo_name_slug(value):
125 def repo_name_slug(value):
97 """
126 """
98 Return slug of name of repository
127 Return slug of name of repository
99 This function is called on each creation/modification
128 This function is called on each creation/modification
100 of repository to prevent bad names in repo
129 of repository to prevent bad names in repo
101 """
130 """
102
131
103 replacement_char = '-'
132 replacement_char = '-'
104
133
105 slug = strip_tags(value)
134 slug = strip_tags(value)
106 slug = convert_accented_entities(slug)
135 slug = convert_accented_entities(slug)
107 slug = convert_misc_entities(slug)
136 slug = convert_misc_entities(slug)
108
137
109 slug = SLUG_BAD_CHAR_RE.sub('', slug)
138 slug = SLUG_BAD_CHAR_RE.sub('', slug)
110 slug = re.sub(r'[\s]+', '-', slug)
139 slug = re.sub(r'[\s]+', '-', slug)
111 slug = collapse(slug, replacement_char)
140 slug = collapse(slug, replacement_char)
112
141
113 return slug
142 return slug
114
143
115
144
116 #==============================================================================
145 #==============================================================================
117 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
146 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
118 #==============================================================================
147 #==============================================================================
119 def get_repo_slug(request):
148 def get_repo_slug(request):
120 _repo = ''
149 _repo = ''
121
150
122 if hasattr(request, 'db_repo_name'):
151 if hasattr(request, 'db_repo_name'):
123 # if our requests has set db reference use it for name, this
152 # if our requests has set db reference use it for name, this
124 # translates the example.com/_<id> into proper repo names
153 # translates the example.com/_<id> into proper repo names
125 _repo = request.db_repo_name
154 _repo = request.db_repo_name
126 elif getattr(request, 'matchdict', None):
155 elif getattr(request, 'matchdict', None):
127 # pyramid
156 # pyramid
128 _repo = request.matchdict.get('repo_name')
157 _repo = request.matchdict.get('repo_name')
129
158
130 if _repo:
159 if _repo:
131 _repo = _repo.rstrip('/')
160 _repo = _repo.rstrip('/')
132 return _repo
161 return _repo
133
162
134
163
135 def get_repo_group_slug(request):
164 def get_repo_group_slug(request):
136 _group = ''
165 _group = ''
137 if hasattr(request, 'db_repo_group'):
166 if hasattr(request, 'db_repo_group'):
138 # if our requests has set db reference use it for name, this
167 # if our requests has set db reference use it for name, this
139 # translates the example.com/_<id> into proper repo group names
168 # translates the example.com/_<id> into proper repo group names
140 _group = request.db_repo_group.group_name
169 _group = request.db_repo_group.group_name
141 elif getattr(request, 'matchdict', None):
170 elif getattr(request, 'matchdict', None):
142 # pyramid
171 # pyramid
143 _group = request.matchdict.get('repo_group_name')
172 _group = request.matchdict.get('repo_group_name')
144
173
145 if _group:
174 if _group:
146 _group = _group.rstrip('/')
175 _group = _group.rstrip('/')
147 return _group
176 return _group
148
177
149
178
150 def get_user_group_slug(request):
179 def get_user_group_slug(request):
151 _user_group = ''
180 _user_group = ''
152
181
153 if hasattr(request, 'db_user_group'):
182 if hasattr(request, 'db_user_group'):
154 _user_group = request.db_user_group.users_group_name
183 _user_group = request.db_user_group.users_group_name
155 elif getattr(request, 'matchdict', None):
184 elif getattr(request, 'matchdict', None):
156 # pyramid
185 # pyramid
157 _user_group = request.matchdict.get('user_group_id')
186 _user_group = request.matchdict.get('user_group_id')
158 _user_group_name = request.matchdict.get('user_group_name')
187 _user_group_name = request.matchdict.get('user_group_name')
159 try:
188 try:
160 if _user_group:
189 if _user_group:
161 _user_group = UserGroup.get(_user_group)
190 _user_group = UserGroup.get(_user_group)
162 elif _user_group_name:
191 elif _user_group_name:
163 _user_group = UserGroup.get_by_group_name(_user_group_name)
192 _user_group = UserGroup.get_by_group_name(_user_group_name)
164
193
165 if _user_group:
194 if _user_group:
166 _user_group = _user_group.users_group_name
195 _user_group = _user_group.users_group_name
167 except Exception:
196 except Exception:
168 log.exception('Failed to get user group by id and name')
197 log.exception('Failed to get user group by id and name')
169 # catch all failures here
198 # catch all failures here
170 return None
199 return None
171
200
172 return _user_group
201 return _user_group
173
202
174
203
175 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
204 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
176 """
205 """
177 Scans given path for repos and return (name,(type,path)) tuple
206 Scans given path for repos and return (name,(type,path)) tuple
178
207
179 :param path: path to scan for repositories
208 :param path: path to scan for repositories
180 :param recursive: recursive search and return names with subdirs in front
209 :param recursive: recursive search and return names with subdirs in front
181 """
210 """
182
211
183 # remove ending slash for better results
212 # remove ending slash for better results
184 path = path.rstrip(os.sep)
213 path = path.rstrip(os.sep)
185 log.debug('now scanning in %s location recursive:%s...', path, recursive)
214 log.debug('now scanning in %s location recursive:%s...', path, recursive)
186
215
187 def _get_repos(p):
216 def _get_repos(p):
188 dirpaths = get_dirpaths(p)
217 dirpaths = get_dirpaths(p)
189 if not _is_dir_writable(p):
218 if not _is_dir_writable(p):
190 log.warning('repo path without write access: %s', p)
219 log.warning('repo path without write access: %s', p)
191
220
192 for dirpath in dirpaths:
221 for dirpath in dirpaths:
193 if os.path.isfile(os.path.join(p, dirpath)):
222 if os.path.isfile(os.path.join(p, dirpath)):
194 continue
223 continue
195 cur_path = os.path.join(p, dirpath)
224 cur_path = os.path.join(p, dirpath)
196
225
197 # skip removed repos
226 # skip removed repos
198 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
227 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
199 continue
228 continue
200
229
201 #skip .<somethin> dirs
230 #skip .<somethin> dirs
202 if dirpath.startswith('.'):
231 if dirpath.startswith('.'):
203 continue
232 continue
204
233
205 try:
234 try:
206 scm_info = get_scm(cur_path)
235 scm_info = get_scm(cur_path)
207 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
236 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
208 except VCSError:
237 except VCSError:
209 if not recursive:
238 if not recursive:
210 continue
239 continue
211 #check if this dir containts other repos for recursive scan
240 #check if this dir containts other repos for recursive scan
212 rec_path = os.path.join(p, dirpath)
241 rec_path = os.path.join(p, dirpath)
213 if os.path.isdir(rec_path):
242 if os.path.isdir(rec_path):
214 yield from _get_repos(rec_path)
243 yield from _get_repos(rec_path)
215
244
216 return _get_repos(path)
245 return _get_repos(path)
217
246
218
247
219 def get_dirpaths(p: str) -> list:
248 def get_dirpaths(p: str) -> list:
220 try:
249 try:
221 # OS-independable way of checking if we have at least read-only
250 # OS-independable way of checking if we have at least read-only
222 # access or not.
251 # access or not.
223 dirpaths = os.listdir(p)
252 dirpaths = os.listdir(p)
224 except OSError:
253 except OSError:
225 log.warning('ignoring repo path without read access: %s', p)
254 log.warning('ignoring repo path without read access: %s', p)
226 return []
255 return []
227
256
228 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
257 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
229 # decode paths and suddenly returns unicode objects itself. The items it
258 # decode paths and suddenly returns unicode objects itself. The items it
230 # cannot decode are returned as strings and cause issues.
259 # cannot decode are returned as strings and cause issues.
231 #
260 #
232 # Those paths are ignored here until a solid solution for path handling has
261 # Those paths are ignored here until a solid solution for path handling has
233 # been built.
262 # been built.
234 expected_type = type(p)
263 expected_type = type(p)
235
264
236 def _has_correct_type(item):
265 def _has_correct_type(item):
237 if type(item) is not expected_type:
266 if type(item) is not expected_type:
238 log.error(
267 log.error(
239 "Ignoring path %s since it cannot be decoded into str.",
268 "Ignoring path %s since it cannot be decoded into str.",
240 # Using "repr" to make sure that we see the byte value in case
269 # Using "repr" to make sure that we see the byte value in case
241 # of support.
270 # of support.
242 repr(item))
271 repr(item))
243 return False
272 return False
244 return True
273 return True
245
274
246 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
275 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
247
276
248 return dirpaths
277 return dirpaths
249
278
250
279
251 def _is_dir_writable(path):
280 def _is_dir_writable(path):
252 """
281 """
253 Probe if `path` is writable.
282 Probe if `path` is writable.
254
283
255 Due to trouble on Cygwin / Windows, this is actually probing if it is
284 Due to trouble on Cygwin / Windows, this is actually probing if it is
256 possible to create a file inside of `path`, stat does not produce reliable
285 possible to create a file inside of `path`, stat does not produce reliable
257 results in this case.
286 results in this case.
258 """
287 """
259 try:
288 try:
260 with tempfile.TemporaryFile(dir=path):
289 with tempfile.TemporaryFile(dir=path):
261 pass
290 pass
262 except OSError:
291 except OSError:
263 return False
292 return False
264 return True
293 return True
265
294
266
295
267 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
296 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
268 """
297 """
269 Returns True if given path is a valid repository False otherwise.
298 Returns True if given path is a valid repository False otherwise.
270 If expect_scm param is given also, compare if given scm is the same
299 If expect_scm param is given also, compare if given scm is the same
271 as expected from scm parameter. If explicit_scm is given don't try to
300 as expected from scm parameter. If explicit_scm is given don't try to
272 detect the scm, just use the given one to check if repo is valid
301 detect the scm, just use the given one to check if repo is valid
273
302
274 :param repo_name:
303 :param repo_name:
275 :param base_path:
304 :param base_path:
276 :param expect_scm:
305 :param expect_scm:
277 :param explicit_scm:
306 :param explicit_scm:
278 :param config:
307 :param config:
279
308
280 :return True: if given path is a valid repository
309 :return True: if given path is a valid repository
281 """
310 """
282 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
311 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
283 log.debug('Checking if `%s` is a valid path for repository. '
312 log.debug('Checking if `%s` is a valid path for repository. '
284 'Explicit type: %s', repo_name, explicit_scm)
313 'Explicit type: %s', repo_name, explicit_scm)
285
314
286 try:
315 try:
287 if explicit_scm:
316 if explicit_scm:
288 detected_scms = [get_scm_backend(explicit_scm)(
317 detected_scms = [get_scm_backend(explicit_scm)(
289 full_path, config=config).alias]
318 full_path, config=config).alias]
290 else:
319 else:
291 detected_scms = get_scm(full_path)
320 detected_scms = get_scm(full_path)
292
321
293 if expect_scm:
322 if expect_scm:
294 return detected_scms[0] == expect_scm
323 return detected_scms[0] == expect_scm
295 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
324 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
296 return True
325 return True
297 except VCSError:
326 except VCSError:
298 log.debug('path: %s is not a valid repo !', full_path)
327 log.debug('path: %s is not a valid repo !', full_path)
299 return False
328 return False
300
329
301
330
302 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
331 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
303 """
332 """
304 Returns True if a given path is a repository group, False otherwise
333 Returns True if a given path is a repository group, False otherwise
305
334
306 :param repo_group_name:
335 :param repo_group_name:
307 :param base_path:
336 :param base_path:
308 """
337 """
309 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
338 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
310 log.debug('Checking if `%s` is a valid path for repository group',
339 log.debug('Checking if `%s` is a valid path for repository group',
311 repo_group_name)
340 repo_group_name)
312
341
313 # check if it's not a repo
342 # check if it's not a repo
314 if is_valid_repo(repo_group_name, base_path):
343 if is_valid_repo(repo_group_name, base_path):
315 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
344 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
316 return False
345 return False
317
346
318 try:
347 try:
319 # we need to check bare git repos at higher level
348 # we need to check bare git repos at higher level
320 # since we might match branches/hooks/info/objects or possible
349 # since we might match branches/hooks/info/objects or possible
321 # other things inside bare git repo
350 # other things inside bare git repo
322 maybe_repo = os.path.dirname(full_path)
351 maybe_repo = os.path.dirname(full_path)
323 if maybe_repo == base_path:
352 if maybe_repo == base_path:
324 # skip root level repo check; we know root location CANNOT BE a repo group
353 # skip root level repo check; we know root location CANNOT BE a repo group
325 return False
354 return False
326
355
327 scm_ = get_scm(maybe_repo)
356 scm_ = get_scm(maybe_repo)
328 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
357 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
329 return False
358 return False
330 except VCSError:
359 except VCSError:
331 pass
360 pass
332
361
333 # check if it's a valid path
362 # check if it's a valid path
334 if skip_path_check or os.path.isdir(full_path):
363 if skip_path_check or os.path.isdir(full_path):
335 log.debug('path: %s is a valid repo group !', full_path)
364 log.debug('path: %s is a valid repo group !', full_path)
336 return True
365 return True
337
366
338 log.debug('path: %s is not a valid repo group !', full_path)
367 log.debug('path: %s is not a valid repo group !', full_path)
339 return False
368 return False
340
369
341
370
342 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
371 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
343 while True:
372 while True:
344 ok = input(prompt)
373 ok = input(prompt)
345 if ok.lower() in ('y', 'ye', 'yes'):
374 if ok.lower() in ('y', 'ye', 'yes'):
346 return True
375 return True
347 if ok.lower() in ('n', 'no', 'nop', 'nope'):
376 if ok.lower() in ('n', 'no', 'nop', 'nope'):
348 return False
377 return False
349 retries = retries - 1
378 retries = retries - 1
350 if retries < 0:
379 if retries < 0:
351 raise OSError
380 raise OSError
352 print(complaint)
381 print(complaint)
353
382
354 # propagated from mercurial documentation
383 # propagated from mercurial documentation
355 ui_sections = [
384 ui_sections = [
356 'alias', 'auth',
385 'alias', 'auth',
357 'decode/encode', 'defaults',
386 'decode/encode', 'defaults',
358 'diff', 'email',
387 'diff', 'email',
359 'extensions', 'format',
388 'extensions', 'format',
360 'merge-patterns', 'merge-tools',
389 'merge-patterns', 'merge-tools',
361 'hooks', 'http_proxy',
390 'hooks', 'http_proxy',
362 'smtp', 'patch',
391 'smtp', 'patch',
363 'paths', 'profiling',
392 'paths', 'profiling',
364 'server', 'trusted',
393 'server', 'trusted',
365 'ui', 'web', ]
394 'ui', 'web', ]
366
395
367
396
368 def prepare_config_data(clear_session=True, repo=None):
397 def prepare_config_data(clear_session=True, repo=None):
369 """
398 """
370 Read the configuration data from the database, *.ini files and return configuration
399 Read the configuration data from the database, *.ini files and return configuration
371 tuples.
400 tuples.
372 """
401 """
373 from rhodecode.model.settings import VcsSettingsModel
402 from rhodecode.model.settings import VcsSettingsModel
374
403
375 sa = meta.Session()
404 sa = meta.Session()
376 settings_model = VcsSettingsModel(repo=repo, sa=sa)
405 settings_model = VcsSettingsModel(repo=repo, sa=sa)
377
406
378 ui_settings = settings_model.get_ui_settings()
407 ui_settings = settings_model.get_ui_settings()
379
408
380 ui_data = []
409 ui_data = []
381 config = [
410 config = [
382 ('web', 'push_ssl', 'false'),
411 ('web', 'push_ssl', 'false'),
383 ]
412 ]
384 for setting in ui_settings:
413 for setting in ui_settings:
385 # Todo: remove this section once transition to *.ini files will be completed
414 # Todo: remove this section once transition to *.ini files will be completed
386 if setting.section in ('largefiles', 'vcs_git_lfs'):
415 if setting.section in ('largefiles', 'vcs_git_lfs'):
387 if setting.key != 'enabled':
416 if setting.key != 'enabled':
388 continue
417 continue
389 if setting.active:
418 if setting.active:
390 ui_data.append((setting.section, setting.key, setting.value))
419 ui_data.append((setting.section, setting.key, setting.value))
391 config.append((
420 config.append((
392 safe_str(setting.section), safe_str(setting.key),
421 safe_str(setting.section), safe_str(setting.key),
393 safe_str(setting.value)))
422 safe_str(setting.value)))
394 if setting.key == 'push_ssl':
423 if setting.key == 'push_ssl':
395 # force set push_ssl requirement to False this is deprecated, and we must force it to False
424 # force set push_ssl requirement to False this is deprecated, and we must force it to False
396 config.append((
425 config.append((
397 safe_str(setting.section), safe_str(setting.key), False))
426 safe_str(setting.section), safe_str(setting.key), False))
398 config_getter = ConfigGet()
427 config_getter = ConfigGet()
399 config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location')))
428 config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location')))
400 config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location')))
429 config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location')))
401 log.debug(
430 log.debug(
402 'settings ui from db@repo[%s]: %s',
431 'settings ui from db@repo[%s]: %s',
403 repo,
432 repo,
404 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
433 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
405 if clear_session:
434 if clear_session:
406 meta.Session.remove()
435 meta.Session.remove()
407
436
408 # TODO: mikhail: probably it makes no sense to re-read hooks information.
437 # TODO: mikhail: probably it makes no sense to re-read hooks information.
409 # It's already there and activated/deactivated
438 # It's already there and activated/deactivated
410 skip_entries = []
439 skip_entries = []
411 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
440 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
412 if 'pull' not in enabled_hook_classes:
441 if 'pull' not in enabled_hook_classes:
413 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
442 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
414 if 'push' not in enabled_hook_classes:
443 if 'push' not in enabled_hook_classes:
415 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
444 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
416 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
445 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
417 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
446 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
418
447
419 config = [entry for entry in config if entry[:2] not in skip_entries]
448 config = [entry for entry in config if entry[:2] not in skip_entries]
420
449
421 return config
450 return config
422
451
423
452
424 def make_db_config(clear_session=True, repo=None):
453 def make_db_config(clear_session=True, repo=None):
425 """
454 """
426 Create a :class:`Config` instance based on the values in the database.
455 Create a :class:`Config` instance based on the values in the database.
427 """
456 """
428 config = Config()
457 config = Config()
429 config_data = prepare_config_data(clear_session=clear_session, repo=repo)
458 config_data = prepare_config_data(clear_session=clear_session, repo=repo)
430 for section, option, value in config_data:
459 for section, option, value in config_data:
431 config.set(section, option, value)
460 config.set(section, option, value)
432 return config
461 return config
433
462
434
463
435 def get_enabled_hook_classes(ui_settings):
464 def get_enabled_hook_classes(ui_settings):
436 """
465 """
437 Return the enabled hook classes.
466 Return the enabled hook classes.
438
467
439 :param ui_settings: List of ui_settings as returned
468 :param ui_settings: List of ui_settings as returned
440 by :meth:`VcsSettingsModel.get_ui_settings`
469 by :meth:`VcsSettingsModel.get_ui_settings`
441
470
442 :return: a list with the enabled hook classes. The order is not guaranteed.
471 :return: a list with the enabled hook classes. The order is not guaranteed.
443 :rtype: list
472 :rtype: list
444 """
473 """
445 enabled_hooks = []
474 enabled_hooks = []
446 active_hook_keys = [
475 active_hook_keys = [
447 key for section, key, value, active in ui_settings
476 key for section, key, value, active in ui_settings
448 if section == 'hooks' and active]
477 if section == 'hooks' and active]
449
478
450 hook_names = {
479 hook_names = {
451 RhodeCodeUi.HOOK_PUSH: 'push',
480 RhodeCodeUi.HOOK_PUSH: 'push',
452 RhodeCodeUi.HOOK_PULL: 'pull',
481 RhodeCodeUi.HOOK_PULL: 'pull',
453 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
482 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
454 }
483 }
455
484
456 for key in active_hook_keys:
485 for key in active_hook_keys:
457 hook = hook_names.get(key)
486 hook = hook_names.get(key)
458 if hook:
487 if hook:
459 enabled_hooks.append(hook)
488 enabled_hooks.append(hook)
460
489
461 return enabled_hooks
490 return enabled_hooks
462
491
463
492
464 def set_rhodecode_config(config):
493 def set_rhodecode_config(config):
465 """
494 """
466 Updates pyramid config with new settings from database
495 Updates pyramid config with new settings from database
467
496
468 :param config:
497 :param config:
469 """
498 """
470 from rhodecode.model.settings import SettingsModel
499 from rhodecode.model.settings import SettingsModel
471 app_settings = SettingsModel().get_all_settings()
500 app_settings = SettingsModel().get_all_settings()
472
501
473 for k, v in list(app_settings.items()):
502 for k, v in list(app_settings.items()):
474 config[k] = v
503 config[k] = v
475
504
476
505
477 def get_rhodecode_realm():
506 def get_rhodecode_realm():
478 """
507 """
479 Return the rhodecode realm from database.
508 Return the rhodecode realm from database.
480 """
509 """
481 from rhodecode.model.settings import SettingsModel
510 from rhodecode.model.settings import SettingsModel
482 realm = SettingsModel().get_setting_by_name('realm')
511 realm = SettingsModel().get_setting_by_name('realm')
483 return safe_str(realm.app_settings_value)
512 return safe_str(realm.app_settings_value)
484
513
485
514
486 def get_rhodecode_repo_store_path():
515 def get_rhodecode_repo_store_path():
487 """
516 """
488 Returns the base path. The base path is the filesystem path which points
517 Returns the base path. The base path is the filesystem path which points
489 to the repository store.
518 to the repository store.
490 """
519 """
491
520
492 import rhodecode
521 import rhodecode
493 return rhodecode.CONFIG['repo_store.path']
522 return rhodecode.CONFIG['repo_store.path']
494
523
495
524
496 def map_groups(path):
525 def map_groups(path):
497 """
526 """
498 Given a full path to a repository, create all nested groups that this
527 Given a full path to a repository, create all nested groups that this
499 repo is inside. This function creates parent-child relationships between
528 repo is inside. This function creates parent-child relationships between
500 groups and creates default perms for all new groups.
529 groups and creates default perms for all new groups.
501
530
502 :param paths: full path to repository
531 :param paths: full path to repository
503 """
532 """
504 from rhodecode.model.repo_group import RepoGroupModel
533 from rhodecode.model.repo_group import RepoGroupModel
505 sa = meta.Session()
534 sa = meta.Session()
506 groups = path.split(Repository.NAME_SEP)
535 groups = path.split(Repository.NAME_SEP)
507 parent = None
536 parent = None
508 group = None
537 group = None
509
538
510 # last element is repo in nested groups structure
539 # last element is repo in nested groups structure
511 groups = groups[:-1]
540 groups = groups[:-1]
512 rgm = RepoGroupModel(sa)
541 rgm = RepoGroupModel(sa)
513 owner = User.get_first_super_admin()
542 owner = User.get_first_super_admin()
514 for lvl, group_name in enumerate(groups):
543 for lvl, group_name in enumerate(groups):
515 group_name = '/'.join(groups[:lvl] + [group_name])
544 group_name = '/'.join(groups[:lvl] + [group_name])
516 group = RepoGroup.get_by_group_name(group_name)
545 group = RepoGroup.get_by_group_name(group_name)
517 desc = '%s group' % group_name
546 desc = '%s group' % group_name
518
547
519 # skip folders that are now removed repos
548 # skip folders that are now removed repos
520 if REMOVED_REPO_PAT.match(group_name):
549 if REMOVED_REPO_PAT.match(group_name):
521 break
550 break
522
551
523 if group is None:
552 if group is None:
524 log.debug('creating group level: %s group_name: %s',
553 log.debug('creating group level: %s group_name: %s',
525 lvl, group_name)
554 lvl, group_name)
526 group = RepoGroup(group_name, parent)
555 group = RepoGroup(group_name, parent)
527 group.group_description = desc
556 group.group_description = desc
528 group.user = owner
557 group.user = owner
529 sa.add(group)
558 sa.add(group)
530 perm_obj = rgm._create_default_perms(group)
559 perm_obj = rgm._create_default_perms(group)
531 sa.add(perm_obj)
560 sa.add(perm_obj)
532 sa.flush()
561 sa.flush()
533
562
534 parent = group
563 parent = group
535 return group
564 return group
536
565
537
566
538 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
567 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
539 """
568 """
540 maps all repos given in initial_repo_list, non existing repositories
569 maps all repos given in initial_repo_list, non existing repositories
541 are created, if remove_obsolete is True it also checks for db entries
570 are created, if remove_obsolete is True it also checks for db entries
542 that are not in initial_repo_list and removes them.
571 that are not in initial_repo_list and removes them.
543
572
544 :param initial_repo_list: list of repositories found by scanning methods
573 :param initial_repo_list: list of repositories found by scanning methods
545 :param remove_obsolete: check for obsolete entries in database
574 :param remove_obsolete: check for obsolete entries in database
546 """
575 """
547 from rhodecode.model.repo import RepoModel
576 from rhodecode.model.repo import RepoModel
548 from rhodecode.model.repo_group import RepoGroupModel
577 from rhodecode.model.repo_group import RepoGroupModel
549 from rhodecode.model.settings import SettingsModel
578 from rhodecode.model.settings import SettingsModel
550
579
551 sa = meta.Session()
580 sa = meta.Session()
552 repo_model = RepoModel()
581 repo_model = RepoModel()
553 user = User.get_first_super_admin()
582 user = User.get_first_super_admin()
554 added = []
583 added = []
555
584
556 # creation defaults
585 # creation defaults
557 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
586 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
558 enable_statistics = defs.get('repo_enable_statistics')
587 enable_statistics = defs.get('repo_enable_statistics')
559 enable_locking = defs.get('repo_enable_locking')
588 enable_locking = defs.get('repo_enable_locking')
560 enable_downloads = defs.get('repo_enable_downloads')
589 enable_downloads = defs.get('repo_enable_downloads')
561 private = defs.get('repo_private')
590 private = defs.get('repo_private')
562
591
563 for name, repo in list(initial_repo_list.items()):
592 for name, repo in list(initial_repo_list.items()):
564 group = map_groups(name)
593 group = map_groups(name)
565 str_name = safe_str(name)
594 str_name = safe_str(name)
566 db_repo = repo_model.get_by_repo_name(str_name)
595 db_repo = repo_model.get_by_repo_name(str_name)
567
596
568 # found repo that is on filesystem not in RhodeCode database
597 # found repo that is on filesystem not in RhodeCode database
569 if not db_repo:
598 if not db_repo:
570 log.info('repository `%s` not found in the database, creating now', name)
599 log.info('repository `%s` not found in the database, creating now', name)
571 added.append(name)
600 added.append(name)
572 desc = (repo.description
601 desc = (repo.description
573 if repo.description != 'unknown'
602 if repo.description != 'unknown'
574 else '%s repository' % name)
603 else '%s repository' % name)
575
604
576 db_repo = repo_model._create_repo(
605 db_repo = repo_model._create_repo(
577 repo_name=name,
606 repo_name=name,
578 repo_type=repo.alias,
607 repo_type=repo.alias,
579 description=desc,
608 description=desc,
580 repo_group=getattr(group, 'group_id', None),
609 repo_group=getattr(group, 'group_id', None),
581 owner=user,
610 owner=user,
582 enable_locking=enable_locking,
611 enable_locking=enable_locking,
583 enable_downloads=enable_downloads,
612 enable_downloads=enable_downloads,
584 enable_statistics=enable_statistics,
613 enable_statistics=enable_statistics,
585 private=private,
614 private=private,
586 state=Repository.STATE_CREATED
615 state=Repository.STATE_CREATED
587 )
616 )
588 sa.commit()
617 sa.commit()
589 # we added that repo just now, and make sure we updated server info
618 # we added that repo just now, and make sure we updated server info
590 if db_repo.repo_type == 'git':
619 if db_repo.repo_type == 'git':
591 git_repo = db_repo.scm_instance()
620 git_repo = db_repo.scm_instance()
592 # update repository server-info
621 # update repository server-info
593 log.debug('Running update server info')
622 log.debug('Running update server info')
594 git_repo._update_server_info(force=True)
623 git_repo._update_server_info(force=True)
595
624
596 db_repo.update_commit_cache(recursive=False)
625 db_repo.update_commit_cache(recursive=False)
597
626
598 config = db_repo._config
627 config = db_repo._config
599 config.set('extensions', 'largefiles', '')
628 config.set('extensions', 'largefiles', '')
600 repo = db_repo.scm_instance(config=config)
629 repo = db_repo.scm_instance(config=config)
601 repo.install_hooks(force=force_hooks_rebuild)
630 repo.install_hooks(force=force_hooks_rebuild)
602
631
603 removed = []
632 removed = []
604 if remove_obsolete:
633 if remove_obsolete:
605 # remove from database those repositories that are not in the filesystem
634 # remove from database those repositories that are not in the filesystem
606 for repo in sa.query(Repository).all():
635 for repo in sa.query(Repository).all():
607 if repo.repo_name not in list(initial_repo_list.keys()):
636 if repo.repo_name not in list(initial_repo_list.keys()):
608 log.debug("Removing non-existing repository found in db `%s`",
637 log.debug("Removing non-existing repository found in db `%s`",
609 repo.repo_name)
638 repo.repo_name)
610 try:
639 try:
611 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
640 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
612 sa.commit()
641 sa.commit()
613 removed.append(repo.repo_name)
642 removed.append(repo.repo_name)
614 except Exception:
643 except Exception:
615 # don't hold further removals on error
644 # don't hold further removals on error
616 log.error(traceback.format_exc())
645 log.error(traceback.format_exc())
617 sa.rollback()
646 sa.rollback()
618
647
619 def splitter(full_repo_name):
648 def splitter(full_repo_name):
620 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
649 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
621 gr_name = None
650 gr_name = None
622 if len(_parts) == 2:
651 if len(_parts) == 2:
623 gr_name = _parts[0]
652 gr_name = _parts[0]
624 return gr_name
653 return gr_name
625
654
626 initial_repo_group_list = [splitter(x) for x in
655 initial_repo_group_list = [splitter(x) for x in
627 list(initial_repo_list.keys()) if splitter(x)]
656 list(initial_repo_list.keys()) if splitter(x)]
628
657
629 # remove from database those repository groups that are not in the
658 # remove from database those repository groups that are not in the
630 # filesystem due to parent child relationships we need to delete them
659 # filesystem due to parent child relationships we need to delete them
631 # in a specific order of most nested first
660 # in a specific order of most nested first
632 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
661 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
633 def nested_sort(gr):
662 def nested_sort(gr):
634 return len(gr.split('/'))
663 return len(gr.split('/'))
635 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
664 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
636 if group_name not in initial_repo_group_list:
665 if group_name not in initial_repo_group_list:
637 repo_group = RepoGroup.get_by_group_name(group_name)
666 repo_group = RepoGroup.get_by_group_name(group_name)
638 if (repo_group.children.all() or
667 if (repo_group.children.all() or
639 not RepoGroupModel().check_exist_filesystem(
668 not RepoGroupModel().check_exist_filesystem(
640 group_name=group_name, exc_on_failure=False)):
669 group_name=group_name, exc_on_failure=False)):
641 continue
670 continue
642
671
643 log.info(
672 log.info(
644 'Removing non-existing repository group found in db `%s`',
673 'Removing non-existing repository group found in db `%s`',
645 group_name)
674 group_name)
646 try:
675 try:
647 RepoGroupModel(sa).delete(group_name, fs_remove=False)
676 RepoGroupModel(sa).delete(group_name, fs_remove=False)
648 sa.commit()
677 sa.commit()
649 removed.append(group_name)
678 removed.append(group_name)
650 except Exception:
679 except Exception:
651 # don't hold further removals on error
680 # don't hold further removals on error
652 log.exception(
681 log.exception(
653 'Unable to remove repository group `%s`',
682 'Unable to remove repository group `%s`',
654 group_name)
683 group_name)
655 sa.rollback()
684 sa.rollback()
656 raise
685 raise
657
686
658 return added, removed
687 return added, removed
659
688
660
689
661 def load_rcextensions(root_path):
690 def load_rcextensions(root_path):
662 import rhodecode
691 import rhodecode
663 from rhodecode.config import conf
692 from rhodecode.config import conf
664
693
665 path = os.path.join(root_path)
694 path = os.path.join(root_path)
666 sys.path.append(path)
695 sys.path.append(path)
667
696
668 try:
697 try:
669 rcextensions = __import__('rcextensions')
698 rcextensions = __import__('rcextensions')
670 except ImportError:
699 except ImportError:
671 if os.path.isdir(os.path.join(path, 'rcextensions')):
700 if os.path.isdir(os.path.join(path, 'rcextensions')):
672 log.warning('Unable to load rcextensions from %s', path)
701 log.warning('Unable to load rcextensions from %s', path)
673 rcextensions = None
702 rcextensions = None
674
703
675 if rcextensions:
704 if rcextensions:
676 log.info('Loaded rcextensions from %s...', rcextensions)
705 log.info('Loaded rcextensions from %s...', rcextensions)
677 rhodecode.EXTENSIONS = rcextensions
706 rhodecode.EXTENSIONS = rcextensions
678
707
679 # Additional mappings that are not present in the pygments lexers
708 # Additional mappings that are not present in the pygments lexers
680 conf.LANGUAGES_EXTENSIONS_MAP.update(
709 conf.LANGUAGES_EXTENSIONS_MAP.update(
681 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
710 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
682
711
683
712
684 def get_custom_lexer(extension):
713 def get_custom_lexer(extension):
685 """
714 """
686 returns a custom lexer if it is defined in rcextensions module, or None
715 returns a custom lexer if it is defined in rcextensions module, or None
687 if there's no custom lexer defined
716 if there's no custom lexer defined
688 """
717 """
689 import rhodecode
718 import rhodecode
690 from pygments import lexers
719 from pygments import lexers
691
720
692 # custom override made by RhodeCode
721 # custom override made by RhodeCode
693 if extension in ['mako']:
722 if extension in ['mako']:
694 return lexers.get_lexer_by_name('html+mako')
723 return lexers.get_lexer_by_name('html+mako')
695
724
696 # check if we didn't define this extension as other lexer
725 # check if we didn't define this extension as other lexer
697 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
726 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
698 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
727 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
699 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
728 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
700 return lexers.get_lexer_by_name(_lexer_name)
729 return lexers.get_lexer_by_name(_lexer_name)
701
730
702
731
703 #==============================================================================
732 #==============================================================================
704 # TEST FUNCTIONS AND CREATORS
733 # TEST FUNCTIONS AND CREATORS
705 #==============================================================================
734 #==============================================================================
706 def create_test_index(repo_location, config):
735 def create_test_index(repo_location, config):
707 """
736 """
708 Makes default test index.
737 Makes default test index.
709 """
738 """
710 try:
739 try:
711 import rc_testdata
740 import rc_testdata
712 except ImportError:
741 except ImportError:
713 raise ImportError('Failed to import rc_testdata, '
742 raise ImportError('Failed to import rc_testdata, '
714 'please make sure this package is installed from requirements_test.txt')
743 'please make sure this package is installed from requirements_test.txt')
715 rc_testdata.extract_search_index(
744 rc_testdata.extract_search_index(
716 'vcs_search_index', os.path.dirname(config['search.location']))
745 'vcs_search_index', os.path.dirname(config['search.location']))
717
746
718
747
719 def create_test_directory(test_path):
748 def create_test_directory(test_path):
720 """
749 """
721 Create test directory if it doesn't exist.
750 Create test directory if it doesn't exist.
722 """
751 """
723 if not os.path.isdir(test_path):
752 if not os.path.isdir(test_path):
724 log.debug('Creating testdir %s', test_path)
753 log.debug('Creating testdir %s', test_path)
725 os.makedirs(test_path)
754 os.makedirs(test_path)
726
755
727
756
728 def create_test_database(test_path, config):
757 def create_test_database(test_path, config):
729 """
758 """
730 Makes a fresh database.
759 Makes a fresh database.
731 """
760 """
732 from rhodecode.lib.db_manage import DbManage
761 from rhodecode.lib.db_manage import DbManage
733 from rhodecode.lib.utils2 import get_encryption_key
762 from rhodecode.lib.utils2 import get_encryption_key
734
763
735 # PART ONE create db
764 # PART ONE create db
736 dbconf = config['sqlalchemy.db1.url']
765 dbconf = config['sqlalchemy.db1.url']
737 enc_key = get_encryption_key(config)
766 enc_key = get_encryption_key(config)
738
767
739 log.debug('making test db %s', dbconf)
768 log.debug('making test db %s', dbconf)
740
769
741 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
770 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
742 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
771 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
743 dbmanage.create_tables(override=True)
772 dbmanage.create_tables(override=True)
744 dbmanage.set_db_version()
773 dbmanage.set_db_version()
745 # for tests dynamically set new root paths based on generated content
774 # for tests dynamically set new root paths based on generated content
746 dbmanage.create_settings(dbmanage.config_prompt(test_path))
775 dbmanage.create_settings(dbmanage.config_prompt(test_path))
747 dbmanage.create_default_user()
776 dbmanage.create_default_user()
748 dbmanage.create_test_admin_and_users()
777 dbmanage.create_test_admin_and_users()
749 dbmanage.create_permissions()
778 dbmanage.create_permissions()
750 dbmanage.populate_default_permissions()
779 dbmanage.populate_default_permissions()
751 Session().commit()
780 Session().commit()
752
781
753
782
754 def create_test_repositories(test_path, config):
783 def create_test_repositories(test_path, config):
755 """
784 """
756 Creates test repositories in the temporary directory. Repositories are
785 Creates test repositories in the temporary directory. Repositories are
757 extracted from archives within the rc_testdata package.
786 extracted from archives within the rc_testdata package.
758 """
787 """
759 import rc_testdata
788 import rc_testdata
760 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
789 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
761
790
762 log.debug('making test vcs repositories')
791 log.debug('making test vcs repositories')
763
792
764 idx_path = config['search.location']
793 idx_path = config['search.location']
765 data_path = config['cache_dir']
794 data_path = config['cache_dir']
766
795
767 # clean index and data
796 # clean index and data
768 if idx_path and os.path.exists(idx_path):
797 if idx_path and os.path.exists(idx_path):
769 log.debug('remove %s', idx_path)
798 log.debug('remove %s', idx_path)
770 shutil.rmtree(idx_path)
799 shutil.rmtree(idx_path)
771
800
772 if data_path and os.path.exists(data_path):
801 if data_path and os.path.exists(data_path):
773 log.debug('remove %s', data_path)
802 log.debug('remove %s', data_path)
774 shutil.rmtree(data_path)
803 shutil.rmtree(data_path)
775
804
776 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
805 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
777 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
806 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
778
807
779 # Note: Subversion is in the process of being integrated with the system,
808 # Note: Subversion is in the process of being integrated with the system,
780 # until we have a properly packed version of the test svn repository, this
809 # until we have a properly packed version of the test svn repository, this
781 # tries to copy over the repo from a package "rc_testdata"
810 # tries to copy over the repo from a package "rc_testdata"
782 svn_repo_path = rc_testdata.get_svn_repo_archive()
811 svn_repo_path = rc_testdata.get_svn_repo_archive()
783 with tarfile.open(svn_repo_path) as tar:
812 with tarfile.open(svn_repo_path) as tar:
784 tar.extractall(jn(test_path, SVN_REPO))
813 tar.extractall(jn(test_path, SVN_REPO))
785
814
786
815
787 def password_changed(auth_user, session):
816 def password_changed(auth_user, session):
788 # Never report password change in case of default user or anonymous user.
817 # Never report password change in case of default user or anonymous user.
789 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
818 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
790 return False
819 return False
791
820
792 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
821 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
793 rhodecode_user = session.get('rhodecode_user', {})
822 rhodecode_user = session.get('rhodecode_user', {})
794 session_password_hash = rhodecode_user.get('password', '')
823 session_password_hash = rhodecode_user.get('password', '')
795 return password_hash != session_password_hash
824 return password_hash != session_password_hash
796
825
797
826
798 def read_opensource_licenses():
827 def read_opensource_licenses():
799 global _license_cache
828 global _license_cache
800
829
801 if not _license_cache:
830 if not _license_cache:
802 licenses = pkg_resources.resource_string(
831 licenses = pkg_resources.resource_string(
803 'rhodecode', 'config/licenses.json')
832 'rhodecode', 'config/licenses.json')
804 _license_cache = json.loads(licenses)
833 _license_cache = json.loads(licenses)
805
834
806 return _license_cache
835 return _license_cache
807
836
808
837
809 def generate_platform_uuid():
838 def generate_platform_uuid():
810 """
839 """
811 Generates platform UUID based on it's name
840 Generates platform UUID based on it's name
812 """
841 """
813 import platform
842 import platform
814
843
815 try:
844 try:
816 uuid_list = [platform.platform()]
845 uuid_list = [platform.platform()]
817 return sha256_safe(':'.join(uuid_list))
846 return sha256_safe(':'.join(uuid_list))
818 except Exception as e:
847 except Exception as e:
819 log.error('Failed to generate host uuid: %s', e)
848 log.error('Failed to generate host uuid: %s', e)
820 return 'UNDEFINED'
849 return 'UNDEFINED'
821
850
822
851
823 def send_test_email(recipients, email_body='TEST EMAIL'):
852 def send_test_email(recipients, email_body='TEST EMAIL'):
824 """
853 """
825 Simple code for generating test emails.
854 Simple code for generating test emails.
826 Usage::
855 Usage::
827
856
828 from rhodecode.lib import utils
857 from rhodecode.lib import utils
829 utils.send_test_email()
858 utils.send_test_email()
830 """
859 """
831 from rhodecode.lib.celerylib import tasks, run_task
860 from rhodecode.lib.celerylib import tasks, run_task
832
861
833 email_body = email_body_plaintext = email_body
862 email_body = email_body_plaintext = email_body
834 subject = f'SUBJECT FROM: {socket.gethostname()}'
863 subject = f'SUBJECT FROM: {socket.gethostname()}'
835 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
864 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
General Comments 0
You need to be logged in to leave comments. Login now