##// END OF EJS Templates
fix(svn): svn events fixes and change the way how we handle the events
super-admin -
r5459:7f730862 default
parent child Browse files
Show More
@@ -0,0 +1,132 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import logging
20 import redis
21
22 from ..lib import rc_cache
23 from ..lib.ext_json import json
24
25
26 log = logging.getLogger(__name__)
27
28 redis_client = None
29
30
31 class RedisTxnClient:
32
33 def __init__(self, url):
34 self.url = url
35 self._create_client(url)
36
37 def _create_client(self, url):
38 connection_pool = redis.ConnectionPool.from_url(url)
39 self.writer_client = redis.StrictRedis(
40 connection_pool=connection_pool
41 )
42 self.reader_client = self.writer_client
43
44 def set(self, key, value, expire=24 * 60000):
45 self.writer_client.set(key, value, ex=expire)
46
47 def get(self, key):
48 return self.reader_client.get(key)
49
50 def delete(self, key):
51 self.writer_client.delete(key)
52
53
54 def get_redis_client(url=''):
55
56 global redis_client
57 if redis_client is not None:
58 return redis_client
59 if not url:
60 from rhodecode import CONFIG
61 url = CONFIG['vcs.svn.redis_conn']
62 redis_client = RedisTxnClient(url)
63 return redis_client
64
65
66 def extract_svn_txn_id(data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71 import re
72 from lxml import etree
73
74 try:
75 root = etree.fromstring(data)
76 pat = re.compile(r'/txn/(?P<txn_id>.*)')
77 for el in root:
78 if el.tag == '{DAV:}source':
79 for sub_el in el:
80 if sub_el.tag == '{DAV:}href':
81 match = pat.search(sub_el.text)
82 if match:
83 svn_tx_id = match.groupdict()['txn_id']
84 return svn_tx_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 def get_txn_id_data_key(repo_path, svn_txn_id):
90 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
91 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
92 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
93 log.debug('computed final key: %s', final_key)
94
95 return final_key
96
97
98 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
99 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
100
101 if not svn_txn_id:
102 log.warning('Cannot store txn_id because it is empty')
103 return
104
105 redis_conn = get_redis_client()
106
107 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
108 store_data = json.dumps(data_dict)
109 redis_conn.set(store_key, store_data)
110
111
112 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
113 """
114 Reads txn_id from store and if present returns the data for callback manager
115 """
116 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
117 redis_conn = get_redis_client()
118
119 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
120 data = {}
121 redis_conn.get(store_key)
122 try:
123 raw_data = redis_conn.get(store_key)
124 data = json.loads(raw_data)
125 except Exception:
126 log.exception('Failed to get txn_id metadata')
127
128 if rm_on_read:
129 log.debug('Cleaning up txn_id at %s', store_key)
130 redis_conn.delete(store_key)
131
132 return data
@@ -0,0 +1,226 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
28
29
30 import time
31
32 import pytest
33
34 from rhodecode.lib import rc_cache
35 from rhodecode.model.db import Repository, UserIpMap, CacheKey
36 from rhodecode.model.meta import Session
37 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.user import UserModel
39 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
40
41 from rhodecode.tests.vcs_operations import (
42 Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP)
43
44
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
46 class TestVCSOperations(object):
47
48 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
49 clone_url = rc_web_server.repo_clone_url(HG_REPO)
50 stdout, stderr = Command('/tmp').execute(
51 'hg clone', clone_url, tmpdir.strpath)
52 _check_proper_clone(stdout, stderr, 'hg')
53
54 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
55 clone_url = rc_web_server.repo_clone_url(HG_REPO)
56 stdout, stderr = Command('/tmp').execute(
57 'hg clone --pull', clone_url, tmpdir.strpath)
58 _check_proper_clone(stdout, stderr, 'hg')
59
60 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
61 clone_url = rc_web_server.repo_clone_url(HG_REPO)
62 stdout, stderr = Command('/tmp').execute(
63 'hg clone --pull --stream', clone_url, tmpdir.strpath)
64 assert 'files to transfer,' in stdout
65 assert 'transferred 1.' in stdout
66 assert '114 files updated,' in stdout
67
68 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
69 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
70 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
71 stdout, stderr = Command('/tmp').execute(
72 'hg clone', clone_url, tmpdir.strpath)
73 _check_proper_clone(stdout, stderr, 'hg')
74
75 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
76 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
77 stdout, stderr = Command('/tmp').execute(
78 'hg clone', clone_url, tmpdir.strpath)
79 _check_proper_clone(stdout, stderr, 'hg')
80
81 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
82 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
83 stdout, stderr = Command('/tmp').execute(
84 'hg clone', clone_url, tmpdir.strpath)
85 assert 'abort: authorization failed' in stderr
86
87 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
88 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 assert 'HTTP Error 404: Not Found' in stderr
92
93 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
94 clone_url = rc_web_server.repo_clone_url('trololo')
95 stdout, stderr = Command('/tmp').execute(
96 'hg clone', clone_url, tmpdir.strpath)
97 assert 'HTTP Error 404: Not Found' in stderr
98
99 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
100 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
101 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
102 assert 'HTTP Error 404: Not Found' in stderr
103
104 def test_clone_existing_path_hg_not_in_database(
105 self, rc_web_server, tmpdir, fs_repo_only):
106
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109 stdout, stderr = Command('/tmp').execute(
110 'hg clone', clone_url, tmpdir.strpath)
111 assert 'HTTP Error 404: Not Found' in stderr
112
113 def test_clone_existing_path_hg_not_in_database_different_scm(
114 self, rc_web_server, tmpdir, fs_repo_only):
115 db_name = fs_repo_only('not-in-db-git', repo_type='git')
116 clone_url = rc_web_server.repo_clone_url(db_name)
117 stdout, stderr = Command('/tmp').execute(
118 'hg clone', clone_url, tmpdir.strpath)
119 assert 'HTTP Error 404: Not Found' in stderr
120
121 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
122 repo = user_util.create_repo()
123 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
124
125 # Damage repo by removing it's folder
126 RepoModel()._delete_filesystem_repo(repo)
127
128 stdout, stderr = Command('/tmp').execute(
129 'hg clone', clone_url, tmpdir.strpath)
130 assert 'HTTP Error 404: Not Found' in stderr
131
132 def test_push_new_file_hg(self, rc_web_server, tmpdir):
133 clone_url = rc_web_server.repo_clone_url(HG_REPO)
134 stdout, stderr = Command('/tmp').execute(
135 'hg clone', clone_url, tmpdir.strpath)
136
137 stdout, stderr = _add_files_and_push(
138 'hg', tmpdir.strpath, clone_url=clone_url)
139
140 assert 'pushing to' in stdout
141 assert 'size summary' in stdout
142
143 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
144 hg_repo = Repository.get_by_repo_name(HG_REPO)
145
146 # init cache objects
147 CacheKey.delete_all_cache()
148
149 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
150
151 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
152
153 with inv_context_manager as invalidation_context:
154 # __enter__ will create and register cache objects
155 pass
156
157 cache_keys = hg_repo.cache_keys
158 assert cache_keys != []
159 old_ids = [x.cache_state_uid for x in cache_keys]
160
161 # clone to init cache
162 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
163 stdout, stderr = Command('/tmp').execute(
164 'hg clone', clone_url, tmpdir.strpath)
165
166 cache_keys = hg_repo.cache_keys
167 assert cache_keys != []
168 for key in cache_keys:
169 assert key.cache_active is True
170
171 # PUSH that should trigger invalidation cache
172 stdout, stderr = _add_files_and_push(
173 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
174
175 # flush...
176 Session().commit()
177 hg_repo = Repository.get_by_repo_name(HG_REPO)
178 cache_keys = hg_repo.cache_keys
179 assert cache_keys != []
180 new_ids = [x.cache_state_uid for x in cache_keys]
181 assert new_ids != old_ids
182
183 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
184 clone_url = rc_web_server.repo_clone_url(HG_REPO)
185 stdout, stderr = Command('/tmp').execute(
186 'hg clone', clone_url, tmpdir.strpath)
187
188 push_url = rc_web_server.repo_clone_url(
189 HG_REPO, user='bad', passwd='name')
190 stdout, stderr = _add_files_and_push(
191 'hg', tmpdir.strpath, clone_url=push_url)
192
193 assert 'abort: authorization failed' in stderr
194
195 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
196 clone_url = rc_web_server.repo_clone_url(HG_REPO)
197 stdout, stderr = Command('/tmp').execute(
198 'hg clone', clone_url, tmpdir.strpath)
199
200 stdout, stderr = _add_files_and_push(
201 'hg', tmpdir.strpath,
202 clone_url=rc_web_server.repo_clone_url('not-existing'))
203
204 assert 'HTTP Error 404: Not Found' in stderr
205
206 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
207 user_model = UserModel()
208 try:
209 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
210 Session().commit()
211 time.sleep(2)
212 clone_url = rc_web_server.repo_clone_url(HG_REPO)
213 stdout, stderr = Command('/tmp').execute(
214 'hg clone', clone_url, tmpdir.strpath)
215 assert 'abort: HTTP Error 403: Forbidden' in stderr
216 finally:
217 # release IP restrictions
218 for ip in UserIpMap.getAll():
219 UserIpMap.delete(ip.ip_id)
220 Session().commit()
221
222 time.sleep(2)
223
224 stdout, stderr = Command('/tmp').execute(
225 'hg clone', clone_url, tmpdir.strpath)
226 _check_proper_clone(stdout, stderr, 'hg')
@@ -0,0 +1,197 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 Test suite for making push/pull operations, on specially modified INI files
21
22 .. important::
23
24 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 to redirect things to stderr instead of stdout.
26 """
27
28
29 import time
30 import pytest
31
32 from rhodecode.model.db import Repository, UserIpMap
33 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import (SVN_REPO, TEST_USER_ADMIN_LOGIN)
37
38
39 from rhodecode.tests.vcs_operations import (
40 Command, _check_proper_clone, _check_proper_svn_push,
41 _add_files_and_push, SVN_REPO_WITH_GROUP)
42
43
44 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
45 class TestVCSOperations(object):
46
47 def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir):
48 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
49 username, password = rc_web_server.repo_clone_credentials()
50
51 cmd = Command('/tmp')
52
53 auth = f'--non-interactive --username={username} --password={password}'
54 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
55 _check_proper_clone(stdout, stderr, 'svn')
56 cmd.assert_returncode_success()
57
58 def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir):
59 repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id
60 username, password = rc_web_server.repo_clone_credentials()
61
62 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
63 cmd = Command('/tmp')
64 auth = f'--non-interactive --username={username} --password={password}'
65 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
66 _check_proper_clone(stdout, stderr, 'svn')
67 cmd.assert_returncode_success()
68
69 def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir):
70 clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP)
71 username, password = rc_web_server.repo_clone_credentials()
72
73 cmd = Command('/tmp')
74 auth = f'--non-interactive --username={username} --password={password}'
75 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
76 _check_proper_clone(stdout, stderr, 'svn')
77 cmd.assert_returncode_success()
78
79 def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir):
80 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
81 username, password = rc_web_server.repo_clone_credentials()
82 password = 'bad-password'
83
84 auth = f'--non-interactive --username={username} --password={password}'
85 stdout, stderr = Command('/tmp').execute(
86 f'svn checkout {auth}', clone_url, tmpdir.strpath)
87 assert 'fatal: Authentication failed' in stderr
88
89 def test_clone_svn_with_slashes(self, rc_web_server, tmpdir):
90 clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO)
91 stdout, stderr = Command('/tmp').execute('svn checkout', clone_url)
92 assert 'not found' in stderr
93
94 def test_clone_existing_path_svn_not_in_database(
95 self, rc_web_server, tmpdir, fs_repo_only):
96 db_name = fs_repo_only('not-in-db-git', repo_type='git')
97 clone_url = rc_web_server.repo_clone_url(db_name)
98 username, password = '', ''
99 auth = f'--non-interactive --username={username} --password={password}'
100
101 stdout, stderr = Command('/tmp').execute(
102 f'svn checkout {auth}', clone_url, tmpdir.strpath)
103 assert 'not found' in stderr
104
105 def test_clone_existing_path_svn_not_in_database_different_scm(
106 self, rc_web_server, tmpdir, fs_repo_only):
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109
110 username, password = '', ''
111 auth = f'--non-interactive --username={username} --password={password}'
112 stdout, stderr = Command('/tmp').execute(
113 f'svn checkout {auth}', clone_url, tmpdir.strpath)
114 assert 'not found' in stderr
115
116 def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util):
117 repo = user_util.create_repo(repo_type='git')
118 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
119
120 # Damage repo by removing it's folder
121 RepoModel()._delete_filesystem_repo(repo)
122
123 username, password = '', ''
124 auth = f'--non-interactive --username={username} --password={password}'
125 stdout, stderr = Command('/tmp').execute(
126 f'svn checkout {auth}', clone_url, tmpdir.strpath)
127 assert 'not found' in stderr
128
129 def test_push_new_file_svn(self, rc_web_server, tmpdir):
130 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
131 username, password = '', ''
132 auth = f'--non-interactive --username={username} --password={password}'
133
134 stdout, stderr = Command('/tmp').execute(
135 f'svn checkout {auth}', clone_url, tmpdir.strpath)
136
137 # commit some stuff into this repo
138 stdout, stderr = _add_files_and_push(
139 'svn', tmpdir.strpath, clone_url=clone_url)
140
141 _check_proper_svn_push(stdout, stderr)
142
143 def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir):
144 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
145
146 username, password = '', ''
147 auth = f'--non-interactive --username={username} --password={password}'
148 stdout, stderr = Command('/tmp').execute(
149 f'svn checkout {auth}', clone_url, tmpdir.strpath)
150
151 push_url = rc_web_server.repo_clone_url(
152 SVN_REPO, user='bad', passwd='name')
153 stdout, stderr = _add_files_and_push(
154 'svn', tmpdir.strpath, clone_url=push_url)
155
156 assert 'fatal: Authentication failed' in stderr
157
158 def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir):
159 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
160 username, password = '', ''
161 auth = f'--non-interactive --username={username} --password={password}'
162 Command('/tmp').execute(
163 f'svn checkout {auth}', clone_url, tmpdir.strpath)
164
165 stdout, stderr = _add_files_and_push(
166 'svn', tmpdir.strpath,
167 clone_url=rc_web_server.repo_clone_url('not-existing'))
168
169 assert 'not found' in stderr
170
171 def test_ip_restriction_svn(self, rc_web_server, tmpdir):
172 user_model = UserModel()
173 username, password = '', ''
174 auth = f'--non-interactive --username={username} --password={password}'
175
176 try:
177 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
178 Session().commit()
179 time.sleep(2)
180 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
181
182 stdout, stderr = Command('/tmp').execute(
183 f'svn checkout {auth}', clone_url, tmpdir.strpath)
184 msg = "The requested URL returned error: 403"
185 assert msg in stderr
186 finally:
187 # release IP restrictions
188 for ip in UserIpMap.getAll():
189 UserIpMap.delete(ip.ip_id)
190 Session().commit()
191
192 time.sleep(2)
193
194 cmd = Command('/tmp')
195 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
196 cmd.assert_returncode_success()
197 _check_proper_clone(stdout, stderr, 'svn')
@@ -657,6 +657,10 b' vcs.methods.cache = true'
657 657 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
658 658 #vcs.svn.compatible_version = 1.8
659 659
660 ; Redis connection settings for svn integrations logic
661 ; This connection string needs to be the same on ce and vcsserver
662 vcs.svn.redis_conn = redis://redis:6379/0
663
660 664 ; Enable SVN proxy of requests over HTTP
661 665 vcs.svn.proxy.enabled = true
662 666
@@ -625,6 +625,10 b' vcs.methods.cache = true'
625 625 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
626 626 #vcs.svn.compatible_version = 1.8
627 627
628 ; Redis connection settings for svn integrations logic
629 ; This connection string needs to be the same on ce and vcsserver
630 vcs.svn.redis_conn = redis://redis:6379/0
631
628 632 ; Enable SVN proxy of requests over HTTP
629 633 vcs.svn.proxy.enabled = true
630 634
@@ -103,6 +103,7 b' def sanitize_settings_and_apply_defaults'
103 103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104 104
105 105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
106 107 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 108 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 109 settings_maker.make_setting('vcs.hooks.protocol', 'http')
@@ -258,8 +258,7 b' class ActionParser(object):'
258 258 commit = repo.get_commit(commit_id=commit_id)
259 259 commits.append(commit)
260 260 except CommitDoesNotExistError:
261 log.error(
262 'cannot find commit id %s in this repository',
261 log.error('cannot find commit id %s in this repository',
263 262 commit_id)
264 263 commits.append(commit_id)
265 264 continue
@@ -15,13 +15,14 b''
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 19 import os
19 20 import time
20 21 import logging
21 import tempfile
22 22
23 23 from rhodecode.lib.config_utils import get_config
24 from rhodecode.lib.ext_json import json
24
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
25 26
26 27 log = logging.getLogger(__name__)
27 28
@@ -47,49 +48,22 b' class HooksModuleCallbackDaemon(BaseHook'
47 48 super().__init__()
48 49 self.hooks_module = module
49 50
50
51 def get_txn_id_data_path(txn_id):
52 import rhodecode
53
54 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
55 final_dir = os.path.join(root, 'svn_txn_id')
56
57 if not os.path.isdir(final_dir):
58 os.makedirs(final_dir)
59 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
60
61
62 def store_txn_id_data(txn_id, data_dict):
63 if not txn_id:
64 log.warning('Cannot store txn_id because it is empty')
65 return
66
67 path = get_txn_id_data_path(txn_id)
68 try:
69 with open(path, 'wb') as f:
70 f.write(json.dumps(data_dict))
71 except Exception:
72 log.exception('Failed to write txn_id metadata')
73
74
75 def get_txn_id_from_store(txn_id):
76 """
77 Reads txn_id from store and if present returns the data for callback manager
78 """
79 path = get_txn_id_data_path(txn_id)
80 try:
81 with open(path, 'rb') as f:
82 return json.loads(f.read())
83 except Exception:
84 return {}
51 def __repr__(self):
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
85 53
86 54
87 55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
88 txn_details = get_txn_id_from_store(txn_id)
89 port = txn_details.get('port', 0)
56
90 57 match protocol:
91 58 case 'http':
92 59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
60 port = 0
61 if txn_id:
62 # read txn-id to re-use the PORT for callback daemon
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 port = txn_details.get('port', 0)
66
93 67 callback_daemon = HttpHooksCallbackDaemon(
94 68 txn_id=txn_id, host=host, port=port)
95 69 case 'celery':
@@ -28,3 +28,6 b' class CeleryHooksCallbackDaemon(BaseHook'
28 28 # TODO: replace this with settings bootstrapped...
29 29 self.task_queue = config.get('app:main', 'celery.broker_url')
30 30 self.task_backend = config.get('app:main', 'celery.result_backend')
31
32 def __repr__(self):
33 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -30,7 +30,7 b' from socketserver import TCPServer'
30 30 from rhodecode.model import meta
31 31 from rhodecode.lib.ext_json import json
32 32 from rhodecode.lib import rc_cache
33 from rhodecode.lib.hook_daemon.base import get_txn_id_data_path
33 from rhodecode.lib.svn_txn_utils import get_txn_id_data_key
34 34 from rhodecode.lib.hook_daemon.hook_module import Hooks
35 35
36 36 log = logging.getLogger(__name__)
@@ -185,9 +185,12 b' class HttpHooksCallbackDaemon(ThreadedHo'
185 185
186 186 use_gevent = False
187 187
188 def __repr__(self):
189 return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})'
190
188 191 @property
189 192 def _hook_prefix(self):
190 return 'HOOKS: {} '.format(self.hooks_uri)
193 return f'HOOKS: {self.hooks_uri} '
191 194
192 195 def get_hostname(self):
193 196 return socket.gethostname() or '127.0.0.1'
@@ -205,7 +208,7 b' class HttpHooksCallbackDaemon(ThreadedHo'
205 208 port = self.get_available_port()
206 209
207 210 server_address = (host, port)
208 self.hooks_uri = '{}:{}'.format(host, port)
211 self.hooks_uri = f'{host}:{port}'
209 212 self.txn_id = txn_id
210 213 self._done = False
211 214
@@ -249,7 +252,9 b' class HttpHooksCallbackDaemon(ThreadedHo'
249 252 self._daemon = None
250 253 self._callback_thread = None
251 254 if self.txn_id:
252 txn_id_file = get_txn_id_data_path(self.txn_id)
255 #TODO: figure out the repo_path...
256 repo_path = ''
257 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
253 258 log.debug('Cleaning up TXN ID %s', txn_id_file)
254 259 if os.path.isfile(txn_id_file):
255 260 os.remove(txn_id_file)
@@ -272,7 +277,9 b' class HttpHooksCallbackDaemon(ThreadedHo'
272 277 self._callback_greenlet = None
273 278
274 279 if self.txn_id:
275 txn_id_file = get_txn_id_data_path(self.txn_id)
280 #TODO: figure out the repo_path...
281 repo_path = ''
282 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
276 283 log.debug('Cleaning up TXN ID %s', txn_id_file)
277 284 if os.path.isfile(txn_id_file):
278 285 os.remove(txn_id_file)
@@ -17,7 +17,8 b''
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 import base64
20 import re
21 import os
21 22 import logging
22 23 import urllib.request
23 24 import urllib.parse
@@ -28,14 +29,10 b' import requests'
28 29 from pyramid.httpexceptions import HTTPNotAcceptable
29 30
30 31 from rhodecode import ConfigGet
31 from rhodecode.lib import rc_cache
32 32 from rhodecode.lib.middleware import simplevcs
33 33 from rhodecode.lib.middleware.utils import get_path_info
34 34 from rhodecode.lib.utils import is_valid_repo
35 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
36 from rhodecode.lib.type_utils import str2bool
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.hook_daemon.base import store_txn_id_data
35 from rhodecode.lib.str_utils import safe_str
39 36
40 37 log = logging.getLogger(__name__)
41 38
@@ -63,28 +60,11 b' class SimpleSvnApp(object):'
63 60
64 61 # stream control flag, based on request and content type...
65 62 stream = False
66
67 63 if req_method in ['MKCOL'] or has_content_length:
68 data_processed = False
69 # read chunk to check if we have txn-with-props
70 initial_data: bytes = data_io.read(1024)
71 if initial_data.startswith(b'(create-txn-with-props'):
72 data_io = initial_data + data_io.read()
73 # store on-the-fly our rc_extra using svn revision properties
74 # those can be read later on in hooks executed so we have a way
75 # to pass in the data into svn hooks
76 rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
77 rc_data_len = str(len(rc_data))
78 # header defines data length, and serialized data
79 skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data))
80 data_io = data_io[:-2] + skel + b'))'
81 data_processed = True
82
83 if not data_processed:
84 # NOTE(johbo): Avoid that we end up with sending the request in chunked
85 # transfer encoding (mainly on Gunicorn). If we know the content
86 # length, then we should transfer the payload in one request.
87 data_io = initial_data + data_io.read()
64 # NOTE(johbo): Avoid that we end up with sending the request in chunked
65 # transfer encoding (mainly on Gunicorn). If we know the content
66 # length, then we should transfer the payload in one request.
67 data_io = data_io.read()
88 68
89 69 if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked':
90 70 # NOTE(marcink): when getting/uploading files, we want to STREAM content
@@ -101,6 +81,7 b' class SimpleSvnApp(object):'
101 81 stream=stream
102 82 )
103 83 if req_method in ['HEAD', 'DELETE']:
84 # NOTE(marcink): HEAD might be deprecated for SVN 1.14+ protocol
104 85 del call_kwargs['data']
105 86
106 87 try:
@@ -120,14 +101,6 b' class SimpleSvnApp(object):'
120 101 log.debug('got response code: %s', response.status_code)
121 102
122 103 response_headers = self._get_response_headers(response.headers)
123
124 if response.headers.get('SVN-Txn-name'):
125 svn_tx_id = response.headers.get('SVN-Txn-name')
126 txn_id = rc_cache.utils.compute_key_from_params(
127 self.config['repository'], svn_tx_id)
128 port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
129 store_txn_id_data(txn_id, {'port': port})
130
131 104 start_response(f'{response.status_code} {response.reason}', response_headers)
132 105 return response.iter_content(chunk_size=1024)
133 106
@@ -137,6 +110,20 b' class SimpleSvnApp(object):'
137 110 url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'")
138 111 return url_path
139 112
113 def _get_txn_id(self, environ):
114 url = environ['RAW_URI']
115
116 # Define the regex pattern
117 pattern = r'/txr/([^/]+)/'
118
119 # Search for the pattern in the URL
120 match = re.search(pattern, url)
121
122 # Check if a match is found and extract the captured group
123 if match:
124 txn_id = match.group(1)
125 return txn_id
126
140 127 def _get_request_headers(self, environ):
141 128 headers = {}
142 129 whitelist = {
@@ -182,10 +169,39 b' class DisabledSimpleSvnApp(object):'
182 169
183 170
184 171 class SimpleSvn(simplevcs.SimpleVCS):
172 """
173 details: https://svn.apache.org/repos/asf/subversion/trunk/notes/http-and-webdav/webdav-protocol
174
175 Read Commands : (OPTIONS, PROPFIND, GET, REPORT)
176
177 GET: fetch info about resources
178 PROPFIND: Used to retrieve properties of resources.
179 REPORT: Used for specialized queries to the repository. E.g History etc...
180 OPTIONS: request is sent to an SVN server, the server responds with information about the available HTTP
181 methods and other server capabilities.
182
183 Write Commands : (MKACTIVITY, PROPPATCH, PUT, CHECKOUT, MKCOL, MOVE,
184 -------------- COPY, DELETE, LOCK, UNLOCK, MERGE)
185
186 With the exception of LOCK/UNLOCK, every write command performs some
187 sort of DeltaV commit operation. In DeltaV, a commit always starts
188 by creating a transaction (MKACTIVITY), applies a log message
189 (PROPPATCH), does some other write methods, and then ends by
190 committing the transaction (MERGE). If the MERGE fails, the client
191 may try to remove the transaction with a DELETE.
192
193 PROPPATCH: Used to set and/or remove properties on resources.
194 MKCOL: Creates a new collection (directory).
195 DELETE: Removes a resource.
196 COPY and MOVE: Used for copying and moving resources.
197 MERGE: Used to merge changes from different branches.
198 CHECKOUT, CHECKIN, UNCHECKOUT: DeltaV methods for managing working resources and versions.
199 """
185 200
186 201 SCM = 'svn'
187 202 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
188 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
203 WRITE_COMMANDS = ('MERGE', 'POST', 'PUT', 'COPY', 'MOVE', 'DELETE', 'MKCOL')
204 DEFAULT_HTTP_SERVER = 'http://svn:8090'
189 205
190 206 def _get_repository_name(self, environ):
191 207 """
@@ -218,10 +234,10 b' class SimpleSvn(simplevcs.SimpleVCS):'
218 234 else 'push')
219 235
220 236 def _should_use_callback_daemon(self, extras, environ, action):
221 # only MERGE command triggers hooks, so we don't want to start
237 # only PUT & MERGE command triggers hooks, so we don't want to start
222 238 # hooks server too many times. POST however starts the svn transaction
223 239 # so we also need to run the init of callback daemon of POST
224 if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
240 if environ['REQUEST_METHOD'] not in self.READ_ONLY_COMMANDS:
225 241 return True
226 242 return False
227 243
@@ -25,11 +25,9 b" It's implemented with basic auth functio"
25 25
26 26 import os
27 27 import re
28 import io
29 28 import logging
30 29 import importlib
31 30 from functools import wraps
32 from lxml import etree
33 31
34 32 import time
35 33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
@@ -41,6 +39,7 b' from zope.cachedescriptors.property impo'
41 39 import rhodecode
42 40 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 41 from rhodecode.lib import rc_cache
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
44 43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 44 from rhodecode.lib.base import (
46 45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
@@ -48,7 +47,7 b' from rhodecode.lib.exceptions import (Us'
48 47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
49 48 from rhodecode.lib.middleware import appenlight
50 49 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.str_utils import safe_bytes
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
52 51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
@@ -63,29 +62,6 b' from rhodecode.model.settings import Set'
63 62 log = logging.getLogger(__name__)
64 63
65 64
66 def extract_svn_txn_id(acl_repo_name, data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71
72 try:
73 root = etree.fromstring(data)
74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
75 for el in root:
76 if el.tag == '{DAV:}source':
77 for sub_el in el:
78 if sub_el.tag == '{DAV:}href':
79 match = pat.search(sub_el.text)
80 if match:
81 svn_tx_id = match.groupdict()['txn_id']
82 txn_id = rc_cache.utils.compute_key_from_params(
83 acl_repo_name, svn_tx_id)
84 return txn_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 65 def initialize_generator(factory):
90 66 """
91 67 Initializes the returned generator by draining its first element.
@@ -468,7 +444,6 b' class SimpleVCS(object):'
468 444 log.debug('Not enough credentials to access repo: `%s` '
469 445 'repository as anonymous user', self.acl_repo_name)
470 446
471
472 447 username = None
473 448 # ==============================================================
474 449 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
@@ -582,6 +557,24 b' class SimpleVCS(object):'
582 557 return self._generate_vcs_response(
583 558 environ, start_response, repo_path, extras, action)
584 559
560 def _get_txn_id(self, environ):
561
562 for k in ['RAW_URI', 'HTTP_DESTINATION']:
563 url = environ.get(k)
564 if not url:
565 continue
566
567 # regex to search for svn-txn-id
568 pattern = r'/!svn/txr/([^/]+)/'
569
570 # Search for the pattern in the URL
571 match = re.search(pattern, url)
572
573 # Check if a match is found and extract the captured group
574 if match:
575 txn_id = match.group(1)
576 return txn_id
577
585 578 @initialize_generator
586 579 def _generate_vcs_response(
587 580 self, environ, start_response, repo_path, extras, action):
@@ -593,28 +586,23 b' class SimpleVCS(object):'
593 586 also handles the locking exceptions which will be triggered when
594 587 the first chunk is produced by the underlying WSGI application.
595 588 """
596
597 txn_id = ''
598 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
599 # case for SVN, we want to re-use the callback daemon port
600 # so we use the txn_id, for this we peek the body, and still save
601 # it as wsgi.input
602
603 stream = environ['wsgi.input']
604
605 if isinstance(stream, io.BytesIO):
606 data: bytes = stream.getvalue()
607 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
608 data: bytes = stream.buf.getvalue()
609 else:
610 # fallback to the crudest way, copy the iterator
611 data = safe_bytes(stream.read())
612 environ['wsgi.input'] = io.BytesIO(data)
613
614 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
589 svn_txn_id = ''
590 if action == 'push':
591 svn_txn_id = self._get_txn_id(environ)
615 592
616 593 callback_daemon, extras = self._prepare_callback_daemon(
617 extras, environ, action, txn_id=txn_id)
594 extras, environ, action, txn_id=svn_txn_id)
595
596 if svn_txn_id:
597
598 port = safe_int(extras['hooks_uri'].split(':')[-1])
599 txn_id_data = extras.copy()
600 txn_id_data.update({'port': port})
601 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
602
603 full_repo_path = repo_path
604 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
605
618 606 log.debug('HOOKS extras is %s', extras)
619 607
620 608 http_scheme = self._get_http_scheme(environ)
@@ -677,6 +665,7 b' class SimpleVCS(object):'
677 665
678 666 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
679 667 protocol = vcs_settings.HOOKS_PROTOCOL
668
680 669 if not self._should_use_callback_daemon(extras, environ, action):
681 670 # disable callback daemon for actions that don't require it
682 671 protocol = 'local'
@@ -12,16 +12,14 b''
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
15 pyroutes.register('admin_artifacts', '/_admin/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_delete', '/_admin/_admin/artifacts/%(uid)s/delete', ['uid']);
17 pyroutes.register('admin_artifacts_show_all', '/_admin/_admin/artifacts', []);
18 pyroutes.register('admin_artifacts_show_info', '/_admin/_admin/artifacts/%(uid)s', ['uid']);
19 pyroutes.register('admin_artifacts_update', '/_admin/_admin/artifacts/%(uid)s/update', ['uid']);
21 20 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
22 21 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
23 pyroutes.register('admin_automation', '/_admin/automation', []);
24 pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']);
22 pyroutes.register('admin_automation', '/_admin/_admin/automation', []);
25 23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
26 24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
27 25 pyroutes.register('admin_home', '/_admin', []);
@@ -29,7 +27,6 b' function registerRCRoutes() {'
29 27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
30 28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
31 29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
32 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
33 30 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
34 31 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
35 32 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
@@ -39,8 +36,7 b' function registerRCRoutes() {'
39 36 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
40 37 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
41 38 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
42 pyroutes.register('admin_scheduler', '/_admin/scheduler', []);
43 pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []);
39 pyroutes.register('admin_scheduler', '/_admin/_admin/scheduler', []);
44 40 pyroutes.register('admin_settings', '/_admin/settings', []);
45 41 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
46 42 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
@@ -59,8 +55,6 b' function registerRCRoutes() {'
59 55 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
60 56 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
61 57 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
62 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
63 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
64 58 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
65 59 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
66 60 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
@@ -68,12 +62,6 b' function registerRCRoutes() {'
68 62 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
69 63 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
70 64 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
71 pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []);
72 pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']);
73 pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']);
74 pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']);
75 pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []);
76 pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']);
77 65 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
78 66 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
79 67 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
@@ -97,7 +85,6 b' function registerRCRoutes() {'
97 85 pyroutes.register('channelstream_proxy', '/_channelstream', []);
98 86 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
99 87 pyroutes.register('check_2fa', '/_admin/check_2fa', []);
100 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
101 88 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
102 89 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
103 90 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
@@ -222,8 +209,6 b' function registerRCRoutes() {'
222 209 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
223 210 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
224 211 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
225 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
226 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
227 212 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
228 213 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
229 214 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
@@ -254,7 +239,6 b' function registerRCRoutes() {'
254 239 pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
255 240 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
256 241 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
257 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
258 242 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
259 243 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
260 244 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
@@ -264,7 +248,6 b' function registerRCRoutes() {'
264 248 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
265 249 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
266 250 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
267 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
268 251 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
269 252 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
270 253 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
@@ -277,18 +260,8 b' function registerRCRoutes() {'
277 260 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
278 261 pyroutes.register('register', '/_admin/register', []);
279 262 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
280 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
281 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
282 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
283 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
284 263 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
285 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
286 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
287 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
288 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
289 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
290 264 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
291 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
292 265 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
293 266 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
294 267 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
@@ -366,9 +339,6 b' function registerRCRoutes() {'
366 339 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
367 340 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
368 341 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
369 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
370 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
371 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
372 342 pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']);
373 343 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
374 344 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
@@ -17,7 +17,7 b' examples = ['
17 17
18 18 (
19 19 'Tickets with #123 (Redmine etc)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>\d+)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>[0-9]+)',
21 21 'https://myissueserver.com/${repo}/issue/${issue_id}',
22 22 ''
23 23 ),
@@ -60,7 +60,7 b' examples = ['
60 60
61 61 (
62 62 'Pivotal Tracker',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>\d+)',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>[0-9]+)',
64 64 'https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}',
65 65 'PIV-',
66 66 ),
@@ -332,7 +332,6 b''
332 332 POST request to trigger the (re)generation of the mod_dav_svn config. */
333 333 $('#vcs_svn_generate_cfg').on('click', function(event) {
334 334 event.preventDefault();
335 alert('i cliked it !!')
336 335 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
337 336 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
338 337 jqxhr.done(function(data) {
@@ -161,7 +161,7 b' def vcsserver_port(request):'
161 161
162 162
163 163 @pytest.fixture(scope='session')
164 def available_port_factory():
164 def available_port_factory() -> get_available_port:
165 165 """
166 166 Returns a callable which returns free port numbers.
167 167 """
@@ -304,7 +304,8 b' class TestPrepareHooksDaemon(object):'
304 304 'txn_id': 'txnid2',
305 305 'hooks_protocol': protocol.lower(),
306 306 'task_backend': '',
307 'task_queue': ''
307 'task_queue': '',
308 'repo_store': '/var/opt/rhodecode_repo_store'
308 309 }
309 310 callback, extras = hook_base.prepare_callback_daemon(
310 311 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
@@ -148,7 +148,7 b' class RcVCSServer(ServerBase):'
148 148 self._args = [
149 149 'gunicorn',
150 150 '--bind', self.bind_addr,
151 '--worker-class', 'gevent',
151 '--worker-class', 'gthread',
152 152 '--backlog', '16',
153 153 '--timeout', '300',
154 154 '--workers', workers,
@@ -185,7 +185,7 b' class RcWebServer(ServerBase):'
185 185 self._args = [
186 186 'gunicorn',
187 187 '--bind', self.bind_addr,
188 '--worker-class', 'gevent',
188 '--worker-class', 'gthread',
189 189 '--backlog', '16',
190 190 '--timeout', '300',
191 191 '--workers', workers,
@@ -219,3 +219,11 b' class RcWebServer(ServerBase):'
219 219 params.update(**kwargs)
220 220 _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}"
221 221 return _url
222
223 def repo_clone_credentials(self, **kwargs):
224 params = {
225 'user': TEST_USER_ADMIN_LOGIN,
226 'passwd': TEST_USER_ADMIN_PASS,
227 }
228 params.update(**kwargs)
229 return params['user'], params['passwd']
@@ -26,20 +26,21 b' Base for test suite for making push/pull'
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 from os.path import join as jn
30 from subprocess import Popen, PIPE
29
31 30 import logging
32 31 import os
33 32 import tempfile
33 import subprocess
34 34
35 35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.tests import GIT_REPO, HG_REPO
36 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
37 37
38 38 DEBUG = True
39 39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 40 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
41 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
42 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
43 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
43 44
44 45 log = logging.getLogger(__name__)
45 46
@@ -65,8 +66,9 b' class Command(object):'
65 66 if key.startswith('COV_CORE_'):
66 67 del env[key]
67 68
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 cwd=self.cwd, env=env)
69 self.process = subprocess.Popen(
70 command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
71 cwd=self.cwd, env=env)
70 72 stdout, stderr = self.process.communicate()
71 73
72 74 stdout = safe_str(stdout)
@@ -85,12 +87,14 b' def _add_files(vcs, dest, clone_url=None'
85 87 full_name = 'Marcin KuΕΊminski'
86 88 email = 'me@email.com'
87 89 git_ident = f"git config user.name {full_name} && git config user.email {email}"
88 cwd = path = jn(dest)
90 cwd = path = os.path.join(dest)
89 91
90 92 tags = tags or []
91 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
92 Command(cwd).execute('touch %s' % added_file)
93 Command(cwd).execute('%s add %s' % (vcs, added_file))
93 name_sequence = next(tempfile._RandomNameSequence())
94 added_file = os.path.join(path, f'{name_sequence}_setup.py')
95
96 Command(cwd).execute(f'touch {added_file}')
97 Command(cwd).execute(f'{vcs} add {added_file}')
94 98 author_str = 'Marcin KuΕΊminski <me@email.com>'
95 99
96 100 for i in range(kwargs.get('files_no', 3)):
@@ -128,7 +132,7 b' def _add_files_and_push(vcs, dest, clone'
128 132 vcs is git or hg and defines what VCS we want to make those files for
129 133 """
130 134 git_ident = "git config user.name Marcin KuΕΊminski && git config user.email me@email.com"
131 cwd = jn(dest)
135 cwd = os.path.join(dest)
132 136
133 137 # commit some stuff into this repo
134 138 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
@@ -147,12 +151,15 b' def _add_files_and_push(vcs, dest, clone'
147 151 if new_branch:
148 152 maybe_new_branch = '--new-branch'
149 153 stdout, stderr = Command(cwd).execute(
150 'hg push --traceback --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
154 f'hg push --traceback --verbose {maybe_new_branch} -r {target_branch} {clone_url}'
151 155 )
152 156 elif vcs == 'git':
153 157 stdout, stderr = Command(cwd).execute(
154 """{} &&
155 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
158 f'{git_ident} && git push --verbose --tags {clone_url} {target_branch}'
159 )
160 elif vcs == 'svn':
161 stdout, stderr = Command(cwd).execute(
162 f'svn ci -m "pushing to {target_branch}"'
156 163 )
157 164
158 165 return stdout, stderr
@@ -179,6 +186,13 b' def _check_proper_hg_push(stdout, stderr'
179 186 assert 'abort:' not in stderr
180 187
181 188
189 def _check_proper_svn_push(stdout, stderr):
190 assert 'pushing to' in stdout
191 assert 'searching for changes' in stdout
192
193 assert 'abort:' not in stderr
194
195
182 196 def _check_proper_clone(stdout, stderr, vcs):
183 197 if vcs == 'hg':
184 198 assert 'requesting all changes' in stdout
@@ -193,3 +207,8 b' def _check_proper_clone(stdout, stderr, '
193 207 assert 'Cloning into' in stderr
194 208 assert 'abort:' not in stderr
195 209 assert 'fatal:' not in stderr
210
211 if vcs == 'svn':
212 assert 'dupa' in stdout
213
214
@@ -42,7 +42,7 b' from rhodecode.model.db import Repositor'
42 42 from rhodecode.model.meta import Session
43 43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
44 44
45 from rhodecode.tests import GIT_REPO, HG_REPO
45 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
46 46 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
47 47 from rhodecode.tests.fixture import Fixture
48 48 from rhodecode.tests.server_utils import RcWebServer
@@ -51,13 +51,15 b' from rhodecode.tests.server_utils import'
51 51 REPO_GROUP = 'a_repo_group'
52 52 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
53 53 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
54 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
54 55
55 56 log = logging.getLogger(__name__)
56 57
57 58
58 59 def check_httpbin_connection():
60 log.debug('Checking if HTTPBIN_DOMAIN: %s is available', HTTPBIN_DOMAIN)
59 61 try:
60 response = requests.get(HTTPBIN_DOMAIN)
62 response = requests.get(HTTPBIN_DOMAIN, timeout=5)
61 63 return response.status_code == 200
62 64 except Exception as e:
63 65 print(e)
@@ -102,11 +104,15 b' def repos(request, db_connection):'
102 104 fixture.create_fork(GIT_REPO, GIT_REPO,
103 105 repo_name_full=GIT_REPO_WITH_GROUP,
104 106 repo_group=repo_group_id)
107 fixture.create_fork(SVN_REPO, SVN_REPO,
108 repo_name_full=SVN_REPO_WITH_GROUP,
109 repo_group=repo_group_id)
105 110
106 111 @request.addfinalizer
107 112 def cleanup():
108 113 fixture.destroy_repo(HG_REPO_WITH_GROUP)
109 114 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
115 fixture.destroy_repo(SVN_REPO_WITH_GROUP)
110 116 fixture.destroy_repo_group(repo_group_id)
111 117
112 118
@@ -139,11 +145,11 b' def rc_web_server('
139 145 """
140 146 Run the web server as a subprocess. with its own instance of vcsserver
141 147 """
142 rcweb_port = available_port_factory()
143 log.info('Using rcweb ops test port {}'.format(rcweb_port))
148 rcweb_port: int = available_port_factory()
149 log.info('Using rcweb ops test port %s', rcweb_port)
144 150
145 vcsserver_port = available_port_factory()
146 log.info('Using vcsserver ops test port {}'.format(vcsserver_port))
151 vcsserver_port: int = available_port_factory()
152 log.info('Using vcsserver ops test port %s', vcsserver_port)
147 153
148 154 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
149 155 vcsserver_factory(
@@ -303,5 +309,3 b' def branch_permission_setter(request):'
303 309 Session().commit()
304 310
305 311 return _branch_permissions_setter
306
307
@@ -32,7 +32,7 b' from rhodecode.lib.vcs.backends.git.repo'
32 32 from rhodecode.lib.vcs.nodes import FileNode
33 33 from rhodecode.tests import GIT_REPO
34 34 from rhodecode.tests.vcs_operations import Command
35 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
35 from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push
36 36
37 37
38 38 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
@@ -28,47 +28,23 b' Test suite for making push/pull operatio'
28 28
29 29
30 30 import time
31 import logging
32
33 31 import pytest
34 32
35 from rhodecode.lib import rc_cache
36 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
33 from rhodecode.model.db import Repository, UserIpMap
38 34 from rhodecode.model.meta import Session
39 35 from rhodecode.model.repo import RepoModel
40 36 from rhodecode.model.user import UserModel
41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
42 from rhodecode.tests.utils import assert_message_in_log
37 from rhodecode.tests import (GIT_REPO, TEST_USER_ADMIN_LOGIN)
38
43 39
44 40 from rhodecode.tests.vcs_operations import (
45 41 Command, _check_proper_clone, _check_proper_git_push,
46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
42 _add_files_and_push, GIT_REPO_WITH_GROUP)
47 43
48 44
49 45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
50 46 class TestVCSOperations(object):
51 47
52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 stdout, stderr = Command('/tmp').execute(
55 'hg clone', clone_url, tmpdir.strpath)
56 _check_proper_clone(stdout, stderr, 'hg')
57
58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 stdout, stderr = Command('/tmp').execute(
61 'hg clone --pull', clone_url, tmpdir.strpath)
62 _check_proper_clone(stdout, stderr, 'hg')
63
64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
66 stdout, stderr = Command('/tmp').execute(
67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
68 assert 'files to transfer,' in stdout
69 assert 'transferred 1.' in stdout
70 assert '114 files updated,' in stdout
71
72 48 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
73 49 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
74 50 cmd = Command('/tmp')
@@ -83,13 +59,6 b' class TestVCSOperations(object):'
83 59 _check_proper_clone(stdout, stderr, 'git')
84 60 cmd.assert_returncode_success()
85 61
86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 _check_proper_clone(stdout, stderr, 'hg')
92
93 62 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
94 63 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
95 64 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
@@ -98,12 +67,6 b' class TestVCSOperations(object):'
98 67 _check_proper_clone(stdout, stderr, 'git')
99 68 cmd.assert_returncode_success()
100 69
101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
103 stdout, stderr = Command('/tmp').execute(
104 'hg clone', clone_url, tmpdir.strpath)
105 _check_proper_clone(stdout, stderr, 'hg')
106
107 70 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
108 71 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
109 72 cmd = Command('/tmp')
@@ -121,11 +84,6 b' class TestVCSOperations(object):'
121 84 assert 'Cloning into' in stderr
122 85 cmd.assert_returncode_success()
123 86
124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
126 stdout, stderr = Command('/tmp').execute(
127 'hg clone', clone_url, tmpdir.strpath)
128 assert 'abort: authorization failed' in stderr
129 87
130 88 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
131 89 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
@@ -139,12 +97,6 b' class TestVCSOperations(object):'
139 97 'hg clone', clone_url, tmpdir.strpath)
140 98 assert 'HTTP Error 404: Not Found' in stderr
141 99
142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
144 stdout, stderr = Command('/tmp').execute(
145 'git clone', clone_url, tmpdir.strpath)
146 assert 'not found' in stderr
147
148 100 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
149 101 clone_url = rc_web_server.repo_clone_url('trololo')
150 102 stdout, stderr = Command('/tmp').execute(
@@ -156,25 +108,11 b' class TestVCSOperations(object):'
156 108 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
157 109 assert 'not found' in stderr
158 110
159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
162 assert 'HTTP Error 404: Not Found' in stderr
163
164 111 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
165 112 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
166 113 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
167 114 assert 'not found' in stderr
168 115
169 def test_clone_existing_path_hg_not_in_database(
170 self, rc_web_server, tmpdir, fs_repo_only):
171
172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
173 clone_url = rc_web_server.repo_clone_url(db_name)
174 stdout, stderr = Command('/tmp').execute(
175 'hg clone', clone_url, tmpdir.strpath)
176 assert 'HTTP Error 404: Not Found' in stderr
177
178 116 def test_clone_existing_path_git_not_in_database(
179 117 self, rc_web_server, tmpdir, fs_repo_only):
180 118 db_name = fs_repo_only('not-in-db-git', repo_type='git')
@@ -183,14 +121,6 b' class TestVCSOperations(object):'
183 121 'git clone', clone_url, tmpdir.strpath)
184 122 assert 'not found' in stderr
185 123
186 def test_clone_existing_path_hg_not_in_database_different_scm(
187 self, rc_web_server, tmpdir, fs_repo_only):
188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
189 clone_url = rc_web_server.repo_clone_url(db_name)
190 stdout, stderr = Command('/tmp').execute(
191 'hg clone', clone_url, tmpdir.strpath)
192 assert 'HTTP Error 404: Not Found' in stderr
193
194 124 def test_clone_existing_path_git_not_in_database_different_scm(
195 125 self, rc_web_server, tmpdir, fs_repo_only):
196 126 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
@@ -199,17 +129,6 b' class TestVCSOperations(object):'
199 129 'git clone', clone_url, tmpdir.strpath)
200 130 assert 'not found' in stderr
201 131
202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
203 repo = user_util.create_repo()
204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
205
206 # Damage repo by removing it's folder
207 RepoModel()._delete_filesystem_repo(repo)
208
209 stdout, stderr = Command('/tmp').execute(
210 'hg clone', clone_url, tmpdir.strpath)
211 assert 'HTTP Error 404: Not Found' in stderr
212
213 132 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
214 133 repo = user_util.create_repo(repo_type='git')
215 134 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
@@ -221,17 +140,6 b' class TestVCSOperations(object):'
221 140 'git clone', clone_url, tmpdir.strpath)
222 141 assert 'not found' in stderr
223 142
224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
226 stdout, stderr = Command('/tmp').execute(
227 'hg clone', clone_url, tmpdir.strpath)
228
229 stdout, stderr = _add_files_and_push(
230 'hg', tmpdir.strpath, clone_url=clone_url)
231
232 assert 'pushing to' in stdout
233 assert 'size summary' in stdout
234
235 143 def test_push_new_file_git(self, rc_web_server, tmpdir):
236 144 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
237 145 stdout, stderr = Command('/tmp').execute(
@@ -243,58 +151,6 b' class TestVCSOperations(object):'
243 151
244 152 _check_proper_git_push(stdout, stderr)
245 153
246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
247 hg_repo = Repository.get_by_repo_name(HG_REPO)
248
249 # init cache objects
250 CacheKey.delete_all_cache()
251
252 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
253
254 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
255
256 with inv_context_manager as invalidation_context:
257 # __enter__ will create and register cache objects
258 pass
259
260 cache_keys = hg_repo.cache_keys
261 assert cache_keys != []
262 old_ids = [x.cache_state_uid for x in cache_keys]
263
264 # clone to init cache
265 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
266 stdout, stderr = Command('/tmp').execute(
267 'hg clone', clone_url, tmpdir.strpath)
268
269 cache_keys = hg_repo.cache_keys
270 assert cache_keys != []
271 for key in cache_keys:
272 assert key.cache_active is True
273
274 # PUSH that should trigger invalidation cache
275 stdout, stderr = _add_files_and_push(
276 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
277
278 # flush...
279 Session().commit()
280 hg_repo = Repository.get_by_repo_name(HG_REPO)
281 cache_keys = hg_repo.cache_keys
282 assert cache_keys != []
283 new_ids = [x.cache_state_uid for x in cache_keys]
284 assert new_ids != old_ids
285
286 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
287 clone_url = rc_web_server.repo_clone_url(HG_REPO)
288 stdout, stderr = Command('/tmp').execute(
289 'hg clone', clone_url, tmpdir.strpath)
290
291 push_url = rc_web_server.repo_clone_url(
292 HG_REPO, user='bad', passwd='name')
293 stdout, stderr = _add_files_and_push(
294 'hg', tmpdir.strpath, clone_url=push_url)
295
296 assert 'abort: authorization failed' in stderr
297
298 154 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
299 155 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
300 156 stdout, stderr = Command('/tmp').execute(
@@ -307,17 +163,6 b' class TestVCSOperations(object):'
307 163
308 164 assert 'fatal: Authentication failed' in stderr
309 165
310 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
311 clone_url = rc_web_server.repo_clone_url(HG_REPO)
312 stdout, stderr = Command('/tmp').execute(
313 'hg clone', clone_url, tmpdir.strpath)
314
315 stdout, stderr = _add_files_and_push(
316 'hg', tmpdir.strpath,
317 clone_url=rc_web_server.repo_clone_url('not-existing'))
318
319 assert 'HTTP Error 404: Not Found' in stderr
320
321 166 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
322 167 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
323 168 stdout, stderr = Command('/tmp').execute(
@@ -329,28 +174,6 b' class TestVCSOperations(object):'
329 174
330 175 assert 'not found' in stderr
331 176
332 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
333 user_model = UserModel()
334 try:
335 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
336 Session().commit()
337 time.sleep(2)
338 clone_url = rc_web_server.repo_clone_url(HG_REPO)
339 stdout, stderr = Command('/tmp').execute(
340 'hg clone', clone_url, tmpdir.strpath)
341 assert 'abort: HTTP Error 403: Forbidden' in stderr
342 finally:
343 # release IP restrictions
344 for ip in UserIpMap.getAll():
345 UserIpMap.delete(ip.ip_id)
346 Session().commit()
347
348 time.sleep(2)
349
350 stdout, stderr = Command('/tmp').execute(
351 'hg clone', clone_url, tmpdir.strpath)
352 _check_proper_clone(stdout, stderr, 'hg')
353
354 177 def test_ip_restriction_git(self, rc_web_server, tmpdir):
355 178 user_model = UserModel()
356 179 try:
@@ -42,6 +42,7 b' connection_available = pytest.mark.skipi'
42 42 "enable_webhook_push_integration")
43 43 class TestVCSOperationsOnCustomIniConfig(object):
44 44
45 @connection_available
45 46 def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir):
46 47 clone_url = rc_web_server.repo_clone_url(HG_REPO)
47 48 stdout, stderr = Command('/tmp').execute(
@@ -56,6 +57,7 b' class TestVCSOperationsOnCustomIniConfig'
56 57 assert 'ERROR' not in rc_log
57 58 assert "{'name': 'v1.0.0'," in rc_log
58 59
60 @connection_available
59 61 def test_push_tag_with_commit_git(
60 62 self, rc_web_server, tmpdir):
61 63 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
@@ -71,6 +73,7 b' class TestVCSOperationsOnCustomIniConfig'
71 73 assert 'ERROR' not in rc_log
72 74 assert "{'name': 'v1.0.0'," in rc_log
73 75
76 @connection_available
74 77 def test_push_tag_with_no_commit_git(
75 78 self, rc_web_server, tmpdir):
76 79 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
@@ -7,7 +7,7 b''
7 7 [server:main]
8 8 ; COMMON HOST/IP CONFIG
9 9 host = 127.0.0.1
10 port = 9900
10 port = 10010
11 11
12 12
13 13 ; ###########################
@@ -22,6 +22,17 b' use = egg:gunicorn#main'
22 22 [app:main]
23 23 ; The %(here)s variable will be replaced with the absolute path of parent directory
24 24 ; of this file
25 ; Each option in the app:main can be override by an environmental variable
26 ;
27 ;To override an option:
28 ;
29 ;RC_<KeyName>
30 ;Everything should be uppercase, . and - should be replaced by _.
31 ;For example, if you have these configuration settings:
32 ;rc_cache.repo_object.backend = foo
33 ;can be overridden by
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
35
25 36 use = egg:rhodecode-vcsserver
26 37
27 38 ; Pyramid default locales, we need this to be set
@@ -30,11 +41,15 b' pyramid.default_locale_name = en'
30 41 ; default locale used by VCS systems
31 42 locale = en_US.UTF-8
32 43
33 ; path to binaries for vcsserver, it should be set by the installer
34 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
35 ; it can also be a path to nix-build output in case of development
44 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
45 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
46 ; or /usr/local/bin/rhodecode_bin/vcs_bin
36 47 core.binary_dir =
37 48
49 ; Redis connection settings for svn integrations logic
50 ; This connection string needs to be the same on ce and vcsserver
51 vcs.svn.redis_conn = redis://redis:6379/0
52
38 53 ; Custom exception store path, defaults to TMPDIR
39 54 ; This is used to store exception from RhodeCode in shared directory
40 55 #exception_tracker.store_path =
@@ -52,14 +67,14 b' cache_dir = %(here)s/data'
52 67 ; ***************************************
53 68
54 69 ; `repo_object` cache settings for vcs methods for repositories
55 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
70 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
56 71
57 72 ; cache auto-expires after N seconds
58 73 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
59 rc_cache.repo_object.expiration_time = 2592000
74 #rc_cache.repo_object.expiration_time = 2592000
60 75
61 76 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
62 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
77 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
63 78
64 79 ; ***********************************************************
65 80 ; `repo_object` cache with redis backend
@@ -83,19 +98,32 b' rc_cache.repo_object.expiration_time = 2'
83 98 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
84 99 #rc_cache.repo_object.arguments.distributed_lock = true
85 100
86 # legacy cache regions, please don't change
87 beaker.cache.regions = repo_object
88 beaker.cache.repo_object.type = memorylru
89 beaker.cache.repo_object.max_items = 100
90 # cache auto-expires after N seconds
91 beaker.cache.repo_object.expire = 300
92 beaker.cache.repo_object.enabled = true
101 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
102 #rc_cache.repo_object.arguments.lock_auto_renewal = true
103
104 ; Statsd client config, this is used to send metrics to statsd
105 ; We recommend setting statsd_exported and scrape them using Promethues
106 #statsd.enabled = false
107 #statsd.statsd_host = 0.0.0.0
108 #statsd.statsd_port = 8125
109 #statsd.statsd_prefix =
110 #statsd.statsd_ipv6 = false
93 111
112 ; configure logging automatically at server startup set to false
113 ; to use the below custom logging config.
114 ; RC_LOGGING_FORMATTER
115 ; RC_LOGGING_LEVEL
116 ; env variables can control the settings for logging in case of autoconfigure
94 117
118 #logging.autoconfigure = true
119
120 ; specify your own custom logging config file to configure logging
121 #logging.logging_conf_file = /path/to/custom_logging.ini
95 122
96 123 ; #####################
97 124 ; LOGGING CONFIGURATION
98 125 ; #####################
126
99 127 [loggers]
100 128 keys = root, vcsserver
101 129
@@ -103,7 +131,7 b' keys = root, vcsserver'
103 131 keys = console
104 132
105 133 [formatters]
106 keys = generic
134 keys = generic, json
107 135
108 136 ; #######
109 137 ; LOGGERS
@@ -113,12 +141,11 b' level = NOTSET'
113 141 handlers = console
114 142
115 143 [logger_vcsserver]
116 level = DEBUG
144 level = INFO
117 145 handlers =
118 146 qualname = vcsserver
119 147 propagate = 1
120 148
121
122 149 ; ########
123 150 ; HANDLERS
124 151 ; ########
@@ -127,6 +154,8 b' propagate = 1'
127 154 class = StreamHandler
128 155 args = (sys.stderr, )
129 156 level = DEBUG
157 ; To enable JSON formatted logs replace 'generic' with 'json'
158 ; This allows sending properly formatted logs to grafana loki or elasticsearch
130 159 formatter = generic
131 160
132 161 ; ##########
@@ -136,3 +165,7 b' formatter = generic'
136 165 [formatter_generic]
137 166 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
138 167 datefmt = %Y-%m-%d %H:%M:%S
168
169 [formatter_json]
170 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
171 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now