##// END OF EJS Templates
fix(svn): svn events fixes and change the way how we handle the events
super-admin -
r5459:7f730862 default
parent child Browse files
Show More
@@ -0,0 +1,132 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import logging
20 import redis
21
22 from ..lib import rc_cache
23 from ..lib.ext_json import json
24
25
26 log = logging.getLogger(__name__)
27
28 redis_client = None
29
30
31 class RedisTxnClient:
32
33 def __init__(self, url):
34 self.url = url
35 self._create_client(url)
36
37 def _create_client(self, url):
38 connection_pool = redis.ConnectionPool.from_url(url)
39 self.writer_client = redis.StrictRedis(
40 connection_pool=connection_pool
41 )
42 self.reader_client = self.writer_client
43
44 def set(self, key, value, expire=24 * 60000):
45 self.writer_client.set(key, value, ex=expire)
46
47 def get(self, key):
48 return self.reader_client.get(key)
49
50 def delete(self, key):
51 self.writer_client.delete(key)
52
53
54 def get_redis_client(url=''):
55
56 global redis_client
57 if redis_client is not None:
58 return redis_client
59 if not url:
60 from rhodecode import CONFIG
61 url = CONFIG['vcs.svn.redis_conn']
62 redis_client = RedisTxnClient(url)
63 return redis_client
64
65
66 def extract_svn_txn_id(data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71 import re
72 from lxml import etree
73
74 try:
75 root = etree.fromstring(data)
76 pat = re.compile(r'/txn/(?P<txn_id>.*)')
77 for el in root:
78 if el.tag == '{DAV:}source':
79 for sub_el in el:
80 if sub_el.tag == '{DAV:}href':
81 match = pat.search(sub_el.text)
82 if match:
83 svn_tx_id = match.groupdict()['txn_id']
84 return svn_tx_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 def get_txn_id_data_key(repo_path, svn_txn_id):
90 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
91 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
92 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
93 log.debug('computed final key: %s', final_key)
94
95 return final_key
96
97
98 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
99 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
100
101 if not svn_txn_id:
102 log.warning('Cannot store txn_id because it is empty')
103 return
104
105 redis_conn = get_redis_client()
106
107 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
108 store_data = json.dumps(data_dict)
109 redis_conn.set(store_key, store_data)
110
111
112 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
113 """
114 Reads txn_id from store and if present returns the data for callback manager
115 """
116 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
117 redis_conn = get_redis_client()
118
119 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
120 data = {}
121 redis_conn.get(store_key)
122 try:
123 raw_data = redis_conn.get(store_key)
124 data = json.loads(raw_data)
125 except Exception:
126 log.exception('Failed to get txn_id metadata')
127
128 if rm_on_read:
129 log.debug('Cleaning up txn_id at %s', store_key)
130 redis_conn.delete(store_key)
131
132 return data
@@ -0,0 +1,226 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
28
29
30 import time
31
32 import pytest
33
34 from rhodecode.lib import rc_cache
35 from rhodecode.model.db import Repository, UserIpMap, CacheKey
36 from rhodecode.model.meta import Session
37 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.user import UserModel
39 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
40
41 from rhodecode.tests.vcs_operations import (
42 Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP)
43
44
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
46 class TestVCSOperations(object):
47
48 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
49 clone_url = rc_web_server.repo_clone_url(HG_REPO)
50 stdout, stderr = Command('/tmp').execute(
51 'hg clone', clone_url, tmpdir.strpath)
52 _check_proper_clone(stdout, stderr, 'hg')
53
54 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
55 clone_url = rc_web_server.repo_clone_url(HG_REPO)
56 stdout, stderr = Command('/tmp').execute(
57 'hg clone --pull', clone_url, tmpdir.strpath)
58 _check_proper_clone(stdout, stderr, 'hg')
59
60 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
61 clone_url = rc_web_server.repo_clone_url(HG_REPO)
62 stdout, stderr = Command('/tmp').execute(
63 'hg clone --pull --stream', clone_url, tmpdir.strpath)
64 assert 'files to transfer,' in stdout
65 assert 'transferred 1.' in stdout
66 assert '114 files updated,' in stdout
67
68 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
69 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
70 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
71 stdout, stderr = Command('/tmp').execute(
72 'hg clone', clone_url, tmpdir.strpath)
73 _check_proper_clone(stdout, stderr, 'hg')
74
75 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
76 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
77 stdout, stderr = Command('/tmp').execute(
78 'hg clone', clone_url, tmpdir.strpath)
79 _check_proper_clone(stdout, stderr, 'hg')
80
81 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
82 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
83 stdout, stderr = Command('/tmp').execute(
84 'hg clone', clone_url, tmpdir.strpath)
85 assert 'abort: authorization failed' in stderr
86
87 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
88 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 assert 'HTTP Error 404: Not Found' in stderr
92
93 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
94 clone_url = rc_web_server.repo_clone_url('trololo')
95 stdout, stderr = Command('/tmp').execute(
96 'hg clone', clone_url, tmpdir.strpath)
97 assert 'HTTP Error 404: Not Found' in stderr
98
99 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
100 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
101 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
102 assert 'HTTP Error 404: Not Found' in stderr
103
104 def test_clone_existing_path_hg_not_in_database(
105 self, rc_web_server, tmpdir, fs_repo_only):
106
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109 stdout, stderr = Command('/tmp').execute(
110 'hg clone', clone_url, tmpdir.strpath)
111 assert 'HTTP Error 404: Not Found' in stderr
112
113 def test_clone_existing_path_hg_not_in_database_different_scm(
114 self, rc_web_server, tmpdir, fs_repo_only):
115 db_name = fs_repo_only('not-in-db-git', repo_type='git')
116 clone_url = rc_web_server.repo_clone_url(db_name)
117 stdout, stderr = Command('/tmp').execute(
118 'hg clone', clone_url, tmpdir.strpath)
119 assert 'HTTP Error 404: Not Found' in stderr
120
121 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
122 repo = user_util.create_repo()
123 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
124
125 # Damage repo by removing it's folder
126 RepoModel()._delete_filesystem_repo(repo)
127
128 stdout, stderr = Command('/tmp').execute(
129 'hg clone', clone_url, tmpdir.strpath)
130 assert 'HTTP Error 404: Not Found' in stderr
131
132 def test_push_new_file_hg(self, rc_web_server, tmpdir):
133 clone_url = rc_web_server.repo_clone_url(HG_REPO)
134 stdout, stderr = Command('/tmp').execute(
135 'hg clone', clone_url, tmpdir.strpath)
136
137 stdout, stderr = _add_files_and_push(
138 'hg', tmpdir.strpath, clone_url=clone_url)
139
140 assert 'pushing to' in stdout
141 assert 'size summary' in stdout
142
143 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
144 hg_repo = Repository.get_by_repo_name(HG_REPO)
145
146 # init cache objects
147 CacheKey.delete_all_cache()
148
149 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
150
151 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
152
153 with inv_context_manager as invalidation_context:
154 # __enter__ will create and register cache objects
155 pass
156
157 cache_keys = hg_repo.cache_keys
158 assert cache_keys != []
159 old_ids = [x.cache_state_uid for x in cache_keys]
160
161 # clone to init cache
162 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
163 stdout, stderr = Command('/tmp').execute(
164 'hg clone', clone_url, tmpdir.strpath)
165
166 cache_keys = hg_repo.cache_keys
167 assert cache_keys != []
168 for key in cache_keys:
169 assert key.cache_active is True
170
171 # PUSH that should trigger invalidation cache
172 stdout, stderr = _add_files_and_push(
173 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
174
175 # flush...
176 Session().commit()
177 hg_repo = Repository.get_by_repo_name(HG_REPO)
178 cache_keys = hg_repo.cache_keys
179 assert cache_keys != []
180 new_ids = [x.cache_state_uid for x in cache_keys]
181 assert new_ids != old_ids
182
183 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
184 clone_url = rc_web_server.repo_clone_url(HG_REPO)
185 stdout, stderr = Command('/tmp').execute(
186 'hg clone', clone_url, tmpdir.strpath)
187
188 push_url = rc_web_server.repo_clone_url(
189 HG_REPO, user='bad', passwd='name')
190 stdout, stderr = _add_files_and_push(
191 'hg', tmpdir.strpath, clone_url=push_url)
192
193 assert 'abort: authorization failed' in stderr
194
195 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
196 clone_url = rc_web_server.repo_clone_url(HG_REPO)
197 stdout, stderr = Command('/tmp').execute(
198 'hg clone', clone_url, tmpdir.strpath)
199
200 stdout, stderr = _add_files_and_push(
201 'hg', tmpdir.strpath,
202 clone_url=rc_web_server.repo_clone_url('not-existing'))
203
204 assert 'HTTP Error 404: Not Found' in stderr
205
206 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
207 user_model = UserModel()
208 try:
209 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
210 Session().commit()
211 time.sleep(2)
212 clone_url = rc_web_server.repo_clone_url(HG_REPO)
213 stdout, stderr = Command('/tmp').execute(
214 'hg clone', clone_url, tmpdir.strpath)
215 assert 'abort: HTTP Error 403: Forbidden' in stderr
216 finally:
217 # release IP restrictions
218 for ip in UserIpMap.getAll():
219 UserIpMap.delete(ip.ip_id)
220 Session().commit()
221
222 time.sleep(2)
223
224 stdout, stderr = Command('/tmp').execute(
225 'hg clone', clone_url, tmpdir.strpath)
226 _check_proper_clone(stdout, stderr, 'hg')
@@ -0,0 +1,197 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 Test suite for making push/pull operations, on specially modified INI files
21
22 .. important::
23
24 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 to redirect things to stderr instead of stdout.
26 """
27
28
29 import time
30 import pytest
31
32 from rhodecode.model.db import Repository, UserIpMap
33 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import (SVN_REPO, TEST_USER_ADMIN_LOGIN)
37
38
39 from rhodecode.tests.vcs_operations import (
40 Command, _check_proper_clone, _check_proper_svn_push,
41 _add_files_and_push, SVN_REPO_WITH_GROUP)
42
43
44 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
45 class TestVCSOperations(object):
46
47 def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir):
48 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
49 username, password = rc_web_server.repo_clone_credentials()
50
51 cmd = Command('/tmp')
52
53 auth = f'--non-interactive --username={username} --password={password}'
54 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
55 _check_proper_clone(stdout, stderr, 'svn')
56 cmd.assert_returncode_success()
57
58 def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir):
59 repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id
60 username, password = rc_web_server.repo_clone_credentials()
61
62 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
63 cmd = Command('/tmp')
64 auth = f'--non-interactive --username={username} --password={password}'
65 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
66 _check_proper_clone(stdout, stderr, 'svn')
67 cmd.assert_returncode_success()
68
69 def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir):
70 clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP)
71 username, password = rc_web_server.repo_clone_credentials()
72
73 cmd = Command('/tmp')
74 auth = f'--non-interactive --username={username} --password={password}'
75 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
76 _check_proper_clone(stdout, stderr, 'svn')
77 cmd.assert_returncode_success()
78
79 def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir):
80 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
81 username, password = rc_web_server.repo_clone_credentials()
82 password = 'bad-password'
83
84 auth = f'--non-interactive --username={username} --password={password}'
85 stdout, stderr = Command('/tmp').execute(
86 f'svn checkout {auth}', clone_url, tmpdir.strpath)
87 assert 'fatal: Authentication failed' in stderr
88
89 def test_clone_svn_with_slashes(self, rc_web_server, tmpdir):
90 clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO)
91 stdout, stderr = Command('/tmp').execute('svn checkout', clone_url)
92 assert 'not found' in stderr
93
94 def test_clone_existing_path_svn_not_in_database(
95 self, rc_web_server, tmpdir, fs_repo_only):
96 db_name = fs_repo_only('not-in-db-git', repo_type='git')
97 clone_url = rc_web_server.repo_clone_url(db_name)
98 username, password = '', ''
99 auth = f'--non-interactive --username={username} --password={password}'
100
101 stdout, stderr = Command('/tmp').execute(
102 f'svn checkout {auth}', clone_url, tmpdir.strpath)
103 assert 'not found' in stderr
104
105 def test_clone_existing_path_svn_not_in_database_different_scm(
106 self, rc_web_server, tmpdir, fs_repo_only):
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109
110 username, password = '', ''
111 auth = f'--non-interactive --username={username} --password={password}'
112 stdout, stderr = Command('/tmp').execute(
113 f'svn checkout {auth}', clone_url, tmpdir.strpath)
114 assert 'not found' in stderr
115
116 def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util):
117 repo = user_util.create_repo(repo_type='git')
118 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
119
120 # Damage repo by removing it's folder
121 RepoModel()._delete_filesystem_repo(repo)
122
123 username, password = '', ''
124 auth = f'--non-interactive --username={username} --password={password}'
125 stdout, stderr = Command('/tmp').execute(
126 f'svn checkout {auth}', clone_url, tmpdir.strpath)
127 assert 'not found' in stderr
128
129 def test_push_new_file_svn(self, rc_web_server, tmpdir):
130 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
131 username, password = '', ''
132 auth = f'--non-interactive --username={username} --password={password}'
133
134 stdout, stderr = Command('/tmp').execute(
135 f'svn checkout {auth}', clone_url, tmpdir.strpath)
136
137 # commit some stuff into this repo
138 stdout, stderr = _add_files_and_push(
139 'svn', tmpdir.strpath, clone_url=clone_url)
140
141 _check_proper_svn_push(stdout, stderr)
142
143 def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir):
144 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
145
146 username, password = '', ''
147 auth = f'--non-interactive --username={username} --password={password}'
148 stdout, stderr = Command('/tmp').execute(
149 f'svn checkout {auth}', clone_url, tmpdir.strpath)
150
151 push_url = rc_web_server.repo_clone_url(
152 SVN_REPO, user='bad', passwd='name')
153 stdout, stderr = _add_files_and_push(
154 'svn', tmpdir.strpath, clone_url=push_url)
155
156 assert 'fatal: Authentication failed' in stderr
157
158 def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir):
159 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
160 username, password = '', ''
161 auth = f'--non-interactive --username={username} --password={password}'
162 Command('/tmp').execute(
163 f'svn checkout {auth}', clone_url, tmpdir.strpath)
164
165 stdout, stderr = _add_files_and_push(
166 'svn', tmpdir.strpath,
167 clone_url=rc_web_server.repo_clone_url('not-existing'))
168
169 assert 'not found' in stderr
170
171 def test_ip_restriction_svn(self, rc_web_server, tmpdir):
172 user_model = UserModel()
173 username, password = '', ''
174 auth = f'--non-interactive --username={username} --password={password}'
175
176 try:
177 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
178 Session().commit()
179 time.sleep(2)
180 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
181
182 stdout, stderr = Command('/tmp').execute(
183 f'svn checkout {auth}', clone_url, tmpdir.strpath)
184 msg = "The requested URL returned error: 403"
185 assert msg in stderr
186 finally:
187 # release IP restrictions
188 for ip in UserIpMap.getAll():
189 UserIpMap.delete(ip.ip_id)
190 Session().commit()
191
192 time.sleep(2)
193
194 cmd = Command('/tmp')
195 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
196 cmd.assert_returncode_success()
197 _check_proper_clone(stdout, stderr, 'svn')
@@ -1,852 +1,856 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; #############
75 75 ; DEBUG OPTIONS
76 76 ; #############
77 77
78 78 pyramid.reload_templates = true
79 79
80 80 # During development the we want to have the debug toolbar enabled
81 81 pyramid.includes =
82 82 pyramid_debugtoolbar
83 83
84 84 debugtoolbar.hosts = 0.0.0.0/0
85 85 debugtoolbar.exclude_prefixes =
86 86 /css
87 87 /fonts
88 88 /images
89 89 /js
90 90
91 91 ## RHODECODE PLUGINS ##
92 92 rhodecode.includes =
93 93 rhodecode.api
94 94
95 95
96 96 # api prefix url
97 97 rhodecode.api.url = /_admin/api
98 98
99 99 ; enable debug style page
100 100 debug_style = true
101 101
102 102 ; #################
103 103 ; END DEBUG OPTIONS
104 104 ; #################
105 105
106 106 ; encryption key used to encrypt social plugin tokens,
107 107 ; remote_urls with credentials etc, if not set it defaults to
108 108 ; `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ; decryption strict mode (enabled by default). It controls if decryption raises
112 112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
116 116 ; fernet is safer, and we strongly recommend switching to it.
117 117 ; Due to backward compatibility aes is used as default.
118 118 #rhodecode.encrypted_values.algorithm = fernet
119 119
120 120 ; Return gzipped responses from RhodeCode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ; Auto-generate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ; System global default language.
127 127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ; Perform a full repository scan and import on each server start.
131 131 ; Settings this to true could lead to very long startup time.
132 132 startup.import_repos = false
133 133
134 134 ; URL at which the application is running. This is used for Bootstrapping
135 135 ; requests in context when no web request is available. Used in ishell, or
136 136 ; SSH calls. Set this for events to receive proper url for SSH calls.
137 137 app.base_url = http://rhodecode.local
138 138
139 139 ; Host at which the Service API is running.
140 140 app.service_api.host = http://rhodecode.local:10020
141 141
142 142 ; Secret for Service API authentication.
143 143 app.service_api.token =
144 144
145 145 ; Unique application ID. Should be a random unique string for security.
146 146 app_instance_uuid = rc-production
147 147
148 148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ; commit, or pull request exceeds this limit this diff will be displayed
150 150 ; partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ; file inside diff which exceeds this limit will be displayed partially.
155 155 ; E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
159 159 vcs_full_cache = true
160 160
161 161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
162 162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
163 163 force_https = false
164 164
165 165 ; use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ; Set to true if your repos are exposed using the dumb protocol
169 169 git_update_server_info = false
170 170
171 171 ; RSS/ATOM feed options
172 172 rss_cut_off_limit = 256000
173 173 rss_items_per_page = 10
174 174 rss_include_diff = false
175 175
176 176 ; gist URL alias, used to create nicer urls for gist. This should be an
177 177 ; url that does rewrites to _admin/gists/{gistid}.
178 178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
179 179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
180 180 gist_alias_url =
181 181
182 182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
183 183 ; used for access.
184 184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
185 185 ; came from the the logged in user who own this authentication token.
186 186 ; Additionally @TOKEN syntax can be used to bound the view to specific
187 187 ; authentication token. Such view would be only accessible when used together
188 188 ; with this authentication token
189 189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
190 190 ; The list should be "," separated and on a single line.
191 191 ; Most common views to enable:
192 192
193 193 # RepoCommitsView:repo_commit_download
194 194 # RepoCommitsView:repo_commit_patch
195 195 # RepoCommitsView:repo_commit_raw
196 196 # RepoCommitsView:repo_commit_raw@TOKEN
197 197 # RepoFilesView:repo_files_diff
198 198 # RepoFilesView:repo_archivefile
199 199 # RepoFilesView:repo_file_raw
200 200 # GistView:*
201 201 api_access_controllers_whitelist =
202 202
203 203 ; Default encoding used to convert from and to unicode
204 204 ; can be also a comma separated list of encoding in case of mixed encodings
205 205 default_encoding = UTF-8
206 206
207 207 ; instance-id prefix
208 208 ; a prefix key for this instance used for cache invalidation when running
209 209 ; multiple instances of RhodeCode, make sure it's globally unique for
210 210 ; all running RhodeCode instances. Leave empty if you don't use it
211 211 instance_id =
212 212
213 213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
214 214 ; of an authentication plugin also if it is disabled by it's settings.
215 215 ; This could be useful if you are unable to log in to the system due to broken
216 216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
217 217 ; module to log in again and fix the settings.
218 218 ; Available builtin plugin IDs (hash is part of the ID):
219 219 ; egg:rhodecode-enterprise-ce#rhodecode
220 220 ; egg:rhodecode-enterprise-ce#pam
221 221 ; egg:rhodecode-enterprise-ce#ldap
222 222 ; egg:rhodecode-enterprise-ce#jasig_cas
223 223 ; egg:rhodecode-enterprise-ce#headers
224 224 ; egg:rhodecode-enterprise-ce#crowd
225 225
226 226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
227 227
228 228 ; Flag to control loading of legacy plugins in py:/path format
229 229 auth_plugin.import_legacy_plugins = true
230 230
231 231 ; alternative return HTTP header for failed authentication. Default HTTP
232 232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
233 233 ; handling that causing a series of failed authentication calls.
234 234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
235 235 ; This will be served instead of default 401 on bad authentication
236 236 auth_ret_code =
237 237
238 238 ; use special detection method when serving auth_ret_code, instead of serving
239 239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
240 240 ; and then serve auth_ret_code to clients
241 241 auth_ret_code_detection = false
242 242
243 243 ; locking return code. When repository is locked return this HTTP code. 2XX
244 244 ; codes don't break the transactions while 4XX codes do
245 245 lock_ret_code = 423
246 246
247 247 ; Filesystem location were repositories should be stored
248 248 repo_store.path = /var/opt/rhodecode_repo_store
249 249
250 250 ; allows to setup custom hooks in settings page
251 251 allow_custom_hooks_settings = true
252 252
253 253 ; Generated license token required for EE edition license.
254 254 ; New generated token value can be found in Admin > settings > license page.
255 255 license_token =
256 256
257 257 ; This flag hides sensitive information on the license page such as token, and license data
258 258 license.hide_license_info = false
259 259
260 260 ; supervisor connection uri, for managing supervisor and logs.
261 261 supervisor.uri =
262 262
263 263 ; supervisord group name/id we only want this RC instance to handle
264 264 supervisor.group_id = dev
265 265
266 266 ; Display extended labs settings
267 267 labs_settings_active = true
268 268
269 269 ; Custom exception store path, defaults to TMPDIR
270 270 ; This is used to store exception from RhodeCode in shared directory
271 271 #exception_tracker.store_path =
272 272
273 273 ; Send email with exception details when it happens
274 274 #exception_tracker.send_email = false
275 275
276 276 ; Comma separated list of recipients for exception emails,
277 277 ; e.g admin@rhodecode.com,devops@rhodecode.com
278 278 ; Can be left empty, then emails will be sent to ALL super-admins
279 279 #exception_tracker.send_email_recipients =
280 280
281 281 ; optional prefix to Add to email Subject
282 282 #exception_tracker.email_prefix = [RHODECODE ERROR]
283 283
284 284 ; File store configuration. This is used to store and serve uploaded files
285 285 file_store.enabled = true
286 286
287 287 ; Storage backend, available options are: local
288 288 file_store.backend = local
289 289
290 290 ; path to store the uploaded binaries and artifacts
291 291 file_store.storage_path = /var/opt/rhodecode_data/file_store
292 292
293 293
294 294 ; Redis url to acquire/check generation of archives locks
295 295 archive_cache.locking.url = redis://redis:6379/1
296 296
297 297 ; Storage backend, only 'filesystem' and 'objectstore' are available now
298 298 archive_cache.backend.type = filesystem
299 299
300 300 ; url for s3 compatible storage that allows to upload artifacts
301 301 ; e.g http://minio:9000
302 302 archive_cache.objectstore.url = http://s3-minio:9000
303 303
304 304 ; key for s3 auth
305 305 archive_cache.objectstore.key = key
306 306
307 307 ; secret for s3 auth
308 308 archive_cache.objectstore.secret = secret
309 309
310 310 ;region for s3 storage
311 311 archive_cache.objectstore.region = eu-central-1
312 312
313 313 ; number of sharded buckets to create to distribute archives across
314 314 ; default is 8 shards
315 315 archive_cache.objectstore.bucket_shards = 8
316 316
317 317 ; a top-level bucket to put all other shards in
318 318 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
319 319 archive_cache.objectstore.bucket = rhodecode-archive-cache
320 320
321 321 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
322 322 archive_cache.objectstore.retry = false
323 323
324 324 ; number of seconds to wait for next try using retry
325 325 archive_cache.objectstore.retry_backoff = 1
326 326
327 327 ; how many tries do do a retry fetch from this backend
328 328 archive_cache.objectstore.retry_attempts = 10
329 329
330 330 ; Default is $cache_dir/archive_cache if not set
331 331 ; Generated repo archives will be cached at this location
332 332 ; and served from the cache during subsequent requests for the same archive of
333 333 ; the repository. This path is important to be shared across filesystems and with
334 334 ; RhodeCode and vcsserver
335 335 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
336 336
337 337 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
338 338 archive_cache.filesystem.cache_size_gb = 1
339 339
340 340 ; Eviction policy used to clear out after cache_size_gb limit is reached
341 341 archive_cache.filesystem.eviction_policy = least-recently-stored
342 342
343 343 ; By default cache uses sharding technique, this specifies how many shards are there
344 344 ; default is 8 shards
345 345 archive_cache.filesystem.cache_shards = 8
346 346
347 347 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
348 348 archive_cache.filesystem.retry = false
349 349
350 350 ; number of seconds to wait for next try using retry
351 351 archive_cache.filesystem.retry_backoff = 1
352 352
353 353 ; how many tries do do a retry fetch from this backend
354 354 archive_cache.filesystem.retry_attempts = 10
355 355
356 356
357 357 ; #############
358 358 ; CELERY CONFIG
359 359 ; #############
360 360
361 361 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
362 362
363 363 use_celery = true
364 364
365 365 ; path to store schedule database
366 366 #celerybeat-schedule.path =
367 367
368 368 ; connection url to the message broker (default redis)
369 369 celery.broker_url = redis://redis:6379/8
370 370
371 371 ; results backend to get results for (default redis)
372 372 celery.result_backend = redis://redis:6379/8
373 373
374 374 ; rabbitmq example
375 375 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
376 376
377 377 ; maximum tasks to execute before worker restart
378 378 celery.max_tasks_per_child = 20
379 379
380 380 ; tasks will never be sent to the queue, but executed locally instead.
381 381 celery.task_always_eager = false
382 382
383 383 ; #############
384 384 ; DOGPILE CACHE
385 385 ; #############
386 386
387 387 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
388 388 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
389 389 cache_dir = /var/opt/rhodecode_data
390 390
391 391 ; *********************************************
392 392 ; `sql_cache_short` cache for heavy SQL queries
393 393 ; Only supported backend is `memory_lru`
394 394 ; *********************************************
395 395 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
396 396 rc_cache.sql_cache_short.expiration_time = 30
397 397
398 398
399 399 ; *****************************************************
400 400 ; `cache_repo_longterm` cache for repo object instances
401 401 ; Only supported backend is `memory_lru`
402 402 ; *****************************************************
403 403 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
404 404 ; by default we use 30 Days, cache is still invalidated on push
405 405 rc_cache.cache_repo_longterm.expiration_time = 2592000
406 406 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
407 407 rc_cache.cache_repo_longterm.max_size = 10000
408 408
409 409
410 410 ; *********************************************
411 411 ; `cache_general` cache for general purpose use
412 412 ; for simplicity use rc.file_namespace backend,
413 413 ; for performance and scale use rc.redis
414 414 ; *********************************************
415 415 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
416 416 rc_cache.cache_general.expiration_time = 43200
417 417 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
418 418 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
419 419
420 420 ; alternative `cache_general` redis backend with distributed lock
421 421 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
422 422 #rc_cache.cache_general.expiration_time = 300
423 423
424 424 ; redis_expiration_time needs to be greater then expiration_time
425 425 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
426 426
427 427 #rc_cache.cache_general.arguments.host = localhost
428 428 #rc_cache.cache_general.arguments.port = 6379
429 429 #rc_cache.cache_general.arguments.db = 0
430 430 #rc_cache.cache_general.arguments.socket_timeout = 30
431 431 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
432 432 #rc_cache.cache_general.arguments.distributed_lock = true
433 433
434 434 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
435 435 #rc_cache.cache_general.arguments.lock_auto_renewal = true
436 436
437 437 ; *************************************************
438 438 ; `cache_perms` cache for permission tree, auth TTL
439 439 ; for simplicity use rc.file_namespace backend,
440 440 ; for performance and scale use rc.redis
441 441 ; *************************************************
442 442 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
443 443 rc_cache.cache_perms.expiration_time = 3600
444 444 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
445 445 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
446 446
447 447 ; alternative `cache_perms` redis backend with distributed lock
448 448 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
449 449 #rc_cache.cache_perms.expiration_time = 300
450 450
451 451 ; redis_expiration_time needs to be greater then expiration_time
452 452 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
453 453
454 454 #rc_cache.cache_perms.arguments.host = localhost
455 455 #rc_cache.cache_perms.arguments.port = 6379
456 456 #rc_cache.cache_perms.arguments.db = 0
457 457 #rc_cache.cache_perms.arguments.socket_timeout = 30
458 458 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
459 459 #rc_cache.cache_perms.arguments.distributed_lock = true
460 460
461 461 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
462 462 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
463 463
464 464 ; ***************************************************
465 465 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
466 466 ; for simplicity use rc.file_namespace backend,
467 467 ; for performance and scale use rc.redis
468 468 ; ***************************************************
469 469 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
470 470 rc_cache.cache_repo.expiration_time = 2592000
471 471 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
472 472 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
473 473
474 474 ; alternative `cache_repo` redis backend with distributed lock
475 475 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
476 476 #rc_cache.cache_repo.expiration_time = 2592000
477 477
478 478 ; redis_expiration_time needs to be greater then expiration_time
479 479 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
480 480
481 481 #rc_cache.cache_repo.arguments.host = localhost
482 482 #rc_cache.cache_repo.arguments.port = 6379
483 483 #rc_cache.cache_repo.arguments.db = 1
484 484 #rc_cache.cache_repo.arguments.socket_timeout = 30
485 485 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
486 486 #rc_cache.cache_repo.arguments.distributed_lock = true
487 487
488 488 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
489 489 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
490 490
491 491 ; ##############
492 492 ; BEAKER SESSION
493 493 ; ##############
494 494
495 495 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
496 496 ; types are file, ext:redis, ext:database, ext:memcached
497 497 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
498 498 #beaker.session.type = file
499 499 #beaker.session.data_dir = %(here)s/data/sessions
500 500
501 501 ; Redis based sessions
502 502 beaker.session.type = ext:redis
503 503 beaker.session.url = redis://redis:6379/2
504 504
505 505 ; DB based session, fast, and allows easy management over logged in users
506 506 #beaker.session.type = ext:database
507 507 #beaker.session.table_name = db_session
508 508 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
509 509 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
510 510 #beaker.session.sa.pool_recycle = 3600
511 511 #beaker.session.sa.echo = false
512 512
513 513 beaker.session.key = rhodecode
514 514 beaker.session.secret = develop-rc-uytcxaz
515 515 beaker.session.lock_dir = /data_ramdisk/lock
516 516
517 517 ; Secure encrypted cookie. Requires AES and AES python libraries
518 518 ; you must disable beaker.session.secret to use this
519 519 #beaker.session.encrypt_key = key_for_encryption
520 520 #beaker.session.validate_key = validation_key
521 521
522 522 ; Sets session as invalid (also logging out user) if it haven not been
523 523 ; accessed for given amount of time in seconds
524 524 beaker.session.timeout = 2592000
525 525 beaker.session.httponly = true
526 526
527 527 ; Path to use for the cookie. Set to prefix if you use prefix middleware
528 528 #beaker.session.cookie_path = /custom_prefix
529 529
530 530 ; Set https secure cookie
531 531 beaker.session.secure = false
532 532
533 533 ; default cookie expiration time in seconds, set to `true` to set expire
534 534 ; at browser close
535 535 #beaker.session.cookie_expires = 3600
536 536
537 537 ; #############################
538 538 ; SEARCH INDEXING CONFIGURATION
539 539 ; #############################
540 540
541 541 ; Full text search indexer is available in rhodecode-tools under
542 542 ; `rhodecode-tools index` command
543 543
544 544 ; WHOOSH Backend, doesn't require additional services to run
545 545 ; it works good with few dozen repos
546 546 search.module = rhodecode.lib.index.whoosh
547 547 search.location = %(here)s/data/index
548 548
549 549 ; ####################
550 550 ; CHANNELSTREAM CONFIG
551 551 ; ####################
552 552
553 553 ; channelstream enables persistent connections and live notification
554 554 ; in the system. It's also used by the chat system
555 555
556 556 channelstream.enabled = true
557 557
558 558 ; server address for channelstream server on the backend
559 559 channelstream.server = channelstream:9800
560 560
561 561 ; location of the channelstream server from outside world
562 562 ; use ws:// for http or wss:// for https. This address needs to be handled
563 563 ; by external HTTP server such as Nginx or Apache
564 564 ; see Nginx/Apache configuration examples in our docs
565 565 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
566 566 channelstream.secret = ENV_GENERATED
567 567 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
568 568
569 569 ; Internal application path that Javascript uses to connect into.
570 570 ; If you use proxy-prefix the prefix should be added before /_channelstream
571 571 channelstream.proxy_path = /_channelstream
572 572
573 573
574 574 ; ##############################
575 575 ; MAIN RHODECODE DATABASE CONFIG
576 576 ; ##############################
577 577
578 578 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
579 579 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
580 580 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
581 581 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
582 582 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
583 583
584 584 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
585 585
586 586 ; see sqlalchemy docs for other advanced settings
587 587 ; print the sql statements to output
588 588 sqlalchemy.db1.echo = false
589 589
590 590 ; recycle the connections after this amount of seconds
591 591 sqlalchemy.db1.pool_recycle = 3600
592 592
593 593 ; the number of connections to keep open inside the connection pool.
594 594 ; 0 indicates no limit
595 595 ; the general calculus with gevent is:
596 596 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
597 597 ; then increase pool size + max overflow so that they add up to 500.
598 598 #sqlalchemy.db1.pool_size = 5
599 599
600 600 ; The number of connections to allow in connection pool "overflow", that is
601 601 ; connections that can be opened above and beyond the pool_size setting,
602 602 ; which defaults to five.
603 603 #sqlalchemy.db1.max_overflow = 10
604 604
605 605 ; Connection check ping, used to detect broken database connections
606 606 ; could be enabled to better handle cases if MySQL has gone away errors
607 607 #sqlalchemy.db1.ping_connection = true
608 608
609 609 ; ##########
610 610 ; VCS CONFIG
611 611 ; ##########
612 612 vcs.server.enable = true
613 613 vcs.server = vcsserver:10010
614 614
615 615 ; Web server connectivity protocol, responsible for web based VCS operations
616 616 ; Available protocols are:
617 617 ; `http` - use http-rpc backend (default)
618 618 vcs.server.protocol = http
619 619
620 620 ; Push/Pull operations protocol, available options are:
621 621 ; `http` - use http-rpc backend (default)
622 622 vcs.scm_app_implementation = http
623 623
624 624 ; Push/Pull operations hooks protocol, available options are:
625 625 ; `http` - use http-rpc backend (default)
626 626 ; `celery` - use celery based hooks
627 627 vcs.hooks.protocol = http
628 628
629 629 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
630 630 ; accessible via network.
631 631 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
632 632 vcs.hooks.host = *
633 633
634 634 ; Start VCSServer with this instance as a subprocess, useful for development
635 635 vcs.start_server = false
636 636
637 637 ; List of enabled VCS backends, available options are:
638 638 ; `hg` - mercurial
639 639 ; `git` - git
640 640 ; `svn` - subversion
641 641 vcs.backends = hg, git, svn
642 642
643 643 ; Wait this number of seconds before killing connection to the vcsserver
644 644 vcs.connection_timeout = 3600
645 645
646 646 ; Cache flag to cache vcsserver remote calls locally
647 647 ; It uses cache_region `cache_repo`
648 648 vcs.methods.cache = true
649 649
650 650 ; ####################################################
651 651 ; Subversion proxy support (mod_dav_svn)
652 652 ; Maps RhodeCode repo groups into SVN paths for Apache
653 653 ; ####################################################
654 654
655 655 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
656 656 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
657 657 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
658 658 #vcs.svn.compatible_version = 1.8
659 659
660 ; Redis connection settings for svn integrations logic
661 ; This connection string needs to be the same on ce and vcsserver
662 vcs.svn.redis_conn = redis://redis:6379/0
663
660 664 ; Enable SVN proxy of requests over HTTP
661 665 vcs.svn.proxy.enabled = true
662 666
663 667 ; host to connect to running SVN subsystem
664 668 vcs.svn.proxy.host = http://svn:8090
665 669
666 670 ; Enable or disable the config file generation.
667 671 svn.proxy.generate_config = true
668 672
669 673 ; Generate config file with `SVNListParentPath` set to `On`.
670 674 svn.proxy.list_parent_path = true
671 675
672 676 ; Set location and file name of generated config file.
673 677 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
674 678
675 679 ; alternative mod_dav config template. This needs to be a valid mako template
676 680 ; Example template can be found in the source code:
677 681 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
678 682 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
679 683
680 684 ; Used as a prefix to the `Location` block in the generated config file.
681 685 ; In most cases it should be set to `/`.
682 686 svn.proxy.location_root = /
683 687
684 688 ; Command to reload the mod dav svn configuration on change.
685 689 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
686 690 ; Make sure user who runs RhodeCode process is allowed to reload Apache
687 691 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
688 692
689 693 ; If the timeout expires before the reload command finishes, the command will
690 694 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
691 695 #svn.proxy.reload_timeout = 10
692 696
693 697 ; ####################
694 698 ; SSH Support Settings
695 699 ; ####################
696 700
697 701 ; Defines if a custom authorized_keys file should be created and written on
698 702 ; any change user ssh keys. Setting this to false also disables possibility
699 703 ; of adding SSH keys by users from web interface. Super admins can still
700 704 ; manage SSH Keys.
701 705 ssh.generate_authorized_keyfile = true
702 706
703 707 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
704 708 # ssh.authorized_keys_ssh_opts =
705 709
706 710 ; Path to the authorized_keys file where the generate entries are placed.
707 711 ; It is possible to have multiple key files specified in `sshd_config` e.g.
708 712 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
709 713 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
710 714
711 715 ; Command to execute the SSH wrapper. The binary is available in the
712 716 ; RhodeCode installation directory.
713 717 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
714 718 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
715 719 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
716 720
717 721 ; Allow shell when executing the ssh-wrapper command
718 722 ssh.wrapper_cmd_allow_shell = false
719 723
720 724 ; Enables logging, and detailed output send back to the client during SSH
721 725 ; operations. Useful for debugging, shouldn't be used in production.
722 726 ssh.enable_debug_logging = true
723 727
724 728 ; Paths to binary executable, by default they are the names, but we can
725 729 ; override them if we want to use a custom one
726 730 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
727 731 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
728 732 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
729 733
730 734 ; Enables SSH key generator web interface. Disabling this still allows users
731 735 ; to add their own keys.
732 736 ssh.enable_ui_key_generator = true
733 737
734 738 ; Statsd client config, this is used to send metrics to statsd
735 739 ; We recommend setting statsd_exported and scrape them using Prometheus
736 740 #statsd.enabled = false
737 741 #statsd.statsd_host = 0.0.0.0
738 742 #statsd.statsd_port = 8125
739 743 #statsd.statsd_prefix =
740 744 #statsd.statsd_ipv6 = false
741 745
742 746 ; configure logging automatically at server startup set to false
743 747 ; to use the below custom logging config.
744 748 ; RC_LOGGING_FORMATTER
745 749 ; RC_LOGGING_LEVEL
746 750 ; env variables can control the settings for logging in case of autoconfigure
747 751
748 752 #logging.autoconfigure = true
749 753
750 754 ; specify your own custom logging config file to configure logging
751 755 #logging.logging_conf_file = /path/to/custom_logging.ini
752 756
753 757 ; Dummy marker to add new entries after.
754 758 ; Add any custom entries below. Please don't remove this marker.
755 759 custom.conf = 1
756 760
757 761
758 762 ; #####################
759 763 ; LOGGING CONFIGURATION
760 764 ; #####################
761 765
762 766 [loggers]
763 767 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
764 768
765 769 [handlers]
766 770 keys = console, console_sql
767 771
768 772 [formatters]
769 773 keys = generic, json, color_formatter, color_formatter_sql
770 774
771 775 ; #######
772 776 ; LOGGERS
773 777 ; #######
774 778 [logger_root]
775 779 level = NOTSET
776 780 handlers = console
777 781
778 782 [logger_sqlalchemy]
779 783 level = INFO
780 784 handlers = console_sql
781 785 qualname = sqlalchemy.engine
782 786 propagate = 0
783 787
784 788 [logger_beaker]
785 789 level = DEBUG
786 790 handlers =
787 791 qualname = beaker.container
788 792 propagate = 1
789 793
790 794 [logger_rhodecode]
791 795 level = DEBUG
792 796 handlers =
793 797 qualname = rhodecode
794 798 propagate = 1
795 799
796 800 [logger_ssh_wrapper]
797 801 level = DEBUG
798 802 handlers =
799 803 qualname = ssh_wrapper
800 804 propagate = 1
801 805
802 806 [logger_celery]
803 807 level = DEBUG
804 808 handlers =
805 809 qualname = celery
806 810
807 811
808 812 ; ########
809 813 ; HANDLERS
810 814 ; ########
811 815
812 816 [handler_console]
813 817 class = StreamHandler
814 818 args = (sys.stderr, )
815 819 level = DEBUG
816 820 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
817 821 ; This allows sending properly formatted logs to grafana loki or elasticsearch
818 822 formatter = color_formatter
819 823
820 824 [handler_console_sql]
821 825 ; "level = DEBUG" logs SQL queries and results.
822 826 ; "level = INFO" logs SQL queries.
823 827 ; "level = WARN" logs neither. (Recommended for production systems.)
824 828 class = StreamHandler
825 829 args = (sys.stderr, )
826 830 level = WARN
827 831 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
828 832 ; This allows sending properly formatted logs to grafana loki or elasticsearch
829 833 formatter = color_formatter_sql
830 834
831 835 ; ##########
832 836 ; FORMATTERS
833 837 ; ##########
834 838
835 839 [formatter_generic]
836 840 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
837 841 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
838 842 datefmt = %Y-%m-%d %H:%M:%S
839 843
840 844 [formatter_color_formatter]
841 845 class = rhodecode.lib.logging_formatter.ColorFormatter
842 846 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
843 847 datefmt = %Y-%m-%d %H:%M:%S
844 848
845 849 [formatter_color_formatter_sql]
846 850 class = rhodecode.lib.logging_formatter.ColorFormatterSql
847 851 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
848 852 datefmt = %Y-%m-%d %H:%M:%S
849 853
850 854 [formatter_json]
851 855 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
852 856 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,820 +1,824 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = false
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = false
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 107 ; Host at which the Service API is running.
108 108 app.service_api.host = http://rhodecode.local:10020
109 109
110 110 ; Secret for Service API authentication.
111 111 app.service_api.token =
112 112
113 113 ; Unique application ID. Should be a random unique string for security.
114 114 app_instance_uuid = rc-production
115 115
116 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 117 ; commit, or pull request exceeds this limit this diff will be displayed
118 118 ; partially. E.g 512000 == 512Kb
119 119 cut_off_limit_diff = 512000
120 120
121 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 122 ; file inside diff which exceeds this limit will be displayed partially.
123 123 ; E.g 128000 == 128Kb
124 124 cut_off_limit_file = 128000
125 125
126 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 127 vcs_full_cache = true
128 128
129 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 131 force_https = false
132 132
133 133 ; use Strict-Transport-Security headers
134 134 use_htsts = false
135 135
136 136 ; Set to true if your repos are exposed using the dumb protocol
137 137 git_update_server_info = false
138 138
139 139 ; RSS/ATOM feed options
140 140 rss_cut_off_limit = 256000
141 141 rss_items_per_page = 10
142 142 rss_include_diff = false
143 143
144 144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 145 ; url that does rewrites to _admin/gists/{gistid}.
146 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 148 gist_alias_url =
149 149
150 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 151 ; used for access.
152 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 153 ; came from the the logged in user who own this authentication token.
154 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 155 ; authentication token. Such view would be only accessible when used together
156 156 ; with this authentication token
157 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 158 ; The list should be "," separated and on a single line.
159 159 ; Most common views to enable:
160 160
161 161 # RepoCommitsView:repo_commit_download
162 162 # RepoCommitsView:repo_commit_patch
163 163 # RepoCommitsView:repo_commit_raw
164 164 # RepoCommitsView:repo_commit_raw@TOKEN
165 165 # RepoFilesView:repo_files_diff
166 166 # RepoFilesView:repo_archivefile
167 167 # RepoFilesView:repo_file_raw
168 168 # GistView:*
169 169 api_access_controllers_whitelist =
170 170
171 171 ; Default encoding used to convert from and to unicode
172 172 ; can be also a comma separated list of encoding in case of mixed encodings
173 173 default_encoding = UTF-8
174 174
175 175 ; instance-id prefix
176 176 ; a prefix key for this instance used for cache invalidation when running
177 177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 178 ; all running RhodeCode instances. Leave empty if you don't use it
179 179 instance_id =
180 180
181 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 182 ; of an authentication plugin also if it is disabled by it's settings.
183 183 ; This could be useful if you are unable to log in to the system due to broken
184 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 185 ; module to log in again and fix the settings.
186 186 ; Available builtin plugin IDs (hash is part of the ID):
187 187 ; egg:rhodecode-enterprise-ce#rhodecode
188 188 ; egg:rhodecode-enterprise-ce#pam
189 189 ; egg:rhodecode-enterprise-ce#ldap
190 190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 191 ; egg:rhodecode-enterprise-ce#headers
192 192 ; egg:rhodecode-enterprise-ce#crowd
193 193
194 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195 195
196 196 ; Flag to control loading of legacy plugins in py:/path format
197 197 auth_plugin.import_legacy_plugins = true
198 198
199 199 ; alternative return HTTP header for failed authentication. Default HTTP
200 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 201 ; handling that causing a series of failed authentication calls.
202 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 203 ; This will be served instead of default 401 on bad authentication
204 204 auth_ret_code =
205 205
206 206 ; use special detection method when serving auth_ret_code, instead of serving
207 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 208 ; and then serve auth_ret_code to clients
209 209 auth_ret_code_detection = false
210 210
211 211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 212 ; codes don't break the transactions while 4XX codes do
213 213 lock_ret_code = 423
214 214
215 215 ; Filesystem location were repositories should be stored
216 216 repo_store.path = /var/opt/rhodecode_repo_store
217 217
218 218 ; allows to setup custom hooks in settings page
219 219 allow_custom_hooks_settings = true
220 220
221 221 ; Generated license token required for EE edition license.
222 222 ; New generated token value can be found in Admin > settings > license page.
223 223 license_token =
224 224
225 225 ; This flag hides sensitive information on the license page such as token, and license data
226 226 license.hide_license_info = false
227 227
228 228 ; supervisor connection uri, for managing supervisor and logs.
229 229 supervisor.uri =
230 230
231 231 ; supervisord group name/id we only want this RC instance to handle
232 232 supervisor.group_id = prod
233 233
234 234 ; Display extended labs settings
235 235 labs_settings_active = true
236 236
237 237 ; Custom exception store path, defaults to TMPDIR
238 238 ; This is used to store exception from RhodeCode in shared directory
239 239 #exception_tracker.store_path =
240 240
241 241 ; Send email with exception details when it happens
242 242 #exception_tracker.send_email = false
243 243
244 244 ; Comma separated list of recipients for exception emails,
245 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 246 ; Can be left empty, then emails will be sent to ALL super-admins
247 247 #exception_tracker.send_email_recipients =
248 248
249 249 ; optional prefix to Add to email Subject
250 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251 251
252 252 ; File store configuration. This is used to store and serve uploaded files
253 253 file_store.enabled = true
254 254
255 255 ; Storage backend, available options are: local
256 256 file_store.backend = local
257 257
258 258 ; path to store the uploaded binaries and artifacts
259 259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260 260
261 261
262 262 ; Redis url to acquire/check generation of archives locks
263 263 archive_cache.locking.url = redis://redis:6379/1
264 264
265 265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
266 266 archive_cache.backend.type = filesystem
267 267
268 268 ; url for s3 compatible storage that allows to upload artifacts
269 269 ; e.g http://minio:9000
270 270 archive_cache.objectstore.url = http://s3-minio:9000
271 271
272 272 ; key for s3 auth
273 273 archive_cache.objectstore.key = key
274 274
275 275 ; secret for s3 auth
276 276 archive_cache.objectstore.secret = secret
277 277
278 278 ;region for s3 storage
279 279 archive_cache.objectstore.region = eu-central-1
280 280
281 281 ; number of sharded buckets to create to distribute archives across
282 282 ; default is 8 shards
283 283 archive_cache.objectstore.bucket_shards = 8
284 284
285 285 ; a top-level bucket to put all other shards in
286 286 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
287 287 archive_cache.objectstore.bucket = rhodecode-archive-cache
288 288
289 289 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
290 290 archive_cache.objectstore.retry = false
291 291
292 292 ; number of seconds to wait for next try using retry
293 293 archive_cache.objectstore.retry_backoff = 1
294 294
295 295 ; how many tries do do a retry fetch from this backend
296 296 archive_cache.objectstore.retry_attempts = 10
297 297
298 298 ; Default is $cache_dir/archive_cache if not set
299 299 ; Generated repo archives will be cached at this location
300 300 ; and served from the cache during subsequent requests for the same archive of
301 301 ; the repository. This path is important to be shared across filesystems and with
302 302 ; RhodeCode and vcsserver
303 303 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
304 304
305 305 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
306 306 archive_cache.filesystem.cache_size_gb = 40
307 307
308 308 ; Eviction policy used to clear out after cache_size_gb limit is reached
309 309 archive_cache.filesystem.eviction_policy = least-recently-stored
310 310
311 311 ; By default cache uses sharding technique, this specifies how many shards are there
312 312 ; default is 8 shards
313 313 archive_cache.filesystem.cache_shards = 8
314 314
315 315 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
316 316 archive_cache.filesystem.retry = false
317 317
318 318 ; number of seconds to wait for next try using retry
319 319 archive_cache.filesystem.retry_backoff = 1
320 320
321 321 ; how many tries do do a retry fetch from this backend
322 322 archive_cache.filesystem.retry_attempts = 10
323 323
324 324
325 325 ; #############
326 326 ; CELERY CONFIG
327 327 ; #############
328 328
329 329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330 330
331 331 use_celery = true
332 332
333 333 ; path to store schedule database
334 334 #celerybeat-schedule.path =
335 335
336 336 ; connection url to the message broker (default redis)
337 337 celery.broker_url = redis://redis:6379/8
338 338
339 339 ; results backend to get results for (default redis)
340 340 celery.result_backend = redis://redis:6379/8
341 341
342 342 ; rabbitmq example
343 343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
344 344
345 345 ; maximum tasks to execute before worker restart
346 346 celery.max_tasks_per_child = 20
347 347
348 348 ; tasks will never be sent to the queue, but executed locally instead.
349 349 celery.task_always_eager = false
350 350
351 351 ; #############
352 352 ; DOGPILE CACHE
353 353 ; #############
354 354
355 355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
356 356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
357 357 cache_dir = /var/opt/rhodecode_data
358 358
359 359 ; *********************************************
360 360 ; `sql_cache_short` cache for heavy SQL queries
361 361 ; Only supported backend is `memory_lru`
362 362 ; *********************************************
363 363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
364 364 rc_cache.sql_cache_short.expiration_time = 30
365 365
366 366
367 367 ; *****************************************************
368 368 ; `cache_repo_longterm` cache for repo object instances
369 369 ; Only supported backend is `memory_lru`
370 370 ; *****************************************************
371 371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
372 372 ; by default we use 30 Days, cache is still invalidated on push
373 373 rc_cache.cache_repo_longterm.expiration_time = 2592000
374 374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
375 375 rc_cache.cache_repo_longterm.max_size = 10000
376 376
377 377
378 378 ; *********************************************
379 379 ; `cache_general` cache for general purpose use
380 380 ; for simplicity use rc.file_namespace backend,
381 381 ; for performance and scale use rc.redis
382 382 ; *********************************************
383 383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
384 384 rc_cache.cache_general.expiration_time = 43200
385 385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
386 386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
387 387
388 388 ; alternative `cache_general` redis backend with distributed lock
389 389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
390 390 #rc_cache.cache_general.expiration_time = 300
391 391
392 392 ; redis_expiration_time needs to be greater then expiration_time
393 393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
394 394
395 395 #rc_cache.cache_general.arguments.host = localhost
396 396 #rc_cache.cache_general.arguments.port = 6379
397 397 #rc_cache.cache_general.arguments.db = 0
398 398 #rc_cache.cache_general.arguments.socket_timeout = 30
399 399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
400 400 #rc_cache.cache_general.arguments.distributed_lock = true
401 401
402 402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
403 403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
404 404
405 405 ; *************************************************
406 406 ; `cache_perms` cache for permission tree, auth TTL
407 407 ; for simplicity use rc.file_namespace backend,
408 408 ; for performance and scale use rc.redis
409 409 ; *************************************************
410 410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
411 411 rc_cache.cache_perms.expiration_time = 3600
412 412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
413 413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
414 414
415 415 ; alternative `cache_perms` redis backend with distributed lock
416 416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
417 417 #rc_cache.cache_perms.expiration_time = 300
418 418
419 419 ; redis_expiration_time needs to be greater then expiration_time
420 420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
421 421
422 422 #rc_cache.cache_perms.arguments.host = localhost
423 423 #rc_cache.cache_perms.arguments.port = 6379
424 424 #rc_cache.cache_perms.arguments.db = 0
425 425 #rc_cache.cache_perms.arguments.socket_timeout = 30
426 426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
427 427 #rc_cache.cache_perms.arguments.distributed_lock = true
428 428
429 429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
430 430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
431 431
432 432 ; ***************************************************
433 433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
434 434 ; for simplicity use rc.file_namespace backend,
435 435 ; for performance and scale use rc.redis
436 436 ; ***************************************************
437 437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
438 438 rc_cache.cache_repo.expiration_time = 2592000
439 439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
440 440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
441 441
442 442 ; alternative `cache_repo` redis backend with distributed lock
443 443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
444 444 #rc_cache.cache_repo.expiration_time = 2592000
445 445
446 446 ; redis_expiration_time needs to be greater then expiration_time
447 447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
448 448
449 449 #rc_cache.cache_repo.arguments.host = localhost
450 450 #rc_cache.cache_repo.arguments.port = 6379
451 451 #rc_cache.cache_repo.arguments.db = 1
452 452 #rc_cache.cache_repo.arguments.socket_timeout = 30
453 453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
454 454 #rc_cache.cache_repo.arguments.distributed_lock = true
455 455
456 456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
457 457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
458 458
459 459 ; ##############
460 460 ; BEAKER SESSION
461 461 ; ##############
462 462
463 463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
464 464 ; types are file, ext:redis, ext:database, ext:memcached
465 465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
466 466 #beaker.session.type = file
467 467 #beaker.session.data_dir = %(here)s/data/sessions
468 468
469 469 ; Redis based sessions
470 470 beaker.session.type = ext:redis
471 471 beaker.session.url = redis://redis:6379/2
472 472
473 473 ; DB based session, fast, and allows easy management over logged in users
474 474 #beaker.session.type = ext:database
475 475 #beaker.session.table_name = db_session
476 476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
477 477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
478 478 #beaker.session.sa.pool_recycle = 3600
479 479 #beaker.session.sa.echo = false
480 480
481 481 beaker.session.key = rhodecode
482 482 beaker.session.secret = production-rc-uytcxaz
483 483 beaker.session.lock_dir = /data_ramdisk/lock
484 484
485 485 ; Secure encrypted cookie. Requires AES and AES python libraries
486 486 ; you must disable beaker.session.secret to use this
487 487 #beaker.session.encrypt_key = key_for_encryption
488 488 #beaker.session.validate_key = validation_key
489 489
490 490 ; Sets session as invalid (also logging out user) if it haven not been
491 491 ; accessed for given amount of time in seconds
492 492 beaker.session.timeout = 2592000
493 493 beaker.session.httponly = true
494 494
495 495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
496 496 #beaker.session.cookie_path = /custom_prefix
497 497
498 498 ; Set https secure cookie
499 499 beaker.session.secure = false
500 500
501 501 ; default cookie expiration time in seconds, set to `true` to set expire
502 502 ; at browser close
503 503 #beaker.session.cookie_expires = 3600
504 504
505 505 ; #############################
506 506 ; SEARCH INDEXING CONFIGURATION
507 507 ; #############################
508 508
509 509 ; Full text search indexer is available in rhodecode-tools under
510 510 ; `rhodecode-tools index` command
511 511
512 512 ; WHOOSH Backend, doesn't require additional services to run
513 513 ; it works good with few dozen repos
514 514 search.module = rhodecode.lib.index.whoosh
515 515 search.location = %(here)s/data/index
516 516
517 517 ; ####################
518 518 ; CHANNELSTREAM CONFIG
519 519 ; ####################
520 520
521 521 ; channelstream enables persistent connections and live notification
522 522 ; in the system. It's also used by the chat system
523 523
524 524 channelstream.enabled = true
525 525
526 526 ; server address for channelstream server on the backend
527 527 channelstream.server = channelstream:9800
528 528
529 529 ; location of the channelstream server from outside world
530 530 ; use ws:// for http or wss:// for https. This address needs to be handled
531 531 ; by external HTTP server such as Nginx or Apache
532 532 ; see Nginx/Apache configuration examples in our docs
533 533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
534 534 channelstream.secret = ENV_GENERATED
535 535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
536 536
537 537 ; Internal application path that Javascript uses to connect into.
538 538 ; If you use proxy-prefix the prefix should be added before /_channelstream
539 539 channelstream.proxy_path = /_channelstream
540 540
541 541
542 542 ; ##############################
543 543 ; MAIN RHODECODE DATABASE CONFIG
544 544 ; ##############################
545 545
546 546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
548 548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
549 549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
550 550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
551 551
552 552 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
553 553
554 554 ; see sqlalchemy docs for other advanced settings
555 555 ; print the sql statements to output
556 556 sqlalchemy.db1.echo = false
557 557
558 558 ; recycle the connections after this amount of seconds
559 559 sqlalchemy.db1.pool_recycle = 3600
560 560
561 561 ; the number of connections to keep open inside the connection pool.
562 562 ; 0 indicates no limit
563 563 ; the general calculus with gevent is:
564 564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
565 565 ; then increase pool size + max overflow so that they add up to 500.
566 566 #sqlalchemy.db1.pool_size = 5
567 567
568 568 ; The number of connections to allow in connection pool "overflow", that is
569 569 ; connections that can be opened above and beyond the pool_size setting,
570 570 ; which defaults to five.
571 571 #sqlalchemy.db1.max_overflow = 10
572 572
573 573 ; Connection check ping, used to detect broken database connections
574 574 ; could be enabled to better handle cases if MySQL has gone away errors
575 575 #sqlalchemy.db1.ping_connection = true
576 576
577 577 ; ##########
578 578 ; VCS CONFIG
579 579 ; ##########
580 580 vcs.server.enable = true
581 581 vcs.server = vcsserver:10010
582 582
583 583 ; Web server connectivity protocol, responsible for web based VCS operations
584 584 ; Available protocols are:
585 585 ; `http` - use http-rpc backend (default)
586 586 vcs.server.protocol = http
587 587
588 588 ; Push/Pull operations protocol, available options are:
589 589 ; `http` - use http-rpc backend (default)
590 590 vcs.scm_app_implementation = http
591 591
592 592 ; Push/Pull operations hooks protocol, available options are:
593 593 ; `http` - use http-rpc backend (default)
594 594 ; `celery` - use celery based hooks
595 595 vcs.hooks.protocol = http
596 596
597 597 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
598 598 ; accessible via network.
599 599 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
600 600 vcs.hooks.host = *
601 601
602 602 ; Start VCSServer with this instance as a subprocess, useful for development
603 603 vcs.start_server = false
604 604
605 605 ; List of enabled VCS backends, available options are:
606 606 ; `hg` - mercurial
607 607 ; `git` - git
608 608 ; `svn` - subversion
609 609 vcs.backends = hg, git, svn
610 610
611 611 ; Wait this number of seconds before killing connection to the vcsserver
612 612 vcs.connection_timeout = 3600
613 613
614 614 ; Cache flag to cache vcsserver remote calls locally
615 615 ; It uses cache_region `cache_repo`
616 616 vcs.methods.cache = true
617 617
618 618 ; ####################################################
619 619 ; Subversion proxy support (mod_dav_svn)
620 620 ; Maps RhodeCode repo groups into SVN paths for Apache
621 621 ; ####################################################
622 622
623 623 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
624 624 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
625 625 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
626 626 #vcs.svn.compatible_version = 1.8
627 627
628 ; Redis connection settings for svn integrations logic
629 ; This connection string needs to be the same on ce and vcsserver
630 vcs.svn.redis_conn = redis://redis:6379/0
631
628 632 ; Enable SVN proxy of requests over HTTP
629 633 vcs.svn.proxy.enabled = true
630 634
631 635 ; host to connect to running SVN subsystem
632 636 vcs.svn.proxy.host = http://svn:8090
633 637
634 638 ; Enable or disable the config file generation.
635 639 svn.proxy.generate_config = true
636 640
637 641 ; Generate config file with `SVNListParentPath` set to `On`.
638 642 svn.proxy.list_parent_path = true
639 643
640 644 ; Set location and file name of generated config file.
641 645 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
642 646
643 647 ; alternative mod_dav config template. This needs to be a valid mako template
644 648 ; Example template can be found in the source code:
645 649 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
646 650 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
647 651
648 652 ; Used as a prefix to the `Location` block in the generated config file.
649 653 ; In most cases it should be set to `/`.
650 654 svn.proxy.location_root = /
651 655
652 656 ; Command to reload the mod dav svn configuration on change.
653 657 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
654 658 ; Make sure user who runs RhodeCode process is allowed to reload Apache
655 659 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
656 660
657 661 ; If the timeout expires before the reload command finishes, the command will
658 662 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
659 663 #svn.proxy.reload_timeout = 10
660 664
661 665 ; ####################
662 666 ; SSH Support Settings
663 667 ; ####################
664 668
665 669 ; Defines if a custom authorized_keys file should be created and written on
666 670 ; any change user ssh keys. Setting this to false also disables possibility
667 671 ; of adding SSH keys by users from web interface. Super admins can still
668 672 ; manage SSH Keys.
669 673 ssh.generate_authorized_keyfile = true
670 674
671 675 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
672 676 # ssh.authorized_keys_ssh_opts =
673 677
674 678 ; Path to the authorized_keys file where the generate entries are placed.
675 679 ; It is possible to have multiple key files specified in `sshd_config` e.g.
676 680 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
677 681 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
678 682
679 683 ; Command to execute the SSH wrapper. The binary is available in the
680 684 ; RhodeCode installation directory.
681 685 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
682 686 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
683 687 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
684 688
685 689 ; Allow shell when executing the ssh-wrapper command
686 690 ssh.wrapper_cmd_allow_shell = false
687 691
688 692 ; Enables logging, and detailed output send back to the client during SSH
689 693 ; operations. Useful for debugging, shouldn't be used in production.
690 694 ssh.enable_debug_logging = false
691 695
692 696 ; Paths to binary executable, by default they are the names, but we can
693 697 ; override them if we want to use a custom one
694 698 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
695 699 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
696 700 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
697 701
698 702 ; Enables SSH key generator web interface. Disabling this still allows users
699 703 ; to add their own keys.
700 704 ssh.enable_ui_key_generator = true
701 705
702 706 ; Statsd client config, this is used to send metrics to statsd
703 707 ; We recommend setting statsd_exported and scrape them using Prometheus
704 708 #statsd.enabled = false
705 709 #statsd.statsd_host = 0.0.0.0
706 710 #statsd.statsd_port = 8125
707 711 #statsd.statsd_prefix =
708 712 #statsd.statsd_ipv6 = false
709 713
710 714 ; configure logging automatically at server startup set to false
711 715 ; to use the below custom logging config.
712 716 ; RC_LOGGING_FORMATTER
713 717 ; RC_LOGGING_LEVEL
714 718 ; env variables can control the settings for logging in case of autoconfigure
715 719
716 720 #logging.autoconfigure = true
717 721
718 722 ; specify your own custom logging config file to configure logging
719 723 #logging.logging_conf_file = /path/to/custom_logging.ini
720 724
721 725 ; Dummy marker to add new entries after.
722 726 ; Add any custom entries below. Please don't remove this marker.
723 727 custom.conf = 1
724 728
725 729
726 730 ; #####################
727 731 ; LOGGING CONFIGURATION
728 732 ; #####################
729 733
730 734 [loggers]
731 735 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
732 736
733 737 [handlers]
734 738 keys = console, console_sql
735 739
736 740 [formatters]
737 741 keys = generic, json, color_formatter, color_formatter_sql
738 742
739 743 ; #######
740 744 ; LOGGERS
741 745 ; #######
742 746 [logger_root]
743 747 level = NOTSET
744 748 handlers = console
745 749
746 750 [logger_sqlalchemy]
747 751 level = INFO
748 752 handlers = console_sql
749 753 qualname = sqlalchemy.engine
750 754 propagate = 0
751 755
752 756 [logger_beaker]
753 757 level = DEBUG
754 758 handlers =
755 759 qualname = beaker.container
756 760 propagate = 1
757 761
758 762 [logger_rhodecode]
759 763 level = DEBUG
760 764 handlers =
761 765 qualname = rhodecode
762 766 propagate = 1
763 767
764 768 [logger_ssh_wrapper]
765 769 level = DEBUG
766 770 handlers =
767 771 qualname = ssh_wrapper
768 772 propagate = 1
769 773
770 774 [logger_celery]
771 775 level = DEBUG
772 776 handlers =
773 777 qualname = celery
774 778
775 779
776 780 ; ########
777 781 ; HANDLERS
778 782 ; ########
779 783
780 784 [handler_console]
781 785 class = StreamHandler
782 786 args = (sys.stderr, )
783 787 level = INFO
784 788 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
785 789 ; This allows sending properly formatted logs to grafana loki or elasticsearch
786 790 formatter = generic
787 791
788 792 [handler_console_sql]
789 793 ; "level = DEBUG" logs SQL queries and results.
790 794 ; "level = INFO" logs SQL queries.
791 795 ; "level = WARN" logs neither. (Recommended for production systems.)
792 796 class = StreamHandler
793 797 args = (sys.stderr, )
794 798 level = WARN
795 799 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
796 800 ; This allows sending properly formatted logs to grafana loki or elasticsearch
797 801 formatter = generic
798 802
799 803 ; ##########
800 804 ; FORMATTERS
801 805 ; ##########
802 806
803 807 [formatter_generic]
804 808 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
805 809 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
806 810 datefmt = %Y-%m-%d %H:%M:%S
807 811
808 812 [formatter_color_formatter]
809 813 class = rhodecode.lib.logging_formatter.ColorFormatter
810 814 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
811 815 datefmt = %Y-%m-%d %H:%M:%S
812 816
813 817 [formatter_color_formatter_sql]
814 818 class = rhodecode.lib.logging_formatter.ColorFormatterSql
815 819 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
816 820 datefmt = %Y-%m-%d %H:%M:%S
817 821
818 822 [formatter_json]
819 823 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
820 824 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,223 +1,224 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import tempfile
21 21 import logging
22 22
23 23 from pyramid.settings import asbool
24 24
25 25 from rhodecode.config.settings_maker import SettingsMaker
26 26 from rhodecode.config import utils as config_utils
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 32 """
33 33 Applies settings defaults and does all type conversion.
34 34
35 35 We would move all settings parsing and preparation into this place, so that
36 36 we have only one place left which deals with this part. The remaining parts
37 37 of the application would start to rely fully on well-prepared settings.
38 38
39 39 This piece would later be split up per topic to avoid a big fat monster
40 40 function.
41 41 """
42 42 jn = os.path.join
43 43
44 44 global_settings_maker = SettingsMaker(global_config)
45 45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 46 debug_enabled = asbool(global_config.get('debug'))
47 47
48 48 settings_maker = SettingsMaker(settings)
49 49
50 50 settings_maker.make_setting(
51 51 'logging.autoconfigure',
52 52 default=False,
53 53 parser='bool')
54 54
55 55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57 57
58 58 # Default includes, possible to change as a user
59 59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 60 log.debug(
61 61 "Using the following pyramid.includes: %s",
62 62 pyramid_includes)
63 63
64 64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66 66
67 67 if 'mako.default_filters' not in settings:
68 68 # set custom default filters if we don't have it defined
69 69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 70 settings['mako.default_filters'] = 'h_filter'
71 71
72 72 if 'mako.directories' not in settings:
73 73 mako_directories = settings.setdefault('mako.directories', [
74 74 # Base templates of the original application
75 75 'rhodecode:templates',
76 76 ])
77 77 log.debug(
78 78 "Using the following Mako template directories: %s",
79 79 mako_directories)
80 80
81 81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 83 raw_url = settings['beaker.session.url']
84 84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 85 settings['beaker.session.url'] = 'redis://' + raw_url
86 86
87 87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88 88
89 89 # TODO: johbo: Re-think this, usually the call to config.include
90 90 # should allow to pass in a prefix.
91 91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92 92
93 93 # Sanitize generic settings.
94 94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97 97
98 98 # statsd
99 99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104 104
105 105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
106 107 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 108 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 109 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 110 settings_maker.make_setting('vcs.hooks.host', '*')
110 111 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 112 settings_maker.make_setting('vcs.server', '')
112 113 settings_maker.make_setting('vcs.server.protocol', 'http')
113 114 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 115 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 116 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 117 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 118 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118 119
119 120 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120 121
121 122 # repo_store path
122 123 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
123 124 # Support legacy values of vcs.scm_app_implementation. Legacy
124 125 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
125 126 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
126 127 scm_app_impl = settings['vcs.scm_app_implementation']
127 128 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
128 129 settings['vcs.scm_app_implementation'] = 'http'
129 130
130 131 settings_maker.make_setting('appenlight', False, parser='bool')
131 132
132 133 temp_store = tempfile.gettempdir()
133 134 tmp_cache_dir = jn(temp_store, 'rc_cache')
134 135
135 136 # save default, cache dir, and use it for all backends later.
136 137 default_cache_dir = settings_maker.make_setting(
137 138 'cache_dir',
138 139 default=tmp_cache_dir, default_when_empty=True,
139 140 parser='dir:ensured')
140 141
141 142 # exception store cache
142 143 settings_maker.make_setting(
143 144 'exception_tracker.store_path',
144 145 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
145 146 parser='dir:ensured'
146 147 )
147 148
148 149 settings_maker.make_setting(
149 150 'celerybeat-schedule.path',
150 151 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
151 152 parser='file:ensured'
152 153 )
153 154
154 155 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
155 156 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
156 157
157 158 # sessions, ensure file since no-value is memory
158 159 settings_maker.make_setting('beaker.session.type', 'file')
159 160 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
160 161
161 162 # cache_general
162 163 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
163 164 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
164 165 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
165 166
166 167 # cache_perms
167 168 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
168 169 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
169 170 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
170 171
171 172 # cache_repo
172 173 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
173 174 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
174 175 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
175 176
176 177 # cache_license
177 178 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
178 179 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
179 180 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
180 181
181 182 # cache_repo_longterm memory, 96H
182 183 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
183 184 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
184 185 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
185 186
186 187 # sql_cache_short
187 188 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
188 189 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
189 190 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
190 191
191 192 # archive_cache
192 193 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
193 194 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
194 195
195 196 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
196 197 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
197 198 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
198 199 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
199 200
200 201 settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
201 202 settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
202 203 settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
203 204
204 205 settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
205 206 settings_maker.make_setting('archive_cache.objectstore.key', '')
206 207 settings_maker.make_setting('archive_cache.objectstore.secret', '')
207 208 settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1')
208 209 settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,)
209 210 settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
210 211
211 212 settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
212 213 settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
213 214
214 215 settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
215 216 settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
216 217 settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
217 218
218 219 settings_maker.env_expand()
219 220
220 221 # configure instance id
221 222 config_utils.set_instance_id(settings)
222 223
223 224 return settings
@@ -1,355 +1,354 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20
21 21 from webhelpers2.html.builder import literal
22 22 from webhelpers2.html.tags import link_to
23 23
24 24 from rhodecode.lib.utils2 import AttributeDict
25 25 from rhodecode.lib.vcs.backends.base import BaseCommit
26 26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 def action_parser(request, user_log, feed=False, parse_cs=False):
33 33 """
34 34 This helper will action_map the specified string action into translated
35 35 fancy names with icons and links
36 36
37 37 :param user_log: user log instance
38 38 :param feed: use output for feeds (no html and fancy icons)
39 39 :param parse_cs: parse Changesets into VCS instances
40 40 """
41 41 if user_log.version == 'v2':
42 42 ap = AuditLogParser(request, user_log)
43 43 return ap.callbacks()
44 44 else:
45 45 # old style
46 46 ap = ActionParser(request, user_log, feed=False, parse_commits=False)
47 47 return ap.callbacks()
48 48
49 49
50 50 class ActionParser(object):
51 51
52 52 commits_limit = 3 # display this amount always
53 53 commits_top_limit = 50 # show up to this amount of commits hidden
54 54
55 55 def __init__(self, request, user_log, feed=False, parse_commits=False):
56 56 self.user_log = user_log
57 57 self.feed = feed
58 58 self.parse_commits = parse_commits
59 59 self.request = request
60 60
61 61 self.action = user_log.action
62 62 self.action_params = ' '
63 63 x = self.action.split(':', 1)
64 64 if len(x) > 1:
65 65 self.action, self.action_params = x
66 66
67 67 def callbacks(self):
68 68 action_str = self.action_map.get(self.action, self.action)
69 69 if self.feed:
70 70 action = action_str[0].replace('[', '').replace(']', '')
71 71 else:
72 72 action = action_str[0]\
73 73 .replace('[', '<span class="journal_highlight">')\
74 74 .replace(']', '</span>')
75 75
76 76 action_params_func = _no_params_func
77 77 if callable(action_str[1]):
78 78 action_params_func = action_str[1]
79 79
80 80 # returned callbacks we need to call to get
81 81 return [
82 82 lambda: literal(action), action_params_func,
83 83 self.action_parser_icon]
84 84
85 85 @property
86 86 def action_map(self):
87 87 _ = self.request.translate
88 88 # action : translated str, callback(extractor), icon
89 89 action_map = {
90 90 'user_deleted_repo': (
91 91 _('[deleted] repository'),
92 92 None, 'icon-trash'),
93 93 'user_created_repo': (
94 94 _('[created] repository'),
95 95 None, 'icon-plus icon-plus-colored'),
96 96 'user_created_fork': (
97 97 _('[created] repository as fork'),
98 98 None, 'icon-code-fork'),
99 99 'user_forked_repo': (
100 100 _('[forked] repository'),
101 101 self.get_fork_name, 'icon-code-fork'),
102 102 'user_updated_repo': (
103 103 _('[updated] repository'),
104 104 None, 'icon-pencil icon-pencil-colored'),
105 105 'user_downloaded_archive': (
106 106 _('[downloaded] archive from repository'),
107 107 self.get_archive_name, 'icon-download-alt'),
108 108 'admin_deleted_repo': (
109 109 _('[delete] repository'),
110 110 None, 'icon-trash'),
111 111 'admin_created_repo': (
112 112 _('[created] repository'),
113 113 None, 'icon-plus icon-plus-colored'),
114 114 'admin_forked_repo': (
115 115 _('[forked] repository'),
116 116 None, 'icon-code-fork icon-fork-colored'),
117 117 'admin_updated_repo': (
118 118 _('[updated] repository'),
119 119 None, 'icon-pencil icon-pencil-colored'),
120 120 'admin_created_user': (
121 121 _('[created] user'),
122 122 self.get_user_name, 'icon-user icon-user-colored'),
123 123 'admin_updated_user': (
124 124 _('[updated] user'),
125 125 self.get_user_name, 'icon-user icon-user-colored'),
126 126 'admin_created_users_group': (
127 127 _('[created] user group'),
128 128 self.get_users_group, 'icon-pencil icon-pencil-colored'),
129 129 'admin_updated_users_group': (
130 130 _('[updated] user group'),
131 131 self.get_users_group, 'icon-pencil icon-pencil-colored'),
132 132 'user_commented_revision': (
133 133 _('[commented] on commit in repository'),
134 134 self.get_cs_links, 'icon-comment icon-comment-colored'),
135 135 'user_commented_pull_request': (
136 136 _('[commented] on pull request for'),
137 137 self.get_pull_request, 'icon-comment icon-comment-colored'),
138 138 'user_closed_pull_request': (
139 139 _('[closed] pull request for'),
140 140 self.get_pull_request, 'icon-check'),
141 141 'user_merged_pull_request': (
142 142 _('[merged] pull request for'),
143 143 self.get_pull_request, 'icon-check'),
144 144 'push': (
145 145 _('[pushed] into'),
146 146 self.get_cs_links, 'icon-arrow-up'),
147 147 'push_local': (
148 148 _('[committed via RhodeCode] into repository'),
149 149 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
150 150 'push_remote': (
151 151 _('[pulled from remote] into repository'),
152 152 self.get_cs_links, 'icon-arrow-up'),
153 153 'pull': (
154 154 _('[pulled] from'),
155 155 None, 'icon-arrow-down'),
156 156 'started_following_repo': (
157 157 _('[started following] repository'),
158 158 None, 'icon-heart icon-heart-colored'),
159 159 'stopped_following_repo': (
160 160 _('[stopped following] repository'),
161 161 None, 'icon-heart-empty icon-heart-colored'),
162 162 }
163 163 return action_map
164 164
165 165 def get_fork_name(self):
166 166 from rhodecode.lib import helpers as h
167 167 _ = self.request.translate
168 168 repo_name = self.action_params
169 169 _url = h.route_path('repo_summary', repo_name=repo_name)
170 170 return _('fork name %s') % link_to(self.action_params, _url)
171 171
172 172 def get_user_name(self):
173 173 user_name = self.action_params
174 174 return user_name
175 175
176 176 def get_users_group(self):
177 177 group_name = self.action_params
178 178 return group_name
179 179
180 180 def get_pull_request(self):
181 181 from rhodecode.lib import helpers as h
182 182 _ = self.request.translate
183 183 pull_request_id = self.action_params
184 184 if self.is_deleted():
185 185 repo_name = self.user_log.repository_name
186 186 else:
187 187 repo_name = self.user_log.repository.repo_name
188 188 return link_to(
189 189 _('Pull request #%s') % pull_request_id,
190 190 h.route_path('pullrequest_show', repo_name=repo_name,
191 191 pull_request_id=pull_request_id))
192 192
193 193 def get_archive_name(self):
194 194 archive_name = self.action_params
195 195 return archive_name
196 196
197 197 def action_parser_icon(self):
198 198 tmpl = """<i class="%s" alt="%s"></i>"""
199 199 ico = self.action_map.get(self.action, ['', '', ''])[2]
200 200 return literal(tmpl % (ico, self.action))
201 201
202 202 def get_cs_links(self):
203 203 from rhodecode.lib import helpers as h
204 204 _ = self.request.translate
205 205 if self.is_deleted():
206 206 return self.action_params
207 207
208 208 repo_name = self.user_log.repository.repo_name
209 209 commit_ids = self.action_params.split(',')
210 210 commits = self.get_commits(commit_ids)
211 211
212 212 link_generator = (
213 213 self.lnk(commit, repo_name)
214 214 for commit in commits[:self.commits_limit])
215 215 commit_links = [" " + ', '.join(link_generator)]
216 216 _op1, _name1 = _get_op(commit_ids[0])
217 217 _op2, _name2 = _get_op(commit_ids[-1])
218 218
219 219 commit_id_range = '%s...%s' % (_name1, _name2)
220 220
221 221 compare_view = (
222 222 ' <div class="compare_view tooltip" title="%s">'
223 223 '<a href="%s">%s</a> </div>' % (
224 224 _('Show all combined commits %s->%s') % (
225 225 commit_ids[0][:12], commit_ids[-1][:12]
226 226 ),
227 227 h.route_path(
228 228 'repo_commit', repo_name=repo_name,
229 229 commit_id=commit_id_range), _('compare view')
230 230 )
231 231 )
232 232
233 233 if len(commit_ids) > self.commits_limit:
234 234 more_count = len(commit_ids) - self.commits_limit
235 235 commit_links.append(
236 236 _(' and %(num)s more commits') % {'num': more_count}
237 237 )
238 238
239 239 if len(commits) > 1:
240 240 commit_links.append(compare_view)
241 241 return ''.join(commit_links)
242 242
243 243 def get_commits(self, commit_ids):
244 244 commits = []
245 245 if not [v for v in commit_ids if v != '']:
246 246 return commits
247 247
248 248 repo = None
249 249 if self.parse_commits:
250 250 repo = self.user_log.repository.scm_instance()
251 251
252 252 for commit_id in commit_ids[:self.commits_top_limit]:
253 253 _op, _name = _get_op(commit_id)
254 254
255 255 # we want parsed commits, or new log store format is bad
256 256 if self.parse_commits:
257 257 try:
258 258 commit = repo.get_commit(commit_id=commit_id)
259 259 commits.append(commit)
260 260 except CommitDoesNotExistError:
261 log.error(
262 'cannot find commit id %s in this repository',
261 log.error('cannot find commit id %s in this repository',
263 262 commit_id)
264 263 commits.append(commit_id)
265 264 continue
266 265 else:
267 266 fake_commit = AttributeDict({
268 267 'short_id': commit_id[:12],
269 268 'raw_id': commit_id,
270 269 'message': '',
271 270 'op': _op,
272 271 'ref_name': _name
273 272 })
274 273 commits.append(fake_commit)
275 274
276 275 return commits
277 276
278 277 def lnk(self, commit_or_id, repo_name):
279 278 from rhodecode.lib.helpers import tooltip
280 279 from rhodecode.lib import helpers as h
281 280 _ = self.request.translate
282 281 title = ''
283 282 lazy_cs = True
284 283 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
285 284 lazy_cs = True
286 285 if (getattr(commit_or_id, 'op', None) and
287 286 getattr(commit_or_id, 'ref_name', None)):
288 287 lazy_cs = False
289 288 lbl = '?'
290 289 if commit_or_id.op == 'delete_branch':
291 290 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
292 291 title = ''
293 292 elif commit_or_id.op == 'tag':
294 293 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
295 294 title = ''
296 295 _url = '#'
297 296
298 297 else:
299 298 lbl = '%s' % (commit_or_id.short_id[:8])
300 299 _url = h.route_path('repo_commit', repo_name=repo_name,
301 300 commit_id=commit_or_id.raw_id)
302 301 title = tooltip(commit_or_id.message)
303 302 else:
304 303 # commit cannot be found/striped/removed etc.
305 304 lbl = ('%s' % commit_or_id)[:12]
306 305 _url = '#'
307 306 title = _('Commit not found')
308 307 if self.parse_commits:
309 308 return link_to(lbl, _url, title=title, class_='tooltip')
310 309 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
311 310 class_='lazy-cs' if lazy_cs else '')
312 311
313 312 def is_deleted(self):
314 313 return self.user_log.repository is None
315 314
316 315
317 316 class AuditLogParser(object):
318 317 def __init__(self, request, audit_log_entry):
319 318 self.audit_log_entry = audit_log_entry
320 319 self.request = request
321 320
322 321 def get_icon(self, action):
323 322 return 'icon-rhodecode'
324 323
325 324 def callbacks(self):
326 325 action_str = self.audit_log_entry.action
327 326
328 327 def callback():
329 328 # returned callbacks we need to call to get
330 329 action = action_str \
331 330 .replace('[', '<span class="journal_highlight">')\
332 331 .replace(']', '</span>')
333 332 return literal(action)
334 333
335 334 def icon():
336 335 tmpl = """<i class="%s" alt="%s"></i>"""
337 336 ico = self.get_icon(action_str)
338 337 return literal(tmpl % (ico, action_str))
339 338
340 339 action_params_func = _no_params_func
341 340
342 341 return [
343 342 callback, action_params_func, icon]
344 343
345 344
346 345 def _no_params_func():
347 346 return ""
348 347
349 348
350 349 def _get_op(commit_id):
351 350 _op = None
352 351 _name = commit_id
353 352 if len(commit_id.split('=>')) == 2:
354 353 _op, _name = commit_id.split('=>')
355 354 return _op, _name
@@ -1,115 +1,89 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 19 import os
19 20 import time
20 21 import logging
21 import tempfile
22 22
23 23 from rhodecode.lib.config_utils import get_config
24 from rhodecode.lib.ext_json import json
24
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
25 26
26 27 log = logging.getLogger(__name__)
27 28
28 29
29 30 class BaseHooksCallbackDaemon:
30 31 """
31 32 Basic context manager for actions that don't require some extra
32 33 """
33 34 def __init__(self):
34 35 pass
35 36
36 37 def __enter__(self):
37 38 log.debug('Running `%s` callback daemon', self.__class__.__name__)
38 39 return self
39 40
40 41 def __exit__(self, exc_type, exc_val, exc_tb):
41 42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
42 43
43 44
44 45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
45 46
46 47 def __init__(self, module):
47 48 super().__init__()
48 49 self.hooks_module = module
49 50
50
51 def get_txn_id_data_path(txn_id):
52 import rhodecode
53
54 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
55 final_dir = os.path.join(root, 'svn_txn_id')
56
57 if not os.path.isdir(final_dir):
58 os.makedirs(final_dir)
59 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
60
61
62 def store_txn_id_data(txn_id, data_dict):
63 if not txn_id:
64 log.warning('Cannot store txn_id because it is empty')
65 return
66
67 path = get_txn_id_data_path(txn_id)
68 try:
69 with open(path, 'wb') as f:
70 f.write(json.dumps(data_dict))
71 except Exception:
72 log.exception('Failed to write txn_id metadata')
73
74
75 def get_txn_id_from_store(txn_id):
76 """
77 Reads txn_id from store and if present returns the data for callback manager
78 """
79 path = get_txn_id_data_path(txn_id)
80 try:
81 with open(path, 'rb') as f:
82 return json.loads(f.read())
83 except Exception:
84 return {}
51 def __repr__(self):
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
85 53
86 54
87 55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
88 txn_details = get_txn_id_from_store(txn_id)
89 port = txn_details.get('port', 0)
56
90 57 match protocol:
91 58 case 'http':
92 59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
60 port = 0
61 if txn_id:
62 # read txn-id to re-use the PORT for callback daemon
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 port = txn_details.get('port', 0)
66
93 67 callback_daemon = HttpHooksCallbackDaemon(
94 68 txn_id=txn_id, host=host, port=port)
95 69 case 'celery':
96 70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
97 71 callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
98 72 case 'local':
99 73 from rhodecode.lib.hook_daemon.hook_module import Hooks
100 74 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
101 75 case _:
102 76 log.error('Unsupported callback daemon protocol "%s"', protocol)
103 77 raise Exception('Unsupported callback daemon protocol.')
104 78
105 79 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
106 80 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
107 81 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
108 82 extras['hooks_protocol'] = protocol
109 83 extras['time'] = time.time()
110 84
111 85 # register txn_id
112 86 extras['txn_id'] = txn_id
113 87 log.debug('Prepared a callback daemon: %s',
114 88 callback_daemon.__class__.__name__)
115 89 return callback_daemon, extras
@@ -1,30 +1,33 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
20 20
21 21
22 22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
23 23 """
24 24 Context manger for achieving a compatibility with celery backend
25 25 """
26 26
27 27 def __init__(self, config):
28 28 # TODO: replace this with settings bootstrapped...
29 29 self.task_queue = config.get('app:main', 'celery.broker_url')
30 30 self.task_backend = config.get('app:main', 'celery.result_backend')
31
32 def __repr__(self):
33 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -1,280 +1,287 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import logging
21 21 import traceback
22 22 import threading
23 23 import socket
24 24 import msgpack
25 25 import gevent
26 26
27 27 from http.server import BaseHTTPRequestHandler
28 28 from socketserver import TCPServer
29 29
30 30 from rhodecode.model import meta
31 31 from rhodecode.lib.ext_json import json
32 32 from rhodecode.lib import rc_cache
33 from rhodecode.lib.hook_daemon.base import get_txn_id_data_path
33 from rhodecode.lib.svn_txn_utils import get_txn_id_data_key
34 34 from rhodecode.lib.hook_daemon.hook_module import Hooks
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class HooksHttpHandler(BaseHTTPRequestHandler):
40 40
41 41 JSON_HOOKS_PROTO = 'json.v1'
42 42 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
43 43 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
44 44 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
45 45
46 46 @classmethod
47 47 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
48 48 if proto == cls.MSGPACK_HOOKS_PROTO:
49 49 return msgpack.packb(data)
50 50 return json.dumps(data)
51 51
52 52 @classmethod
53 53 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
54 54 if proto == cls.MSGPACK_HOOKS_PROTO:
55 55 return msgpack.unpackb(data)
56 56 return json.loads(data)
57 57
58 58 def do_POST(self):
59 59 hooks_proto, method, extras = self._read_request()
60 60 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
61 61
62 62 txn_id = getattr(self.server, 'txn_id', None)
63 63 if txn_id:
64 64 log.debug('Computing TXN_ID based on `%s`:`%s`',
65 65 extras['repository'], extras['txn_id'])
66 66 computed_txn_id = rc_cache.utils.compute_key_from_params(
67 67 extras['repository'], extras['txn_id'])
68 68 if txn_id != computed_txn_id:
69 69 raise Exception(
70 70 'TXN ID fail: expected {} got {} instead'.format(
71 71 txn_id, computed_txn_id))
72 72
73 73 request = getattr(self.server, 'request', None)
74 74 try:
75 75 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
76 76 result = self._call_hook_method(hooks, method, extras)
77 77
78 78 except Exception as e:
79 79 exc_tb = traceback.format_exc()
80 80 result = {
81 81 'exception': e.__class__.__name__,
82 82 'exception_traceback': exc_tb,
83 83 'exception_args': e.args
84 84 }
85 85 self._write_response(hooks_proto, result)
86 86
87 87 def _read_request(self):
88 88 length = int(self.headers['Content-Length'])
89 89 # respect sent headers, fallback to OLD proto for compatability
90 90 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
91 91 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
92 92 # support for new vcsserver msgpack based protocol hooks
93 93 body = self.rfile.read(length)
94 94 data = self.deserialize_data(body)
95 95 else:
96 96 body = self.rfile.read(length)
97 97 data = self.deserialize_data(body)
98 98
99 99 return hooks_proto, data['method'], data['extras']
100 100
101 101 def _write_response(self, hooks_proto, result):
102 102 self.send_response(200)
103 103 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
104 104 self.send_header("Content-type", "application/msgpack")
105 105 self.end_headers()
106 106 data = self.serialize_data(result)
107 107 self.wfile.write(data)
108 108 else:
109 109 self.send_header("Content-type", "text/json")
110 110 self.end_headers()
111 111 data = self.serialize_data(result)
112 112 self.wfile.write(data)
113 113
114 114 def _call_hook_method(self, hooks, method, extras):
115 115 try:
116 116 result = getattr(hooks, method)(extras)
117 117 finally:
118 118 meta.Session.remove()
119 119 return result
120 120
121 121 def log_message(self, format, *args):
122 122 """
123 123 This is an overridden method of BaseHTTPRequestHandler which logs using
124 124 a logging library instead of writing directly to stderr.
125 125 """
126 126
127 127 message = format % args
128 128
129 129 log.debug(
130 130 "HOOKS: client=%s - - [%s] %s", self.client_address,
131 131 self.log_date_time_string(), message)
132 132
133 133
134 134 class ThreadedHookCallbackDaemon(object):
135 135
136 136 _callback_thread = None
137 137 _daemon = None
138 138 _done = False
139 139 use_gevent = False
140 140
141 141 def __init__(self, txn_id=None, host=None, port=None):
142 142 self._prepare(txn_id=txn_id, host=host, port=port)
143 143 if self.use_gevent:
144 144 self._run_func = self._run_gevent
145 145 self._stop_func = self._stop_gevent
146 146 else:
147 147 self._run_func = self._run
148 148 self._stop_func = self._stop
149 149
150 150 def __enter__(self):
151 151 log.debug('Running `%s` callback daemon', self.__class__.__name__)
152 152 self._run_func()
153 153 return self
154 154
155 155 def __exit__(self, exc_type, exc_val, exc_tb):
156 156 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
157 157 self._stop_func()
158 158
159 159 def _prepare(self, txn_id=None, host=None, port=None):
160 160 raise NotImplementedError()
161 161
162 162 def _run(self):
163 163 raise NotImplementedError()
164 164
165 165 def _stop(self):
166 166 raise NotImplementedError()
167 167
168 168 def _run_gevent(self):
169 169 raise NotImplementedError()
170 170
171 171 def _stop_gevent(self):
172 172 raise NotImplementedError()
173 173
174 174
175 175 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
176 176 """
177 177 Context manager which will run a callback daemon in a background thread.
178 178 """
179 179
180 180 hooks_uri = None
181 181
182 182 # From Python docs: Polling reduces our responsiveness to a shutdown
183 183 # request and wastes cpu at all other times.
184 184 POLL_INTERVAL = 0.01
185 185
186 186 use_gevent = False
187 187
188 def __repr__(self):
189 return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})'
190
188 191 @property
189 192 def _hook_prefix(self):
190 return 'HOOKS: {} '.format(self.hooks_uri)
193 return f'HOOKS: {self.hooks_uri} '
191 194
192 195 def get_hostname(self):
193 196 return socket.gethostname() or '127.0.0.1'
194 197
195 198 def get_available_port(self, min_port=20000, max_port=65535):
196 199 from rhodecode.lib.utils2 import get_available_port as _get_port
197 200 return _get_port(min_port, max_port)
198 201
199 202 def _prepare(self, txn_id=None, host=None, port=None):
200 203 from pyramid.threadlocal import get_current_request
201 204
202 205 if not host or host == "*":
203 206 host = self.get_hostname()
204 207 if not port:
205 208 port = self.get_available_port()
206 209
207 210 server_address = (host, port)
208 self.hooks_uri = '{}:{}'.format(host, port)
211 self.hooks_uri = f'{host}:{port}'
209 212 self.txn_id = txn_id
210 213 self._done = False
211 214
212 215 log.debug(
213 216 "%s Preparing HTTP callback daemon registering hook object: %s",
214 217 self._hook_prefix, HooksHttpHandler)
215 218
216 219 self._daemon = TCPServer(server_address, HooksHttpHandler)
217 220 # inject transaction_id for later verification
218 221 self._daemon.txn_id = self.txn_id
219 222
220 223 # pass the WEB app request into daemon
221 224 self._daemon.request = get_current_request()
222 225
223 226 def _run(self):
224 227 log.debug("Running thread-based loop of callback daemon in background")
225 228 callback_thread = threading.Thread(
226 229 target=self._daemon.serve_forever,
227 230 kwargs={'poll_interval': self.POLL_INTERVAL})
228 231 callback_thread.daemon = True
229 232 callback_thread.start()
230 233 self._callback_thread = callback_thread
231 234
232 235 def _run_gevent(self):
233 236 log.debug("Running gevent-based loop of callback daemon in background")
234 237 # create a new greenlet for the daemon's serve_forever method
235 238 callback_greenlet = gevent.spawn(
236 239 self._daemon.serve_forever,
237 240 poll_interval=self.POLL_INTERVAL)
238 241
239 242 # store reference to greenlet
240 243 self._callback_greenlet = callback_greenlet
241 244
242 245 # switch to this greenlet
243 246 gevent.sleep(0.01)
244 247
245 248 def _stop(self):
246 249 log.debug("Waiting for background thread to finish.")
247 250 self._daemon.shutdown()
248 251 self._callback_thread.join()
249 252 self._daemon = None
250 253 self._callback_thread = None
251 254 if self.txn_id:
252 txn_id_file = get_txn_id_data_path(self.txn_id)
255 #TODO: figure out the repo_path...
256 repo_path = ''
257 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
253 258 log.debug('Cleaning up TXN ID %s', txn_id_file)
254 259 if os.path.isfile(txn_id_file):
255 260 os.remove(txn_id_file)
256 261
257 262 log.debug("Background thread done.")
258 263
259 264 def _stop_gevent(self):
260 265 log.debug("Waiting for background greenlet to finish.")
261 266
262 267 # if greenlet exists and is running
263 268 if self._callback_greenlet and not self._callback_greenlet.dead:
264 269 # shutdown daemon if it exists
265 270 if self._daemon:
266 271 self._daemon.shutdown()
267 272
268 273 # kill the greenlet
269 274 self._callback_greenlet.kill()
270 275
271 276 self._daemon = None
272 277 self._callback_greenlet = None
273 278
274 279 if self.txn_id:
275 txn_id_file = get_txn_id_data_path(self.txn_id)
280 #TODO: figure out the repo_path...
281 repo_path = ''
282 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
276 283 log.debug('Cleaning up TXN ID %s', txn_id_file)
277 284 if os.path.isfile(txn_id_file):
278 285 os.remove(txn_id_file)
279 286
280 287 log.debug("Background greenlet done.")
@@ -1,242 +1,258 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 import base64
20 import re
21 import os
21 22 import logging
22 23 import urllib.request
23 24 import urllib.parse
24 25 import urllib.error
25 26 import urllib.parse
26 27
27 28 import requests
28 29 from pyramid.httpexceptions import HTTPNotAcceptable
29 30
30 31 from rhodecode import ConfigGet
31 from rhodecode.lib import rc_cache
32 32 from rhodecode.lib.middleware import simplevcs
33 33 from rhodecode.lib.middleware.utils import get_path_info
34 34 from rhodecode.lib.utils import is_valid_repo
35 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
36 from rhodecode.lib.type_utils import str2bool
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.hook_daemon.base import store_txn_id_data
35 from rhodecode.lib.str_utils import safe_str
39 36
40 37 log = logging.getLogger(__name__)
41 38
42 39
43 40 class SimpleSvnApp(object):
44 41 IGNORED_HEADERS = [
45 42 'connection', 'keep-alive', 'content-encoding',
46 43 'transfer-encoding', 'content-length']
47 44 rc_extras = {}
48 45
49 46 def __init__(self, config):
50 47 self.config = config
51 48 self.session = requests.Session()
52 49
53 50 def __call__(self, environ, start_response):
54 51 request_headers = self._get_request_headers(environ)
55 52 data_io = environ['wsgi.input']
56 53 req_method: str = environ['REQUEST_METHOD']
57 54 has_content_length: bool = 'CONTENT_LENGTH' in environ
58 55
59 56 path_info = self._get_url(
60 57 self.config.get('subversion_http_server_url', ''), get_path_info(environ))
61 58 transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '')
62 59 log.debug('Handling: %s method via `%s` has_content_length:%s', req_method, path_info, has_content_length)
63 60
64 61 # stream control flag, based on request and content type...
65 62 stream = False
66
67 63 if req_method in ['MKCOL'] or has_content_length:
68 data_processed = False
69 # read chunk to check if we have txn-with-props
70 initial_data: bytes = data_io.read(1024)
71 if initial_data.startswith(b'(create-txn-with-props'):
72 data_io = initial_data + data_io.read()
73 # store on-the-fly our rc_extra using svn revision properties
74 # those can be read later on in hooks executed so we have a way
75 # to pass in the data into svn hooks
76 rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
77 rc_data_len = str(len(rc_data))
78 # header defines data length, and serialized data
79 skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data))
80 data_io = data_io[:-2] + skel + b'))'
81 data_processed = True
82
83 if not data_processed:
84 # NOTE(johbo): Avoid that we end up with sending the request in chunked
85 # transfer encoding (mainly on Gunicorn). If we know the content
86 # length, then we should transfer the payload in one request.
87 data_io = initial_data + data_io.read()
64 # NOTE(johbo): Avoid that we end up with sending the request in chunked
65 # transfer encoding (mainly on Gunicorn). If we know the content
66 # length, then we should transfer the payload in one request.
67 data_io = data_io.read()
88 68
89 69 if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked':
90 70 # NOTE(marcink): when getting/uploading files, we want to STREAM content
91 71 # back to the client/proxy instead of buffering it here...
92 72 stream = True
93 73
94 74 stream = stream
95 75 log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s',
96 76 path_info, req_method, stream)
97 77
98 78 call_kwargs = dict(
99 79 data=data_io,
100 80 headers=request_headers,
101 81 stream=stream
102 82 )
103 83 if req_method in ['HEAD', 'DELETE']:
84 # NOTE(marcink): HEAD might be deprecated for SVN 1.14+ protocol
104 85 del call_kwargs['data']
105 86
106 87 try:
107 88 response = self.session.request(
108 89 req_method, path_info, **call_kwargs)
109 90 except requests.ConnectionError:
110 91 log.exception('ConnectionError occurred for endpoint %s', path_info)
111 92 raise
112 93
113 94 if response.status_code not in [200, 401]:
114 95 text = '\n{}'.format(safe_str(response.text)) if response.text else ''
115 96 if response.status_code >= 500:
116 97 log.error('Got SVN response:%s with text:`%s`', response, text)
117 98 else:
118 99 log.debug('Got SVN response:%s with text:`%s`', response, text)
119 100 else:
120 101 log.debug('got response code: %s', response.status_code)
121 102
122 103 response_headers = self._get_response_headers(response.headers)
123
124 if response.headers.get('SVN-Txn-name'):
125 svn_tx_id = response.headers.get('SVN-Txn-name')
126 txn_id = rc_cache.utils.compute_key_from_params(
127 self.config['repository'], svn_tx_id)
128 port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
129 store_txn_id_data(txn_id, {'port': port})
130
131 104 start_response(f'{response.status_code} {response.reason}', response_headers)
132 105 return response.iter_content(chunk_size=1024)
133 106
134 107 def _get_url(self, svn_http_server, path):
135 108 svn_http_server_url = (svn_http_server or '').rstrip('/')
136 109 url_path = urllib.parse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/'))
137 110 url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'")
138 111 return url_path
139 112
113 def _get_txn_id(self, environ):
114 url = environ['RAW_URI']
115
116 # Define the regex pattern
117 pattern = r'/txr/([^/]+)/'
118
119 # Search for the pattern in the URL
120 match = re.search(pattern, url)
121
122 # Check if a match is found and extract the captured group
123 if match:
124 txn_id = match.group(1)
125 return txn_id
126
140 127 def _get_request_headers(self, environ):
141 128 headers = {}
142 129 whitelist = {
143 130 'Authorization': {}
144 131 }
145 132 for key in environ:
146 133 if key in whitelist:
147 134 headers[key] = environ[key]
148 135 elif not key.startswith('HTTP_'):
149 136 continue
150 137 else:
151 138 new_key = key.split('_')
152 139 new_key = [k.capitalize() for k in new_key[1:]]
153 140 new_key = '-'.join(new_key)
154 141 headers[new_key] = environ[key]
155 142
156 143 if 'CONTENT_TYPE' in environ:
157 144 headers['Content-Type'] = environ['CONTENT_TYPE']
158 145
159 146 if 'CONTENT_LENGTH' in environ:
160 147 headers['Content-Length'] = environ['CONTENT_LENGTH']
161 148
162 149 return headers
163 150
164 151 def _get_response_headers(self, headers):
165 152 headers = [
166 153 (h, headers[h])
167 154 for h in headers
168 155 if h.lower() not in self.IGNORED_HEADERS
169 156 ]
170 157
171 158 return headers
172 159
173 160
174 161 class DisabledSimpleSvnApp(object):
175 162 def __init__(self, config):
176 163 self.config = config
177 164
178 165 def __call__(self, environ, start_response):
179 166 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
180 167 log.warning(reason)
181 168 return HTTPNotAcceptable(reason)(environ, start_response)
182 169
183 170
184 171 class SimpleSvn(simplevcs.SimpleVCS):
172 """
173 details: https://svn.apache.org/repos/asf/subversion/trunk/notes/http-and-webdav/webdav-protocol
174
175 Read Commands : (OPTIONS, PROPFIND, GET, REPORT)
176
177 GET: fetch info about resources
178 PROPFIND: Used to retrieve properties of resources.
179 REPORT: Used for specialized queries to the repository. E.g History etc...
180 OPTIONS: request is sent to an SVN server, the server responds with information about the available HTTP
181 methods and other server capabilities.
182
183 Write Commands : (MKACTIVITY, PROPPATCH, PUT, CHECKOUT, MKCOL, MOVE,
184 -------------- COPY, DELETE, LOCK, UNLOCK, MERGE)
185
186 With the exception of LOCK/UNLOCK, every write command performs some
187 sort of DeltaV commit operation. In DeltaV, a commit always starts
188 by creating a transaction (MKACTIVITY), applies a log message
189 (PROPPATCH), does some other write methods, and then ends by
190 committing the transaction (MERGE). If the MERGE fails, the client
191 may try to remove the transaction with a DELETE.
192
193 PROPPATCH: Used to set and/or remove properties on resources.
194 MKCOL: Creates a new collection (directory).
195 DELETE: Removes a resource.
196 COPY and MOVE: Used for copying and moving resources.
197 MERGE: Used to merge changes from different branches.
198 CHECKOUT, CHECKIN, UNCHECKOUT: DeltaV methods for managing working resources and versions.
199 """
185 200
186 201 SCM = 'svn'
187 202 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
188 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
203 WRITE_COMMANDS = ('MERGE', 'POST', 'PUT', 'COPY', 'MOVE', 'DELETE', 'MKCOL')
204 DEFAULT_HTTP_SERVER = 'http://svn:8090'
189 205
190 206 def _get_repository_name(self, environ):
191 207 """
192 208 Gets repository name out of PATH_INFO header
193 209
194 210 :param environ: environ where PATH_INFO is stored
195 211 """
196 212 path = get_path_info(environ).split('!')
197 213 repo_name = path[0].strip('/')
198 214
199 215 # SVN includes the whole path in it's requests, including
200 216 # subdirectories inside the repo. Therefore we have to search for
201 217 # the repo root directory.
202 218 if not is_valid_repo(
203 219 repo_name, self.base_path, explicit_scm=self.SCM):
204 220 current_path = ''
205 221 for component in repo_name.split('/'):
206 222 current_path += component
207 223 if is_valid_repo(
208 224 current_path, self.base_path, explicit_scm=self.SCM):
209 225 return current_path
210 226 current_path += '/'
211 227
212 228 return repo_name
213 229
214 230 def _get_action(self, environ):
215 231 return (
216 232 'pull'
217 233 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
218 234 else 'push')
219 235
220 236 def _should_use_callback_daemon(self, extras, environ, action):
221 # only MERGE command triggers hooks, so we don't want to start
237 # only PUT & MERGE command triggers hooks, so we don't want to start
222 238 # hooks server too many times. POST however starts the svn transaction
223 239 # so we also need to run the init of callback daemon of POST
224 if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
240 if environ['REQUEST_METHOD'] not in self.READ_ONLY_COMMANDS:
225 241 return True
226 242 return False
227 243
228 244 def _create_wsgi_app(self, repo_path, repo_name, config):
229 245 if self._is_svn_enabled():
230 246 return SimpleSvnApp(config)
231 247 # we don't have http proxy enabled return dummy request handler
232 248 return DisabledSimpleSvnApp(config)
233 249
234 250 def _is_svn_enabled(self):
235 251 return ConfigGet().get_bool('vcs.svn.proxy.enabled')
236 252
237 253 def _create_config(self, extras, repo_name, scheme='http'):
238 254 server_url = ConfigGet().get_str('vcs.svn.proxy.host')
239 255 server_url = server_url or self.DEFAULT_HTTP_SERVER
240 256
241 257 extras['subversion_http_server_url'] = server_url
242 258 return extras
@@ -1,694 +1,683 b''
1 1
2 2
3 3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 import io
29 28 import logging
30 29 import importlib
31 30 from functools import wraps
32 from lxml import etree
33 31
34 32 import time
35 33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 34
37 35 from pyramid.httpexceptions import (
38 36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 37 from zope.cachedescriptors.property import Lazy as LazyProperty
40 38
41 39 import rhodecode
42 40 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 41 from rhodecode.lib import rc_cache
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
44 43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 44 from rhodecode.lib.base import (
46 45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 46 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
49 48 from rhodecode.lib.middleware import appenlight
50 49 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.str_utils import safe_bytes
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
52 51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 54 from rhodecode.lib.vcs.backends import base
56 55
57 56 from rhodecode.model import meta
58 57 from rhodecode.model.db import User, Repository, PullRequest
59 58 from rhodecode.model.scm import ScmModel
60 59 from rhodecode.model.pull_request import PullRequestModel
61 60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62 61
63 62 log = logging.getLogger(__name__)
64 63
65 64
66 def extract_svn_txn_id(acl_repo_name, data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71
72 try:
73 root = etree.fromstring(data)
74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
75 for el in root:
76 if el.tag == '{DAV:}source':
77 for sub_el in el:
78 if sub_el.tag == '{DAV:}href':
79 match = pat.search(sub_el.text)
80 if match:
81 svn_tx_id = match.groupdict()['txn_id']
82 txn_id = rc_cache.utils.compute_key_from_params(
83 acl_repo_name, svn_tx_id)
84 return txn_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 65 def initialize_generator(factory):
90 66 """
91 67 Initializes the returned generator by draining its first element.
92 68
93 69 This can be used to give a generator an initializer, which is the code
94 70 up to the first yield statement. This decorator enforces that the first
95 71 produced element has the value ``"__init__"`` to make its special
96 72 purpose very explicit in the using code.
97 73 """
98 74
99 75 @wraps(factory)
100 76 def wrapper(*args, **kwargs):
101 77 gen = factory(*args, **kwargs)
102 78 try:
103 79 init = next(gen)
104 80 except StopIteration:
105 81 raise ValueError('Generator must yield at least one element.')
106 82 if init != "__init__":
107 83 raise ValueError('First yielded element must be "__init__".')
108 84 return gen
109 85 return wrapper
110 86
111 87
112 88 class SimpleVCS(object):
113 89 """Common functionality for SCM HTTP handlers."""
114 90
115 91 SCM = 'unknown'
116 92
117 93 acl_repo_name = None
118 94 url_repo_name = None
119 95 vcs_repo_name = None
120 96 rc_extras = {}
121 97
122 98 # We have to handle requests to shadow repositories different than requests
123 99 # to normal repositories. Therefore we have to distinguish them. To do this
124 100 # we use this regex which will match only on URLs pointing to shadow
125 101 # repositories.
126 102 shadow_repo_re = re.compile(
127 103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
128 104 '(?P<target>{slug_pat})/' # target repo
129 105 'pull-request/(?P<pr_id>\\d+)/' # pull request
130 106 'repository$' # shadow repo
131 107 .format(slug_pat=SLUG_RE.pattern))
132 108
133 109 def __init__(self, config, registry):
134 110 self.registry = registry
135 111 self.config = config
136 112 # re-populated by specialized middleware
137 113 self.repo_vcs_config = base.Config()
138 114
139 115 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
140 116 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
141 117
142 118 # authenticate this VCS request using authfunc
143 119 auth_ret_code_detection = \
144 120 str2bool(self.config.get('auth_ret_code_detection', False))
145 121 self.authenticate = BasicAuth(
146 122 '', authenticate, registry, config.get('auth_ret_code'),
147 123 auth_ret_code_detection, rc_realm=realm)
148 124 self.ip_addr = '0.0.0.0'
149 125
150 126 @LazyProperty
151 127 def global_vcs_config(self):
152 128 try:
153 129 return VcsSettingsModel().get_ui_settings_as_config_obj()
154 130 except Exception:
155 131 return base.Config()
156 132
157 133 @property
158 134 def base_path(self):
159 135 settings_path = self.config.get('repo_store.path')
160 136
161 137 if not settings_path:
162 138 raise ValueError('FATAL: repo_store.path is empty')
163 139 return settings_path
164 140
165 141 def set_repo_names(self, environ):
166 142 """
167 143 This will populate the attributes acl_repo_name, url_repo_name,
168 144 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
169 145 shadow) repositories all names are equal. In case of requests to a
170 146 shadow repository the acl-name points to the target repo of the pull
171 147 request and the vcs-name points to the shadow repo file system path.
172 148 The url-name is always the URL used by the vcs client program.
173 149
174 150 Example in case of a shadow repo:
175 151 acl_repo_name = RepoGroup/MyRepo
176 152 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
177 153 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
178 154 """
179 155 # First we set the repo name from URL for all attributes. This is the
180 156 # default if handling normal (non shadow) repo requests.
181 157 self.url_repo_name = self._get_repository_name(environ)
182 158 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
183 159 self.is_shadow_repo = False
184 160
185 161 # Check if this is a request to a shadow repository.
186 162 match = self.shadow_repo_re.match(self.url_repo_name)
187 163 if match:
188 164 match_dict = match.groupdict()
189 165
190 166 # Build acl repo name from regex match.
191 167 acl_repo_name = safe_str('{groups}{target}'.format(
192 168 groups=match_dict['groups'] or '',
193 169 target=match_dict['target']))
194 170
195 171 # Retrieve pull request instance by ID from regex match.
196 172 pull_request = PullRequest.get(match_dict['pr_id'])
197 173
198 174 # Only proceed if we got a pull request and if acl repo name from
199 175 # URL equals the target repo name of the pull request.
200 176 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
201 177
202 178 # Get file system path to shadow repository.
203 179 workspace_id = PullRequestModel()._workspace_id(pull_request)
204 180 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
205 181
206 182 # Store names for later usage.
207 183 self.vcs_repo_name = vcs_repo_name
208 184 self.acl_repo_name = acl_repo_name
209 185 self.is_shadow_repo = True
210 186
211 187 log.debug('Setting all VCS repository names: %s', {
212 188 'acl_repo_name': self.acl_repo_name,
213 189 'url_repo_name': self.url_repo_name,
214 190 'vcs_repo_name': self.vcs_repo_name,
215 191 })
216 192
217 193 @property
218 194 def scm_app(self):
219 195 custom_implementation = self.config['vcs.scm_app_implementation']
220 196 if custom_implementation == 'http':
221 197 log.debug('Using HTTP implementation of scm app.')
222 198 scm_app_impl = scm_app_http
223 199 else:
224 200 log.debug('Using custom implementation of scm_app: "{}"'.format(
225 201 custom_implementation))
226 202 scm_app_impl = importlib.import_module(custom_implementation)
227 203 return scm_app_impl
228 204
229 205 def _get_by_id(self, repo_name):
230 206 """
231 207 Gets a special pattern _<ID> from clone url and tries to replace it
232 208 with a repository_name for support of _<ID> non changeable urls
233 209 """
234 210
235 211 data = repo_name.split('/')
236 212 if len(data) >= 2:
237 213 from rhodecode.model.repo import RepoModel
238 214 by_id_match = RepoModel().get_repo_by_id(repo_name)
239 215 if by_id_match:
240 216 data[1] = by_id_match.repo_name
241 217
242 218 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
243 219 # and we use this data
244 220 maybe_new_path = '/'.join(data)
245 221 return safe_bytes(maybe_new_path).decode('latin1')
246 222
247 223 def _invalidate_cache(self, repo_name):
248 224 """
249 225 Set's cache for this repository for invalidation on next access
250 226
251 227 :param repo_name: full repo name, also a cache key
252 228 """
253 229 ScmModel().mark_for_invalidation(repo_name)
254 230
255 231 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
256 232 db_repo = Repository.get_by_repo_name(repo_name)
257 233 if not db_repo:
258 234 log.debug('Repository `%s` not found inside the database.',
259 235 repo_name)
260 236 return False
261 237
262 238 if db_repo.repo_type != scm_type:
263 239 log.warning(
264 240 'Repository `%s` have incorrect scm_type, expected %s got %s',
265 241 repo_name, db_repo.repo_type, scm_type)
266 242 return False
267 243
268 244 config = db_repo._config
269 245 config.set('extensions', 'largefiles', '')
270 246 return is_valid_repo(
271 247 repo_name, base_path,
272 248 explicit_scm=scm_type, expect_scm=scm_type, config=config)
273 249
274 250 def valid_and_active_user(self, user):
275 251 """
276 252 Checks if that user is not empty, and if it's actually object it checks
277 253 if he's active.
278 254
279 255 :param user: user object or None
280 256 :return: boolean
281 257 """
282 258 if user is None:
283 259 return False
284 260
285 261 elif user.active:
286 262 return True
287 263
288 264 return False
289 265
290 266 @property
291 267 def is_shadow_repo_dir(self):
292 268 return os.path.isdir(self.vcs_repo_name)
293 269
294 270 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
295 271 plugin_id='', plugin_cache_active=False, cache_ttl=0):
296 272 """
297 273 Checks permissions using action (push/pull) user and repository
298 274 name. If plugin_cache and ttl is set it will use the plugin which
299 275 authenticated the user to store the cached permissions result for N
300 276 amount of seconds as in cache_ttl
301 277
302 278 :param action: push or pull action
303 279 :param user: user instance
304 280 :param repo_name: repository name
305 281 """
306 282
307 283 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
308 284 plugin_id, plugin_cache_active, cache_ttl)
309 285
310 286 user_id = user.user_id
311 287 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
312 288 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
313 289
314 290 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
315 291 expiration_time=cache_ttl,
316 292 condition=plugin_cache_active)
317 293 def compute_perm_vcs(
318 294 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
319 295
320 296 log.debug('auth: calculating permission access now...')
321 297 # check IP
322 298 inherit = user.inherit_default_permissions
323 299 ip_allowed = AuthUser.check_ip_allowed(
324 300 user_id, ip_addr, inherit_from_default=inherit)
325 301 if ip_allowed:
326 302 log.info('Access for IP:%s allowed', ip_addr)
327 303 else:
328 304 return False
329 305
330 306 if action == 'push':
331 307 perms = ('repository.write', 'repository.admin')
332 308 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
333 309 return False
334 310
335 311 else:
336 312 # any other action need at least read permission
337 313 perms = (
338 314 'repository.read', 'repository.write', 'repository.admin')
339 315 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
340 316 return False
341 317
342 318 return True
343 319
344 320 start = time.time()
345 321 log.debug('Running plugin `%s` permissions check', plugin_id)
346 322
347 323 # for environ based auth, password can be empty, but then the validation is
348 324 # on the server that fills in the env data needed for authentication
349 325 perm_result = compute_perm_vcs(
350 326 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
351 327
352 328 auth_time = time.time() - start
353 329 log.debug('Permissions for plugin `%s` completed in %.4fs, '
354 330 'expiration time of fetched cache %.1fs.',
355 331 plugin_id, auth_time, cache_ttl)
356 332
357 333 return perm_result
358 334
359 335 def _get_http_scheme(self, environ):
360 336 try:
361 337 return environ['wsgi.url_scheme']
362 338 except Exception:
363 339 log.exception('Failed to read http scheme')
364 340 return 'http'
365 341
366 342 def _check_ssl(self, environ, start_response):
367 343 """
368 344 Checks the SSL check flag and returns False if SSL is not present
369 345 and required True otherwise
370 346 """
371 347 org_proto = environ['wsgi._org_proto']
372 348 # check if we have SSL required ! if not it's a bad request !
373 349 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
374 350 if require_ssl and org_proto == 'http':
375 351 log.debug(
376 352 'Bad request: detected protocol is `%s` and '
377 353 'SSL/HTTPS is required.', org_proto)
378 354 return False
379 355 return True
380 356
381 357 def _get_default_cache_ttl(self):
382 358 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
383 359 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
384 360 plugin_settings = plugin.get_settings()
385 361 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
386 362 plugin_settings) or (False, 0)
387 363 return plugin_cache_active, cache_ttl
388 364
389 365 def __call__(self, environ, start_response):
390 366 try:
391 367 return self._handle_request(environ, start_response)
392 368 except Exception:
393 369 log.exception("Exception while handling request")
394 370 appenlight.track_exception(environ)
395 371 return HTTPInternalServerError()(environ, start_response)
396 372 finally:
397 373 meta.Session.remove()
398 374
399 375 def _handle_request(self, environ, start_response):
400 376 if not self._check_ssl(environ, start_response):
401 377 reason = ('SSL required, while RhodeCode was unable '
402 378 'to detect this as SSL request')
403 379 log.debug('User not allowed to proceed, %s', reason)
404 380 return HTTPNotAcceptable(reason)(environ, start_response)
405 381
406 382 if not self.url_repo_name:
407 383 log.warning('Repository name is empty: %s', self.url_repo_name)
408 384 # failed to get repo name, we fail now
409 385 return HTTPNotFound()(environ, start_response)
410 386 log.debug('Extracted repo name is %s', self.url_repo_name)
411 387
412 388 ip_addr = get_ip_addr(environ)
413 389 user_agent = get_user_agent(environ)
414 390 username = None
415 391
416 392 # skip passing error to error controller
417 393 environ['pylons.status_code_redirect'] = True
418 394
419 395 # ======================================================================
420 396 # GET ACTION PULL or PUSH
421 397 # ======================================================================
422 398 action = self._get_action(environ)
423 399
424 400 # ======================================================================
425 401 # Check if this is a request to a shadow repository of a pull request.
426 402 # In this case only pull action is allowed.
427 403 # ======================================================================
428 404 if self.is_shadow_repo and action != 'pull':
429 405 reason = 'Only pull action is allowed for shadow repositories.'
430 406 log.debug('User not allowed to proceed, %s', reason)
431 407 return HTTPNotAcceptable(reason)(environ, start_response)
432 408
433 409 # Check if the shadow repo actually exists, in case someone refers
434 410 # to it, and it has been deleted because of successful merge.
435 411 if self.is_shadow_repo and not self.is_shadow_repo_dir:
436 412 log.debug(
437 413 'Shadow repo detected, and shadow repo dir `%s` is missing',
438 414 self.is_shadow_repo_dir)
439 415 return HTTPNotFound()(environ, start_response)
440 416
441 417 # ======================================================================
442 418 # CHECK ANONYMOUS PERMISSION
443 419 # ======================================================================
444 420 detect_force_push = False
445 421 check_branch_perms = False
446 422 if action in ['pull', 'push']:
447 423 user_obj = anonymous_user = User.get_default_user()
448 424 auth_user = user_obj.AuthUser()
449 425 username = anonymous_user.username
450 426 if anonymous_user.active:
451 427 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
452 428 # ONLY check permissions if the user is activated
453 429 anonymous_perm = self._check_permission(
454 430 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
455 431 plugin_id='anonymous_access',
456 432 plugin_cache_active=plugin_cache_active,
457 433 cache_ttl=cache_ttl,
458 434 )
459 435 else:
460 436 anonymous_perm = False
461 437
462 438 if not anonymous_user.active or not anonymous_perm:
463 439 if not anonymous_user.active:
464 440 log.debug('Anonymous access is disabled, running '
465 441 'authentication')
466 442
467 443 if not anonymous_perm:
468 444 log.debug('Not enough credentials to access repo: `%s` '
469 445 'repository as anonymous user', self.acl_repo_name)
470 446
471
472 447 username = None
473 448 # ==============================================================
474 449 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
475 450 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
476 451 # ==============================================================
477 452
478 453 # try to auth based on environ, container auth methods
479 454 log.debug('Running PRE-AUTH for container|headers based authentication')
480 455
481 456 # headers auth, by just reading special headers and bypass the auth with user/passwd
482 457 pre_auth = authenticate(
483 458 '', '', environ, VCS_TYPE, registry=self.registry,
484 459 acl_repo_name=self.acl_repo_name)
485 460
486 461 if pre_auth and pre_auth.get('username'):
487 462 username = pre_auth['username']
488 463 log.debug('PRE-AUTH got `%s` as username', username)
489 464 if pre_auth:
490 465 log.debug('PRE-AUTH successful from %s',
491 466 pre_auth.get('auth_data', {}).get('_plugin'))
492 467
493 468 # If not authenticated by the container, running basic auth
494 469 # before inject the calling repo_name for special scope checks
495 470 self.authenticate.acl_repo_name = self.acl_repo_name
496 471
497 472 plugin_cache_active, cache_ttl = False, 0
498 473 plugin = None
499 474
500 475 # regular auth chain
501 476 if not username:
502 477 self.authenticate.realm = self.authenticate.get_rc_realm()
503 478
504 479 try:
505 480 auth_result = self.authenticate(environ)
506 481 except (UserCreationError, NotAllowedToCreateUserError) as e:
507 482 log.error(e)
508 483 reason = safe_str(e)
509 484 return HTTPNotAcceptable(reason)(environ, start_response)
510 485
511 486 if isinstance(auth_result, dict):
512 487 AUTH_TYPE.update(environ, 'basic')
513 488 REMOTE_USER.update(environ, auth_result['username'])
514 489 username = auth_result['username']
515 490 plugin = auth_result.get('auth_data', {}).get('_plugin')
516 491 log.info(
517 492 'MAIN-AUTH successful for user `%s` from %s plugin',
518 493 username, plugin)
519 494
520 495 plugin_cache_active, cache_ttl = auth_result.get(
521 496 'auth_data', {}).get('_ttl_cache') or (False, 0)
522 497 else:
523 498 return auth_result.wsgi_application(environ, start_response)
524 499
525 500 # ==============================================================
526 501 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
527 502 # ==============================================================
528 503 user = User.get_by_username(username)
529 504 if not self.valid_and_active_user(user):
530 505 return HTTPForbidden()(environ, start_response)
531 506 username = user.username
532 507 user_id = user.user_id
533 508
534 509 # check user attributes for password change flag
535 510 user_obj = user
536 511 auth_user = user_obj.AuthUser()
537 512 if user_obj and user_obj.username != User.DEFAULT_USER and \
538 513 user_obj.user_data.get('force_password_change'):
539 514 reason = 'password change required'
540 515 log.debug('User not allowed to authenticate, %s', reason)
541 516 return HTTPNotAcceptable(reason)(environ, start_response)
542 517
543 518 # check permissions for this repository
544 519 perm = self._check_permission(
545 520 action, user, auth_user, self.acl_repo_name, ip_addr,
546 521 plugin, plugin_cache_active, cache_ttl)
547 522 if not perm:
548 523 return HTTPForbidden()(environ, start_response)
549 524 environ['rc_auth_user_id'] = str(user_id)
550 525
551 526 if action == 'push':
552 527 perms = auth_user.get_branch_permissions(self.acl_repo_name)
553 528 if perms:
554 529 check_branch_perms = True
555 530 detect_force_push = True
556 531
557 532 # extras are injected into UI object and later available
558 533 # in hooks executed by RhodeCode
559 534 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
560 535
561 536 extras = vcs_operation_context(
562 537 environ, repo_name=self.acl_repo_name, username=username,
563 538 action=action, scm=self.SCM, check_locking=check_locking,
564 539 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
565 540 detect_force_push=detect_force_push
566 541 )
567 542
568 543 # ======================================================================
569 544 # REQUEST HANDLING
570 545 # ======================================================================
571 546 repo_path = os.path.join(
572 547 safe_str(self.base_path), safe_str(self.vcs_repo_name))
573 548 log.debug('Repository path is %s', repo_path)
574 549
575 550 fix_PATH()
576 551
577 552 log.info(
578 553 '%s action on %s repo "%s" by "%s" from %s %s',
579 554 action, self.SCM, safe_str(self.url_repo_name),
580 555 safe_str(username), ip_addr, user_agent)
581 556
582 557 return self._generate_vcs_response(
583 558 environ, start_response, repo_path, extras, action)
584 559
560 def _get_txn_id(self, environ):
561
562 for k in ['RAW_URI', 'HTTP_DESTINATION']:
563 url = environ.get(k)
564 if not url:
565 continue
566
567 # regex to search for svn-txn-id
568 pattern = r'/!svn/txr/([^/]+)/'
569
570 # Search for the pattern in the URL
571 match = re.search(pattern, url)
572
573 # Check if a match is found and extract the captured group
574 if match:
575 txn_id = match.group(1)
576 return txn_id
577
585 578 @initialize_generator
586 579 def _generate_vcs_response(
587 580 self, environ, start_response, repo_path, extras, action):
588 581 """
589 582 Returns a generator for the response content.
590 583
591 584 This method is implemented as a generator, so that it can trigger
592 585 the cache validation after all content sent back to the client. It
593 586 also handles the locking exceptions which will be triggered when
594 587 the first chunk is produced by the underlying WSGI application.
595 588 """
596
597 txn_id = ''
598 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
599 # case for SVN, we want to re-use the callback daemon port
600 # so we use the txn_id, for this we peek the body, and still save
601 # it as wsgi.input
602
603 stream = environ['wsgi.input']
604
605 if isinstance(stream, io.BytesIO):
606 data: bytes = stream.getvalue()
607 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
608 data: bytes = stream.buf.getvalue()
609 else:
610 # fallback to the crudest way, copy the iterator
611 data = safe_bytes(stream.read())
612 environ['wsgi.input'] = io.BytesIO(data)
613
614 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
589 svn_txn_id = ''
590 if action == 'push':
591 svn_txn_id = self._get_txn_id(environ)
615 592
616 593 callback_daemon, extras = self._prepare_callback_daemon(
617 extras, environ, action, txn_id=txn_id)
594 extras, environ, action, txn_id=svn_txn_id)
595
596 if svn_txn_id:
597
598 port = safe_int(extras['hooks_uri'].split(':')[-1])
599 txn_id_data = extras.copy()
600 txn_id_data.update({'port': port})
601 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
602
603 full_repo_path = repo_path
604 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
605
618 606 log.debug('HOOKS extras is %s', extras)
619 607
620 608 http_scheme = self._get_http_scheme(environ)
621 609
622 610 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
623 611 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
624 612 with callback_daemon:
625 613 app.rc_extras = extras
626 614
627 615 try:
628 616 response = app(environ, start_response)
629 617 finally:
630 618 # This statement works together with the decorator
631 619 # "initialize_generator" above. The decorator ensures that
632 620 # we hit the first yield statement before the generator is
633 621 # returned back to the WSGI server. This is needed to
634 622 # ensure that the call to "app" above triggers the
635 623 # needed callback to "start_response" before the
636 624 # generator is actually used.
637 625 yield "__init__"
638 626
639 627 # iter content
640 628 for chunk in response:
641 629 yield chunk
642 630
643 631 try:
644 632 # invalidate cache on push
645 633 if action == 'push':
646 634 self._invalidate_cache(self.url_repo_name)
647 635 finally:
648 636 meta.Session.remove()
649 637
650 638 def _get_repository_name(self, environ):
651 639 """Get repository name out of the environmnent
652 640
653 641 :param environ: WSGI environment
654 642 """
655 643 raise NotImplementedError()
656 644
657 645 def _get_action(self, environ):
658 646 """Map request commands into a pull or push command.
659 647
660 648 :param environ: WSGI environment
661 649 """
662 650 raise NotImplementedError()
663 651
664 652 def _create_wsgi_app(self, repo_path, repo_name, config):
665 653 """Return the WSGI app that will finally handle the request."""
666 654 raise NotImplementedError()
667 655
668 656 def _create_config(self, extras, repo_name, scheme='http'):
669 657 """Create a safe config representation."""
670 658 raise NotImplementedError()
671 659
672 660 def _should_use_callback_daemon(self, extras, environ, action):
673 661 if extras.get('is_shadow_repo'):
674 662 # we don't want to execute hooks, and callback daemon for shadow repos
675 663 return False
676 664 return True
677 665
678 666 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
679 667 protocol = vcs_settings.HOOKS_PROTOCOL
668
680 669 if not self._should_use_callback_daemon(extras, environ, action):
681 670 # disable callback daemon for actions that don't require it
682 671 protocol = 'local'
683 672
684 673 return prepare_callback_daemon(
685 674 extras, protocol=protocol,
686 675 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
687 676
688 677
689 678 def _should_check_locking(query_string):
690 679 # this is kind of hacky, but due to how mercurial handles client-server
691 680 # server see all operation on commit; bookmarks, phases and
692 681 # obsolescence marker in different transaction, we don't want to check
693 682 # locking on those
694 683 return query_string not in ['cmd=listkeys']
@@ -1,421 +1,391 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
15 pyroutes.register('admin_artifacts', '/_admin/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_delete', '/_admin/_admin/artifacts/%(uid)s/delete', ['uid']);
17 pyroutes.register('admin_artifacts_show_all', '/_admin/_admin/artifacts', []);
18 pyroutes.register('admin_artifacts_show_info', '/_admin/_admin/artifacts/%(uid)s', ['uid']);
19 pyroutes.register('admin_artifacts_update', '/_admin/_admin/artifacts/%(uid)s/update', ['uid']);
21 20 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
22 21 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
23 pyroutes.register('admin_automation', '/_admin/automation', []);
24 pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']);
22 pyroutes.register('admin_automation', '/_admin/_admin/automation', []);
25 23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
26 24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
27 25 pyroutes.register('admin_home', '/_admin', []);
28 26 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
29 27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
30 28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
31 29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
32 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
33 30 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
34 31 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
35 32 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
36 33 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
37 34 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
38 35 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
39 36 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
40 37 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
41 38 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
42 pyroutes.register('admin_scheduler', '/_admin/scheduler', []);
43 pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []);
39 pyroutes.register('admin_scheduler', '/_admin/_admin/scheduler', []);
44 40 pyroutes.register('admin_settings', '/_admin/settings', []);
45 41 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
46 42 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
47 43 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
48 44 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
49 45 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
50 46 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
51 47 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
52 48 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
53 49 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
54 50 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
55 51 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
56 52 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
57 53 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
58 54 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
59 55 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
60 56 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
61 57 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
62 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
63 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
64 58 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
65 59 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
66 60 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
67 61 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
68 62 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
69 63 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
70 64 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
71 pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []);
72 pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']);
73 pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']);
74 pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']);
75 pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []);
76 pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']);
77 65 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
78 66 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
79 67 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
80 68 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
81 69 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
82 70 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
83 71 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
84 72 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
85 73 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
86 74 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
87 75 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
88 76 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
89 77 pyroutes.register('apiv2', '/_admin/api', []);
90 78 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
91 79 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
92 80 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
93 81 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
94 82 pyroutes.register('branch_remove', '/%(repo_name)s/branches/%(branch_name)s/remove', ['repo_name', 'branch_name']);
95 83 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
96 84 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
97 85 pyroutes.register('channelstream_proxy', '/_channelstream', []);
98 86 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
99 87 pyroutes.register('check_2fa', '/_admin/check_2fa', []);
100 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
101 88 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
102 89 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
103 90 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
104 91 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
105 92 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
106 93 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
107 94 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
108 95 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
109 96 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
110 97 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
111 98 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
112 99 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
113 100 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
114 101 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
115 102 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
116 103 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
117 104 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
118 105 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
119 106 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
120 107 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
121 108 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
122 109 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
123 110 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
124 111 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
125 112 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
126 113 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
127 114 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
128 115 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
129 116 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
130 117 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
131 118 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
132 119 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
133 120 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
134 121 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
135 122 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
136 123 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
137 124 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
138 125 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
139 126 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
140 127 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
141 128 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
142 129 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
143 130 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
144 131 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
145 132 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
146 133 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
147 134 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
148 135 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
149 136 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
150 137 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
151 138 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
152 139 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
153 140 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
154 141 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
155 142 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
156 143 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
157 144 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
158 145 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
159 146 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
160 147 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
161 148 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
162 149 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
163 150 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
164 151 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
165 152 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
166 153 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
167 154 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
168 155 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
169 156 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
170 157 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
171 158 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
172 159 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
173 160 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
174 161 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
175 162 pyroutes.register('favicon', '/favicon.ico', []);
176 163 pyroutes.register('file_preview', '/_file_preview', []);
177 164 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
178 165 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
179 166 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
180 167 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
181 168 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
182 169 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
183 170 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
184 171 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
185 172 pyroutes.register('gists_create', '/_admin/gists/create', []);
186 173 pyroutes.register('gists_new', '/_admin/gists/new', []);
187 174 pyroutes.register('gists_show', '/_admin/gists', []);
188 175 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
189 176 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
190 177 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
191 178 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
192 179 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
193 180 pyroutes.register('goto_switcher_data', '/_goto_data', []);
194 181 pyroutes.register('home', '/', []);
195 182 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
196 183 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
197 184 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
198 185 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
199 186 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
200 187 pyroutes.register('journal', '/_admin/journal', []);
201 188 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
202 189 pyroutes.register('journal_public', '/_admin/public_journal', []);
203 190 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
204 191 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
205 192 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
206 193 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
207 194 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
208 195 pyroutes.register('login', '/_admin/login', []);
209 196 pyroutes.register('logout', '/_admin/logout', []);
210 197 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
211 198 pyroutes.register('main_page_repos_data', '/_home_repos', []);
212 199 pyroutes.register('markup_preview', '/_markup_preview', []);
213 200 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
214 201 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
215 202 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
216 203 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
217 204 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
218 205 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
219 206 pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []);
220 207 pyroutes.register('my_account_configure_2fa_update', '/_admin/my_account/configure_2fa_update', []);
221 208 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
222 209 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
223 210 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
224 211 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
225 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
226 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
227 212 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
228 213 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
229 214 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
230 215 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
231 216 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
232 217 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
233 218 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
234 219 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
235 220 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
236 221 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
237 222 pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []);
238 223 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
239 224 pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []);
240 225 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
241 226 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
242 227 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
243 228 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
244 229 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
245 230 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
246 231 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
247 232 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
248 233 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
249 234 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
250 235 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
251 236 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
252 237 pyroutes.register('ops_celery_error_test', '/_admin/ops/error-celery', []);
253 238 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
254 239 pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
255 240 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
256 241 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
257 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
258 242 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
259 243 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
260 244 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
261 245 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
262 246 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
263 247 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
264 248 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
265 249 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
266 250 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
267 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
268 251 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
269 252 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
270 253 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
271 254 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
272 255 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
273 256 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
274 257 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
275 258 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
276 259 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
277 260 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
278 261 pyroutes.register('register', '/_admin/register', []);
279 262 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
280 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
281 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
282 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
283 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
284 263 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
285 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
286 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
287 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
288 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
289 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
290 264 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
291 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
292 265 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
293 266 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
294 267 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
295 268 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
296 269 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
297 270 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
298 271 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
299 272 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
300 273 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
301 274 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
302 275 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
303 276 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
304 277 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
305 278 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
306 279 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
307 280 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
308 281 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
309 282 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
310 283 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
311 284 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 285 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
313 286 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
314 287 pyroutes.register('repo_create', '/_admin/repos/create', []);
315 288 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
316 289 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
317 290 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
318 291 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
319 292 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
320 293 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
321 294 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
322 295 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 296 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 297 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
325 298 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
326 299 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
327 300 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
328 301 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
329 302 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
330 303 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
331 304 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
332 305 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
333 306 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
334 307 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
335 308 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
336 309 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
337 310 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
338 311 pyroutes.register('repo_files_replace_binary', '/%(repo_name)s/replace_binary/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
339 312 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
340 313 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
341 314 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
342 315 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
343 316 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
344 317 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
345 318 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
346 319 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
347 320 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
348 321 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
349 322 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
350 323 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
351 324 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
352 325 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
353 326 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
354 327 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
355 328 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
356 329 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
357 330 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
358 331 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
359 332 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
360 333 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
361 334 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
362 335 pyroutes.register('repo_list_data', '/_repos', []);
363 336 pyroutes.register('repo_new', '/_admin/repos/new', []);
364 337 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
365 338 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
366 339 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
367 340 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
368 341 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
369 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
370 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
371 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
372 342 pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']);
373 343 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
374 344 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
375 345 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
376 346 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
377 347 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
378 348 pyroutes.register('repos', '/_admin/repos', []);
379 349 pyroutes.register('repos_data', '/_admin/repos_data', []);
380 350 pyroutes.register('reset_password', '/_admin/password_reset', []);
381 351 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
382 352 pyroutes.register('robots', '/robots.txt', []);
383 353 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
384 354 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
385 355 pyroutes.register('search', '/_admin/search', []);
386 356 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
387 357 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
388 358 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
389 359 pyroutes.register('setup_2fa', '/_admin/setup_2fa', []);
390 360 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
391 361 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
392 362 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
393 363 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
394 364 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
395 365 pyroutes.register('upload_file', '/_file_store/upload', []);
396 366 pyroutes.register('user_autocomplete_data', '/_users', []);
397 367 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
398 368 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
399 369 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
400 370 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
401 371 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
402 372 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
403 373 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
404 374 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
405 375 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
406 376 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
407 377 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
408 378 pyroutes.register('user_groups', '/_admin/user_groups', []);
409 379 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
410 380 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
411 381 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
412 382 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
413 383 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
414 384 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
415 385 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
416 386 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
417 387 pyroutes.register('users', '/_admin/users', []);
418 388 pyroutes.register('users_create', '/_admin/users/create', []);
419 389 pyroutes.register('users_data', '/_admin/users_data', []);
420 390 pyroutes.register('users_new', '/_admin/users/new', []);
421 391 }
@@ -1,328 +1,328 b''
1 1 ## snippet for displaying issue tracker settings
2 2 ## usage:
3 3 ## <%namespace name="its" file="/base/issue_tracker_settings.mako"/>
4 4 ## ${its.issue_tracker_settings_table(patterns, form_url, delete_url)}
5 5 ## ${its.issue_tracker_settings_test(test_url)}
6 6
7 7 <%def name="issue_tracker_settings_table(patterns, form_url, delete_url)">
8 8 <%
9 9 # Name/desc, pattern, issue prefix
10 10 examples = [
11 11 (
12 12 ' ',
13 13 ' ',
14 14 ' ',
15 15 ' '
16 16 ),
17 17
18 18 (
19 19 'Tickets with #123 (Redmine etc)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>\d+)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>[0-9]+)',
21 21 'https://myissueserver.com/${repo}/issue/${issue_id}',
22 22 ''
23 23 ),
24 24
25 25 (
26 26 'Redmine - Alternative',
27 27 '(?:issue-)(\d+)',
28 28 'https://myissueserver.com/redmine/issue/${id}',
29 29 ''
30 30 ),
31 31
32 32 (
33 33 'Redmine - Wiki',
34 34 '(?:wiki-)([a-zA-Z0-9]+)',
35 35 'http://example.org/projects/${repo_name}/wiki/${id}',
36 36 'wiki-'
37 37 ),
38 38
39 39 (
40 40 'JIRA - All tickets',
41 41 # official JIRA ticket pattern
42 42 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>[A-Z]{1,6}-(?:[1-9][0-9]{0,7}))',
43 43 'https://myjira.com/browse/${issue_id}',
44 44 ''
45 45 ),
46 46
47 47 (
48 48 'JIRA - Single project (JRA-XXXXXXXX)',
49 49 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>JRA-(?:[1-9][0-9]{0,7}))',
50 50 'https://myjira.com/${issue_id}',
51 51 ''
52 52 ),
53 53
54 54 (
55 55 'Confluence WIKI',
56 56 '(?:conf-)([A-Z0-9]+)',
57 57 'https://example.atlassian.net/display/wiki/${id}/${repo_name}',
58 58 'CONF-',
59 59 ),
60 60
61 61 (
62 62 'Pivotal Tracker',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>\d+)',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>[0-9]+)',
64 64 'https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}',
65 65 'PIV-',
66 66 ),
67 67
68 68 (
69 69 'Trello',
70 70 '(?:trello-)(?P<card_id>[a-zA-Z0-9]+)',
71 71 'https://trello.com/example.com/${card_id}',
72 72 'TRELLO-',
73 73 ),
74 74 ]
75 75 %>
76 76
77 77 <table class="rctable issuetracker">
78 78 <tr>
79 79 <th>${_('Description')}</th>
80 80 <th>${_('Pattern')}</th>
81 81 <th>${_('Url')}</th>
82 82 <th>${_('Extra Prefix')}</th>
83 83 <th ></th>
84 84 </tr>
85 85 % for name, pat, url, pref in examples:
86 86 <tr class="it-examples" style="${'' if loop.index == 0 else 'display:none'}">
87 87 <td class="td-issue-tracker-name issue-tracker-example">${name}</td>
88 88 <td class="td-regex issue-tracker-example">${pat}</td>
89 89 <td class="td-url issue-tracker-example">${url}</td>
90 90 <td class="td-prefix issue-tracker-example">${pref}</td>
91 91 <td>
92 92 % if loop.index == 0:
93 93 <a href="#showMore" onclick="$('.it-examples').toggle(); return false">${_('show examples')}</a>
94 94 % else:
95 95 <a href="#copyToInput" onclick="copyToInput(this, '${h.str_json(name)}', '${h.str_json(pat)}', '${h.str_json(url)}', '${h.str_json(pref)}'); return false">copy to input</a>
96 96 % endif
97 97 </td>
98 98 </tr>
99 99 % endfor
100 100
101 101 %for uid, entry in patterns:
102 102 <tr id="entry_${uid}">
103 103 <td class="td-issue-tracker-name issuetracker_desc">
104 104 <span class="entry">
105 105 ${entry.desc}
106 106 </span>
107 107 <span class="edit">
108 108 ${h.text('new_pattern_description_'+uid, class_='medium-inline', value=entry.desc or '')}
109 109 </span>
110 110 </td>
111 111 <td class="td-issue-tracker-regex issuetracker_pat">
112 112 <span class="entry">
113 113 ${entry.pat}
114 114 </span>
115 115 <span class="edit">
116 116 ${h.text('new_pattern_pattern_'+uid, class_='medium-inline', value=entry.pat or '')}
117 117 </span>
118 118 </td>
119 119 <td class="td-url issuetracker_url">
120 120 <span class="entry">
121 121 ${entry.url}
122 122 </span>
123 123 <span class="edit">
124 124 ${h.text('new_pattern_url_'+uid, class_='medium-inline', value=entry.url or '')}
125 125 </span>
126 126 </td>
127 127 <td class="td-prefix issuetracker_pref">
128 128 <span class="entry">
129 129 ${entry.pref}
130 130 </span>
131 131 <span class="edit">
132 132 ${h.text('new_pattern_prefix_'+uid, class_='medium-inline', value=entry.pref or '')}
133 133 </span>
134 134 </td>
135 135 <td class="td-action">
136 136 <div class="grid_edit">
137 137 <span class="entry">
138 138 <a class="edit_issuetracker_entry" href="">${_('Edit')}</a>
139 139 </span>
140 140 <span class="edit">
141 141 <input id="uid_${uid}" name="uid" type="hidden" value="${uid}">
142 142 </span>
143 143 </div>
144 144 <div class="grid_delete">
145 145 <span class="entry">
146 146 <a class="btn btn-link btn-danger delete_issuetracker_entry" data-desc="${entry.desc}" data-uid="${uid}">
147 147 ${_('Delete')}
148 148 </a>
149 149 </span>
150 150 <span class="edit">
151 151 <a class="btn btn-link btn-danger edit_issuetracker_cancel" data-uid="${uid}">${_('Cancel')}</a>
152 152 </span>
153 153 </div>
154 154 </td>
155 155 </tr>
156 156 %endfor
157 157 <tr id="last-row"></tr>
158 158 </table>
159 159 <p>
160 160 <a id="add_pattern" class="link">
161 161 ${_('Add new')}
162 162 </a>
163 163 </p>
164 164
165 165 <script type="text/javascript">
166 166 var newEntryLabel = $('label[for="new_entry"]');
167 167
168 168 var resetEntry = function() {
169 169 newEntryLabel.text("${_('New Entry')}:");
170 170 };
171 171
172 172 var delete_pattern = function(entry) {
173 173 if (confirm("${_('Confirm to remove this pattern:')} "+$(entry).data('desc'))) {
174 174 $.ajax({
175 175 type: "POST",
176 176 url: "${delete_url}",
177 177 data: {
178 178 'csrf_token': CSRF_TOKEN,
179 179 'uid':$(entry).data('uid')
180 180 },
181 181 success: function(){
182 182 window.location.reload();
183 183 },
184 184 error: function(data, textStatus, errorThrown){
185 185 alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url));
186 186 }
187 187 });
188 188 }
189 189 };
190 190
191 191 $('.delete_issuetracker_entry').on('click', function(e){
192 192 e.preventDefault();
193 193 delete_pattern(this);
194 194 });
195 195
196 196 $('.edit_issuetracker_entry').on('click', function(e){
197 197 e.preventDefault();
198 198 $(this).parents('tr').addClass('editopen');
199 199 });
200 200
201 201 $('.edit_issuetracker_cancel').on('click', function(e){
202 202 e.preventDefault();
203 203 $(this).parents('tr').removeClass('editopen');
204 204 // Reset to original value
205 205 var uid = $(this).data('uid');
206 206 $('#'+uid+' input').each(function(e) {
207 207 this.value = this.defaultValue;
208 208 });
209 209 });
210 210
211 211 $('input#reset').on('click', function(e) {
212 212 resetEntry();
213 213 });
214 214
215 215 $('#add_pattern').on('click', function(e) {
216 216 addNewPatternInput();
217 217 });
218 218
219 219 var copied = false;
220 220 copyToInput = function (elem, name, pat, url, pref) {
221 221 if (copied === false) {
222 222 addNewPatternInput();
223 223 copied = true;
224 224 }
225 225 $(elem).hide();
226 226 var load = function(text){
227 227 return text.replace(/["]/g, "")
228 228 };
229 229 $('#description_1').val(load(name));
230 230 $('#pattern_1').val(load(pat));
231 231 $('#url_1').val(load(url));
232 232 $('#prefix_1').val(load(pref));
233 233
234 234 }
235 235
236 236 </script>
237 237 </%def>
238 238
239 239 <%def name="issue_tracker_new_row()">
240 240 <table id="add-row-tmpl" style="display: none;">
241 241 <tbody>
242 242 <tr class="new_pattern">
243 243 <td class="td-issue-tracker-name issuetracker_desc">
244 244 <span class="entry">
245 245 <input class="medium-inline" id="description_##UUID##" name="new_pattern_description_##UUID##" value="##DESCRIPTION##" type="text">
246 246 </span>
247 247 </td>
248 248 <td class="td-issue-tracker-regex issuetracker_pat">
249 249 <span class="entry">
250 250 <input class="medium-inline" id="pattern_##UUID##" name="new_pattern_pattern_##UUID##" placeholder="Pattern"
251 251 value="##PATTERN##" type="text">
252 252 </span>
253 253 </td>
254 254 <td class="td-url issuetracker_url">
255 255 <span class="entry">
256 256 <input class="medium-inline" id="url_##UUID##" name="new_pattern_url_##UUID##" placeholder="Url" value="##URL##" type="text">
257 257 </span>
258 258 </td>
259 259 <td class="td-prefix issuetracker_pref">
260 260 <span class="entry">
261 261 <input class="medium-inline" id="prefix_##UUID##" name="new_pattern_prefix_##UUID##" placeholder="Prefix" value="##PREFIX##" type="text">
262 262 </span>
263 263 </td>
264 264 <td class="td-action">
265 265 </td>
266 266 <input id="uid_##UUID##" name="uid_##UUID##" type="hidden" value="">
267 267 </tr>
268 268 </tbody>
269 269 </table>
270 270 </%def>
271 271
272 272 <%def name="issue_tracker_settings_test(test_url)">
273 273 <div class="form-vertical">
274 274 <div class="fields">
275 275 <div class="field">
276 276 <div class='textarea-full'>
277 277 <textarea id="test_pattern_data" rows="12">
278 278 This is an example text for testing issue tracker patterns.
279 279 This commit fixes ticket #451 and ticket #910, reference for JRA-401.
280 280 The following tickets will get mentioned:
281 281 #123
282 282 #456 and PROJ-101
283 283 JRA-123 and #123
284 284 PROJ-456
285 285
286 286 [my artifact](http://something.com/JRA-1234-build.zip)
287 287
288 288 - #1001
289 289 - JRA-998
290 290
291 291 Open a pull request !101 to contribute!
292 292 Added tag v1.3.0 for commit 0f3b629be725
293 293
294 294 Add a test pattern here and hit preview to see the link.
295 295 </textarea>
296 296 </div>
297 297 </div>
298 298 </div>
299 299 <div class="test_pattern_preview">
300 300 <div id="test_pattern" class="btn btn-small" >${_('Preview')}</div>
301 301 <p>${_('Test Pattern Preview')}</p>
302 302 <div id="test_pattern_result" style="white-space: pre-wrap"></div>
303 303 </div>
304 304 </div>
305 305
306 306 <script type="text/javascript">
307 307 $('#test_pattern').on('click', function(e) {
308 308 $.ajax({
309 309 type: "POST",
310 310 url: "${test_url}",
311 311 data: {
312 312 'test_text': $('#test_pattern_data').val(),
313 313 'csrf_token': CSRF_TOKEN
314 314 },
315 315 success: function(data){
316 316 $('#test_pattern_result').html(data);
317 317 tooltipActivate();
318 318 },
319 319 error: function(jqXHR, textStatus, errorThrown){
320 320 $('#test_pattern_result').html('Error: ' + errorThrown);
321 321 }
322 322 });
323 323 $('#test_pattern_result').show();
324 324 });
325 325 </script>
326 326 </%def>
327 327
328 328
@@ -1,346 +1,345 b''
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
7 7 % if display_globals:
8 8 <div class="panel panel-default">
9 9 <div class="panel-heading" id="general">
10 10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
11 11 </div>
12 12 <div class="panel-body">
13 13 <div class="field">
14 14 <div class="checkbox">
15 15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 17 </div>
18 18 <div class="label">
19 19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 20 </div>
21 21 </div>
22 22 </div>
23 23 </div>
24 24 % endif
25 25
26 26 % if display_globals or repo_type in ['git', 'hg']:
27 27 <div class="panel panel-default">
28 28 <div class="panel-heading" id="vcs-hooks-options">
29 29 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
30 30 </div>
31 31 <div class="panel-body">
32 32 <div class="field">
33 33 <div class="checkbox">
34 34 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
35 35 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
36 36 </div>
37 37
38 38 <div class="label">
39 39 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
40 40 </div>
41 41 <div class="checkbox">
42 42 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
43 43 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
44 44 </div>
45 45 <div class="label">
46 46 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
47 47 </div>
48 48 <div class="checkbox">
49 49 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
50 50 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
51 51 </div>
52 52 <div class="label">
53 53 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
54 54 </div>
55 55 </div>
56 56 </div>
57 57 </div>
58 58 % endif
59 59
60 60 % if display_globals or repo_type in ['hg']:
61 61 <div class="panel panel-default">
62 62 <div class="panel-heading" id="vcs-hg-options">
63 63 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
64 64 </div>
65 65 <div class="panel-body">
66 66 <div class="checkbox">
67 67 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
68 68 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
69 69 </div>
70 70 <div class="label">
71 71 % if display_globals:
72 72 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
73 73 % else:
74 74 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
75 75 % endif
76 76 </div>
77 77
78 78 % if display_globals:
79 79 <div class="field">
80 80 <div class="input">
81 81 ${h.text('largefiles_usercache' + suffix, size=59)}
82 82 </div>
83 83 </div>
84 84 <div class="label">
85 85 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
86 86 </div>
87 87 % endif
88 88
89 89 <div class="checkbox">
90 90 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
91 91 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
92 92 </div>
93 93 <div class="label">
94 94 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
95 95 </div>
96 96
97 97 <div class="checkbox">
98 98 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
99 99 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
100 100 </div>
101 101 <div class="label">
102 102 % if display_globals:
103 103 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
104 104 % else:
105 105 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
106 106 % endif
107 107 </div>
108 108
109 109 </div>
110 110 </div>
111 111 % endif
112 112
113 113 % if display_globals or repo_type in ['git']:
114 114 <div class="panel panel-default">
115 115 <div class="panel-heading" id="vcs-git-options">
116 116 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
117 117 </div>
118 118 <div class="panel-body">
119 119 <div class="checkbox">
120 120 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
121 121 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
122 122 </div>
123 123 <div class="label">
124 124 % if display_globals:
125 125 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
126 126 % else:
127 127 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
128 128 % endif
129 129 </div>
130 130
131 131 % if display_globals:
132 132 <div class="field">
133 133 <div class="input">
134 134 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
135 135 </div>
136 136 </div>
137 137 <div class="label">
138 138 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
139 139 </div>
140 140 % endif
141 141 </div>
142 142 </div>
143 143 % endif
144 144
145 145 % if display_globals or repo_type in ['svn']:
146 146 <div class="panel panel-default">
147 147 <div class="panel-heading" id="vcs-svn-options">
148 148 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
149 149 </div>
150 150 <div class="panel-body">
151 151 % if display_globals:
152 152 <div class="field">
153 153 <div class="content" >
154 154 <label>${_('mod_dav config')}</label><br/>
155 155 <code>path: ${c.svn_config_path}</code>
156 156 </div>
157 157 <br/>
158 158
159 159 <div>
160 160
161 161 % if c.svn_generate_config:
162 162 <span class="buttons">
163 163 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Re-generate Apache Config')}</button>
164 164 </span>
165 165 % endif
166 166 </div>
167 167 </div>
168 168 % endif
169 169
170 170 <div class="field">
171 171 <div class="content" >
172 172 <label>${_('Repository patterns')}</label><br/>
173 173 </div>
174 174 </div>
175 175 <div class="label">
176 176 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
177 177 </div>
178 178
179 179 <div class="field branch_patterns">
180 180 <div class="input" >
181 181 <label>${_('Branches')}:</label><br/>
182 182 </div>
183 183 % if svn_branch_patterns:
184 184 % for branch in svn_branch_patterns:
185 185 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
186 186 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
187 187 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
188 188 % if kwargs.get('disabled') != 'disabled':
189 189 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
190 190 ${_('Delete')}
191 191 </span>
192 192 % endif
193 193 </div>
194 194 % endfor
195 195 %endif
196 196 </div>
197 197 % if kwargs.get('disabled') != 'disabled':
198 198 <div class="field branch_patterns">
199 199 <div class="input" >
200 200 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
201 201 </div>
202 202 </div>
203 203 % endif
204 204 <div class="field tag_patterns">
205 205 <div class="input" >
206 206 <label>${_('Tags')}:</label><br/>
207 207 </div>
208 208 % if svn_tag_patterns:
209 209 % for tag in svn_tag_patterns:
210 210 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
211 211 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
212 212 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
213 213 % if kwargs.get('disabled') != 'disabled':
214 214 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
215 215 ${_('Delete')}
216 216 </span>
217 217 %endif
218 218 </div>
219 219 % endfor
220 220 % endif
221 221 </div>
222 222 % if kwargs.get('disabled') != 'disabled':
223 223 <div class="field tag_patterns">
224 224 <div class="input" >
225 225 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
226 226 </div>
227 227 </div>
228 228 %endif
229 229 </div>
230 230 </div>
231 231 % else:
232 232 ${h.hidden('new_svn_branch' + suffix, '')}
233 233 ${h.hidden('new_svn_tag' + suffix, '')}
234 234 % endif
235 235
236 236
237 237 % if display_globals or repo_type in ['hg', 'git']:
238 238 <div class="panel panel-default">
239 239 <div class="panel-heading" id="vcs-pull-requests-options">
240 240 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
241 241 </div>
242 242 <div class="panel-body">
243 243 <div class="checkbox">
244 244 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
245 245 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
246 246 </div>
247 247 <div class="label">
248 248 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
249 249 </div>
250 250 <div class="checkbox">
251 251 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
252 252 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
253 253 </div>
254 254 <div class="label">
255 255 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
256 256 </div>
257 257 </div>
258 258 </div>
259 259 % endif
260 260
261 261 % if display_globals or repo_type in ['hg', 'git', 'svn']:
262 262 <div class="panel panel-default">
263 263 <div class="panel-heading" id="vcs-pull-requests-options">
264 264 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
265 265 </div>
266 266 <div class="panel-body">
267 267 <div class="checkbox">
268 268 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
269 269 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
270 270 </div>
271 271 </div>
272 272 </div>
273 273 % endif
274 274
275 275 % if display_globals or repo_type in ['hg',]:
276 276 <div class="panel panel-default">
277 277 <div class="panel-heading" id="vcs-pull-requests-options">
278 278 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
279 279 </div>
280 280 <div class="panel-body">
281 281 ## Specific HG settings
282 282 <div class="checkbox">
283 283 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
284 284 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
285 285 </div>
286 286 <div class="label">
287 287 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
288 288 </div>
289 289
290 290 <div class="checkbox">
291 291 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
292 292 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
293 293 </div>
294 294 <div class="label">
295 295 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
296 296 </div>
297 297
298 298
299 299 </div>
300 300 </div>
301 301 % endif
302 302
303 303 % if display_globals or repo_type in ['git']:
304 304 <div class="panel panel-default">
305 305 <div class="panel-heading" id="vcs-pull-requests-options">
306 306 <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
307 307 </div>
308 308 <div class="panel-body">
309 309 ## <div class="checkbox">
310 310 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
311 311 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
312 312 ## </div>
313 313 ## <div class="label">
314 314 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
315 315 ## </div>
316 316
317 317 <div class="checkbox">
318 318 ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
319 319 <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
320 320 </div>
321 321 <div class="label">
322 322 <span class="help-block">${_('Delete branch after merging it into destination branch.')}</span>
323 323 </div>
324 324 </div>
325 325 </div>
326 326 % endif
327 327
328 328 <script type="text/javascript">
329 329
330 330 $(document).ready(function() {
331 331 /* On click handler for the `Generate Apache Config` button. It sends a
332 332 POST request to trigger the (re)generation of the mod_dav_svn config. */
333 333 $('#vcs_svn_generate_cfg').on('click', function(event) {
334 334 event.preventDefault();
335 alert('i cliked it !!')
336 335 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
337 336 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
338 337 jqxhr.done(function(data) {
339 338 $.Topic('/notifications').publish(data);
340 339 });
341 340 });
342 341 });
343 342
344 343 </script>
345 344 </%def>
346 345
@@ -1,226 +1,226 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21
22 22 from rhodecode.lib.config_utils import get_app_config
23 23 from rhodecode.tests.fixture import TestINI
24 24 from rhodecode.tests import TESTS_TMP_PATH
25 25 from rhodecode.tests.server_utils import RcVCSServer
26 26
27 27
28 28 @pytest.fixture(scope='session')
29 29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 30 """
31 31 Session scope VCSServer.
32 32
33 33 Tests which need the VCSServer have to rely on this fixture in order
34 34 to ensure it will be running.
35 35
36 36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 37 adjust the configuration file for the test run.
38 38
39 39 Command line args:
40 40
41 41 --without-vcsserver: Allows to switch this fixture off. You have to
42 42 manually start the server.
43 43
44 44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 45 """
46 46
47 47 if not request.config.getoption('with_vcsserver'):
48 48 return None
49 49
50 50 return vcsserver_factory(
51 51 request, vcsserver_port=vcsserver_port)
52 52
53 53
54 54 @pytest.fixture(scope='session')
55 55 def vcsserver_factory(tmpdir_factory):
56 56 """
57 57 Use this if you need a running vcsserver with a special configuration.
58 58 """
59 59
60 60 def factory(request, overrides=(), vcsserver_port=None,
61 61 log_file=None, workers='2'):
62 62
63 63 if vcsserver_port is None:
64 64 vcsserver_port = get_available_port()
65 65
66 66 overrides = list(overrides)
67 67 overrides.append({'server:main': {'port': vcsserver_port}})
68 68
69 69 option_name = 'vcsserver_config_http'
70 70 override_option_name = 'vcsserver_config_override'
71 71 config_file = get_config(
72 72 request.config, option_name=option_name,
73 73 override_option_name=override_option_name, overrides=overrides,
74 74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 75 prefix='test_vcs_')
76 76
77 77 server = RcVCSServer(config_file, log_file, workers)
78 78 server.start()
79 79
80 80 @request.addfinalizer
81 81 def cleanup():
82 82 server.shutdown()
83 83
84 84 server.wait_until_ready()
85 85 return server
86 86
87 87 return factory
88 88
89 89
90 90 def _use_log_level(config):
91 91 level = config.getoption('test_loglevel') or 'critical'
92 92 return level.upper()
93 93
94 94
95 95 @pytest.fixture(scope='session')
96 96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 97 option_name = 'pyramid_config'
98 98 log_level = _use_log_level(request.config)
99 99
100 100 overrides = [
101 101 {'server:main': {'port': rcserver_port}},
102 102 {'app:main': {
103 103 'cache_dir': '%(here)s/rc-tests/rc_data',
104 104 'vcs.server': f'localhost:{vcsserver_port}',
105 105 # johbo: We will always start the VCSServer on our own based on the
106 106 # fixtures of the test cases. For the test run it must always be
107 107 # off in the INI file.
108 108 'vcs.start_server': 'false',
109 109
110 110 'vcs.server.protocol': 'http',
111 111 'vcs.scm_app_implementation': 'http',
112 112 'vcs.svn.proxy.enabled': 'true',
113 113 'vcs.hooks.protocol': 'http',
114 114 'vcs.hooks.host': '*',
115 115 'repo_store.path': TESTS_TMP_PATH,
116 116 'app.service_api.token': 'service_secret_token',
117 117 }},
118 118
119 119 {'handler_console': {
120 120 'class': 'StreamHandler',
121 121 'args': '(sys.stderr,)',
122 122 'level': log_level,
123 123 }},
124 124
125 125 ]
126 126
127 127 filename = get_config(
128 128 request.config, option_name=option_name,
129 129 override_option_name='{}_override'.format(option_name),
130 130 overrides=overrides,
131 131 basetemp=tmpdir_factory.getbasetemp().strpath,
132 132 prefix='test_rce_')
133 133 return filename
134 134
135 135
136 136 @pytest.fixture(scope='session')
137 137 def ini_settings(ini_config):
138 138 ini_path = ini_config
139 139 return get_app_config(ini_path)
140 140
141 141
142 142 def get_available_port(min_port=40000, max_port=55555):
143 143 from rhodecode.lib.utils2 import get_available_port as _get_port
144 144 return _get_port(min_port, max_port)
145 145
146 146
147 147 @pytest.fixture(scope='session')
148 148 def rcserver_port(request):
149 149 port = get_available_port()
150 150 print(f'Using rhodecode port {port}')
151 151 return port
152 152
153 153
154 154 @pytest.fixture(scope='session')
155 155 def vcsserver_port(request):
156 156 port = request.config.getoption('--vcsserver-port')
157 157 if port is None:
158 158 port = get_available_port()
159 159 print(f'Using vcsserver port {port}')
160 160 return port
161 161
162 162
163 163 @pytest.fixture(scope='session')
164 def available_port_factory():
164 def available_port_factory() -> get_available_port:
165 165 """
166 166 Returns a callable which returns free port numbers.
167 167 """
168 168 return get_available_port
169 169
170 170
171 171 @pytest.fixture()
172 172 def available_port(available_port_factory):
173 173 """
174 174 Gives you one free port for the current test.
175 175
176 176 Uses "available_port_factory" to retrieve the port.
177 177 """
178 178 return available_port_factory()
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def testini_factory(tmpdir_factory, ini_config):
183 183 """
184 184 Factory to create an INI file based on TestINI.
185 185
186 186 It will make sure to place the INI file in the correct directory.
187 187 """
188 188 basetemp = tmpdir_factory.getbasetemp().strpath
189 189 return TestIniFactory(basetemp, ini_config)
190 190
191 191
192 192 class TestIniFactory(object):
193 193
194 194 def __init__(self, basetemp, template_ini):
195 195 self._basetemp = basetemp
196 196 self._template_ini = template_ini
197 197
198 198 def __call__(self, ini_params, new_file_prefix='test'):
199 199 ini_file = TestINI(
200 200 self._template_ini, ini_params=ini_params,
201 201 new_file_prefix=new_file_prefix, dir=self._basetemp)
202 202 result = ini_file.create()
203 203 return result
204 204
205 205
206 206 def get_config(
207 207 config, option_name, override_option_name, overrides=None,
208 208 basetemp=None, prefix='test'):
209 209 """
210 210 Find a configuration file and apply overrides for the given `prefix`.
211 211 """
212 212 config_file = (
213 213 config.getoption(option_name) or config.getini(option_name))
214 214 if not config_file:
215 215 pytest.exit(
216 216 "Configuration error, could not extract {}.".format(option_name))
217 217
218 218 overrides = overrides or []
219 219 config_override = config.getoption(override_option_name)
220 220 if config_override:
221 221 overrides.append(config_override)
222 222 temp_ini_file = TestINI(
223 223 config_file, ini_params=overrides, new_file_prefix=prefix,
224 224 dir=basetemp)
225 225
226 226 return temp_ini_file.create()
@@ -1,359 +1,360 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import logging
21 21 import io
22 22
23 23 import mock
24 24 import msgpack
25 25 import pytest
26 26 import tempfile
27 27
28 28 from rhodecode.lib.hook_daemon import http_hooks_deamon
29 29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
30 30 from rhodecode.lib.hook_daemon import hook_module
31 31 from rhodecode.lib.hook_daemon import base as hook_base
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.tests.utils import assert_message_in_log
34 34 from rhodecode.lib.ext_json import json
35 35
36 36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
37 37
38 38
39 39 class TestHooks(object):
40 40 def test_hooks_can_be_used_as_a_context_processor(self):
41 41 hooks = hook_module.Hooks()
42 42 with hooks as return_value:
43 43 pass
44 44 assert hooks == return_value
45 45
46 46
47 47 class TestHooksHttpHandler(object):
48 48 def test_read_request_parses_method_name_and_arguments(self):
49 49 data = {
50 50 'method': 'test',
51 51 'extras': {
52 52 'param1': 1,
53 53 'param2': 'a'
54 54 }
55 55 }
56 56 request = self._generate_post_request(data)
57 57 hooks_patcher = mock.patch.object(
58 58 hook_module.Hooks, data['method'], create=True, return_value=1)
59 59
60 60 with hooks_patcher as hooks_mock:
61 61 handler = http_hooks_deamon.HooksHttpHandler
62 62 handler.DEFAULT_HOOKS_PROTO = test_proto
63 63 handler.wbufsize = 10240
64 64 MockServer(handler, request)
65 65
66 66 hooks_mock.assert_called_once_with(data['extras'])
67 67
68 68 def test_hooks_serialized_result_is_returned(self):
69 69 request = self._generate_post_request({})
70 70 rpc_method = 'test'
71 71 hook_result = {
72 72 'first': 'one',
73 73 'second': 2
74 74 }
75 75 extras = {}
76 76
77 77 # patching our _read to return test method and proto used
78 78 read_patcher = mock.patch.object(
79 79 http_hooks_deamon.HooksHttpHandler, '_read_request',
80 80 return_value=(test_proto, rpc_method, extras))
81 81
82 82 # patch Hooks instance to return hook_result data on 'test' call
83 83 hooks_patcher = mock.patch.object(
84 84 hook_module.Hooks, rpc_method, create=True,
85 85 return_value=hook_result)
86 86
87 87 with read_patcher, hooks_patcher:
88 88 handler = http_hooks_deamon.HooksHttpHandler
89 89 handler.DEFAULT_HOOKS_PROTO = test_proto
90 90 handler.wbufsize = 10240
91 91 server = MockServer(handler, request)
92 92
93 93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
94 94
95 95 server.request.output_stream.seek(0)
96 96 assert server.request.output_stream.readlines()[-1] == expected_result
97 97
98 98 def test_exception_is_returned_in_response(self):
99 99 request = self._generate_post_request({})
100 100 rpc_method = 'test'
101 101
102 102 read_patcher = mock.patch.object(
103 103 http_hooks_deamon.HooksHttpHandler, '_read_request',
104 104 return_value=(test_proto, rpc_method, {}))
105 105
106 106 hooks_patcher = mock.patch.object(
107 107 hook_module.Hooks, rpc_method, create=True,
108 108 side_effect=Exception('Test exception'))
109 109
110 110 with read_patcher, hooks_patcher:
111 111 handler = http_hooks_deamon.HooksHttpHandler
112 112 handler.DEFAULT_HOOKS_PROTO = test_proto
113 113 handler.wbufsize = 10240
114 114 server = MockServer(handler, request)
115 115
116 116 server.request.output_stream.seek(0)
117 117 data = server.request.output_stream.readlines()
118 118 msgpack_data = b''.join(data[5:])
119 119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
120 120 expected_result = {
121 121 'exception': 'Exception',
122 122 'exception_traceback': org_exc['exception_traceback'],
123 123 'exception_args': ['Test exception']
124 124 }
125 125 assert org_exc == expected_result
126 126
127 127 def test_log_message_writes_to_debug_log(self, caplog):
128 128 ip_port = ('0.0.0.0', 8888)
129 129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
130 130 fake_date = '1/Nov/2015 00:00:00'
131 131 date_patcher = mock.patch.object(
132 132 handler, 'log_date_time_string', return_value=fake_date)
133 133
134 134 with date_patcher, caplog.at_level(logging.DEBUG):
135 135 handler.log_message('Some message %d, %s', 123, 'string')
136 136
137 137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
138 138
139 139 assert_message_in_log(
140 140 caplog.records, expected_message,
141 141 levelno=logging.DEBUG, module='http_hooks_deamon')
142 142
143 143 def _generate_post_request(self, data, proto=test_proto):
144 144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
145 145 payload = msgpack.packb(data)
146 146 else:
147 147 payload = json.dumps(data)
148 148
149 149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
150 150 len(payload), payload)
151 151
152 152
153 153 class ThreadedHookCallbackDaemon(object):
154 154 def test_constructor_calls_prepare(self):
155 155 prepare_daemon_patcher = mock.patch.object(
156 156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
157 157 with prepare_daemon_patcher as prepare_daemon_mock:
158 158 http_hooks_deamon.ThreadedHookCallbackDaemon()
159 159 prepare_daemon_mock.assert_called_once_with()
160 160
161 161 def test_run_is_called_on_context_start(self):
162 162 patchers = mock.patch.multiple(
163 163 http_hooks_deamon.ThreadedHookCallbackDaemon,
164 164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
165 165
166 166 with patchers as mocks:
167 167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
168 168 with daemon as daemon_context:
169 169 pass
170 170 mocks['_run'].assert_called_once_with()
171 171 assert daemon_context == daemon
172 172
173 173 def test_stop_is_called_on_context_exit(self):
174 174 patchers = mock.patch.multiple(
175 175 http_hooks_deamon.ThreadedHookCallbackDaemon,
176 176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
177 177
178 178 with patchers as mocks:
179 179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
180 180 with daemon as daemon_context:
181 181 assert mocks['_stop'].call_count == 0
182 182
183 183 mocks['_stop'].assert_called_once_with()
184 184 assert daemon_context == daemon
185 185
186 186
187 187 class TestHttpHooksCallbackDaemon(object):
188 188 def test_hooks_callback_generates_new_port(self, caplog):
189 189 with caplog.at_level(logging.DEBUG):
190 190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
191 191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
192 192
193 193 with caplog.at_level(logging.DEBUG):
194 194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
195 195 assert daemon._daemon.server_address[1] in range(0, 66000)
196 196 assert daemon._daemon.server_address[0] != '127.0.0.1'
197 197
198 198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
199 199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
200 200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
201 201 assert daemon._daemon == tcp_server
202 202
203 203 _, port = tcp_server.server_address
204 204
205 205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
206 206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
207 207 assert_message_in_log(
208 208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
209 209
210 210 def test_prepare_inits_hooks_uri_and_logs_it(
211 211 self, tcp_server, caplog):
212 212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
213 213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
214 214
215 215 _, port = tcp_server.server_address
216 216 expected_uri = '{}:{}'.format('127.0.0.1', port)
217 217 assert daemon.hooks_uri == expected_uri
218 218
219 219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
220 220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
221 221
222 222 assert_message_in_log(
223 223 caplog.records, msg,
224 224 levelno=logging.DEBUG, module='http_hooks_deamon')
225 225
226 226 def test_run_creates_a_thread(self, tcp_server):
227 227 thread = mock.Mock()
228 228
229 229 with self._tcp_patcher(tcp_server):
230 230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
231 231
232 232 with self._thread_patcher(thread) as thread_mock:
233 233 daemon._run()
234 234
235 235 thread_mock.assert_called_once_with(
236 236 target=tcp_server.serve_forever,
237 237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
238 238 assert thread.daemon is True
239 239 thread.start.assert_called_once_with()
240 240
241 241 def test_run_logs(self, tcp_server, caplog):
242 242
243 243 with self._tcp_patcher(tcp_server):
244 244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
245 245
246 246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
247 247 daemon._run()
248 248
249 249 assert_message_in_log(
250 250 caplog.records,
251 251 'Running thread-based loop of callback daemon in background',
252 252 levelno=logging.DEBUG, module='http_hooks_deamon')
253 253
254 254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
255 255 thread = mock.Mock()
256 256
257 257 with self._tcp_patcher(tcp_server):
258 258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
259 259
260 260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
261 261 with daemon:
262 262 assert daemon._daemon == tcp_server
263 263 assert daemon._callback_thread == thread
264 264
265 265 assert daemon._daemon is None
266 266 assert daemon._callback_thread is None
267 267 tcp_server.shutdown.assert_called_with()
268 268 thread.join.assert_called_once_with()
269 269
270 270 assert_message_in_log(
271 271 caplog.records, 'Waiting for background thread to finish.',
272 272 levelno=logging.DEBUG, module='http_hooks_deamon')
273 273
274 274 def _tcp_patcher(self, tcp_server):
275 275 return mock.patch.object(
276 276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
277 277
278 278 def _thread_patcher(self, thread):
279 279 return mock.patch.object(
280 280 http_hooks_deamon.threading, 'Thread', return_value=thread)
281 281
282 282
283 283 class TestPrepareHooksDaemon(object):
284 284
285 285 @pytest.mark.parametrize('protocol', ('celery',))
286 286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
287 287 self, protocol):
288 288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
289 289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
290 290 "celery.result_backend = redis://redis/0")
291 291 temp_file.flush()
292 292 expected_extras = {'config': temp_file.name}
293 293 callback, extras = hook_base.prepare_callback_daemon(
294 294 expected_extras, protocol=protocol, host='')
295 295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
296 296
297 297 @pytest.mark.parametrize('protocol, expected_class', (
298 298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
299 299 ))
300 300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
301 301 self, protocol, expected_class):
302 302 expected_extras = {
303 303 'extra1': 'value1',
304 304 'txn_id': 'txnid2',
305 305 'hooks_protocol': protocol.lower(),
306 306 'task_backend': '',
307 'task_queue': ''
307 'task_queue': '',
308 'repo_store': '/var/opt/rhodecode_repo_store'
308 309 }
309 310 callback, extras = hook_base.prepare_callback_daemon(
310 311 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
311 312 txn_id='txnid2')
312 313 assert isinstance(callback, expected_class)
313 314 extras.pop('hooks_uri')
314 315 expected_extras['time'] = extras['time']
315 316 assert extras == expected_extras
316 317
317 318 @pytest.mark.parametrize('protocol', (
318 319 'invalid',
319 320 'Http',
320 321 'HTTP',
321 322 ))
322 323 def test_raises_on_invalid_protocol(self, protocol):
323 324 expected_extras = {
324 325 'extra1': 'value1',
325 326 'hooks_protocol': protocol.lower()
326 327 }
327 328 with pytest.raises(Exception):
328 329 callback, extras = hook_base.prepare_callback_daemon(
329 330 expected_extras.copy(),
330 331 protocol=protocol, host='127.0.0.1')
331 332
332 333
333 334 class MockRequest(object):
334 335
335 336 def __init__(self, request):
336 337 self.request = request
337 338 self.input_stream = io.BytesIO(safe_bytes(self.request))
338 339 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
339 340 self.output_stream.close = lambda: None
340 341
341 342 def makefile(self, mode, *args, **kwargs):
342 343 return self.output_stream if mode == 'wb' else self.input_stream
343 344
344 345
345 346 class MockServer(object):
346 347
347 348 def __init__(self, handler_cls, request):
348 349 ip_port = ('0.0.0.0', 8888)
349 350 self.request = MockRequest(request)
350 351 self.server_address = ip_port
351 352 self.handler = handler_cls(self.request, ip_port, self)
352 353
353 354
354 355 @pytest.fixture()
355 356 def tcp_server():
356 357 server = mock.Mock()
357 358 server.server_address = ('127.0.0.1', 8881)
358 359 server.wbufsize = 1024
359 360 return server
@@ -1,221 +1,229 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20
21 21 import os
22 22 import time
23 23 import tempfile
24 24 import pytest
25 25 import subprocess
26 26 import logging
27 27 from urllib.request import urlopen
28 28 from urllib.error import URLError
29 29 import configparser
30 30
31 31
32 32 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
33 33 from rhodecode.tests.utils import is_url_reachable
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def get_port(pyramid_config):
39 39 config = configparser.ConfigParser()
40 40 config.read(pyramid_config)
41 41 return config.get('server:main', 'port')
42 42
43 43
44 44 def get_host_url(pyramid_config):
45 45 """Construct the host url using the port in the test configuration."""
46 46 port = get_port(pyramid_config)
47 47 return f'127.0.0.1:{port}'
48 48
49 49
50 50 def assert_no_running_instance(url):
51 51 if is_url_reachable(url):
52 52 print(f"Hint: Usually this means another instance of server "
53 53 f"is running in the background at {url}.")
54 54 pytest.fail(f"Port is not free at {url}, cannot start server at")
55 55
56 56
57 57 class ServerBase(object):
58 58 _args = []
59 59 log_file_name = 'NOT_DEFINED.log'
60 60 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
61 61
62 62 def __init__(self, config_file, log_file):
63 63 self.config_file = config_file
64 64 config = configparser.ConfigParser()
65 65 config.read(config_file)
66 66
67 67 self._config = {k: v for k, v in config['server:main'].items()}
68 68
69 69 self._args = []
70 70 self.log_file = log_file or os.path.join(
71 71 tempfile.gettempdir(), self.log_file_name)
72 72 self.process = None
73 73 self.server_out = None
74 74 log.info("Using the {} configuration:{}".format(
75 75 self.__class__.__name__, config_file))
76 76
77 77 if not os.path.isfile(config_file):
78 78 raise RuntimeError(f'Failed to get config at {config_file}')
79 79
80 80 @property
81 81 def command(self):
82 82 return ' '.join(self._args)
83 83
84 84 @property
85 85 def bind_addr(self):
86 86 return '{host}:{port}'.format(**self._config)
87 87
88 88 @property
89 89 def http_url(self):
90 90 template = 'http://{host}:{port}/'
91 91 return template.format(**self._config)
92 92
93 93 def host_url(self):
94 94 host = get_host_url(self.config_file)
95 95 return f'http://{host}'
96 96
97 97 def get_rc_log(self):
98 98 with open(self.log_file) as f:
99 99 return f.read()
100 100
101 101 def assert_message_in_server_logs(self, message):
102 102 server_logs = self.get_rc_log()
103 103 assert message in server_logs
104 104
105 105 def wait_until_ready(self, timeout=30):
106 106 host = self._config['host']
107 107 port = self._config['port']
108 108 status_url = self.status_url_tmpl.format(host=host, port=port)
109 109 start = time.time()
110 110
111 111 while time.time() - start < timeout:
112 112 try:
113 113 urlopen(status_url)
114 114 break
115 115 except URLError:
116 116 time.sleep(0.2)
117 117 else:
118 118 pytest.fail(
119 119 "Starting the {} failed or took more than {} "
120 120 "seconds. cmd: `{}`".format(
121 121 self.__class__.__name__, timeout, self.command))
122 122
123 123 log.info('Server of {} ready at url {}'.format(
124 124 self.__class__.__name__, status_url))
125 125
126 126 def shutdown(self):
127 127 self.process.kill()
128 128 self.server_out.flush()
129 129 self.server_out.close()
130 130
131 131 def get_log_file_with_port(self):
132 132 log_file = list(self.log_file.partition('.log'))
133 133 log_file.insert(1, get_port(self.config_file))
134 134 log_file = ''.join(log_file)
135 135 return log_file
136 136
137 137
138 138 class RcVCSServer(ServerBase):
139 139 """
140 140 Represents a running VCSServer instance.
141 141 """
142 142
143 143 log_file_name = 'rc-vcsserver.log'
144 144 status_url_tmpl = 'http://{host}:{port}/status'
145 145
146 146 def __init__(self, config_file, log_file=None, workers='2'):
147 147 super(RcVCSServer, self).__init__(config_file, log_file)
148 148 self._args = [
149 149 'gunicorn',
150 150 '--bind', self.bind_addr,
151 '--worker-class', 'gevent',
151 '--worker-class', 'gthread',
152 152 '--backlog', '16',
153 153 '--timeout', '300',
154 154 '--workers', workers,
155 155 '--paste', self.config_file]
156 156
157 157 def start(self):
158 158 env = os.environ.copy()
159 159
160 160 self.log_file = self.get_log_file_with_port()
161 161 self.server_out = open(self.log_file, 'w')
162 162
163 163 host_url = self.host_url()
164 164 assert_no_running_instance(host_url)
165 165
166 166 print(f'rhodecode-vcsserver starting at: {host_url}')
167 167 print(f'rhodecode-vcsserver command: {self.command}')
168 168 print(f'rhodecode-vcsserver logfile: {self.log_file}')
169 169
170 170 self.process = subprocess.Popen(
171 171 self._args, bufsize=0, env=env,
172 172 stdout=self.server_out, stderr=self.server_out)
173 173
174 174
175 175 class RcWebServer(ServerBase):
176 176 """
177 177 Represents a running RCE web server used as a test fixture.
178 178 """
179 179
180 180 log_file_name = 'rc-web.log'
181 181 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
182 182
183 183 def __init__(self, config_file, log_file=None, workers='1'):
184 184 super(RcWebServer, self).__init__(config_file, log_file)
185 185 self._args = [
186 186 'gunicorn',
187 187 '--bind', self.bind_addr,
188 '--worker-class', 'gevent',
188 '--worker-class', 'gthread',
189 189 '--backlog', '16',
190 190 '--timeout', '300',
191 191 '--workers', workers,
192 192 '--paste', self.config_file]
193 193
194 194 def start(self):
195 195 env = os.environ.copy()
196 196 env['RC_NO_TMP_PATH'] = '1'
197 197
198 198 self.log_file = self.get_log_file_with_port()
199 199 self.server_out = open(self.log_file, 'w')
200 200
201 201 host_url = self.host_url()
202 202 assert_no_running_instance(host_url)
203 203
204 204 print(f'rhodecode-web starting at: {host_url}')
205 205 print(f'rhodecode-web command: {self.command}')
206 206 print(f'rhodecode-web logfile: {self.log_file}')
207 207
208 208 self.process = subprocess.Popen(
209 209 self._args, bufsize=0, env=env,
210 210 stdout=self.server_out, stderr=self.server_out)
211 211
212 212 def repo_clone_url(self, repo_name, **kwargs):
213 213 params = {
214 214 'user': TEST_USER_ADMIN_LOGIN,
215 215 'passwd': TEST_USER_ADMIN_PASS,
216 216 'host': get_host_url(self.config_file),
217 217 'cloned_repo': repo_name,
218 218 }
219 219 params.update(**kwargs)
220 220 _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}"
221 221 return _url
222
223 def repo_clone_credentials(self, **kwargs):
224 params = {
225 'user': TEST_USER_ADMIN_LOGIN,
226 'passwd': TEST_USER_ADMIN_PASS,
227 }
228 params.update(**kwargs)
229 return params['user'], params['passwd']
@@ -1,195 +1,214 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Base for test suite for making push/pull operations.
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 from os.path import join as jn
30 from subprocess import Popen, PIPE
29
31 30 import logging
32 31 import os
33 32 import tempfile
33 import subprocess
34 34
35 35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.tests import GIT_REPO, HG_REPO
36 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
37 37
38 38 DEBUG = True
39 39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 40 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
41 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
42 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
43 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
43 44
44 45 log = logging.getLogger(__name__)
45 46
46 47
47 48 class Command(object):
48 49
49 50 def __init__(self, cwd):
50 51 self.cwd = cwd
51 52 self.process = None
52 53
53 54 def execute(self, cmd, *args):
54 55 """
55 56 Runs command on the system with given ``args``.
56 57 """
57 58
58 59 command = cmd + ' ' + ' '.join(args)
59 60 if DEBUG:
60 61 log.debug('*** CMD %s ***', command)
61 62
62 63 env = dict(os.environ)
63 64 # Delete coverage variables, as they make the test fail for Mercurial
64 65 for key in env.keys():
65 66 if key.startswith('COV_CORE_'):
66 67 del env[key]
67 68
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 cwd=self.cwd, env=env)
69 self.process = subprocess.Popen(
70 command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
71 cwd=self.cwd, env=env)
70 72 stdout, stderr = self.process.communicate()
71 73
72 74 stdout = safe_str(stdout)
73 75 stderr = safe_str(stderr)
74 76
75 77 if DEBUG:
76 78 log.debug('STDOUT:%s', stdout)
77 79 log.debug('STDERR:%s', stderr)
78 80 return stdout, stderr
79 81
80 82 def assert_returncode_success(self):
81 83 assert self.process.returncode == 0
82 84
83 85
84 86 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
85 87 full_name = 'Marcin KuΕΊminski'
86 88 email = 'me@email.com'
87 89 git_ident = f"git config user.name {full_name} && git config user.email {email}"
88 cwd = path = jn(dest)
90 cwd = path = os.path.join(dest)
89 91
90 92 tags = tags or []
91 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
92 Command(cwd).execute('touch %s' % added_file)
93 Command(cwd).execute('%s add %s' % (vcs, added_file))
93 name_sequence = next(tempfile._RandomNameSequence())
94 added_file = os.path.join(path, f'{name_sequence}_setup.py')
95
96 Command(cwd).execute(f'touch {added_file}')
97 Command(cwd).execute(f'{vcs} add {added_file}')
94 98 author_str = 'Marcin KuΕΊminski <me@email.com>'
95 99
96 100 for i in range(kwargs.get('files_no', 3)):
97 101 cmd = f"""echo 'added_line{i}' >> {added_file}"""
98 102 Command(cwd).execute(cmd)
99 103
100 104 if vcs == 'hg':
101 105 cmd = f"""hg commit -m 'committed new {i}' -u '{author_str}' {added_file} """
102 106 elif vcs == 'git':
103 107 cmd = f"""{git_ident} && git commit -m 'committed new {i}' {added_file}"""
104 108 Command(cwd).execute(cmd)
105 109
106 110 for tag in tags:
107 111 if vcs == 'hg':
108 112 Command(cwd).execute(
109 113 f"""hg tag -m "{tag['commit']}" -u "{author_str}" """,
110 114 tag['name'])
111 115 elif vcs == 'git':
112 116 if tag['commit']:
113 117 # annotated tag
114 118 _stdout, _stderr = Command(cwd).execute(
115 119 f"""{git_ident} && git tag -a {tag['name']} -m "{tag['commit']}" """
116 120 )
117 121 else:
118 122 # lightweight tag
119 123 _stdout, _stderr = Command(cwd).execute(
120 124 f"""{git_ident} && git tag {tag['name']}"""
121 125 )
122 126
123 127
124 128 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
125 129 new_branch=False, **kwargs):
126 130 """
127 131 Generate some files, add it to DEST repo and push back
128 132 vcs is git or hg and defines what VCS we want to make those files for
129 133 """
130 134 git_ident = "git config user.name Marcin KuΕΊminski && git config user.email me@email.com"
131 cwd = jn(dest)
135 cwd = os.path.join(dest)
132 136
133 137 # commit some stuff into this repo
134 138 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
135 139
136 140 default_target_branch = {
137 141 'git': 'master',
138 142 'hg': 'default'
139 143 }.get(vcs)
140 144
141 145 target_branch = target_branch or default_target_branch
142 146
143 147 # PUSH it back
144 148 stdout = stderr = None
145 149 if vcs == 'hg':
146 150 maybe_new_branch = ''
147 151 if new_branch:
148 152 maybe_new_branch = '--new-branch'
149 153 stdout, stderr = Command(cwd).execute(
150 'hg push --traceback --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
154 f'hg push --traceback --verbose {maybe_new_branch} -r {target_branch} {clone_url}'
151 155 )
152 156 elif vcs == 'git':
153 157 stdout, stderr = Command(cwd).execute(
154 """{} &&
155 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
158 f'{git_ident} && git push --verbose --tags {clone_url} {target_branch}'
159 )
160 elif vcs == 'svn':
161 stdout, stderr = Command(cwd).execute(
162 f'svn ci -m "pushing to {target_branch}"'
156 163 )
157 164
158 165 return stdout, stderr
159 166
160 167
161 168 def _check_proper_git_push(
162 169 stdout, stderr, branch='master', should_set_default_branch=False):
163 170 # Note: Git is writing most information to stderr intentionally
164 171 assert 'fatal' not in stderr
165 172 assert 'rejected' not in stderr
166 173 assert 'Pushing to' in stderr
167 174 assert '%s -> %s' % (branch, branch) in stderr
168 175
169 176 if should_set_default_branch:
170 177 assert "Setting default branch to %s" % branch in stderr
171 178 else:
172 179 assert "Setting default branch" not in stderr
173 180
174 181
175 182 def _check_proper_hg_push(stdout, stderr, branch='default'):
176 183 assert 'pushing to' in stdout
177 184 assert 'searching for changes' in stdout
178 185
179 186 assert 'abort:' not in stderr
180 187
181 188
189 def _check_proper_svn_push(stdout, stderr):
190 assert 'pushing to' in stdout
191 assert 'searching for changes' in stdout
192
193 assert 'abort:' not in stderr
194
195
182 196 def _check_proper_clone(stdout, stderr, vcs):
183 197 if vcs == 'hg':
184 198 assert 'requesting all changes' in stdout
185 199 assert 'adding changesets' in stdout
186 200 assert 'adding manifests' in stdout
187 201 assert 'adding file changes' in stdout
188 202
189 203 assert stderr == ''
190 204
191 205 if vcs == 'git':
192 206 assert '' == stdout
193 207 assert 'Cloning into' in stderr
194 208 assert 'abort:' not in stderr
195 209 assert 'fatal:' not in stderr
210
211 if vcs == 'svn':
212 assert 'dupa' in stdout
213
214
@@ -1,307 +1,311 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 py.test config for test suite for making push/pull operations.
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 29 import os
30 30 import tempfile
31 31 import textwrap
32 32 import pytest
33 33 import logging
34 34 import requests
35 35
36 36 from rhodecode import events
37 37 from rhodecode.lib.str_utils import safe_bytes
38 38 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
39 39 UserToRepoBranchPermission, User
40 40 from rhodecode.model.integration import IntegrationModel
41 41 from rhodecode.model.db import Repository
42 42 from rhodecode.model.meta import Session
43 43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
44 44
45 from rhodecode.tests import GIT_REPO, HG_REPO
45 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
46 46 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
47 47 from rhodecode.tests.fixture import Fixture
48 48 from rhodecode.tests.server_utils import RcWebServer
49 49
50 50
51 51 REPO_GROUP = 'a_repo_group'
52 52 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
53 53 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
54 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
54 55
55 56 log = logging.getLogger(__name__)
56 57
57 58
58 59 def check_httpbin_connection():
60 log.debug('Checking if HTTPBIN_DOMAIN: %s is available', HTTPBIN_DOMAIN)
59 61 try:
60 response = requests.get(HTTPBIN_DOMAIN)
62 response = requests.get(HTTPBIN_DOMAIN, timeout=5)
61 63 return response.status_code == 200
62 64 except Exception as e:
63 65 print(e)
64 66
65 67 return False
66 68
67 69
68 70 @pytest.fixture(scope="module")
69 71 def rcextensions(request, db_connection, tmpdir_factory):
70 72 """
71 73 Installs a testing rcextensions pack to ensure they work as expected.
72 74 """
73 75 init_content = textwrap.dedent("""
74 76 # Forward import the example rcextensions to make it
75 77 # active for our tests.
76 78 from rhodecode.tests.other.example_rcextensions import *
77 79 """)
78 80
79 81 # Note: rcextensions are looked up based on the path of the ini file
80 82 root_path = tmpdir_factory.getbasetemp()
81 83 rcextensions_path = root_path.join('rcextensions')
82 84 init_path = rcextensions_path.join('__init__.py')
83 85
84 86 if rcextensions_path.check():
85 87 pytest.fail(
86 88 "Path for rcextensions already exists, please clean up before "
87 89 "test run this path: %s" % (rcextensions_path, ))
88 90 else:
89 91 request.addfinalizer(rcextensions_path.remove)
90 92 init_path.write_binary(safe_bytes(init_content), ensure=True)
91 93
92 94
93 95 @pytest.fixture(scope="module")
94 96 def repos(request, db_connection):
95 97 """Create a copy of each test repo in a repo group."""
96 98 fixture = Fixture()
97 99 repo_group = fixture.create_repo_group(REPO_GROUP)
98 100 repo_group_id = repo_group.group_id
99 101 fixture.create_fork(HG_REPO, HG_REPO,
100 102 repo_name_full=HG_REPO_WITH_GROUP,
101 103 repo_group=repo_group_id)
102 104 fixture.create_fork(GIT_REPO, GIT_REPO,
103 105 repo_name_full=GIT_REPO_WITH_GROUP,
104 106 repo_group=repo_group_id)
107 fixture.create_fork(SVN_REPO, SVN_REPO,
108 repo_name_full=SVN_REPO_WITH_GROUP,
109 repo_group=repo_group_id)
105 110
106 111 @request.addfinalizer
107 112 def cleanup():
108 113 fixture.destroy_repo(HG_REPO_WITH_GROUP)
109 114 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
115 fixture.destroy_repo(SVN_REPO_WITH_GROUP)
110 116 fixture.destroy_repo_group(repo_group_id)
111 117
112 118
113 119 @pytest.fixture(scope="module")
114 120 def rc_web_server_config_modification():
115 121 return []
116 122
117 123
118 124 @pytest.fixture(scope="module")
119 125 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
120 126 """
121 127 Configuration file used for the fixture `rc_web_server`.
122 128 """
123 129
124 130 def factory(rcweb_port, vcsserver_port):
125 131 custom_params = [
126 132 {'handler_console': {'level': 'DEBUG'}},
127 133 {'server:main': {'port': rcweb_port}},
128 134 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
129 135 ]
130 136 custom_params.extend(rc_web_server_config_modification)
131 137 return testini_factory(custom_params)
132 138 return factory
133 139
134 140
135 141 @pytest.fixture(scope="module")
136 142 def rc_web_server(
137 143 request, vcsserver_factory, available_port_factory,
138 144 rc_web_server_config_factory, repos, rcextensions):
139 145 """
140 146 Run the web server as a subprocess. with its own instance of vcsserver
141 147 """
142 rcweb_port = available_port_factory()
143 log.info('Using rcweb ops test port {}'.format(rcweb_port))
148 rcweb_port: int = available_port_factory()
149 log.info('Using rcweb ops test port %s', rcweb_port)
144 150
145 vcsserver_port = available_port_factory()
146 log.info('Using vcsserver ops test port {}'.format(vcsserver_port))
151 vcsserver_port: int = available_port_factory()
152 log.info('Using vcsserver ops test port %s', vcsserver_port)
147 153
148 154 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
149 155 vcsserver_factory(
150 156 request, vcsserver_port=vcsserver_port,
151 157 log_file=vcs_log,
152 158 overrides=(
153 159 {'server:main': {'workers': 2}},
154 160 {'server:main': {'graceful_timeout': 10}},
155 161 ))
156 162
157 163 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
158 164 rc_web_server_config = rc_web_server_config_factory(
159 165 rcweb_port=rcweb_port,
160 166 vcsserver_port=vcsserver_port)
161 167 server = RcWebServer(rc_web_server_config, log_file=rc_log)
162 168 server.start()
163 169
164 170 @request.addfinalizer
165 171 def cleanup():
166 172 server.shutdown()
167 173
168 174 server.wait_until_ready()
169 175 return server
170 176
171 177
172 178 @pytest.fixture()
173 179 def disable_locking(baseapp):
174 180 r = Repository.get_by_repo_name(GIT_REPO)
175 181 Repository.unlock(r)
176 182 r.enable_locking = False
177 183 Session().add(r)
178 184 Session().commit()
179 185
180 186 r = Repository.get_by_repo_name(HG_REPO)
181 187 Repository.unlock(r)
182 188 r.enable_locking = False
183 189 Session().add(r)
184 190 Session().commit()
185 191
186 192
187 193 @pytest.fixture()
188 194 def fs_repo_only(request, rhodecode_fixtures):
189 195 def fs_repo_fabric(repo_name, repo_type):
190 196 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
191 197 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
192 198
193 199 def cleanup():
194 200 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
195 201 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
196 202
197 203 request.addfinalizer(cleanup)
198 204
199 205 return fs_repo_fabric
200 206
201 207
202 208 @pytest.fixture()
203 209 def enable_webhook_push_integration(request):
204 210 integration = Integration()
205 211 integration.integration_type = WebhookIntegrationType.key
206 212 Session().add(integration)
207 213
208 214 settings = dict(
209 215 url=HTTPBIN_POST,
210 216 secret_token='secret',
211 217 username=None,
212 218 password=None,
213 219 custom_header_key=None,
214 220 custom_header_val=None,
215 221 method_type='post',
216 222 events=[events.RepoPushEvent.name],
217 223 log_data=True
218 224 )
219 225
220 226 IntegrationModel().update_integration(
221 227 integration,
222 228 name='IntegrationWebhookTest',
223 229 enabled=True,
224 230 settings=settings,
225 231 repo=None,
226 232 repo_group=None,
227 233 child_repos_only=False,
228 234 )
229 235 Session().commit()
230 236 integration_id = integration.integration_id
231 237
232 238 @request.addfinalizer
233 239 def cleanup():
234 240 integration = Integration.get(integration_id)
235 241 Session().delete(integration)
236 242 Session().commit()
237 243
238 244
239 245 @pytest.fixture()
240 246 def branch_permission_setter(request):
241 247 """
242 248
243 249 def my_test(branch_permission_setter)
244 250 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
245 251
246 252 """
247 253
248 254 rule_id = None
249 255 write_perm_id = None
250 256 write_perm = None
251 257 rule = None
252 258
253 259 def _branch_permissions_setter(
254 260 repo_name, username, pattern='*', permission='branch.push_force'):
255 261 global rule_id, write_perm_id
256 262 global rule, write_perm
257 263
258 264 repo = Repository.get_by_repo_name(repo_name)
259 265 repo_id = repo.repo_id
260 266
261 267 user = User.get_by_username(username)
262 268 user_id = user.user_id
263 269
264 270 rule_perm_obj = Permission.get_by_key(permission)
265 271
266 272 # add new entry, based on existing perm entry
267 273 perm = UserRepoToPerm.query() \
268 274 .filter(UserRepoToPerm.repository_id == repo_id) \
269 275 .filter(UserRepoToPerm.user_id == user_id) \
270 276 .first()
271 277
272 278 if not perm:
273 279 # such user isn't defined in Permissions for repository
274 280 # we now on-the-fly add new permission
275 281
276 282 write_perm = UserRepoToPerm()
277 283 write_perm.permission = Permission.get_by_key('repository.write')
278 284 write_perm.repository_id = repo_id
279 285 write_perm.user_id = user_id
280 286 Session().add(write_perm)
281 287 Session().flush()
282 288
283 289 perm = write_perm
284 290
285 291 rule = UserToRepoBranchPermission()
286 292 rule.rule_to_perm_id = perm.repo_to_perm_id
287 293 rule.branch_pattern = pattern
288 294 rule.rule_order = 10
289 295 rule.permission = rule_perm_obj
290 296 rule.repository_id = repo_id
291 297 Session().add(rule)
292 298 Session().commit()
293 299
294 300 return rule
295 301
296 302 @request.addfinalizer
297 303 def cleanup():
298 304 if rule:
299 305 Session().delete(rule)
300 306 Session().commit()
301 307 if write_perm:
302 308 Session().delete(write_perm)
303 309 Session().commit()
304 310
305 311 return _branch_permissions_setter
306
307
@@ -1,65 +1,65 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Test suite for making push/pull operations, on specially modified INI files
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 29 import os
30 30
31 31 from rhodecode.lib.vcs.backends.git.repository import GitRepository
32 32 from rhodecode.lib.vcs.nodes import FileNode
33 33 from rhodecode.tests import GIT_REPO
34 34 from rhodecode.tests.vcs_operations import Command
35 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
35 from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push
36 36
37 37
38 38 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
39 39 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
40 40 cmd = Command('/tmp')
41 41 stdout, stderr = cmd.execute(
42 42 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
43 43 _check_proper_clone(stdout, stderr, 'git')
44 44 cmd.assert_returncode_success()
45 45
46 46
47 47 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
48 48 empty_repo = backend_git.create_repo()
49 49
50 50 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
51 51
52 52 cmd = Command(tmpdir.strpath)
53 53 cmd.execute('git clone', clone_url)
54 54
55 55 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
56 56 repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello'))
57 57 repo.in_memory_commit.commit(
58 58 message='Commit on branch Master',
59 59 author='Automatic test <automatic@rhodecode.com>',
60 60 branch='master')
61 61
62 62 repo_cmd = Command(repo.path)
63 63 stdout, stderr = repo_cmd.execute(
64 64 f'git -c http.postBuffer=1024 push --verbose {clone_url} master')
65 65 _check_proper_git_push(stdout, stderr, branch='master')
@@ -1,376 +1,199 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Test suite for making push/pull operations, on specially modified INI files
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 29
30 30 import time
31 import logging
32
33 31 import pytest
34 32
35 from rhodecode.lib import rc_cache
36 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
33 from rhodecode.model.db import Repository, UserIpMap
38 34 from rhodecode.model.meta import Session
39 35 from rhodecode.model.repo import RepoModel
40 36 from rhodecode.model.user import UserModel
41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
42 from rhodecode.tests.utils import assert_message_in_log
37 from rhodecode.tests import (GIT_REPO, TEST_USER_ADMIN_LOGIN)
38
43 39
44 40 from rhodecode.tests.vcs_operations import (
45 41 Command, _check_proper_clone, _check_proper_git_push,
46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
42 _add_files_and_push, GIT_REPO_WITH_GROUP)
47 43
48 44
49 45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
50 46 class TestVCSOperations(object):
51 47
52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 stdout, stderr = Command('/tmp').execute(
55 'hg clone', clone_url, tmpdir.strpath)
56 _check_proper_clone(stdout, stderr, 'hg')
57
58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 stdout, stderr = Command('/tmp').execute(
61 'hg clone --pull', clone_url, tmpdir.strpath)
62 _check_proper_clone(stdout, stderr, 'hg')
63
64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
66 stdout, stderr = Command('/tmp').execute(
67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
68 assert 'files to transfer,' in stdout
69 assert 'transferred 1.' in stdout
70 assert '114 files updated,' in stdout
71
72 48 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
73 49 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
74 50 cmd = Command('/tmp')
75 51 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
76 52 _check_proper_clone(stdout, stderr, 'git')
77 53 cmd.assert_returncode_success()
78 54
79 55 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
80 56 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
81 57 cmd = Command('/tmp')
82 58 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
83 59 _check_proper_clone(stdout, stderr, 'git')
84 60 cmd.assert_returncode_success()
85 61
86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 _check_proper_clone(stdout, stderr, 'hg')
92
93 62 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
94 63 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
95 64 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
96 65 cmd = Command('/tmp')
97 66 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
98 67 _check_proper_clone(stdout, stderr, 'git')
99 68 cmd.assert_returncode_success()
100 69
101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
103 stdout, stderr = Command('/tmp').execute(
104 'hg clone', clone_url, tmpdir.strpath)
105 _check_proper_clone(stdout, stderr, 'hg')
106
107 70 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
108 71 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
109 72 cmd = Command('/tmp')
110 73 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
111 74 _check_proper_clone(stdout, stderr, 'git')
112 75 cmd.assert_returncode_success()
113 76
114 77 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
115 78 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
116 79 cmd = Command('/tmp')
117 80 stdout, stderr = cmd.execute(
118 81 'git clone --depth=1', clone_url, tmpdir.strpath)
119 82
120 83 assert '' == stdout
121 84 assert 'Cloning into' in stderr
122 85 cmd.assert_returncode_success()
123 86
124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
126 stdout, stderr = Command('/tmp').execute(
127 'hg clone', clone_url, tmpdir.strpath)
128 assert 'abort: authorization failed' in stderr
129 87
130 88 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
131 89 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
132 90 stdout, stderr = Command('/tmp').execute(
133 91 'git clone', clone_url, tmpdir.strpath)
134 92 assert 'fatal: Authentication failed' in stderr
135 93
136 94 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
137 95 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
138 96 stdout, stderr = Command('/tmp').execute(
139 97 'hg clone', clone_url, tmpdir.strpath)
140 98 assert 'HTTP Error 404: Not Found' in stderr
141 99
142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
144 stdout, stderr = Command('/tmp').execute(
145 'git clone', clone_url, tmpdir.strpath)
146 assert 'not found' in stderr
147
148 100 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
149 101 clone_url = rc_web_server.repo_clone_url('trololo')
150 102 stdout, stderr = Command('/tmp').execute(
151 103 'hg clone', clone_url, tmpdir.strpath)
152 104 assert 'HTTP Error 404: Not Found' in stderr
153 105
154 106 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
155 107 clone_url = rc_web_server.repo_clone_url('trololo')
156 108 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
157 109 assert 'not found' in stderr
158 110
159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
162 assert 'HTTP Error 404: Not Found' in stderr
163
164 111 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
165 112 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
166 113 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
167 114 assert 'not found' in stderr
168 115
169 def test_clone_existing_path_hg_not_in_database(
170 self, rc_web_server, tmpdir, fs_repo_only):
171
172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
173 clone_url = rc_web_server.repo_clone_url(db_name)
174 stdout, stderr = Command('/tmp').execute(
175 'hg clone', clone_url, tmpdir.strpath)
176 assert 'HTTP Error 404: Not Found' in stderr
177
178 116 def test_clone_existing_path_git_not_in_database(
179 117 self, rc_web_server, tmpdir, fs_repo_only):
180 118 db_name = fs_repo_only('not-in-db-git', repo_type='git')
181 119 clone_url = rc_web_server.repo_clone_url(db_name)
182 120 stdout, stderr = Command('/tmp').execute(
183 121 'git clone', clone_url, tmpdir.strpath)
184 122 assert 'not found' in stderr
185 123
186 def test_clone_existing_path_hg_not_in_database_different_scm(
187 self, rc_web_server, tmpdir, fs_repo_only):
188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
189 clone_url = rc_web_server.repo_clone_url(db_name)
190 stdout, stderr = Command('/tmp').execute(
191 'hg clone', clone_url, tmpdir.strpath)
192 assert 'HTTP Error 404: Not Found' in stderr
193
194 124 def test_clone_existing_path_git_not_in_database_different_scm(
195 125 self, rc_web_server, tmpdir, fs_repo_only):
196 126 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
197 127 clone_url = rc_web_server.repo_clone_url(db_name)
198 128 stdout, stderr = Command('/tmp').execute(
199 129 'git clone', clone_url, tmpdir.strpath)
200 130 assert 'not found' in stderr
201 131
202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
203 repo = user_util.create_repo()
204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
205
206 # Damage repo by removing it's folder
207 RepoModel()._delete_filesystem_repo(repo)
208
209 stdout, stderr = Command('/tmp').execute(
210 'hg clone', clone_url, tmpdir.strpath)
211 assert 'HTTP Error 404: Not Found' in stderr
212
213 132 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
214 133 repo = user_util.create_repo(repo_type='git')
215 134 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
216 135
217 136 # Damage repo by removing it's folder
218 137 RepoModel()._delete_filesystem_repo(repo)
219 138
220 139 stdout, stderr = Command('/tmp').execute(
221 140 'git clone', clone_url, tmpdir.strpath)
222 141 assert 'not found' in stderr
223 142
224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
226 stdout, stderr = Command('/tmp').execute(
227 'hg clone', clone_url, tmpdir.strpath)
228
229 stdout, stderr = _add_files_and_push(
230 'hg', tmpdir.strpath, clone_url=clone_url)
231
232 assert 'pushing to' in stdout
233 assert 'size summary' in stdout
234
235 143 def test_push_new_file_git(self, rc_web_server, tmpdir):
236 144 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
237 145 stdout, stderr = Command('/tmp').execute(
238 146 'git clone', clone_url, tmpdir.strpath)
239 147
240 148 # commit some stuff into this repo
241 149 stdout, stderr = _add_files_and_push(
242 150 'git', tmpdir.strpath, clone_url=clone_url)
243 151
244 152 _check_proper_git_push(stdout, stderr)
245 153
246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
247 hg_repo = Repository.get_by_repo_name(HG_REPO)
248
249 # init cache objects
250 CacheKey.delete_all_cache()
251
252 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
253
254 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
255
256 with inv_context_manager as invalidation_context:
257 # __enter__ will create and register cache objects
258 pass
259
260 cache_keys = hg_repo.cache_keys
261 assert cache_keys != []
262 old_ids = [x.cache_state_uid for x in cache_keys]
263
264 # clone to init cache
265 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
266 stdout, stderr = Command('/tmp').execute(
267 'hg clone', clone_url, tmpdir.strpath)
268
269 cache_keys = hg_repo.cache_keys
270 assert cache_keys != []
271 for key in cache_keys:
272 assert key.cache_active is True
273
274 # PUSH that should trigger invalidation cache
275 stdout, stderr = _add_files_and_push(
276 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
277
278 # flush...
279 Session().commit()
280 hg_repo = Repository.get_by_repo_name(HG_REPO)
281 cache_keys = hg_repo.cache_keys
282 assert cache_keys != []
283 new_ids = [x.cache_state_uid for x in cache_keys]
284 assert new_ids != old_ids
285
286 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
287 clone_url = rc_web_server.repo_clone_url(HG_REPO)
288 stdout, stderr = Command('/tmp').execute(
289 'hg clone', clone_url, tmpdir.strpath)
290
291 push_url = rc_web_server.repo_clone_url(
292 HG_REPO, user='bad', passwd='name')
293 stdout, stderr = _add_files_and_push(
294 'hg', tmpdir.strpath, clone_url=push_url)
295
296 assert 'abort: authorization failed' in stderr
297
298 154 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
299 155 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
300 156 stdout, stderr = Command('/tmp').execute(
301 157 'git clone', clone_url, tmpdir.strpath)
302 158
303 159 push_url = rc_web_server.repo_clone_url(
304 160 GIT_REPO, user='bad', passwd='name')
305 161 stdout, stderr = _add_files_and_push(
306 162 'git', tmpdir.strpath, clone_url=push_url)
307 163
308 164 assert 'fatal: Authentication failed' in stderr
309 165
310 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
311 clone_url = rc_web_server.repo_clone_url(HG_REPO)
312 stdout, stderr = Command('/tmp').execute(
313 'hg clone', clone_url, tmpdir.strpath)
314
315 stdout, stderr = _add_files_and_push(
316 'hg', tmpdir.strpath,
317 clone_url=rc_web_server.repo_clone_url('not-existing'))
318
319 assert 'HTTP Error 404: Not Found' in stderr
320
321 166 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
322 167 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
323 168 stdout, stderr = Command('/tmp').execute(
324 169 'git clone', clone_url, tmpdir.strpath)
325 170
326 171 stdout, stderr = _add_files_and_push(
327 172 'git', tmpdir.strpath,
328 173 clone_url=rc_web_server.repo_clone_url('not-existing'))
329 174
330 175 assert 'not found' in stderr
331 176
332 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
333 user_model = UserModel()
334 try:
335 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
336 Session().commit()
337 time.sleep(2)
338 clone_url = rc_web_server.repo_clone_url(HG_REPO)
339 stdout, stderr = Command('/tmp').execute(
340 'hg clone', clone_url, tmpdir.strpath)
341 assert 'abort: HTTP Error 403: Forbidden' in stderr
342 finally:
343 # release IP restrictions
344 for ip in UserIpMap.getAll():
345 UserIpMap.delete(ip.ip_id)
346 Session().commit()
347
348 time.sleep(2)
349
350 stdout, stderr = Command('/tmp').execute(
351 'hg clone', clone_url, tmpdir.strpath)
352 _check_proper_clone(stdout, stderr, 'hg')
353
354 177 def test_ip_restriction_git(self, rc_web_server, tmpdir):
355 178 user_model = UserModel()
356 179 try:
357 180 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
358 181 Session().commit()
359 182 time.sleep(2)
360 183 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
361 184 stdout, stderr = Command('/tmp').execute(
362 185 'git clone', clone_url, tmpdir.strpath)
363 186 msg = "The requested URL returned error: 403"
364 187 assert msg in stderr
365 188 finally:
366 189 # release IP restrictions
367 190 for ip in UserIpMap.getAll():
368 191 UserIpMap.delete(ip.ip_id)
369 192 Session().commit()
370 193
371 194 time.sleep(2)
372 195
373 196 cmd = Command('/tmp')
374 197 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
375 198 cmd.assert_returncode_success()
376 199 _check_proper_clone(stdout, stderr, 'git')
@@ -1,87 +1,90 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Test suite for making push/pull operations, on specially modified INI files
22 22
23 23 .. important::
24 24
25 25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 26 to redirect things to stderr instead of stdout.
27 27 """
28 28
29 29 import pytest
30 30
31 31 from rhodecode.tests import GIT_REPO, HG_REPO
32 32 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
33 33 from rhodecode.tests.vcs_operations.conftest import check_httpbin_connection
34 34
35 35
36 36 connection_available = pytest.mark.skipif(
37 37 not check_httpbin_connection(), reason="No outside internet connection available")
38 38
39 39
40 40 @pytest.mark.usefixtures(
41 41 "disable_locking", "disable_anonymous_user",
42 42 "enable_webhook_push_integration")
43 43 class TestVCSOperationsOnCustomIniConfig(object):
44 44
45 @connection_available
45 46 def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir):
46 47 clone_url = rc_web_server.repo_clone_url(HG_REPO)
47 48 stdout, stderr = Command('/tmp').execute(
48 49 'hg clone', clone_url, tmpdir.strpath)
49 50
50 51 push_url = rc_web_server.repo_clone_url(HG_REPO)
51 52 _add_files_and_push(
52 53 'hg', tmpdir.strpath, clone_url=push_url,
53 54 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
54 55
55 56 rc_log = rc_web_server.get_rc_log()
56 57 assert 'ERROR' not in rc_log
57 58 assert "{'name': 'v1.0.0'," in rc_log
58 59
60 @connection_available
59 61 def test_push_tag_with_commit_git(
60 62 self, rc_web_server, tmpdir):
61 63 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
62 64 stdout, stderr = Command('/tmp').execute(
63 65 'git clone', clone_url, tmpdir.strpath)
64 66
65 67 push_url = rc_web_server.repo_clone_url(GIT_REPO)
66 68 _add_files_and_push(
67 69 'git', tmpdir.strpath, clone_url=push_url,
68 70 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
69 71
70 72 rc_log = rc_web_server.get_rc_log()
71 73 assert 'ERROR' not in rc_log
72 74 assert "{'name': 'v1.0.0'," in rc_log
73 75
76 @connection_available
74 77 def test_push_tag_with_no_commit_git(
75 78 self, rc_web_server, tmpdir):
76 79 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
77 80 stdout, stderr = Command('/tmp').execute(
78 81 'git clone', clone_url, tmpdir.strpath)
79 82
80 83 push_url = rc_web_server.repo_clone_url(GIT_REPO)
81 84 _add_files_and_push(
82 85 'git', tmpdir.strpath, clone_url=push_url,
83 86 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
84 87
85 88 rc_log = rc_web_server.get_rc_log()
86 89 assert 'ERROR' not in rc_log
87 90 assert "{'name': 'v1.0.0'," in rc_log
@@ -1,138 +1,171 b''
1 1 #
2 2
3 3 ; #################################
4 4 ; RHODECODE VCSSERVER CONFIGURATION
5 5 ; #################################
6 6
7 7 [server:main]
8 8 ; COMMON HOST/IP CONFIG
9 9 host = 127.0.0.1
10 port = 9900
10 port = 10010
11 11
12 12
13 13 ; ###########################
14 14 ; GUNICORN APPLICATION SERVER
15 15 ; ###########################
16 16
17 17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
18 18
19 19 ; Module to use, this setting shouldn't be changed
20 20 use = egg:gunicorn#main
21 21
22 22 [app:main]
23 23 ; The %(here)s variable will be replaced with the absolute path of parent directory
24 24 ; of this file
25 ; Each option in the app:main can be override by an environmental variable
26 ;
27 ;To override an option:
28 ;
29 ;RC_<KeyName>
30 ;Everything should be uppercase, . and - should be replaced by _.
31 ;For example, if you have these configuration settings:
32 ;rc_cache.repo_object.backend = foo
33 ;can be overridden by
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
35
25 36 use = egg:rhodecode-vcsserver
26 37
27 38 ; Pyramid default locales, we need this to be set
28 39 pyramid.default_locale_name = en
29 40
30 41 ; default locale used by VCS systems
31 42 locale = en_US.UTF-8
32 43
33 ; path to binaries for vcsserver, it should be set by the installer
34 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
35 ; it can also be a path to nix-build output in case of development
44 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
45 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
46 ; or /usr/local/bin/rhodecode_bin/vcs_bin
36 47 core.binary_dir =
37 48
49 ; Redis connection settings for svn integrations logic
50 ; This connection string needs to be the same on ce and vcsserver
51 vcs.svn.redis_conn = redis://redis:6379/0
52
38 53 ; Custom exception store path, defaults to TMPDIR
39 54 ; This is used to store exception from RhodeCode in shared directory
40 55 #exception_tracker.store_path =
41 56
42 57 ; #############
43 58 ; DOGPILE CACHE
44 59 ; #############
45 60
46 61 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
47 62 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
48 63 cache_dir = %(here)s/data
49 64
50 65 ; ***************************************
51 66 ; `repo_object` cache, default file based
52 67 ; ***************************************
53 68
54 69 ; `repo_object` cache settings for vcs methods for repositories
55 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
70 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
56 71
57 72 ; cache auto-expires after N seconds
58 73 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
59 rc_cache.repo_object.expiration_time = 2592000
74 #rc_cache.repo_object.expiration_time = 2592000
60 75
61 76 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
62 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
77 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
63 78
64 79 ; ***********************************************************
65 80 ; `repo_object` cache with redis backend
66 81 ; recommended for larger instance, and for better performance
67 82 ; ***********************************************************
68 83
69 84 ; `repo_object` cache settings for vcs methods for repositories
70 85 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
71 86
72 87 ; cache auto-expires after N seconds
73 88 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
74 89 #rc_cache.repo_object.expiration_time = 2592000
75 90
76 91 ; redis_expiration_time needs to be greater then expiration_time
77 92 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
78 93
79 94 #rc_cache.repo_object.arguments.host = localhost
80 95 #rc_cache.repo_object.arguments.port = 6379
81 96 #rc_cache.repo_object.arguments.db = 5
82 97 #rc_cache.repo_object.arguments.socket_timeout = 30
83 98 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
84 99 #rc_cache.repo_object.arguments.distributed_lock = true
85 100
86 # legacy cache regions, please don't change
87 beaker.cache.regions = repo_object
88 beaker.cache.repo_object.type = memorylru
89 beaker.cache.repo_object.max_items = 100
90 # cache auto-expires after N seconds
91 beaker.cache.repo_object.expire = 300
92 beaker.cache.repo_object.enabled = true
101 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
102 #rc_cache.repo_object.arguments.lock_auto_renewal = true
103
104 ; Statsd client config, this is used to send metrics to statsd
105 ; We recommend setting statsd_exported and scrape them using Promethues
106 #statsd.enabled = false
107 #statsd.statsd_host = 0.0.0.0
108 #statsd.statsd_port = 8125
109 #statsd.statsd_prefix =
110 #statsd.statsd_ipv6 = false
93 111
112 ; configure logging automatically at server startup set to false
113 ; to use the below custom logging config.
114 ; RC_LOGGING_FORMATTER
115 ; RC_LOGGING_LEVEL
116 ; env variables can control the settings for logging in case of autoconfigure
94 117
118 #logging.autoconfigure = true
119
120 ; specify your own custom logging config file to configure logging
121 #logging.logging_conf_file = /path/to/custom_logging.ini
95 122
96 123 ; #####################
97 124 ; LOGGING CONFIGURATION
98 125 ; #####################
126
99 127 [loggers]
100 128 keys = root, vcsserver
101 129
102 130 [handlers]
103 131 keys = console
104 132
105 133 [formatters]
106 keys = generic
134 keys = generic, json
107 135
108 136 ; #######
109 137 ; LOGGERS
110 138 ; #######
111 139 [logger_root]
112 140 level = NOTSET
113 141 handlers = console
114 142
115 143 [logger_vcsserver]
116 level = DEBUG
144 level = INFO
117 145 handlers =
118 146 qualname = vcsserver
119 147 propagate = 1
120 148
121
122 149 ; ########
123 150 ; HANDLERS
124 151 ; ########
125 152
126 153 [handler_console]
127 154 class = StreamHandler
128 155 args = (sys.stderr, )
129 156 level = DEBUG
157 ; To enable JSON formatted logs replace 'generic' with 'json'
158 ; This allows sending properly formatted logs to grafana loki or elasticsearch
130 159 formatter = generic
131 160
132 161 ; ##########
133 162 ; FORMATTERS
134 163 ; ##########
135 164
136 165 [formatter_generic]
137 166 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
138 167 datefmt = %Y-%m-%d %H:%M:%S
168
169 [formatter_json]
170 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
171 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now