##// END OF EJS Templates
fix(svn): svn events fixes and change the way how we handle the events
super-admin -
r5459:7f730862 default
parent child Browse files
Show More
@@ -0,0 +1,132 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import logging
20 import redis
21
22 from ..lib import rc_cache
23 from ..lib.ext_json import json
24
25
26 log = logging.getLogger(__name__)
27
28 redis_client = None
29
30
31 class RedisTxnClient:
32
33 def __init__(self, url):
34 self.url = url
35 self._create_client(url)
36
37 def _create_client(self, url):
38 connection_pool = redis.ConnectionPool.from_url(url)
39 self.writer_client = redis.StrictRedis(
40 connection_pool=connection_pool
41 )
42 self.reader_client = self.writer_client
43
44 def set(self, key, value, expire=24 * 60000):
45 self.writer_client.set(key, value, ex=expire)
46
47 def get(self, key):
48 return self.reader_client.get(key)
49
50 def delete(self, key):
51 self.writer_client.delete(key)
52
53
54 def get_redis_client(url=''):
55
56 global redis_client
57 if redis_client is not None:
58 return redis_client
59 if not url:
60 from rhodecode import CONFIG
61 url = CONFIG['vcs.svn.redis_conn']
62 redis_client = RedisTxnClient(url)
63 return redis_client
64
65
66 def extract_svn_txn_id(data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71 import re
72 from lxml import etree
73
74 try:
75 root = etree.fromstring(data)
76 pat = re.compile(r'/txn/(?P<txn_id>.*)')
77 for el in root:
78 if el.tag == '{DAV:}source':
79 for sub_el in el:
80 if sub_el.tag == '{DAV:}href':
81 match = pat.search(sub_el.text)
82 if match:
83 svn_tx_id = match.groupdict()['txn_id']
84 return svn_tx_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 def get_txn_id_data_key(repo_path, svn_txn_id):
90 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
91 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
92 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
93 log.debug('computed final key: %s', final_key)
94
95 return final_key
96
97
98 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
99 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
100
101 if not svn_txn_id:
102 log.warning('Cannot store txn_id because it is empty')
103 return
104
105 redis_conn = get_redis_client()
106
107 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
108 store_data = json.dumps(data_dict)
109 redis_conn.set(store_key, store_data)
110
111
112 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
113 """
114 Reads txn_id from store and if present returns the data for callback manager
115 """
116 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
117 redis_conn = get_redis_client()
118
119 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
120 data = {}
121 redis_conn.get(store_key)
122 try:
123 raw_data = redis_conn.get(store_key)
124 data = json.loads(raw_data)
125 except Exception:
126 log.exception('Failed to get txn_id metadata')
127
128 if rm_on_read:
129 log.debug('Cleaning up txn_id at %s', store_key)
130 redis_conn.delete(store_key)
131
132 return data
@@ -0,0 +1,226 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
28
29
30 import time
31
32 import pytest
33
34 from rhodecode.lib import rc_cache
35 from rhodecode.model.db import Repository, UserIpMap, CacheKey
36 from rhodecode.model.meta import Session
37 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.user import UserModel
39 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
40
41 from rhodecode.tests.vcs_operations import (
42 Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP)
43
44
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
46 class TestVCSOperations(object):
47
48 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
49 clone_url = rc_web_server.repo_clone_url(HG_REPO)
50 stdout, stderr = Command('/tmp').execute(
51 'hg clone', clone_url, tmpdir.strpath)
52 _check_proper_clone(stdout, stderr, 'hg')
53
54 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
55 clone_url = rc_web_server.repo_clone_url(HG_REPO)
56 stdout, stderr = Command('/tmp').execute(
57 'hg clone --pull', clone_url, tmpdir.strpath)
58 _check_proper_clone(stdout, stderr, 'hg')
59
60 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
61 clone_url = rc_web_server.repo_clone_url(HG_REPO)
62 stdout, stderr = Command('/tmp').execute(
63 'hg clone --pull --stream', clone_url, tmpdir.strpath)
64 assert 'files to transfer,' in stdout
65 assert 'transferred 1.' in stdout
66 assert '114 files updated,' in stdout
67
68 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
69 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
70 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
71 stdout, stderr = Command('/tmp').execute(
72 'hg clone', clone_url, tmpdir.strpath)
73 _check_proper_clone(stdout, stderr, 'hg')
74
75 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
76 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
77 stdout, stderr = Command('/tmp').execute(
78 'hg clone', clone_url, tmpdir.strpath)
79 _check_proper_clone(stdout, stderr, 'hg')
80
81 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
82 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
83 stdout, stderr = Command('/tmp').execute(
84 'hg clone', clone_url, tmpdir.strpath)
85 assert 'abort: authorization failed' in stderr
86
87 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
88 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 assert 'HTTP Error 404: Not Found' in stderr
92
93 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
94 clone_url = rc_web_server.repo_clone_url('trololo')
95 stdout, stderr = Command('/tmp').execute(
96 'hg clone', clone_url, tmpdir.strpath)
97 assert 'HTTP Error 404: Not Found' in stderr
98
99 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
100 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
101 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
102 assert 'HTTP Error 404: Not Found' in stderr
103
104 def test_clone_existing_path_hg_not_in_database(
105 self, rc_web_server, tmpdir, fs_repo_only):
106
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109 stdout, stderr = Command('/tmp').execute(
110 'hg clone', clone_url, tmpdir.strpath)
111 assert 'HTTP Error 404: Not Found' in stderr
112
113 def test_clone_existing_path_hg_not_in_database_different_scm(
114 self, rc_web_server, tmpdir, fs_repo_only):
115 db_name = fs_repo_only('not-in-db-git', repo_type='git')
116 clone_url = rc_web_server.repo_clone_url(db_name)
117 stdout, stderr = Command('/tmp').execute(
118 'hg clone', clone_url, tmpdir.strpath)
119 assert 'HTTP Error 404: Not Found' in stderr
120
121 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
122 repo = user_util.create_repo()
123 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
124
125 # Damage repo by removing it's folder
126 RepoModel()._delete_filesystem_repo(repo)
127
128 stdout, stderr = Command('/tmp').execute(
129 'hg clone', clone_url, tmpdir.strpath)
130 assert 'HTTP Error 404: Not Found' in stderr
131
132 def test_push_new_file_hg(self, rc_web_server, tmpdir):
133 clone_url = rc_web_server.repo_clone_url(HG_REPO)
134 stdout, stderr = Command('/tmp').execute(
135 'hg clone', clone_url, tmpdir.strpath)
136
137 stdout, stderr = _add_files_and_push(
138 'hg', tmpdir.strpath, clone_url=clone_url)
139
140 assert 'pushing to' in stdout
141 assert 'size summary' in stdout
142
143 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
144 hg_repo = Repository.get_by_repo_name(HG_REPO)
145
146 # init cache objects
147 CacheKey.delete_all_cache()
148
149 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
150
151 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
152
153 with inv_context_manager as invalidation_context:
154 # __enter__ will create and register cache objects
155 pass
156
157 cache_keys = hg_repo.cache_keys
158 assert cache_keys != []
159 old_ids = [x.cache_state_uid for x in cache_keys]
160
161 # clone to init cache
162 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
163 stdout, stderr = Command('/tmp').execute(
164 'hg clone', clone_url, tmpdir.strpath)
165
166 cache_keys = hg_repo.cache_keys
167 assert cache_keys != []
168 for key in cache_keys:
169 assert key.cache_active is True
170
171 # PUSH that should trigger invalidation cache
172 stdout, stderr = _add_files_and_push(
173 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
174
175 # flush...
176 Session().commit()
177 hg_repo = Repository.get_by_repo_name(HG_REPO)
178 cache_keys = hg_repo.cache_keys
179 assert cache_keys != []
180 new_ids = [x.cache_state_uid for x in cache_keys]
181 assert new_ids != old_ids
182
183 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
184 clone_url = rc_web_server.repo_clone_url(HG_REPO)
185 stdout, stderr = Command('/tmp').execute(
186 'hg clone', clone_url, tmpdir.strpath)
187
188 push_url = rc_web_server.repo_clone_url(
189 HG_REPO, user='bad', passwd='name')
190 stdout, stderr = _add_files_and_push(
191 'hg', tmpdir.strpath, clone_url=push_url)
192
193 assert 'abort: authorization failed' in stderr
194
195 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
196 clone_url = rc_web_server.repo_clone_url(HG_REPO)
197 stdout, stderr = Command('/tmp').execute(
198 'hg clone', clone_url, tmpdir.strpath)
199
200 stdout, stderr = _add_files_and_push(
201 'hg', tmpdir.strpath,
202 clone_url=rc_web_server.repo_clone_url('not-existing'))
203
204 assert 'HTTP Error 404: Not Found' in stderr
205
206 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
207 user_model = UserModel()
208 try:
209 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
210 Session().commit()
211 time.sleep(2)
212 clone_url = rc_web_server.repo_clone_url(HG_REPO)
213 stdout, stderr = Command('/tmp').execute(
214 'hg clone', clone_url, tmpdir.strpath)
215 assert 'abort: HTTP Error 403: Forbidden' in stderr
216 finally:
217 # release IP restrictions
218 for ip in UserIpMap.getAll():
219 UserIpMap.delete(ip.ip_id)
220 Session().commit()
221
222 time.sleep(2)
223
224 stdout, stderr = Command('/tmp').execute(
225 'hg clone', clone_url, tmpdir.strpath)
226 _check_proper_clone(stdout, stderr, 'hg')
@@ -0,0 +1,197 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 Test suite for making push/pull operations, on specially modified INI files
21
22 .. important::
23
24 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 to redirect things to stderr instead of stdout.
26 """
27
28
29 import time
30 import pytest
31
32 from rhodecode.model.db import Repository, UserIpMap
33 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import (SVN_REPO, TEST_USER_ADMIN_LOGIN)
37
38
39 from rhodecode.tests.vcs_operations import (
40 Command, _check_proper_clone, _check_proper_svn_push,
41 _add_files_and_push, SVN_REPO_WITH_GROUP)
42
43
44 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
45 class TestVCSOperations(object):
46
47 def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir):
48 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
49 username, password = rc_web_server.repo_clone_credentials()
50
51 cmd = Command('/tmp')
52
53 auth = f'--non-interactive --username={username} --password={password}'
54 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
55 _check_proper_clone(stdout, stderr, 'svn')
56 cmd.assert_returncode_success()
57
58 def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir):
59 repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id
60 username, password = rc_web_server.repo_clone_credentials()
61
62 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
63 cmd = Command('/tmp')
64 auth = f'--non-interactive --username={username} --password={password}'
65 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
66 _check_proper_clone(stdout, stderr, 'svn')
67 cmd.assert_returncode_success()
68
69 def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir):
70 clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP)
71 username, password = rc_web_server.repo_clone_credentials()
72
73 cmd = Command('/tmp')
74 auth = f'--non-interactive --username={username} --password={password}'
75 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
76 _check_proper_clone(stdout, stderr, 'svn')
77 cmd.assert_returncode_success()
78
79 def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir):
80 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
81 username, password = rc_web_server.repo_clone_credentials()
82 password = 'bad-password'
83
84 auth = f'--non-interactive --username={username} --password={password}'
85 stdout, stderr = Command('/tmp').execute(
86 f'svn checkout {auth}', clone_url, tmpdir.strpath)
87 assert 'fatal: Authentication failed' in stderr
88
89 def test_clone_svn_with_slashes(self, rc_web_server, tmpdir):
90 clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO)
91 stdout, stderr = Command('/tmp').execute('svn checkout', clone_url)
92 assert 'not found' in stderr
93
94 def test_clone_existing_path_svn_not_in_database(
95 self, rc_web_server, tmpdir, fs_repo_only):
96 db_name = fs_repo_only('not-in-db-git', repo_type='git')
97 clone_url = rc_web_server.repo_clone_url(db_name)
98 username, password = '', ''
99 auth = f'--non-interactive --username={username} --password={password}'
100
101 stdout, stderr = Command('/tmp').execute(
102 f'svn checkout {auth}', clone_url, tmpdir.strpath)
103 assert 'not found' in stderr
104
105 def test_clone_existing_path_svn_not_in_database_different_scm(
106 self, rc_web_server, tmpdir, fs_repo_only):
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
109
110 username, password = '', ''
111 auth = f'--non-interactive --username={username} --password={password}'
112 stdout, stderr = Command('/tmp').execute(
113 f'svn checkout {auth}', clone_url, tmpdir.strpath)
114 assert 'not found' in stderr
115
116 def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util):
117 repo = user_util.create_repo(repo_type='git')
118 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
119
120 # Damage repo by removing it's folder
121 RepoModel()._delete_filesystem_repo(repo)
122
123 username, password = '', ''
124 auth = f'--non-interactive --username={username} --password={password}'
125 stdout, stderr = Command('/tmp').execute(
126 f'svn checkout {auth}', clone_url, tmpdir.strpath)
127 assert 'not found' in stderr
128
129 def test_push_new_file_svn(self, rc_web_server, tmpdir):
130 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
131 username, password = '', ''
132 auth = f'--non-interactive --username={username} --password={password}'
133
134 stdout, stderr = Command('/tmp').execute(
135 f'svn checkout {auth}', clone_url, tmpdir.strpath)
136
137 # commit some stuff into this repo
138 stdout, stderr = _add_files_and_push(
139 'svn', tmpdir.strpath, clone_url=clone_url)
140
141 _check_proper_svn_push(stdout, stderr)
142
143 def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir):
144 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
145
146 username, password = '', ''
147 auth = f'--non-interactive --username={username} --password={password}'
148 stdout, stderr = Command('/tmp').execute(
149 f'svn checkout {auth}', clone_url, tmpdir.strpath)
150
151 push_url = rc_web_server.repo_clone_url(
152 SVN_REPO, user='bad', passwd='name')
153 stdout, stderr = _add_files_and_push(
154 'svn', tmpdir.strpath, clone_url=push_url)
155
156 assert 'fatal: Authentication failed' in stderr
157
158 def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir):
159 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
160 username, password = '', ''
161 auth = f'--non-interactive --username={username} --password={password}'
162 Command('/tmp').execute(
163 f'svn checkout {auth}', clone_url, tmpdir.strpath)
164
165 stdout, stderr = _add_files_and_push(
166 'svn', tmpdir.strpath,
167 clone_url=rc_web_server.repo_clone_url('not-existing'))
168
169 assert 'not found' in stderr
170
171 def test_ip_restriction_svn(self, rc_web_server, tmpdir):
172 user_model = UserModel()
173 username, password = '', ''
174 auth = f'--non-interactive --username={username} --password={password}'
175
176 try:
177 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
178 Session().commit()
179 time.sleep(2)
180 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
181
182 stdout, stderr = Command('/tmp').execute(
183 f'svn checkout {auth}', clone_url, tmpdir.strpath)
184 msg = "The requested URL returned error: 403"
185 assert msg in stderr
186 finally:
187 # release IP restrictions
188 for ip in UserIpMap.getAll():
189 UserIpMap.delete(ip.ip_id)
190 Session().commit()
191
192 time.sleep(2)
193
194 cmd = Command('/tmp')
195 stdout, stderr = cmd.execute(f'svn checkout {auth}', clone_url, tmpdir.strpath)
196 cmd.assert_returncode_success()
197 _check_proper_clone(stdout, stderr, 'svn')
@@ -1,852 +1,856 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = true
8 debug = true
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; #############
74 ; #############
75 ; DEBUG OPTIONS
75 ; DEBUG OPTIONS
76 ; #############
76 ; #############
77
77
78 pyramid.reload_templates = true
78 pyramid.reload_templates = true
79
79
80 # During development the we want to have the debug toolbar enabled
80 # During development the we want to have the debug toolbar enabled
81 pyramid.includes =
81 pyramid.includes =
82 pyramid_debugtoolbar
82 pyramid_debugtoolbar
83
83
84 debugtoolbar.hosts = 0.0.0.0/0
84 debugtoolbar.hosts = 0.0.0.0/0
85 debugtoolbar.exclude_prefixes =
85 debugtoolbar.exclude_prefixes =
86 /css
86 /css
87 /fonts
87 /fonts
88 /images
88 /images
89 /js
89 /js
90
90
91 ## RHODECODE PLUGINS ##
91 ## RHODECODE PLUGINS ##
92 rhodecode.includes =
92 rhodecode.includes =
93 rhodecode.api
93 rhodecode.api
94
94
95
95
96 # api prefix url
96 # api prefix url
97 rhodecode.api.url = /_admin/api
97 rhodecode.api.url = /_admin/api
98
98
99 ; enable debug style page
99 ; enable debug style page
100 debug_style = true
100 debug_style = true
101
101
102 ; #################
102 ; #################
103 ; END DEBUG OPTIONS
103 ; END DEBUG OPTIONS
104 ; #################
104 ; #################
105
105
106 ; encryption key used to encrypt social plugin tokens,
106 ; encryption key used to encrypt social plugin tokens,
107 ; remote_urls with credentials etc, if not set it defaults to
107 ; remote_urls with credentials etc, if not set it defaults to
108 ; `beaker.session.secret`
108 ; `beaker.session.secret`
109 #rhodecode.encrypted_values.secret =
109 #rhodecode.encrypted_values.secret =
110
110
111 ; decryption strict mode (enabled by default). It controls if decryption raises
111 ; decryption strict mode (enabled by default). It controls if decryption raises
112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 #rhodecode.encrypted_values.strict = false
113 #rhodecode.encrypted_values.strict = false
114
114
115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
116 ; fernet is safer, and we strongly recommend switching to it.
116 ; fernet is safer, and we strongly recommend switching to it.
117 ; Due to backward compatibility aes is used as default.
117 ; Due to backward compatibility aes is used as default.
118 #rhodecode.encrypted_values.algorithm = fernet
118 #rhodecode.encrypted_values.algorithm = fernet
119
119
120 ; Return gzipped responses from RhodeCode (static files/application)
120 ; Return gzipped responses from RhodeCode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ; Auto-generate javascript routes file on startup
123 ; Auto-generate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ; System global default language.
126 ; System global default language.
127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ; Perform a full repository scan and import on each server start.
130 ; Perform a full repository scan and import on each server start.
131 ; Settings this to true could lead to very long startup time.
131 ; Settings this to true could lead to very long startup time.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ; URL at which the application is running. This is used for Bootstrapping
134 ; URL at which the application is running. This is used for Bootstrapping
135 ; requests in context when no web request is available. Used in ishell, or
135 ; requests in context when no web request is available. Used in ishell, or
136 ; SSH calls. Set this for events to receive proper url for SSH calls.
136 ; SSH calls. Set this for events to receive proper url for SSH calls.
137 app.base_url = http://rhodecode.local
137 app.base_url = http://rhodecode.local
138
138
139 ; Host at which the Service API is running.
139 ; Host at which the Service API is running.
140 app.service_api.host = http://rhodecode.local:10020
140 app.service_api.host = http://rhodecode.local:10020
141
141
142 ; Secret for Service API authentication.
142 ; Secret for Service API authentication.
143 app.service_api.token =
143 app.service_api.token =
144
144
145 ; Unique application ID. Should be a random unique string for security.
145 ; Unique application ID. Should be a random unique string for security.
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
149 ; commit, or pull request exceeds this limit this diff will be displayed
149 ; commit, or pull request exceeds this limit this diff will be displayed
150 ; partially. E.g 512000 == 512Kb
150 ; partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
154 ; file inside diff which exceeds this limit will be displayed partially.
154 ; file inside diff which exceeds this limit will be displayed partially.
155 ; E.g 128000 == 128Kb
155 ; E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
163 force_https = false
163 force_https = false
164
164
165 ; use Strict-Transport-Security headers
165 ; use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ; Set to true if your repos are exposed using the dumb protocol
168 ; Set to true if your repos are exposed using the dumb protocol
169 git_update_server_info = false
169 git_update_server_info = false
170
170
171 ; RSS/ATOM feed options
171 ; RSS/ATOM feed options
172 rss_cut_off_limit = 256000
172 rss_cut_off_limit = 256000
173 rss_items_per_page = 10
173 rss_items_per_page = 10
174 rss_include_diff = false
174 rss_include_diff = false
175
175
176 ; gist URL alias, used to create nicer urls for gist. This should be an
176 ; gist URL alias, used to create nicer urls for gist. This should be an
177 ; url that does rewrites to _admin/gists/{gistid}.
177 ; url that does rewrites to _admin/gists/{gistid}.
178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
180 gist_alias_url =
180 gist_alias_url =
181
181
182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
183 ; used for access.
183 ; used for access.
184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
185 ; came from the the logged in user who own this authentication token.
185 ; came from the the logged in user who own this authentication token.
186 ; Additionally @TOKEN syntax can be used to bound the view to specific
186 ; Additionally @TOKEN syntax can be used to bound the view to specific
187 ; authentication token. Such view would be only accessible when used together
187 ; authentication token. Such view would be only accessible when used together
188 ; with this authentication token
188 ; with this authentication token
189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
190 ; The list should be "," separated and on a single line.
190 ; The list should be "," separated and on a single line.
191 ; Most common views to enable:
191 ; Most common views to enable:
192
192
193 # RepoCommitsView:repo_commit_download
193 # RepoCommitsView:repo_commit_download
194 # RepoCommitsView:repo_commit_patch
194 # RepoCommitsView:repo_commit_patch
195 # RepoCommitsView:repo_commit_raw
195 # RepoCommitsView:repo_commit_raw
196 # RepoCommitsView:repo_commit_raw@TOKEN
196 # RepoCommitsView:repo_commit_raw@TOKEN
197 # RepoFilesView:repo_files_diff
197 # RepoFilesView:repo_files_diff
198 # RepoFilesView:repo_archivefile
198 # RepoFilesView:repo_archivefile
199 # RepoFilesView:repo_file_raw
199 # RepoFilesView:repo_file_raw
200 # GistView:*
200 # GistView:*
201 api_access_controllers_whitelist =
201 api_access_controllers_whitelist =
202
202
203 ; Default encoding used to convert from and to unicode
203 ; Default encoding used to convert from and to unicode
204 ; can be also a comma separated list of encoding in case of mixed encodings
204 ; can be also a comma separated list of encoding in case of mixed encodings
205 default_encoding = UTF-8
205 default_encoding = UTF-8
206
206
207 ; instance-id prefix
207 ; instance-id prefix
208 ; a prefix key for this instance used for cache invalidation when running
208 ; a prefix key for this instance used for cache invalidation when running
209 ; multiple instances of RhodeCode, make sure it's globally unique for
209 ; multiple instances of RhodeCode, make sure it's globally unique for
210 ; all running RhodeCode instances. Leave empty if you don't use it
210 ; all running RhodeCode instances. Leave empty if you don't use it
211 instance_id =
211 instance_id =
212
212
213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
214 ; of an authentication plugin also if it is disabled by it's settings.
214 ; of an authentication plugin also if it is disabled by it's settings.
215 ; This could be useful if you are unable to log in to the system due to broken
215 ; This could be useful if you are unable to log in to the system due to broken
216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
217 ; module to log in again and fix the settings.
217 ; module to log in again and fix the settings.
218 ; Available builtin plugin IDs (hash is part of the ID):
218 ; Available builtin plugin IDs (hash is part of the ID):
219 ; egg:rhodecode-enterprise-ce#rhodecode
219 ; egg:rhodecode-enterprise-ce#rhodecode
220 ; egg:rhodecode-enterprise-ce#pam
220 ; egg:rhodecode-enterprise-ce#pam
221 ; egg:rhodecode-enterprise-ce#ldap
221 ; egg:rhodecode-enterprise-ce#ldap
222 ; egg:rhodecode-enterprise-ce#jasig_cas
222 ; egg:rhodecode-enterprise-ce#jasig_cas
223 ; egg:rhodecode-enterprise-ce#headers
223 ; egg:rhodecode-enterprise-ce#headers
224 ; egg:rhodecode-enterprise-ce#crowd
224 ; egg:rhodecode-enterprise-ce#crowd
225
225
226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
227
227
228 ; Flag to control loading of legacy plugins in py:/path format
228 ; Flag to control loading of legacy plugins in py:/path format
229 auth_plugin.import_legacy_plugins = true
229 auth_plugin.import_legacy_plugins = true
230
230
231 ; alternative return HTTP header for failed authentication. Default HTTP
231 ; alternative return HTTP header for failed authentication. Default HTTP
232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
233 ; handling that causing a series of failed authentication calls.
233 ; handling that causing a series of failed authentication calls.
234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
235 ; This will be served instead of default 401 on bad authentication
235 ; This will be served instead of default 401 on bad authentication
236 auth_ret_code =
236 auth_ret_code =
237
237
238 ; use special detection method when serving auth_ret_code, instead of serving
238 ; use special detection method when serving auth_ret_code, instead of serving
239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
240 ; and then serve auth_ret_code to clients
240 ; and then serve auth_ret_code to clients
241 auth_ret_code_detection = false
241 auth_ret_code_detection = false
242
242
243 ; locking return code. When repository is locked return this HTTP code. 2XX
243 ; locking return code. When repository is locked return this HTTP code. 2XX
244 ; codes don't break the transactions while 4XX codes do
244 ; codes don't break the transactions while 4XX codes do
245 lock_ret_code = 423
245 lock_ret_code = 423
246
246
247 ; Filesystem location were repositories should be stored
247 ; Filesystem location were repositories should be stored
248 repo_store.path = /var/opt/rhodecode_repo_store
248 repo_store.path = /var/opt/rhodecode_repo_store
249
249
250 ; allows to setup custom hooks in settings page
250 ; allows to setup custom hooks in settings page
251 allow_custom_hooks_settings = true
251 allow_custom_hooks_settings = true
252
252
253 ; Generated license token required for EE edition license.
253 ; Generated license token required for EE edition license.
254 ; New generated token value can be found in Admin > settings > license page.
254 ; New generated token value can be found in Admin > settings > license page.
255 license_token =
255 license_token =
256
256
257 ; This flag hides sensitive information on the license page such as token, and license data
257 ; This flag hides sensitive information on the license page such as token, and license data
258 license.hide_license_info = false
258 license.hide_license_info = false
259
259
260 ; supervisor connection uri, for managing supervisor and logs.
260 ; supervisor connection uri, for managing supervisor and logs.
261 supervisor.uri =
261 supervisor.uri =
262
262
263 ; supervisord group name/id we only want this RC instance to handle
263 ; supervisord group name/id we only want this RC instance to handle
264 supervisor.group_id = dev
264 supervisor.group_id = dev
265
265
266 ; Display extended labs settings
266 ; Display extended labs settings
267 labs_settings_active = true
267 labs_settings_active = true
268
268
269 ; Custom exception store path, defaults to TMPDIR
269 ; Custom exception store path, defaults to TMPDIR
270 ; This is used to store exception from RhodeCode in shared directory
270 ; This is used to store exception from RhodeCode in shared directory
271 #exception_tracker.store_path =
271 #exception_tracker.store_path =
272
272
273 ; Send email with exception details when it happens
273 ; Send email with exception details when it happens
274 #exception_tracker.send_email = false
274 #exception_tracker.send_email = false
275
275
276 ; Comma separated list of recipients for exception emails,
276 ; Comma separated list of recipients for exception emails,
277 ; e.g admin@rhodecode.com,devops@rhodecode.com
277 ; e.g admin@rhodecode.com,devops@rhodecode.com
278 ; Can be left empty, then emails will be sent to ALL super-admins
278 ; Can be left empty, then emails will be sent to ALL super-admins
279 #exception_tracker.send_email_recipients =
279 #exception_tracker.send_email_recipients =
280
280
281 ; optional prefix to Add to email Subject
281 ; optional prefix to Add to email Subject
282 #exception_tracker.email_prefix = [RHODECODE ERROR]
282 #exception_tracker.email_prefix = [RHODECODE ERROR]
283
283
284 ; File store configuration. This is used to store and serve uploaded files
284 ; File store configuration. This is used to store and serve uploaded files
285 file_store.enabled = true
285 file_store.enabled = true
286
286
287 ; Storage backend, available options are: local
287 ; Storage backend, available options are: local
288 file_store.backend = local
288 file_store.backend = local
289
289
290 ; path to store the uploaded binaries and artifacts
290 ; path to store the uploaded binaries and artifacts
291 file_store.storage_path = /var/opt/rhodecode_data/file_store
291 file_store.storage_path = /var/opt/rhodecode_data/file_store
292
292
293
293
294 ; Redis url to acquire/check generation of archives locks
294 ; Redis url to acquire/check generation of archives locks
295 archive_cache.locking.url = redis://redis:6379/1
295 archive_cache.locking.url = redis://redis:6379/1
296
296
297 ; Storage backend, only 'filesystem' and 'objectstore' are available now
297 ; Storage backend, only 'filesystem' and 'objectstore' are available now
298 archive_cache.backend.type = filesystem
298 archive_cache.backend.type = filesystem
299
299
300 ; url for s3 compatible storage that allows to upload artifacts
300 ; url for s3 compatible storage that allows to upload artifacts
301 ; e.g http://minio:9000
301 ; e.g http://minio:9000
302 archive_cache.objectstore.url = http://s3-minio:9000
302 archive_cache.objectstore.url = http://s3-minio:9000
303
303
304 ; key for s3 auth
304 ; key for s3 auth
305 archive_cache.objectstore.key = key
305 archive_cache.objectstore.key = key
306
306
307 ; secret for s3 auth
307 ; secret for s3 auth
308 archive_cache.objectstore.secret = secret
308 archive_cache.objectstore.secret = secret
309
309
310 ;region for s3 storage
310 ;region for s3 storage
311 archive_cache.objectstore.region = eu-central-1
311 archive_cache.objectstore.region = eu-central-1
312
312
313 ; number of sharded buckets to create to distribute archives across
313 ; number of sharded buckets to create to distribute archives across
314 ; default is 8 shards
314 ; default is 8 shards
315 archive_cache.objectstore.bucket_shards = 8
315 archive_cache.objectstore.bucket_shards = 8
316
316
317 ; a top-level bucket to put all other shards in
317 ; a top-level bucket to put all other shards in
318 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
318 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
319 archive_cache.objectstore.bucket = rhodecode-archive-cache
319 archive_cache.objectstore.bucket = rhodecode-archive-cache
320
320
321 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
321 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
322 archive_cache.objectstore.retry = false
322 archive_cache.objectstore.retry = false
323
323
324 ; number of seconds to wait for next try using retry
324 ; number of seconds to wait for next try using retry
325 archive_cache.objectstore.retry_backoff = 1
325 archive_cache.objectstore.retry_backoff = 1
326
326
327 ; how many tries do do a retry fetch from this backend
327 ; how many tries do do a retry fetch from this backend
328 archive_cache.objectstore.retry_attempts = 10
328 archive_cache.objectstore.retry_attempts = 10
329
329
330 ; Default is $cache_dir/archive_cache if not set
330 ; Default is $cache_dir/archive_cache if not set
331 ; Generated repo archives will be cached at this location
331 ; Generated repo archives will be cached at this location
332 ; and served from the cache during subsequent requests for the same archive of
332 ; and served from the cache during subsequent requests for the same archive of
333 ; the repository. This path is important to be shared across filesystems and with
333 ; the repository. This path is important to be shared across filesystems and with
334 ; RhodeCode and vcsserver
334 ; RhodeCode and vcsserver
335 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
335 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
336
336
337 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
337 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
338 archive_cache.filesystem.cache_size_gb = 1
338 archive_cache.filesystem.cache_size_gb = 1
339
339
340 ; Eviction policy used to clear out after cache_size_gb limit is reached
340 ; Eviction policy used to clear out after cache_size_gb limit is reached
341 archive_cache.filesystem.eviction_policy = least-recently-stored
341 archive_cache.filesystem.eviction_policy = least-recently-stored
342
342
343 ; By default cache uses sharding technique, this specifies how many shards are there
343 ; By default cache uses sharding technique, this specifies how many shards are there
344 ; default is 8 shards
344 ; default is 8 shards
345 archive_cache.filesystem.cache_shards = 8
345 archive_cache.filesystem.cache_shards = 8
346
346
347 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
347 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
348 archive_cache.filesystem.retry = false
348 archive_cache.filesystem.retry = false
349
349
350 ; number of seconds to wait for next try using retry
350 ; number of seconds to wait for next try using retry
351 archive_cache.filesystem.retry_backoff = 1
351 archive_cache.filesystem.retry_backoff = 1
352
352
353 ; how many tries do do a retry fetch from this backend
353 ; how many tries do do a retry fetch from this backend
354 archive_cache.filesystem.retry_attempts = 10
354 archive_cache.filesystem.retry_attempts = 10
355
355
356
356
357 ; #############
357 ; #############
358 ; CELERY CONFIG
358 ; CELERY CONFIG
359 ; #############
359 ; #############
360
360
361 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
361 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
362
362
363 use_celery = true
363 use_celery = true
364
364
365 ; path to store schedule database
365 ; path to store schedule database
366 #celerybeat-schedule.path =
366 #celerybeat-schedule.path =
367
367
368 ; connection url to the message broker (default redis)
368 ; connection url to the message broker (default redis)
369 celery.broker_url = redis://redis:6379/8
369 celery.broker_url = redis://redis:6379/8
370
370
371 ; results backend to get results for (default redis)
371 ; results backend to get results for (default redis)
372 celery.result_backend = redis://redis:6379/8
372 celery.result_backend = redis://redis:6379/8
373
373
374 ; rabbitmq example
374 ; rabbitmq example
375 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
375 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
376
376
377 ; maximum tasks to execute before worker restart
377 ; maximum tasks to execute before worker restart
378 celery.max_tasks_per_child = 20
378 celery.max_tasks_per_child = 20
379
379
380 ; tasks will never be sent to the queue, but executed locally instead.
380 ; tasks will never be sent to the queue, but executed locally instead.
381 celery.task_always_eager = false
381 celery.task_always_eager = false
382
382
383 ; #############
383 ; #############
384 ; DOGPILE CACHE
384 ; DOGPILE CACHE
385 ; #############
385 ; #############
386
386
387 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
387 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
388 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
388 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
389 cache_dir = /var/opt/rhodecode_data
389 cache_dir = /var/opt/rhodecode_data
390
390
391 ; *********************************************
391 ; *********************************************
392 ; `sql_cache_short` cache for heavy SQL queries
392 ; `sql_cache_short` cache for heavy SQL queries
393 ; Only supported backend is `memory_lru`
393 ; Only supported backend is `memory_lru`
394 ; *********************************************
394 ; *********************************************
395 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
395 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
396 rc_cache.sql_cache_short.expiration_time = 30
396 rc_cache.sql_cache_short.expiration_time = 30
397
397
398
398
399 ; *****************************************************
399 ; *****************************************************
400 ; `cache_repo_longterm` cache for repo object instances
400 ; `cache_repo_longterm` cache for repo object instances
401 ; Only supported backend is `memory_lru`
401 ; Only supported backend is `memory_lru`
402 ; *****************************************************
402 ; *****************************************************
403 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
403 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
404 ; by default we use 30 Days, cache is still invalidated on push
404 ; by default we use 30 Days, cache is still invalidated on push
405 rc_cache.cache_repo_longterm.expiration_time = 2592000
405 rc_cache.cache_repo_longterm.expiration_time = 2592000
406 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
406 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
407 rc_cache.cache_repo_longterm.max_size = 10000
407 rc_cache.cache_repo_longterm.max_size = 10000
408
408
409
409
410 ; *********************************************
410 ; *********************************************
411 ; `cache_general` cache for general purpose use
411 ; `cache_general` cache for general purpose use
412 ; for simplicity use rc.file_namespace backend,
412 ; for simplicity use rc.file_namespace backend,
413 ; for performance and scale use rc.redis
413 ; for performance and scale use rc.redis
414 ; *********************************************
414 ; *********************************************
415 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
415 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
416 rc_cache.cache_general.expiration_time = 43200
416 rc_cache.cache_general.expiration_time = 43200
417 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
417 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
418 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
418 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
419
419
420 ; alternative `cache_general` redis backend with distributed lock
420 ; alternative `cache_general` redis backend with distributed lock
421 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
421 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
422 #rc_cache.cache_general.expiration_time = 300
422 #rc_cache.cache_general.expiration_time = 300
423
423
424 ; redis_expiration_time needs to be greater then expiration_time
424 ; redis_expiration_time needs to be greater then expiration_time
425 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
425 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
426
426
427 #rc_cache.cache_general.arguments.host = localhost
427 #rc_cache.cache_general.arguments.host = localhost
428 #rc_cache.cache_general.arguments.port = 6379
428 #rc_cache.cache_general.arguments.port = 6379
429 #rc_cache.cache_general.arguments.db = 0
429 #rc_cache.cache_general.arguments.db = 0
430 #rc_cache.cache_general.arguments.socket_timeout = 30
430 #rc_cache.cache_general.arguments.socket_timeout = 30
431 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
431 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
432 #rc_cache.cache_general.arguments.distributed_lock = true
432 #rc_cache.cache_general.arguments.distributed_lock = true
433
433
434 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
434 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
435 #rc_cache.cache_general.arguments.lock_auto_renewal = true
435 #rc_cache.cache_general.arguments.lock_auto_renewal = true
436
436
437 ; *************************************************
437 ; *************************************************
438 ; `cache_perms` cache for permission tree, auth TTL
438 ; `cache_perms` cache for permission tree, auth TTL
439 ; for simplicity use rc.file_namespace backend,
439 ; for simplicity use rc.file_namespace backend,
440 ; for performance and scale use rc.redis
440 ; for performance and scale use rc.redis
441 ; *************************************************
441 ; *************************************************
442 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
442 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
443 rc_cache.cache_perms.expiration_time = 3600
443 rc_cache.cache_perms.expiration_time = 3600
444 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
444 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
445 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
445 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
446
446
447 ; alternative `cache_perms` redis backend with distributed lock
447 ; alternative `cache_perms` redis backend with distributed lock
448 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
448 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
449 #rc_cache.cache_perms.expiration_time = 300
449 #rc_cache.cache_perms.expiration_time = 300
450
450
451 ; redis_expiration_time needs to be greater then expiration_time
451 ; redis_expiration_time needs to be greater then expiration_time
452 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
452 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
453
453
454 #rc_cache.cache_perms.arguments.host = localhost
454 #rc_cache.cache_perms.arguments.host = localhost
455 #rc_cache.cache_perms.arguments.port = 6379
455 #rc_cache.cache_perms.arguments.port = 6379
456 #rc_cache.cache_perms.arguments.db = 0
456 #rc_cache.cache_perms.arguments.db = 0
457 #rc_cache.cache_perms.arguments.socket_timeout = 30
457 #rc_cache.cache_perms.arguments.socket_timeout = 30
458 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
458 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
459 #rc_cache.cache_perms.arguments.distributed_lock = true
459 #rc_cache.cache_perms.arguments.distributed_lock = true
460
460
461 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
461 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
462 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
462 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
463
463
464 ; ***************************************************
464 ; ***************************************************
465 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
465 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
466 ; for simplicity use rc.file_namespace backend,
466 ; for simplicity use rc.file_namespace backend,
467 ; for performance and scale use rc.redis
467 ; for performance and scale use rc.redis
468 ; ***************************************************
468 ; ***************************************************
469 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
469 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
470 rc_cache.cache_repo.expiration_time = 2592000
470 rc_cache.cache_repo.expiration_time = 2592000
471 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
471 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
472 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
472 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
473
473
474 ; alternative `cache_repo` redis backend with distributed lock
474 ; alternative `cache_repo` redis backend with distributed lock
475 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
475 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
476 #rc_cache.cache_repo.expiration_time = 2592000
476 #rc_cache.cache_repo.expiration_time = 2592000
477
477
478 ; redis_expiration_time needs to be greater then expiration_time
478 ; redis_expiration_time needs to be greater then expiration_time
479 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
479 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
480
480
481 #rc_cache.cache_repo.arguments.host = localhost
481 #rc_cache.cache_repo.arguments.host = localhost
482 #rc_cache.cache_repo.arguments.port = 6379
482 #rc_cache.cache_repo.arguments.port = 6379
483 #rc_cache.cache_repo.arguments.db = 1
483 #rc_cache.cache_repo.arguments.db = 1
484 #rc_cache.cache_repo.arguments.socket_timeout = 30
484 #rc_cache.cache_repo.arguments.socket_timeout = 30
485 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
485 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
486 #rc_cache.cache_repo.arguments.distributed_lock = true
486 #rc_cache.cache_repo.arguments.distributed_lock = true
487
487
488 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
488 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
489 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
489 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
490
490
491 ; ##############
491 ; ##############
492 ; BEAKER SESSION
492 ; BEAKER SESSION
493 ; ##############
493 ; ##############
494
494
495 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
495 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
496 ; types are file, ext:redis, ext:database, ext:memcached
496 ; types are file, ext:redis, ext:database, ext:memcached
497 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
497 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
498 #beaker.session.type = file
498 #beaker.session.type = file
499 #beaker.session.data_dir = %(here)s/data/sessions
499 #beaker.session.data_dir = %(here)s/data/sessions
500
500
501 ; Redis based sessions
501 ; Redis based sessions
502 beaker.session.type = ext:redis
502 beaker.session.type = ext:redis
503 beaker.session.url = redis://redis:6379/2
503 beaker.session.url = redis://redis:6379/2
504
504
505 ; DB based session, fast, and allows easy management over logged in users
505 ; DB based session, fast, and allows easy management over logged in users
506 #beaker.session.type = ext:database
506 #beaker.session.type = ext:database
507 #beaker.session.table_name = db_session
507 #beaker.session.table_name = db_session
508 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
508 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
509 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
509 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
510 #beaker.session.sa.pool_recycle = 3600
510 #beaker.session.sa.pool_recycle = 3600
511 #beaker.session.sa.echo = false
511 #beaker.session.sa.echo = false
512
512
513 beaker.session.key = rhodecode
513 beaker.session.key = rhodecode
514 beaker.session.secret = develop-rc-uytcxaz
514 beaker.session.secret = develop-rc-uytcxaz
515 beaker.session.lock_dir = /data_ramdisk/lock
515 beaker.session.lock_dir = /data_ramdisk/lock
516
516
517 ; Secure encrypted cookie. Requires AES and AES python libraries
517 ; Secure encrypted cookie. Requires AES and AES python libraries
518 ; you must disable beaker.session.secret to use this
518 ; you must disable beaker.session.secret to use this
519 #beaker.session.encrypt_key = key_for_encryption
519 #beaker.session.encrypt_key = key_for_encryption
520 #beaker.session.validate_key = validation_key
520 #beaker.session.validate_key = validation_key
521
521
522 ; Sets session as invalid (also logging out user) if it haven not been
522 ; Sets session as invalid (also logging out user) if it haven not been
523 ; accessed for given amount of time in seconds
523 ; accessed for given amount of time in seconds
524 beaker.session.timeout = 2592000
524 beaker.session.timeout = 2592000
525 beaker.session.httponly = true
525 beaker.session.httponly = true
526
526
527 ; Path to use for the cookie. Set to prefix if you use prefix middleware
527 ; Path to use for the cookie. Set to prefix if you use prefix middleware
528 #beaker.session.cookie_path = /custom_prefix
528 #beaker.session.cookie_path = /custom_prefix
529
529
530 ; Set https secure cookie
530 ; Set https secure cookie
531 beaker.session.secure = false
531 beaker.session.secure = false
532
532
533 ; default cookie expiration time in seconds, set to `true` to set expire
533 ; default cookie expiration time in seconds, set to `true` to set expire
534 ; at browser close
534 ; at browser close
535 #beaker.session.cookie_expires = 3600
535 #beaker.session.cookie_expires = 3600
536
536
537 ; #############################
537 ; #############################
538 ; SEARCH INDEXING CONFIGURATION
538 ; SEARCH INDEXING CONFIGURATION
539 ; #############################
539 ; #############################
540
540
541 ; Full text search indexer is available in rhodecode-tools under
541 ; Full text search indexer is available in rhodecode-tools under
542 ; `rhodecode-tools index` command
542 ; `rhodecode-tools index` command
543
543
544 ; WHOOSH Backend, doesn't require additional services to run
544 ; WHOOSH Backend, doesn't require additional services to run
545 ; it works good with few dozen repos
545 ; it works good with few dozen repos
546 search.module = rhodecode.lib.index.whoosh
546 search.module = rhodecode.lib.index.whoosh
547 search.location = %(here)s/data/index
547 search.location = %(here)s/data/index
548
548
549 ; ####################
549 ; ####################
550 ; CHANNELSTREAM CONFIG
550 ; CHANNELSTREAM CONFIG
551 ; ####################
551 ; ####################
552
552
553 ; channelstream enables persistent connections and live notification
553 ; channelstream enables persistent connections and live notification
554 ; in the system. It's also used by the chat system
554 ; in the system. It's also used by the chat system
555
555
556 channelstream.enabled = true
556 channelstream.enabled = true
557
557
558 ; server address for channelstream server on the backend
558 ; server address for channelstream server on the backend
559 channelstream.server = channelstream:9800
559 channelstream.server = channelstream:9800
560
560
561 ; location of the channelstream server from outside world
561 ; location of the channelstream server from outside world
562 ; use ws:// for http or wss:// for https. This address needs to be handled
562 ; use ws:// for http or wss:// for https. This address needs to be handled
563 ; by external HTTP server such as Nginx or Apache
563 ; by external HTTP server such as Nginx or Apache
564 ; see Nginx/Apache configuration examples in our docs
564 ; see Nginx/Apache configuration examples in our docs
565 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
565 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
566 channelstream.secret = ENV_GENERATED
566 channelstream.secret = ENV_GENERATED
567 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
567 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
568
568
569 ; Internal application path that Javascript uses to connect into.
569 ; Internal application path that Javascript uses to connect into.
570 ; If you use proxy-prefix the prefix should be added before /_channelstream
570 ; If you use proxy-prefix the prefix should be added before /_channelstream
571 channelstream.proxy_path = /_channelstream
571 channelstream.proxy_path = /_channelstream
572
572
573
573
574 ; ##############################
574 ; ##############################
575 ; MAIN RHODECODE DATABASE CONFIG
575 ; MAIN RHODECODE DATABASE CONFIG
576 ; ##############################
576 ; ##############################
577
577
578 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
578 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
579 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
579 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
580 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
580 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
581 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
581 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
582 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
582 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
583
583
584 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
584 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
585
585
586 ; see sqlalchemy docs for other advanced settings
586 ; see sqlalchemy docs for other advanced settings
587 ; print the sql statements to output
587 ; print the sql statements to output
588 sqlalchemy.db1.echo = false
588 sqlalchemy.db1.echo = false
589
589
590 ; recycle the connections after this amount of seconds
590 ; recycle the connections after this amount of seconds
591 sqlalchemy.db1.pool_recycle = 3600
591 sqlalchemy.db1.pool_recycle = 3600
592
592
593 ; the number of connections to keep open inside the connection pool.
593 ; the number of connections to keep open inside the connection pool.
594 ; 0 indicates no limit
594 ; 0 indicates no limit
595 ; the general calculus with gevent is:
595 ; the general calculus with gevent is:
596 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
596 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
597 ; then increase pool size + max overflow so that they add up to 500.
597 ; then increase pool size + max overflow so that they add up to 500.
598 #sqlalchemy.db1.pool_size = 5
598 #sqlalchemy.db1.pool_size = 5
599
599
600 ; The number of connections to allow in connection pool "overflow", that is
600 ; The number of connections to allow in connection pool "overflow", that is
601 ; connections that can be opened above and beyond the pool_size setting,
601 ; connections that can be opened above and beyond the pool_size setting,
602 ; which defaults to five.
602 ; which defaults to five.
603 #sqlalchemy.db1.max_overflow = 10
603 #sqlalchemy.db1.max_overflow = 10
604
604
605 ; Connection check ping, used to detect broken database connections
605 ; Connection check ping, used to detect broken database connections
606 ; could be enabled to better handle cases if MySQL has gone away errors
606 ; could be enabled to better handle cases if MySQL has gone away errors
607 #sqlalchemy.db1.ping_connection = true
607 #sqlalchemy.db1.ping_connection = true
608
608
609 ; ##########
609 ; ##########
610 ; VCS CONFIG
610 ; VCS CONFIG
611 ; ##########
611 ; ##########
612 vcs.server.enable = true
612 vcs.server.enable = true
613 vcs.server = vcsserver:10010
613 vcs.server = vcsserver:10010
614
614
615 ; Web server connectivity protocol, responsible for web based VCS operations
615 ; Web server connectivity protocol, responsible for web based VCS operations
616 ; Available protocols are:
616 ; Available protocols are:
617 ; `http` - use http-rpc backend (default)
617 ; `http` - use http-rpc backend (default)
618 vcs.server.protocol = http
618 vcs.server.protocol = http
619
619
620 ; Push/Pull operations protocol, available options are:
620 ; Push/Pull operations protocol, available options are:
621 ; `http` - use http-rpc backend (default)
621 ; `http` - use http-rpc backend (default)
622 vcs.scm_app_implementation = http
622 vcs.scm_app_implementation = http
623
623
624 ; Push/Pull operations hooks protocol, available options are:
624 ; Push/Pull operations hooks protocol, available options are:
625 ; `http` - use http-rpc backend (default)
625 ; `http` - use http-rpc backend (default)
626 ; `celery` - use celery based hooks
626 ; `celery` - use celery based hooks
627 vcs.hooks.protocol = http
627 vcs.hooks.protocol = http
628
628
629 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
629 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
630 ; accessible via network.
630 ; accessible via network.
631 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
631 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
632 vcs.hooks.host = *
632 vcs.hooks.host = *
633
633
634 ; Start VCSServer with this instance as a subprocess, useful for development
634 ; Start VCSServer with this instance as a subprocess, useful for development
635 vcs.start_server = false
635 vcs.start_server = false
636
636
637 ; List of enabled VCS backends, available options are:
637 ; List of enabled VCS backends, available options are:
638 ; `hg` - mercurial
638 ; `hg` - mercurial
639 ; `git` - git
639 ; `git` - git
640 ; `svn` - subversion
640 ; `svn` - subversion
641 vcs.backends = hg, git, svn
641 vcs.backends = hg, git, svn
642
642
643 ; Wait this number of seconds before killing connection to the vcsserver
643 ; Wait this number of seconds before killing connection to the vcsserver
644 vcs.connection_timeout = 3600
644 vcs.connection_timeout = 3600
645
645
646 ; Cache flag to cache vcsserver remote calls locally
646 ; Cache flag to cache vcsserver remote calls locally
647 ; It uses cache_region `cache_repo`
647 ; It uses cache_region `cache_repo`
648 vcs.methods.cache = true
648 vcs.methods.cache = true
649
649
650 ; ####################################################
650 ; ####################################################
651 ; Subversion proxy support (mod_dav_svn)
651 ; Subversion proxy support (mod_dav_svn)
652 ; Maps RhodeCode repo groups into SVN paths for Apache
652 ; Maps RhodeCode repo groups into SVN paths for Apache
653 ; ####################################################
653 ; ####################################################
654
654
655 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
655 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
656 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
656 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
657 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
657 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
658 #vcs.svn.compatible_version = 1.8
658 #vcs.svn.compatible_version = 1.8
659
659
660 ; Redis connection settings for svn integrations logic
661 ; This connection string needs to be the same on ce and vcsserver
662 vcs.svn.redis_conn = redis://redis:6379/0
663
660 ; Enable SVN proxy of requests over HTTP
664 ; Enable SVN proxy of requests over HTTP
661 vcs.svn.proxy.enabled = true
665 vcs.svn.proxy.enabled = true
662
666
663 ; host to connect to running SVN subsystem
667 ; host to connect to running SVN subsystem
664 vcs.svn.proxy.host = http://svn:8090
668 vcs.svn.proxy.host = http://svn:8090
665
669
666 ; Enable or disable the config file generation.
670 ; Enable or disable the config file generation.
667 svn.proxy.generate_config = true
671 svn.proxy.generate_config = true
668
672
669 ; Generate config file with `SVNListParentPath` set to `On`.
673 ; Generate config file with `SVNListParentPath` set to `On`.
670 svn.proxy.list_parent_path = true
674 svn.proxy.list_parent_path = true
671
675
672 ; Set location and file name of generated config file.
676 ; Set location and file name of generated config file.
673 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
677 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
674
678
675 ; alternative mod_dav config template. This needs to be a valid mako template
679 ; alternative mod_dav config template. This needs to be a valid mako template
676 ; Example template can be found in the source code:
680 ; Example template can be found in the source code:
677 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
681 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
678 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
682 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
679
683
680 ; Used as a prefix to the `Location` block in the generated config file.
684 ; Used as a prefix to the `Location` block in the generated config file.
681 ; In most cases it should be set to `/`.
685 ; In most cases it should be set to `/`.
682 svn.proxy.location_root = /
686 svn.proxy.location_root = /
683
687
684 ; Command to reload the mod dav svn configuration on change.
688 ; Command to reload the mod dav svn configuration on change.
685 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
689 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
686 ; Make sure user who runs RhodeCode process is allowed to reload Apache
690 ; Make sure user who runs RhodeCode process is allowed to reload Apache
687 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
691 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
688
692
689 ; If the timeout expires before the reload command finishes, the command will
693 ; If the timeout expires before the reload command finishes, the command will
690 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
694 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
691 #svn.proxy.reload_timeout = 10
695 #svn.proxy.reload_timeout = 10
692
696
693 ; ####################
697 ; ####################
694 ; SSH Support Settings
698 ; SSH Support Settings
695 ; ####################
699 ; ####################
696
700
697 ; Defines if a custom authorized_keys file should be created and written on
701 ; Defines if a custom authorized_keys file should be created and written on
698 ; any change user ssh keys. Setting this to false also disables possibility
702 ; any change user ssh keys. Setting this to false also disables possibility
699 ; of adding SSH keys by users from web interface. Super admins can still
703 ; of adding SSH keys by users from web interface. Super admins can still
700 ; manage SSH Keys.
704 ; manage SSH Keys.
701 ssh.generate_authorized_keyfile = true
705 ssh.generate_authorized_keyfile = true
702
706
703 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
707 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
704 # ssh.authorized_keys_ssh_opts =
708 # ssh.authorized_keys_ssh_opts =
705
709
706 ; Path to the authorized_keys file where the generate entries are placed.
710 ; Path to the authorized_keys file where the generate entries are placed.
707 ; It is possible to have multiple key files specified in `sshd_config` e.g.
711 ; It is possible to have multiple key files specified in `sshd_config` e.g.
708 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
712 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
709 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
713 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
710
714
711 ; Command to execute the SSH wrapper. The binary is available in the
715 ; Command to execute the SSH wrapper. The binary is available in the
712 ; RhodeCode installation directory.
716 ; RhodeCode installation directory.
713 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
717 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
714 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
718 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
715 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
719 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
716
720
717 ; Allow shell when executing the ssh-wrapper command
721 ; Allow shell when executing the ssh-wrapper command
718 ssh.wrapper_cmd_allow_shell = false
722 ssh.wrapper_cmd_allow_shell = false
719
723
720 ; Enables logging, and detailed output send back to the client during SSH
724 ; Enables logging, and detailed output send back to the client during SSH
721 ; operations. Useful for debugging, shouldn't be used in production.
725 ; operations. Useful for debugging, shouldn't be used in production.
722 ssh.enable_debug_logging = true
726 ssh.enable_debug_logging = true
723
727
724 ; Paths to binary executable, by default they are the names, but we can
728 ; Paths to binary executable, by default they are the names, but we can
725 ; override them if we want to use a custom one
729 ; override them if we want to use a custom one
726 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
730 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
727 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
731 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
728 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
732 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
729
733
730 ; Enables SSH key generator web interface. Disabling this still allows users
734 ; Enables SSH key generator web interface. Disabling this still allows users
731 ; to add their own keys.
735 ; to add their own keys.
732 ssh.enable_ui_key_generator = true
736 ssh.enable_ui_key_generator = true
733
737
734 ; Statsd client config, this is used to send metrics to statsd
738 ; Statsd client config, this is used to send metrics to statsd
735 ; We recommend setting statsd_exported and scrape them using Prometheus
739 ; We recommend setting statsd_exported and scrape them using Prometheus
736 #statsd.enabled = false
740 #statsd.enabled = false
737 #statsd.statsd_host = 0.0.0.0
741 #statsd.statsd_host = 0.0.0.0
738 #statsd.statsd_port = 8125
742 #statsd.statsd_port = 8125
739 #statsd.statsd_prefix =
743 #statsd.statsd_prefix =
740 #statsd.statsd_ipv6 = false
744 #statsd.statsd_ipv6 = false
741
745
742 ; configure logging automatically at server startup set to false
746 ; configure logging automatically at server startup set to false
743 ; to use the below custom logging config.
747 ; to use the below custom logging config.
744 ; RC_LOGGING_FORMATTER
748 ; RC_LOGGING_FORMATTER
745 ; RC_LOGGING_LEVEL
749 ; RC_LOGGING_LEVEL
746 ; env variables can control the settings for logging in case of autoconfigure
750 ; env variables can control the settings for logging in case of autoconfigure
747
751
748 #logging.autoconfigure = true
752 #logging.autoconfigure = true
749
753
750 ; specify your own custom logging config file to configure logging
754 ; specify your own custom logging config file to configure logging
751 #logging.logging_conf_file = /path/to/custom_logging.ini
755 #logging.logging_conf_file = /path/to/custom_logging.ini
752
756
753 ; Dummy marker to add new entries after.
757 ; Dummy marker to add new entries after.
754 ; Add any custom entries below. Please don't remove this marker.
758 ; Add any custom entries below. Please don't remove this marker.
755 custom.conf = 1
759 custom.conf = 1
756
760
757
761
758 ; #####################
762 ; #####################
759 ; LOGGING CONFIGURATION
763 ; LOGGING CONFIGURATION
760 ; #####################
764 ; #####################
761
765
762 [loggers]
766 [loggers]
763 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
767 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
764
768
765 [handlers]
769 [handlers]
766 keys = console, console_sql
770 keys = console, console_sql
767
771
768 [formatters]
772 [formatters]
769 keys = generic, json, color_formatter, color_formatter_sql
773 keys = generic, json, color_formatter, color_formatter_sql
770
774
771 ; #######
775 ; #######
772 ; LOGGERS
776 ; LOGGERS
773 ; #######
777 ; #######
774 [logger_root]
778 [logger_root]
775 level = NOTSET
779 level = NOTSET
776 handlers = console
780 handlers = console
777
781
778 [logger_sqlalchemy]
782 [logger_sqlalchemy]
779 level = INFO
783 level = INFO
780 handlers = console_sql
784 handlers = console_sql
781 qualname = sqlalchemy.engine
785 qualname = sqlalchemy.engine
782 propagate = 0
786 propagate = 0
783
787
784 [logger_beaker]
788 [logger_beaker]
785 level = DEBUG
789 level = DEBUG
786 handlers =
790 handlers =
787 qualname = beaker.container
791 qualname = beaker.container
788 propagate = 1
792 propagate = 1
789
793
790 [logger_rhodecode]
794 [logger_rhodecode]
791 level = DEBUG
795 level = DEBUG
792 handlers =
796 handlers =
793 qualname = rhodecode
797 qualname = rhodecode
794 propagate = 1
798 propagate = 1
795
799
796 [logger_ssh_wrapper]
800 [logger_ssh_wrapper]
797 level = DEBUG
801 level = DEBUG
798 handlers =
802 handlers =
799 qualname = ssh_wrapper
803 qualname = ssh_wrapper
800 propagate = 1
804 propagate = 1
801
805
802 [logger_celery]
806 [logger_celery]
803 level = DEBUG
807 level = DEBUG
804 handlers =
808 handlers =
805 qualname = celery
809 qualname = celery
806
810
807
811
808 ; ########
812 ; ########
809 ; HANDLERS
813 ; HANDLERS
810 ; ########
814 ; ########
811
815
812 [handler_console]
816 [handler_console]
813 class = StreamHandler
817 class = StreamHandler
814 args = (sys.stderr, )
818 args = (sys.stderr, )
815 level = DEBUG
819 level = DEBUG
816 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
820 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
817 ; This allows sending properly formatted logs to grafana loki or elasticsearch
821 ; This allows sending properly formatted logs to grafana loki or elasticsearch
818 formatter = color_formatter
822 formatter = color_formatter
819
823
820 [handler_console_sql]
824 [handler_console_sql]
821 ; "level = DEBUG" logs SQL queries and results.
825 ; "level = DEBUG" logs SQL queries and results.
822 ; "level = INFO" logs SQL queries.
826 ; "level = INFO" logs SQL queries.
823 ; "level = WARN" logs neither. (Recommended for production systems.)
827 ; "level = WARN" logs neither. (Recommended for production systems.)
824 class = StreamHandler
828 class = StreamHandler
825 args = (sys.stderr, )
829 args = (sys.stderr, )
826 level = WARN
830 level = WARN
827 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
831 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
828 ; This allows sending properly formatted logs to grafana loki or elasticsearch
832 ; This allows sending properly formatted logs to grafana loki or elasticsearch
829 formatter = color_formatter_sql
833 formatter = color_formatter_sql
830
834
831 ; ##########
835 ; ##########
832 ; FORMATTERS
836 ; FORMATTERS
833 ; ##########
837 ; ##########
834
838
835 [formatter_generic]
839 [formatter_generic]
836 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
840 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
837 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
841 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
838 datefmt = %Y-%m-%d %H:%M:%S
842 datefmt = %Y-%m-%d %H:%M:%S
839
843
840 [formatter_color_formatter]
844 [formatter_color_formatter]
841 class = rhodecode.lib.logging_formatter.ColorFormatter
845 class = rhodecode.lib.logging_formatter.ColorFormatter
842 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
846 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
843 datefmt = %Y-%m-%d %H:%M:%S
847 datefmt = %Y-%m-%d %H:%M:%S
844
848
845 [formatter_color_formatter_sql]
849 [formatter_color_formatter_sql]
846 class = rhodecode.lib.logging_formatter.ColorFormatterSql
850 class = rhodecode.lib.logging_formatter.ColorFormatterSql
847 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
851 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
848 datefmt = %Y-%m-%d %H:%M:%S
852 datefmt = %Y-%m-%d %H:%M:%S
849
853
850 [formatter_json]
854 [formatter_json]
851 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
855 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
852 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
856 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,820 +1,824 b''
1
1
2 ; #########################################
2 ; #########################################
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 ; #########################################
4 ; #########################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 ; Debug flag sets all loggers to debug, and enables request tracking
7 ; Debug flag sets all loggers to debug, and enables request tracking
8 debug = false
8 debug = false
9
9
10 ; ########################################################################
10 ; ########################################################################
11 ; EMAIL CONFIGURATION
11 ; EMAIL CONFIGURATION
12 ; These settings will be used by the RhodeCode mailing system
12 ; These settings will be used by the RhodeCode mailing system
13 ; ########################################################################
13 ; ########################################################################
14
14
15 ; prefix all emails subjects with given prefix, helps filtering out emails
15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 #email_prefix = [RhodeCode]
16 #email_prefix = [RhodeCode]
17
17
18 ; email FROM address all mails will be sent
18 ; email FROM address all mails will be sent
19 #app_email_from = rhodecode-noreply@localhost
19 #app_email_from = rhodecode-noreply@localhost
20
20
21 #smtp_server = mail.server.com
21 #smtp_server = mail.server.com
22 #smtp_username =
22 #smtp_username =
23 #smtp_password =
23 #smtp_password =
24 #smtp_port =
24 #smtp_port =
25 #smtp_use_tls = false
25 #smtp_use_tls = false
26 #smtp_use_ssl = true
26 #smtp_use_ssl = true
27
27
28 [server:main]
28 [server:main]
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 host = 127.0.0.1
31 host = 127.0.0.1
32 port = 10020
32 port = 10020
33
33
34
34
35 ; ###########################
35 ; ###########################
36 ; GUNICORN APPLICATION SERVER
36 ; GUNICORN APPLICATION SERVER
37 ; ###########################
37 ; ###########################
38
38
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40
40
41 ; Module to use, this setting shouldn't be changed
41 ; Module to use, this setting shouldn't be changed
42 use = egg:gunicorn#main
42 use = egg:gunicorn#main
43
43
44 ; Prefix middleware for RhodeCode.
44 ; Prefix middleware for RhodeCode.
45 ; recommended when using proxy setup.
45 ; recommended when using proxy setup.
46 ; allows to set RhodeCode under a prefix in server.
46 ; allows to set RhodeCode under a prefix in server.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 ; And set your prefix like: `prefix = /custom_prefix`
48 ; And set your prefix like: `prefix = /custom_prefix`
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 ; to make your cookies only work on prefix url
50 ; to make your cookies only work on prefix url
51 [filter:proxy-prefix]
51 [filter:proxy-prefix]
52 use = egg:PasteDeploy#prefix
52 use = egg:PasteDeploy#prefix
53 prefix = /
53 prefix = /
54
54
55 [app:main]
55 [app:main]
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 ; of this file
57 ; of this file
58 ; Each option in the app:main can be override by an environmental variable
58 ; Each option in the app:main can be override by an environmental variable
59 ;
59 ;
60 ;To override an option:
60 ;To override an option:
61 ;
61 ;
62 ;RC_<KeyName>
62 ;RC_<KeyName>
63 ;Everything should be uppercase, . and - should be replaced by _.
63 ;Everything should be uppercase, . and - should be replaced by _.
64 ;For example, if you have these configuration settings:
64 ;For example, if you have these configuration settings:
65 ;rc_cache.repo_object.backend = foo
65 ;rc_cache.repo_object.backend = foo
66 ;can be overridden by
66 ;can be overridden by
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68
68
69 use = egg:rhodecode-enterprise-ce
69 use = egg:rhodecode-enterprise-ce
70
70
71 ; enable proxy prefix middleware, defined above
71 ; enable proxy prefix middleware, defined above
72 #filter-with = proxy-prefix
72 #filter-with = proxy-prefix
73
73
74 ; encryption key used to encrypt social plugin tokens,
74 ; encryption key used to encrypt social plugin tokens,
75 ; remote_urls with credentials etc, if not set it defaults to
75 ; remote_urls with credentials etc, if not set it defaults to
76 ; `beaker.session.secret`
76 ; `beaker.session.secret`
77 #rhodecode.encrypted_values.secret =
77 #rhodecode.encrypted_values.secret =
78
78
79 ; decryption strict mode (enabled by default). It controls if decryption raises
79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 #rhodecode.encrypted_values.strict = false
81 #rhodecode.encrypted_values.strict = false
82
82
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 ; fernet is safer, and we strongly recommend switching to it.
84 ; fernet is safer, and we strongly recommend switching to it.
85 ; Due to backward compatibility aes is used as default.
85 ; Due to backward compatibility aes is used as default.
86 #rhodecode.encrypted_values.algorithm = fernet
86 #rhodecode.encrypted_values.algorithm = fernet
87
87
88 ; Return gzipped responses from RhodeCode (static files/application)
88 ; Return gzipped responses from RhodeCode (static files/application)
89 gzip_responses = false
89 gzip_responses = false
90
90
91 ; Auto-generate javascript routes file on startup
91 ; Auto-generate javascript routes file on startup
92 generate_js_files = false
92 generate_js_files = false
93
93
94 ; System global default language.
94 ; System global default language.
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 lang = en
96 lang = en
97
97
98 ; Perform a full repository scan and import on each server start.
98 ; Perform a full repository scan and import on each server start.
99 ; Settings this to true could lead to very long startup time.
99 ; Settings this to true could lead to very long startup time.
100 startup.import_repos = false
100 startup.import_repos = false
101
101
102 ; URL at which the application is running. This is used for Bootstrapping
102 ; URL at which the application is running. This is used for Bootstrapping
103 ; requests in context when no web request is available. Used in ishell, or
103 ; requests in context when no web request is available. Used in ishell, or
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 app.base_url = http://rhodecode.local
105 app.base_url = http://rhodecode.local
106
106
107 ; Host at which the Service API is running.
107 ; Host at which the Service API is running.
108 app.service_api.host = http://rhodecode.local:10020
108 app.service_api.host = http://rhodecode.local:10020
109
109
110 ; Secret for Service API authentication.
110 ; Secret for Service API authentication.
111 app.service_api.token =
111 app.service_api.token =
112
112
113 ; Unique application ID. Should be a random unique string for security.
113 ; Unique application ID. Should be a random unique string for security.
114 app_instance_uuid = rc-production
114 app_instance_uuid = rc-production
115
115
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 ; commit, or pull request exceeds this limit this diff will be displayed
117 ; commit, or pull request exceeds this limit this diff will be displayed
118 ; partially. E.g 512000 == 512Kb
118 ; partially. E.g 512000 == 512Kb
119 cut_off_limit_diff = 512000
119 cut_off_limit_diff = 512000
120
120
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 ; file inside diff which exceeds this limit will be displayed partially.
122 ; file inside diff which exceeds this limit will be displayed partially.
123 ; E.g 128000 == 128Kb
123 ; E.g 128000 == 128Kb
124 cut_off_limit_file = 128000
124 cut_off_limit_file = 128000
125
125
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 vcs_full_cache = true
127 vcs_full_cache = true
128
128
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 force_https = false
131 force_https = false
132
132
133 ; use Strict-Transport-Security headers
133 ; use Strict-Transport-Security headers
134 use_htsts = false
134 use_htsts = false
135
135
136 ; Set to true if your repos are exposed using the dumb protocol
136 ; Set to true if your repos are exposed using the dumb protocol
137 git_update_server_info = false
137 git_update_server_info = false
138
138
139 ; RSS/ATOM feed options
139 ; RSS/ATOM feed options
140 rss_cut_off_limit = 256000
140 rss_cut_off_limit = 256000
141 rss_items_per_page = 10
141 rss_items_per_page = 10
142 rss_include_diff = false
142 rss_include_diff = false
143
143
144 ; gist URL alias, used to create nicer urls for gist. This should be an
144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 ; url that does rewrites to _admin/gists/{gistid}.
145 ; url that does rewrites to _admin/gists/{gistid}.
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 gist_alias_url =
148 gist_alias_url =
149
149
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 ; used for access.
151 ; used for access.
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 ; came from the the logged in user who own this authentication token.
153 ; came from the the logged in user who own this authentication token.
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 ; authentication token. Such view would be only accessible when used together
155 ; authentication token. Such view would be only accessible when used together
156 ; with this authentication token
156 ; with this authentication token
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 ; The list should be "," separated and on a single line.
158 ; The list should be "," separated and on a single line.
159 ; Most common views to enable:
159 ; Most common views to enable:
160
160
161 # RepoCommitsView:repo_commit_download
161 # RepoCommitsView:repo_commit_download
162 # RepoCommitsView:repo_commit_patch
162 # RepoCommitsView:repo_commit_patch
163 # RepoCommitsView:repo_commit_raw
163 # RepoCommitsView:repo_commit_raw
164 # RepoCommitsView:repo_commit_raw@TOKEN
164 # RepoCommitsView:repo_commit_raw@TOKEN
165 # RepoFilesView:repo_files_diff
165 # RepoFilesView:repo_files_diff
166 # RepoFilesView:repo_archivefile
166 # RepoFilesView:repo_archivefile
167 # RepoFilesView:repo_file_raw
167 # RepoFilesView:repo_file_raw
168 # GistView:*
168 # GistView:*
169 api_access_controllers_whitelist =
169 api_access_controllers_whitelist =
170
170
171 ; Default encoding used to convert from and to unicode
171 ; Default encoding used to convert from and to unicode
172 ; can be also a comma separated list of encoding in case of mixed encodings
172 ; can be also a comma separated list of encoding in case of mixed encodings
173 default_encoding = UTF-8
173 default_encoding = UTF-8
174
174
175 ; instance-id prefix
175 ; instance-id prefix
176 ; a prefix key for this instance used for cache invalidation when running
176 ; a prefix key for this instance used for cache invalidation when running
177 ; multiple instances of RhodeCode, make sure it's globally unique for
177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 ; all running RhodeCode instances. Leave empty if you don't use it
178 ; all running RhodeCode instances. Leave empty if you don't use it
179 instance_id =
179 instance_id =
180
180
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 ; of an authentication plugin also if it is disabled by it's settings.
182 ; of an authentication plugin also if it is disabled by it's settings.
183 ; This could be useful if you are unable to log in to the system due to broken
183 ; This could be useful if you are unable to log in to the system due to broken
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 ; module to log in again and fix the settings.
185 ; module to log in again and fix the settings.
186 ; Available builtin plugin IDs (hash is part of the ID):
186 ; Available builtin plugin IDs (hash is part of the ID):
187 ; egg:rhodecode-enterprise-ce#rhodecode
187 ; egg:rhodecode-enterprise-ce#rhodecode
188 ; egg:rhodecode-enterprise-ce#pam
188 ; egg:rhodecode-enterprise-ce#pam
189 ; egg:rhodecode-enterprise-ce#ldap
189 ; egg:rhodecode-enterprise-ce#ldap
190 ; egg:rhodecode-enterprise-ce#jasig_cas
190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 ; egg:rhodecode-enterprise-ce#headers
191 ; egg:rhodecode-enterprise-ce#headers
192 ; egg:rhodecode-enterprise-ce#crowd
192 ; egg:rhodecode-enterprise-ce#crowd
193
193
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195
195
196 ; Flag to control loading of legacy plugins in py:/path format
196 ; Flag to control loading of legacy plugins in py:/path format
197 auth_plugin.import_legacy_plugins = true
197 auth_plugin.import_legacy_plugins = true
198
198
199 ; alternative return HTTP header for failed authentication. Default HTTP
199 ; alternative return HTTP header for failed authentication. Default HTTP
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 ; handling that causing a series of failed authentication calls.
201 ; handling that causing a series of failed authentication calls.
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 ; This will be served instead of default 401 on bad authentication
203 ; This will be served instead of default 401 on bad authentication
204 auth_ret_code =
204 auth_ret_code =
205
205
206 ; use special detection method when serving auth_ret_code, instead of serving
206 ; use special detection method when serving auth_ret_code, instead of serving
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 ; and then serve auth_ret_code to clients
208 ; and then serve auth_ret_code to clients
209 auth_ret_code_detection = false
209 auth_ret_code_detection = false
210
210
211 ; locking return code. When repository is locked return this HTTP code. 2XX
211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 ; codes don't break the transactions while 4XX codes do
212 ; codes don't break the transactions while 4XX codes do
213 lock_ret_code = 423
213 lock_ret_code = 423
214
214
215 ; Filesystem location were repositories should be stored
215 ; Filesystem location were repositories should be stored
216 repo_store.path = /var/opt/rhodecode_repo_store
216 repo_store.path = /var/opt/rhodecode_repo_store
217
217
218 ; allows to setup custom hooks in settings page
218 ; allows to setup custom hooks in settings page
219 allow_custom_hooks_settings = true
219 allow_custom_hooks_settings = true
220
220
221 ; Generated license token required for EE edition license.
221 ; Generated license token required for EE edition license.
222 ; New generated token value can be found in Admin > settings > license page.
222 ; New generated token value can be found in Admin > settings > license page.
223 license_token =
223 license_token =
224
224
225 ; This flag hides sensitive information on the license page such as token, and license data
225 ; This flag hides sensitive information on the license page such as token, and license data
226 license.hide_license_info = false
226 license.hide_license_info = false
227
227
228 ; supervisor connection uri, for managing supervisor and logs.
228 ; supervisor connection uri, for managing supervisor and logs.
229 supervisor.uri =
229 supervisor.uri =
230
230
231 ; supervisord group name/id we only want this RC instance to handle
231 ; supervisord group name/id we only want this RC instance to handle
232 supervisor.group_id = prod
232 supervisor.group_id = prod
233
233
234 ; Display extended labs settings
234 ; Display extended labs settings
235 labs_settings_active = true
235 labs_settings_active = true
236
236
237 ; Custom exception store path, defaults to TMPDIR
237 ; Custom exception store path, defaults to TMPDIR
238 ; This is used to store exception from RhodeCode in shared directory
238 ; This is used to store exception from RhodeCode in shared directory
239 #exception_tracker.store_path =
239 #exception_tracker.store_path =
240
240
241 ; Send email with exception details when it happens
241 ; Send email with exception details when it happens
242 #exception_tracker.send_email = false
242 #exception_tracker.send_email = false
243
243
244 ; Comma separated list of recipients for exception emails,
244 ; Comma separated list of recipients for exception emails,
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 ; Can be left empty, then emails will be sent to ALL super-admins
246 ; Can be left empty, then emails will be sent to ALL super-admins
247 #exception_tracker.send_email_recipients =
247 #exception_tracker.send_email_recipients =
248
248
249 ; optional prefix to Add to email Subject
249 ; optional prefix to Add to email Subject
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251
251
252 ; File store configuration. This is used to store and serve uploaded files
252 ; File store configuration. This is used to store and serve uploaded files
253 file_store.enabled = true
253 file_store.enabled = true
254
254
255 ; Storage backend, available options are: local
255 ; Storage backend, available options are: local
256 file_store.backend = local
256 file_store.backend = local
257
257
258 ; path to store the uploaded binaries and artifacts
258 ; path to store the uploaded binaries and artifacts
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260
260
261
261
262 ; Redis url to acquire/check generation of archives locks
262 ; Redis url to acquire/check generation of archives locks
263 archive_cache.locking.url = redis://redis:6379/1
263 archive_cache.locking.url = redis://redis:6379/1
264
264
265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
266 archive_cache.backend.type = filesystem
266 archive_cache.backend.type = filesystem
267
267
268 ; url for s3 compatible storage that allows to upload artifacts
268 ; url for s3 compatible storage that allows to upload artifacts
269 ; e.g http://minio:9000
269 ; e.g http://minio:9000
270 archive_cache.objectstore.url = http://s3-minio:9000
270 archive_cache.objectstore.url = http://s3-minio:9000
271
271
272 ; key for s3 auth
272 ; key for s3 auth
273 archive_cache.objectstore.key = key
273 archive_cache.objectstore.key = key
274
274
275 ; secret for s3 auth
275 ; secret for s3 auth
276 archive_cache.objectstore.secret = secret
276 archive_cache.objectstore.secret = secret
277
277
278 ;region for s3 storage
278 ;region for s3 storage
279 archive_cache.objectstore.region = eu-central-1
279 archive_cache.objectstore.region = eu-central-1
280
280
281 ; number of sharded buckets to create to distribute archives across
281 ; number of sharded buckets to create to distribute archives across
282 ; default is 8 shards
282 ; default is 8 shards
283 archive_cache.objectstore.bucket_shards = 8
283 archive_cache.objectstore.bucket_shards = 8
284
284
285 ; a top-level bucket to put all other shards in
285 ; a top-level bucket to put all other shards in
286 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
286 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
287 archive_cache.objectstore.bucket = rhodecode-archive-cache
287 archive_cache.objectstore.bucket = rhodecode-archive-cache
288
288
289 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
289 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
290 archive_cache.objectstore.retry = false
290 archive_cache.objectstore.retry = false
291
291
292 ; number of seconds to wait for next try using retry
292 ; number of seconds to wait for next try using retry
293 archive_cache.objectstore.retry_backoff = 1
293 archive_cache.objectstore.retry_backoff = 1
294
294
295 ; how many tries do do a retry fetch from this backend
295 ; how many tries do do a retry fetch from this backend
296 archive_cache.objectstore.retry_attempts = 10
296 archive_cache.objectstore.retry_attempts = 10
297
297
298 ; Default is $cache_dir/archive_cache if not set
298 ; Default is $cache_dir/archive_cache if not set
299 ; Generated repo archives will be cached at this location
299 ; Generated repo archives will be cached at this location
300 ; and served from the cache during subsequent requests for the same archive of
300 ; and served from the cache during subsequent requests for the same archive of
301 ; the repository. This path is important to be shared across filesystems and with
301 ; the repository. This path is important to be shared across filesystems and with
302 ; RhodeCode and vcsserver
302 ; RhodeCode and vcsserver
303 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
303 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
304
304
305 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
305 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
306 archive_cache.filesystem.cache_size_gb = 40
306 archive_cache.filesystem.cache_size_gb = 40
307
307
308 ; Eviction policy used to clear out after cache_size_gb limit is reached
308 ; Eviction policy used to clear out after cache_size_gb limit is reached
309 archive_cache.filesystem.eviction_policy = least-recently-stored
309 archive_cache.filesystem.eviction_policy = least-recently-stored
310
310
311 ; By default cache uses sharding technique, this specifies how many shards are there
311 ; By default cache uses sharding technique, this specifies how many shards are there
312 ; default is 8 shards
312 ; default is 8 shards
313 archive_cache.filesystem.cache_shards = 8
313 archive_cache.filesystem.cache_shards = 8
314
314
315 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
315 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
316 archive_cache.filesystem.retry = false
316 archive_cache.filesystem.retry = false
317
317
318 ; number of seconds to wait for next try using retry
318 ; number of seconds to wait for next try using retry
319 archive_cache.filesystem.retry_backoff = 1
319 archive_cache.filesystem.retry_backoff = 1
320
320
321 ; how many tries do do a retry fetch from this backend
321 ; how many tries do do a retry fetch from this backend
322 archive_cache.filesystem.retry_attempts = 10
322 archive_cache.filesystem.retry_attempts = 10
323
323
324
324
325 ; #############
325 ; #############
326 ; CELERY CONFIG
326 ; CELERY CONFIG
327 ; #############
327 ; #############
328
328
329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330
330
331 use_celery = true
331 use_celery = true
332
332
333 ; path to store schedule database
333 ; path to store schedule database
334 #celerybeat-schedule.path =
334 #celerybeat-schedule.path =
335
335
336 ; connection url to the message broker (default redis)
336 ; connection url to the message broker (default redis)
337 celery.broker_url = redis://redis:6379/8
337 celery.broker_url = redis://redis:6379/8
338
338
339 ; results backend to get results for (default redis)
339 ; results backend to get results for (default redis)
340 celery.result_backend = redis://redis:6379/8
340 celery.result_backend = redis://redis:6379/8
341
341
342 ; rabbitmq example
342 ; rabbitmq example
343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
344
344
345 ; maximum tasks to execute before worker restart
345 ; maximum tasks to execute before worker restart
346 celery.max_tasks_per_child = 20
346 celery.max_tasks_per_child = 20
347
347
348 ; tasks will never be sent to the queue, but executed locally instead.
348 ; tasks will never be sent to the queue, but executed locally instead.
349 celery.task_always_eager = false
349 celery.task_always_eager = false
350
350
351 ; #############
351 ; #############
352 ; DOGPILE CACHE
352 ; DOGPILE CACHE
353 ; #############
353 ; #############
354
354
355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
357 cache_dir = /var/opt/rhodecode_data
357 cache_dir = /var/opt/rhodecode_data
358
358
359 ; *********************************************
359 ; *********************************************
360 ; `sql_cache_short` cache for heavy SQL queries
360 ; `sql_cache_short` cache for heavy SQL queries
361 ; Only supported backend is `memory_lru`
361 ; Only supported backend is `memory_lru`
362 ; *********************************************
362 ; *********************************************
363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
364 rc_cache.sql_cache_short.expiration_time = 30
364 rc_cache.sql_cache_short.expiration_time = 30
365
365
366
366
367 ; *****************************************************
367 ; *****************************************************
368 ; `cache_repo_longterm` cache for repo object instances
368 ; `cache_repo_longterm` cache for repo object instances
369 ; Only supported backend is `memory_lru`
369 ; Only supported backend is `memory_lru`
370 ; *****************************************************
370 ; *****************************************************
371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
372 ; by default we use 30 Days, cache is still invalidated on push
372 ; by default we use 30 Days, cache is still invalidated on push
373 rc_cache.cache_repo_longterm.expiration_time = 2592000
373 rc_cache.cache_repo_longterm.expiration_time = 2592000
374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
375 rc_cache.cache_repo_longterm.max_size = 10000
375 rc_cache.cache_repo_longterm.max_size = 10000
376
376
377
377
378 ; *********************************************
378 ; *********************************************
379 ; `cache_general` cache for general purpose use
379 ; `cache_general` cache for general purpose use
380 ; for simplicity use rc.file_namespace backend,
380 ; for simplicity use rc.file_namespace backend,
381 ; for performance and scale use rc.redis
381 ; for performance and scale use rc.redis
382 ; *********************************************
382 ; *********************************************
383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
384 rc_cache.cache_general.expiration_time = 43200
384 rc_cache.cache_general.expiration_time = 43200
385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
387
387
388 ; alternative `cache_general` redis backend with distributed lock
388 ; alternative `cache_general` redis backend with distributed lock
389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
390 #rc_cache.cache_general.expiration_time = 300
390 #rc_cache.cache_general.expiration_time = 300
391
391
392 ; redis_expiration_time needs to be greater then expiration_time
392 ; redis_expiration_time needs to be greater then expiration_time
393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
394
394
395 #rc_cache.cache_general.arguments.host = localhost
395 #rc_cache.cache_general.arguments.host = localhost
396 #rc_cache.cache_general.arguments.port = 6379
396 #rc_cache.cache_general.arguments.port = 6379
397 #rc_cache.cache_general.arguments.db = 0
397 #rc_cache.cache_general.arguments.db = 0
398 #rc_cache.cache_general.arguments.socket_timeout = 30
398 #rc_cache.cache_general.arguments.socket_timeout = 30
399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
400 #rc_cache.cache_general.arguments.distributed_lock = true
400 #rc_cache.cache_general.arguments.distributed_lock = true
401
401
402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
404
404
405 ; *************************************************
405 ; *************************************************
406 ; `cache_perms` cache for permission tree, auth TTL
406 ; `cache_perms` cache for permission tree, auth TTL
407 ; for simplicity use rc.file_namespace backend,
407 ; for simplicity use rc.file_namespace backend,
408 ; for performance and scale use rc.redis
408 ; for performance and scale use rc.redis
409 ; *************************************************
409 ; *************************************************
410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
411 rc_cache.cache_perms.expiration_time = 3600
411 rc_cache.cache_perms.expiration_time = 3600
412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
414
414
415 ; alternative `cache_perms` redis backend with distributed lock
415 ; alternative `cache_perms` redis backend with distributed lock
416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
417 #rc_cache.cache_perms.expiration_time = 300
417 #rc_cache.cache_perms.expiration_time = 300
418
418
419 ; redis_expiration_time needs to be greater then expiration_time
419 ; redis_expiration_time needs to be greater then expiration_time
420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
421
421
422 #rc_cache.cache_perms.arguments.host = localhost
422 #rc_cache.cache_perms.arguments.host = localhost
423 #rc_cache.cache_perms.arguments.port = 6379
423 #rc_cache.cache_perms.arguments.port = 6379
424 #rc_cache.cache_perms.arguments.db = 0
424 #rc_cache.cache_perms.arguments.db = 0
425 #rc_cache.cache_perms.arguments.socket_timeout = 30
425 #rc_cache.cache_perms.arguments.socket_timeout = 30
426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
427 #rc_cache.cache_perms.arguments.distributed_lock = true
427 #rc_cache.cache_perms.arguments.distributed_lock = true
428
428
429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
431
431
432 ; ***************************************************
432 ; ***************************************************
433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
434 ; for simplicity use rc.file_namespace backend,
434 ; for simplicity use rc.file_namespace backend,
435 ; for performance and scale use rc.redis
435 ; for performance and scale use rc.redis
436 ; ***************************************************
436 ; ***************************************************
437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
438 rc_cache.cache_repo.expiration_time = 2592000
438 rc_cache.cache_repo.expiration_time = 2592000
439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
441
441
442 ; alternative `cache_repo` redis backend with distributed lock
442 ; alternative `cache_repo` redis backend with distributed lock
443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
444 #rc_cache.cache_repo.expiration_time = 2592000
444 #rc_cache.cache_repo.expiration_time = 2592000
445
445
446 ; redis_expiration_time needs to be greater then expiration_time
446 ; redis_expiration_time needs to be greater then expiration_time
447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
448
448
449 #rc_cache.cache_repo.arguments.host = localhost
449 #rc_cache.cache_repo.arguments.host = localhost
450 #rc_cache.cache_repo.arguments.port = 6379
450 #rc_cache.cache_repo.arguments.port = 6379
451 #rc_cache.cache_repo.arguments.db = 1
451 #rc_cache.cache_repo.arguments.db = 1
452 #rc_cache.cache_repo.arguments.socket_timeout = 30
452 #rc_cache.cache_repo.arguments.socket_timeout = 30
453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
454 #rc_cache.cache_repo.arguments.distributed_lock = true
454 #rc_cache.cache_repo.arguments.distributed_lock = true
455
455
456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
458
458
459 ; ##############
459 ; ##############
460 ; BEAKER SESSION
460 ; BEAKER SESSION
461 ; ##############
461 ; ##############
462
462
463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
464 ; types are file, ext:redis, ext:database, ext:memcached
464 ; types are file, ext:redis, ext:database, ext:memcached
465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
466 #beaker.session.type = file
466 #beaker.session.type = file
467 #beaker.session.data_dir = %(here)s/data/sessions
467 #beaker.session.data_dir = %(here)s/data/sessions
468
468
469 ; Redis based sessions
469 ; Redis based sessions
470 beaker.session.type = ext:redis
470 beaker.session.type = ext:redis
471 beaker.session.url = redis://redis:6379/2
471 beaker.session.url = redis://redis:6379/2
472
472
473 ; DB based session, fast, and allows easy management over logged in users
473 ; DB based session, fast, and allows easy management over logged in users
474 #beaker.session.type = ext:database
474 #beaker.session.type = ext:database
475 #beaker.session.table_name = db_session
475 #beaker.session.table_name = db_session
476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
478 #beaker.session.sa.pool_recycle = 3600
478 #beaker.session.sa.pool_recycle = 3600
479 #beaker.session.sa.echo = false
479 #beaker.session.sa.echo = false
480
480
481 beaker.session.key = rhodecode
481 beaker.session.key = rhodecode
482 beaker.session.secret = production-rc-uytcxaz
482 beaker.session.secret = production-rc-uytcxaz
483 beaker.session.lock_dir = /data_ramdisk/lock
483 beaker.session.lock_dir = /data_ramdisk/lock
484
484
485 ; Secure encrypted cookie. Requires AES and AES python libraries
485 ; Secure encrypted cookie. Requires AES and AES python libraries
486 ; you must disable beaker.session.secret to use this
486 ; you must disable beaker.session.secret to use this
487 #beaker.session.encrypt_key = key_for_encryption
487 #beaker.session.encrypt_key = key_for_encryption
488 #beaker.session.validate_key = validation_key
488 #beaker.session.validate_key = validation_key
489
489
490 ; Sets session as invalid (also logging out user) if it haven not been
490 ; Sets session as invalid (also logging out user) if it haven not been
491 ; accessed for given amount of time in seconds
491 ; accessed for given amount of time in seconds
492 beaker.session.timeout = 2592000
492 beaker.session.timeout = 2592000
493 beaker.session.httponly = true
493 beaker.session.httponly = true
494
494
495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
496 #beaker.session.cookie_path = /custom_prefix
496 #beaker.session.cookie_path = /custom_prefix
497
497
498 ; Set https secure cookie
498 ; Set https secure cookie
499 beaker.session.secure = false
499 beaker.session.secure = false
500
500
501 ; default cookie expiration time in seconds, set to `true` to set expire
501 ; default cookie expiration time in seconds, set to `true` to set expire
502 ; at browser close
502 ; at browser close
503 #beaker.session.cookie_expires = 3600
503 #beaker.session.cookie_expires = 3600
504
504
505 ; #############################
505 ; #############################
506 ; SEARCH INDEXING CONFIGURATION
506 ; SEARCH INDEXING CONFIGURATION
507 ; #############################
507 ; #############################
508
508
509 ; Full text search indexer is available in rhodecode-tools under
509 ; Full text search indexer is available in rhodecode-tools under
510 ; `rhodecode-tools index` command
510 ; `rhodecode-tools index` command
511
511
512 ; WHOOSH Backend, doesn't require additional services to run
512 ; WHOOSH Backend, doesn't require additional services to run
513 ; it works good with few dozen repos
513 ; it works good with few dozen repos
514 search.module = rhodecode.lib.index.whoosh
514 search.module = rhodecode.lib.index.whoosh
515 search.location = %(here)s/data/index
515 search.location = %(here)s/data/index
516
516
517 ; ####################
517 ; ####################
518 ; CHANNELSTREAM CONFIG
518 ; CHANNELSTREAM CONFIG
519 ; ####################
519 ; ####################
520
520
521 ; channelstream enables persistent connections and live notification
521 ; channelstream enables persistent connections and live notification
522 ; in the system. It's also used by the chat system
522 ; in the system. It's also used by the chat system
523
523
524 channelstream.enabled = true
524 channelstream.enabled = true
525
525
526 ; server address for channelstream server on the backend
526 ; server address for channelstream server on the backend
527 channelstream.server = channelstream:9800
527 channelstream.server = channelstream:9800
528
528
529 ; location of the channelstream server from outside world
529 ; location of the channelstream server from outside world
530 ; use ws:// for http or wss:// for https. This address needs to be handled
530 ; use ws:// for http or wss:// for https. This address needs to be handled
531 ; by external HTTP server such as Nginx or Apache
531 ; by external HTTP server such as Nginx or Apache
532 ; see Nginx/Apache configuration examples in our docs
532 ; see Nginx/Apache configuration examples in our docs
533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
534 channelstream.secret = ENV_GENERATED
534 channelstream.secret = ENV_GENERATED
535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
536
536
537 ; Internal application path that Javascript uses to connect into.
537 ; Internal application path that Javascript uses to connect into.
538 ; If you use proxy-prefix the prefix should be added before /_channelstream
538 ; If you use proxy-prefix the prefix should be added before /_channelstream
539 channelstream.proxy_path = /_channelstream
539 channelstream.proxy_path = /_channelstream
540
540
541
541
542 ; ##############################
542 ; ##############################
543 ; MAIN RHODECODE DATABASE CONFIG
543 ; MAIN RHODECODE DATABASE CONFIG
544 ; ##############################
544 ; ##############################
545
545
546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
551
551
552 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
552 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
553
553
554 ; see sqlalchemy docs for other advanced settings
554 ; see sqlalchemy docs for other advanced settings
555 ; print the sql statements to output
555 ; print the sql statements to output
556 sqlalchemy.db1.echo = false
556 sqlalchemy.db1.echo = false
557
557
558 ; recycle the connections after this amount of seconds
558 ; recycle the connections after this amount of seconds
559 sqlalchemy.db1.pool_recycle = 3600
559 sqlalchemy.db1.pool_recycle = 3600
560
560
561 ; the number of connections to keep open inside the connection pool.
561 ; the number of connections to keep open inside the connection pool.
562 ; 0 indicates no limit
562 ; 0 indicates no limit
563 ; the general calculus with gevent is:
563 ; the general calculus with gevent is:
564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
565 ; then increase pool size + max overflow so that they add up to 500.
565 ; then increase pool size + max overflow so that they add up to 500.
566 #sqlalchemy.db1.pool_size = 5
566 #sqlalchemy.db1.pool_size = 5
567
567
568 ; The number of connections to allow in connection pool "overflow", that is
568 ; The number of connections to allow in connection pool "overflow", that is
569 ; connections that can be opened above and beyond the pool_size setting,
569 ; connections that can be opened above and beyond the pool_size setting,
570 ; which defaults to five.
570 ; which defaults to five.
571 #sqlalchemy.db1.max_overflow = 10
571 #sqlalchemy.db1.max_overflow = 10
572
572
573 ; Connection check ping, used to detect broken database connections
573 ; Connection check ping, used to detect broken database connections
574 ; could be enabled to better handle cases if MySQL has gone away errors
574 ; could be enabled to better handle cases if MySQL has gone away errors
575 #sqlalchemy.db1.ping_connection = true
575 #sqlalchemy.db1.ping_connection = true
576
576
577 ; ##########
577 ; ##########
578 ; VCS CONFIG
578 ; VCS CONFIG
579 ; ##########
579 ; ##########
580 vcs.server.enable = true
580 vcs.server.enable = true
581 vcs.server = vcsserver:10010
581 vcs.server = vcsserver:10010
582
582
583 ; Web server connectivity protocol, responsible for web based VCS operations
583 ; Web server connectivity protocol, responsible for web based VCS operations
584 ; Available protocols are:
584 ; Available protocols are:
585 ; `http` - use http-rpc backend (default)
585 ; `http` - use http-rpc backend (default)
586 vcs.server.protocol = http
586 vcs.server.protocol = http
587
587
588 ; Push/Pull operations protocol, available options are:
588 ; Push/Pull operations protocol, available options are:
589 ; `http` - use http-rpc backend (default)
589 ; `http` - use http-rpc backend (default)
590 vcs.scm_app_implementation = http
590 vcs.scm_app_implementation = http
591
591
592 ; Push/Pull operations hooks protocol, available options are:
592 ; Push/Pull operations hooks protocol, available options are:
593 ; `http` - use http-rpc backend (default)
593 ; `http` - use http-rpc backend (default)
594 ; `celery` - use celery based hooks
594 ; `celery` - use celery based hooks
595 vcs.hooks.protocol = http
595 vcs.hooks.protocol = http
596
596
597 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
597 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
598 ; accessible via network.
598 ; accessible via network.
599 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
599 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
600 vcs.hooks.host = *
600 vcs.hooks.host = *
601
601
602 ; Start VCSServer with this instance as a subprocess, useful for development
602 ; Start VCSServer with this instance as a subprocess, useful for development
603 vcs.start_server = false
603 vcs.start_server = false
604
604
605 ; List of enabled VCS backends, available options are:
605 ; List of enabled VCS backends, available options are:
606 ; `hg` - mercurial
606 ; `hg` - mercurial
607 ; `git` - git
607 ; `git` - git
608 ; `svn` - subversion
608 ; `svn` - subversion
609 vcs.backends = hg, git, svn
609 vcs.backends = hg, git, svn
610
610
611 ; Wait this number of seconds before killing connection to the vcsserver
611 ; Wait this number of seconds before killing connection to the vcsserver
612 vcs.connection_timeout = 3600
612 vcs.connection_timeout = 3600
613
613
614 ; Cache flag to cache vcsserver remote calls locally
614 ; Cache flag to cache vcsserver remote calls locally
615 ; It uses cache_region `cache_repo`
615 ; It uses cache_region `cache_repo`
616 vcs.methods.cache = true
616 vcs.methods.cache = true
617
617
618 ; ####################################################
618 ; ####################################################
619 ; Subversion proxy support (mod_dav_svn)
619 ; Subversion proxy support (mod_dav_svn)
620 ; Maps RhodeCode repo groups into SVN paths for Apache
620 ; Maps RhodeCode repo groups into SVN paths for Apache
621 ; ####################################################
621 ; ####################################################
622
622
623 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
623 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
624 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
624 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
625 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
625 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
626 #vcs.svn.compatible_version = 1.8
626 #vcs.svn.compatible_version = 1.8
627
627
628 ; Redis connection settings for svn integrations logic
629 ; This connection string needs to be the same on ce and vcsserver
630 vcs.svn.redis_conn = redis://redis:6379/0
631
628 ; Enable SVN proxy of requests over HTTP
632 ; Enable SVN proxy of requests over HTTP
629 vcs.svn.proxy.enabled = true
633 vcs.svn.proxy.enabled = true
630
634
631 ; host to connect to running SVN subsystem
635 ; host to connect to running SVN subsystem
632 vcs.svn.proxy.host = http://svn:8090
636 vcs.svn.proxy.host = http://svn:8090
633
637
634 ; Enable or disable the config file generation.
638 ; Enable or disable the config file generation.
635 svn.proxy.generate_config = true
639 svn.proxy.generate_config = true
636
640
637 ; Generate config file with `SVNListParentPath` set to `On`.
641 ; Generate config file with `SVNListParentPath` set to `On`.
638 svn.proxy.list_parent_path = true
642 svn.proxy.list_parent_path = true
639
643
640 ; Set location and file name of generated config file.
644 ; Set location and file name of generated config file.
641 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
645 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
642
646
643 ; alternative mod_dav config template. This needs to be a valid mako template
647 ; alternative mod_dav config template. This needs to be a valid mako template
644 ; Example template can be found in the source code:
648 ; Example template can be found in the source code:
645 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
649 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
646 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
650 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
647
651
648 ; Used as a prefix to the `Location` block in the generated config file.
652 ; Used as a prefix to the `Location` block in the generated config file.
649 ; In most cases it should be set to `/`.
653 ; In most cases it should be set to `/`.
650 svn.proxy.location_root = /
654 svn.proxy.location_root = /
651
655
652 ; Command to reload the mod dav svn configuration on change.
656 ; Command to reload the mod dav svn configuration on change.
653 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
657 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
654 ; Make sure user who runs RhodeCode process is allowed to reload Apache
658 ; Make sure user who runs RhodeCode process is allowed to reload Apache
655 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
659 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
656
660
657 ; If the timeout expires before the reload command finishes, the command will
661 ; If the timeout expires before the reload command finishes, the command will
658 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
662 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
659 #svn.proxy.reload_timeout = 10
663 #svn.proxy.reload_timeout = 10
660
664
661 ; ####################
665 ; ####################
662 ; SSH Support Settings
666 ; SSH Support Settings
663 ; ####################
667 ; ####################
664
668
665 ; Defines if a custom authorized_keys file should be created and written on
669 ; Defines if a custom authorized_keys file should be created and written on
666 ; any change user ssh keys. Setting this to false also disables possibility
670 ; any change user ssh keys. Setting this to false also disables possibility
667 ; of adding SSH keys by users from web interface. Super admins can still
671 ; of adding SSH keys by users from web interface. Super admins can still
668 ; manage SSH Keys.
672 ; manage SSH Keys.
669 ssh.generate_authorized_keyfile = true
673 ssh.generate_authorized_keyfile = true
670
674
671 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
675 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
672 # ssh.authorized_keys_ssh_opts =
676 # ssh.authorized_keys_ssh_opts =
673
677
674 ; Path to the authorized_keys file where the generate entries are placed.
678 ; Path to the authorized_keys file where the generate entries are placed.
675 ; It is possible to have multiple key files specified in `sshd_config` e.g.
679 ; It is possible to have multiple key files specified in `sshd_config` e.g.
676 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
680 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
677 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
681 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
678
682
679 ; Command to execute the SSH wrapper. The binary is available in the
683 ; Command to execute the SSH wrapper. The binary is available in the
680 ; RhodeCode installation directory.
684 ; RhodeCode installation directory.
681 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
685 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
682 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
686 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
683 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
687 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
684
688
685 ; Allow shell when executing the ssh-wrapper command
689 ; Allow shell when executing the ssh-wrapper command
686 ssh.wrapper_cmd_allow_shell = false
690 ssh.wrapper_cmd_allow_shell = false
687
691
688 ; Enables logging, and detailed output send back to the client during SSH
692 ; Enables logging, and detailed output send back to the client during SSH
689 ; operations. Useful for debugging, shouldn't be used in production.
693 ; operations. Useful for debugging, shouldn't be used in production.
690 ssh.enable_debug_logging = false
694 ssh.enable_debug_logging = false
691
695
692 ; Paths to binary executable, by default they are the names, but we can
696 ; Paths to binary executable, by default they are the names, but we can
693 ; override them if we want to use a custom one
697 ; override them if we want to use a custom one
694 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
698 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
695 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
699 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
696 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
700 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
697
701
698 ; Enables SSH key generator web interface. Disabling this still allows users
702 ; Enables SSH key generator web interface. Disabling this still allows users
699 ; to add their own keys.
703 ; to add their own keys.
700 ssh.enable_ui_key_generator = true
704 ssh.enable_ui_key_generator = true
701
705
702 ; Statsd client config, this is used to send metrics to statsd
706 ; Statsd client config, this is used to send metrics to statsd
703 ; We recommend setting statsd_exported and scrape them using Prometheus
707 ; We recommend setting statsd_exported and scrape them using Prometheus
704 #statsd.enabled = false
708 #statsd.enabled = false
705 #statsd.statsd_host = 0.0.0.0
709 #statsd.statsd_host = 0.0.0.0
706 #statsd.statsd_port = 8125
710 #statsd.statsd_port = 8125
707 #statsd.statsd_prefix =
711 #statsd.statsd_prefix =
708 #statsd.statsd_ipv6 = false
712 #statsd.statsd_ipv6 = false
709
713
710 ; configure logging automatically at server startup set to false
714 ; configure logging automatically at server startup set to false
711 ; to use the below custom logging config.
715 ; to use the below custom logging config.
712 ; RC_LOGGING_FORMATTER
716 ; RC_LOGGING_FORMATTER
713 ; RC_LOGGING_LEVEL
717 ; RC_LOGGING_LEVEL
714 ; env variables can control the settings for logging in case of autoconfigure
718 ; env variables can control the settings for logging in case of autoconfigure
715
719
716 #logging.autoconfigure = true
720 #logging.autoconfigure = true
717
721
718 ; specify your own custom logging config file to configure logging
722 ; specify your own custom logging config file to configure logging
719 #logging.logging_conf_file = /path/to/custom_logging.ini
723 #logging.logging_conf_file = /path/to/custom_logging.ini
720
724
721 ; Dummy marker to add new entries after.
725 ; Dummy marker to add new entries after.
722 ; Add any custom entries below. Please don't remove this marker.
726 ; Add any custom entries below. Please don't remove this marker.
723 custom.conf = 1
727 custom.conf = 1
724
728
725
729
726 ; #####################
730 ; #####################
727 ; LOGGING CONFIGURATION
731 ; LOGGING CONFIGURATION
728 ; #####################
732 ; #####################
729
733
730 [loggers]
734 [loggers]
731 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
735 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
732
736
733 [handlers]
737 [handlers]
734 keys = console, console_sql
738 keys = console, console_sql
735
739
736 [formatters]
740 [formatters]
737 keys = generic, json, color_formatter, color_formatter_sql
741 keys = generic, json, color_formatter, color_formatter_sql
738
742
739 ; #######
743 ; #######
740 ; LOGGERS
744 ; LOGGERS
741 ; #######
745 ; #######
742 [logger_root]
746 [logger_root]
743 level = NOTSET
747 level = NOTSET
744 handlers = console
748 handlers = console
745
749
746 [logger_sqlalchemy]
750 [logger_sqlalchemy]
747 level = INFO
751 level = INFO
748 handlers = console_sql
752 handlers = console_sql
749 qualname = sqlalchemy.engine
753 qualname = sqlalchemy.engine
750 propagate = 0
754 propagate = 0
751
755
752 [logger_beaker]
756 [logger_beaker]
753 level = DEBUG
757 level = DEBUG
754 handlers =
758 handlers =
755 qualname = beaker.container
759 qualname = beaker.container
756 propagate = 1
760 propagate = 1
757
761
758 [logger_rhodecode]
762 [logger_rhodecode]
759 level = DEBUG
763 level = DEBUG
760 handlers =
764 handlers =
761 qualname = rhodecode
765 qualname = rhodecode
762 propagate = 1
766 propagate = 1
763
767
764 [logger_ssh_wrapper]
768 [logger_ssh_wrapper]
765 level = DEBUG
769 level = DEBUG
766 handlers =
770 handlers =
767 qualname = ssh_wrapper
771 qualname = ssh_wrapper
768 propagate = 1
772 propagate = 1
769
773
770 [logger_celery]
774 [logger_celery]
771 level = DEBUG
775 level = DEBUG
772 handlers =
776 handlers =
773 qualname = celery
777 qualname = celery
774
778
775
779
776 ; ########
780 ; ########
777 ; HANDLERS
781 ; HANDLERS
778 ; ########
782 ; ########
779
783
780 [handler_console]
784 [handler_console]
781 class = StreamHandler
785 class = StreamHandler
782 args = (sys.stderr, )
786 args = (sys.stderr, )
783 level = INFO
787 level = INFO
784 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
788 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
785 ; This allows sending properly formatted logs to grafana loki or elasticsearch
789 ; This allows sending properly formatted logs to grafana loki or elasticsearch
786 formatter = generic
790 formatter = generic
787
791
788 [handler_console_sql]
792 [handler_console_sql]
789 ; "level = DEBUG" logs SQL queries and results.
793 ; "level = DEBUG" logs SQL queries and results.
790 ; "level = INFO" logs SQL queries.
794 ; "level = INFO" logs SQL queries.
791 ; "level = WARN" logs neither. (Recommended for production systems.)
795 ; "level = WARN" logs neither. (Recommended for production systems.)
792 class = StreamHandler
796 class = StreamHandler
793 args = (sys.stderr, )
797 args = (sys.stderr, )
794 level = WARN
798 level = WARN
795 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
799 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
796 ; This allows sending properly formatted logs to grafana loki or elasticsearch
800 ; This allows sending properly formatted logs to grafana loki or elasticsearch
797 formatter = generic
801 formatter = generic
798
802
799 ; ##########
803 ; ##########
800 ; FORMATTERS
804 ; FORMATTERS
801 ; ##########
805 ; ##########
802
806
803 [formatter_generic]
807 [formatter_generic]
804 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
808 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
805 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
809 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
806 datefmt = %Y-%m-%d %H:%M:%S
810 datefmt = %Y-%m-%d %H:%M:%S
807
811
808 [formatter_color_formatter]
812 [formatter_color_formatter]
809 class = rhodecode.lib.logging_formatter.ColorFormatter
813 class = rhodecode.lib.logging_formatter.ColorFormatter
810 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
814 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
811 datefmt = %Y-%m-%d %H:%M:%S
815 datefmt = %Y-%m-%d %H:%M:%S
812
816
813 [formatter_color_formatter_sql]
817 [formatter_color_formatter_sql]
814 class = rhodecode.lib.logging_formatter.ColorFormatterSql
818 class = rhodecode.lib.logging_formatter.ColorFormatterSql
815 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
819 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
816 datefmt = %Y-%m-%d %H:%M:%S
820 datefmt = %Y-%m-%d %H:%M:%S
817
821
818 [formatter_json]
822 [formatter_json]
819 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
823 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
820 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
824 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,223 +1,224 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import tempfile
20 import tempfile
21 import logging
21 import logging
22
22
23 from pyramid.settings import asbool
23 from pyramid.settings import asbool
24
24
25 from rhodecode.config.settings_maker import SettingsMaker
25 from rhodecode.config.settings_maker import SettingsMaker
26 from rhodecode.config import utils as config_utils
26 from rhodecode.config import utils as config_utils
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 def sanitize_settings_and_apply_defaults(global_config, settings):
31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 """
32 """
33 Applies settings defaults and does all type conversion.
33 Applies settings defaults and does all type conversion.
34
34
35 We would move all settings parsing and preparation into this place, so that
35 We would move all settings parsing and preparation into this place, so that
36 we have only one place left which deals with this part. The remaining parts
36 we have only one place left which deals with this part. The remaining parts
37 of the application would start to rely fully on well-prepared settings.
37 of the application would start to rely fully on well-prepared settings.
38
38
39 This piece would later be split up per topic to avoid a big fat monster
39 This piece would later be split up per topic to avoid a big fat monster
40 function.
40 function.
41 """
41 """
42 jn = os.path.join
42 jn = os.path.join
43
43
44 global_settings_maker = SettingsMaker(global_config)
44 global_settings_maker = SettingsMaker(global_config)
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 debug_enabled = asbool(global_config.get('debug'))
46 debug_enabled = asbool(global_config.get('debug'))
47
47
48 settings_maker = SettingsMaker(settings)
48 settings_maker = SettingsMaker(settings)
49
49
50 settings_maker.make_setting(
50 settings_maker.make_setting(
51 'logging.autoconfigure',
51 'logging.autoconfigure',
52 default=False,
52 default=False,
53 parser='bool')
53 parser='bool')
54
54
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57
57
58 # Default includes, possible to change as a user
58 # Default includes, possible to change as a user
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 log.debug(
60 log.debug(
61 "Using the following pyramid.includes: %s",
61 "Using the following pyramid.includes: %s",
62 pyramid_includes)
62 pyramid_includes)
63
63
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66
66
67 if 'mako.default_filters' not in settings:
67 if 'mako.default_filters' not in settings:
68 # set custom default filters if we don't have it defined
68 # set custom default filters if we don't have it defined
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 settings['mako.default_filters'] = 'h_filter'
70 settings['mako.default_filters'] = 'h_filter'
71
71
72 if 'mako.directories' not in settings:
72 if 'mako.directories' not in settings:
73 mako_directories = settings.setdefault('mako.directories', [
73 mako_directories = settings.setdefault('mako.directories', [
74 # Base templates of the original application
74 # Base templates of the original application
75 'rhodecode:templates',
75 'rhodecode:templates',
76 ])
76 ])
77 log.debug(
77 log.debug(
78 "Using the following Mako template directories: %s",
78 "Using the following Mako template directories: %s",
79 mako_directories)
79 mako_directories)
80
80
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 raw_url = settings['beaker.session.url']
83 raw_url = settings['beaker.session.url']
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 settings['beaker.session.url'] = 'redis://' + raw_url
85 settings['beaker.session.url'] = 'redis://' + raw_url
86
86
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88
88
89 # TODO: johbo: Re-think this, usually the call to config.include
89 # TODO: johbo: Re-think this, usually the call to config.include
90 # should allow to pass in a prefix.
90 # should allow to pass in a prefix.
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92
92
93 # Sanitize generic settings.
93 # Sanitize generic settings.
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97
97
98 # statsd
98 # statsd
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104
104
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 settings_maker.make_setting('vcs.hooks.host', '*')
110 settings_maker.make_setting('vcs.hooks.host', '*')
110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 settings_maker.make_setting('vcs.server', '')
112 settings_maker.make_setting('vcs.server', '')
112 settings_maker.make_setting('vcs.server.protocol', 'http')
113 settings_maker.make_setting('vcs.server.protocol', 'http')
113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118
119
119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120
121
121 # repo_store path
122 # repo_store path
122 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
123 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
123 # Support legacy values of vcs.scm_app_implementation. Legacy
124 # Support legacy values of vcs.scm_app_implementation. Legacy
124 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
125 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
125 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
126 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
126 scm_app_impl = settings['vcs.scm_app_implementation']
127 scm_app_impl = settings['vcs.scm_app_implementation']
127 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
128 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
128 settings['vcs.scm_app_implementation'] = 'http'
129 settings['vcs.scm_app_implementation'] = 'http'
129
130
130 settings_maker.make_setting('appenlight', False, parser='bool')
131 settings_maker.make_setting('appenlight', False, parser='bool')
131
132
132 temp_store = tempfile.gettempdir()
133 temp_store = tempfile.gettempdir()
133 tmp_cache_dir = jn(temp_store, 'rc_cache')
134 tmp_cache_dir = jn(temp_store, 'rc_cache')
134
135
135 # save default, cache dir, and use it for all backends later.
136 # save default, cache dir, and use it for all backends later.
136 default_cache_dir = settings_maker.make_setting(
137 default_cache_dir = settings_maker.make_setting(
137 'cache_dir',
138 'cache_dir',
138 default=tmp_cache_dir, default_when_empty=True,
139 default=tmp_cache_dir, default_when_empty=True,
139 parser='dir:ensured')
140 parser='dir:ensured')
140
141
141 # exception store cache
142 # exception store cache
142 settings_maker.make_setting(
143 settings_maker.make_setting(
143 'exception_tracker.store_path',
144 'exception_tracker.store_path',
144 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
145 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
145 parser='dir:ensured'
146 parser='dir:ensured'
146 )
147 )
147
148
148 settings_maker.make_setting(
149 settings_maker.make_setting(
149 'celerybeat-schedule.path',
150 'celerybeat-schedule.path',
150 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
151 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
151 parser='file:ensured'
152 parser='file:ensured'
152 )
153 )
153
154
154 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
155 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
155 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
156 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
156
157
157 # sessions, ensure file since no-value is memory
158 # sessions, ensure file since no-value is memory
158 settings_maker.make_setting('beaker.session.type', 'file')
159 settings_maker.make_setting('beaker.session.type', 'file')
159 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
160 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
160
161
161 # cache_general
162 # cache_general
162 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
163 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
163 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
164 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
164 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
165 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
165
166
166 # cache_perms
167 # cache_perms
167 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
168 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
168 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
169 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
169 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
170 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
170
171
171 # cache_repo
172 # cache_repo
172 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
173 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
173 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
174 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
174 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
175 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
175
176
176 # cache_license
177 # cache_license
177 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
178 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
178 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
179 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
179 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
180 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
180
181
181 # cache_repo_longterm memory, 96H
182 # cache_repo_longterm memory, 96H
182 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
183 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
183 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
184 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
184 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
185 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
185
186
186 # sql_cache_short
187 # sql_cache_short
187 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
188 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
188 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
189 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
189 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
190 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
190
191
191 # archive_cache
192 # archive_cache
192 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
193 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
193 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
194 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
194
195
195 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
196 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
196 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
197 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
197 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
198 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
198 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
199 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
199
200
200 settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
201 settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
201 settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
202 settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
202 settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
203 settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
203
204
204 settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
205 settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
205 settings_maker.make_setting('archive_cache.objectstore.key', '')
206 settings_maker.make_setting('archive_cache.objectstore.key', '')
206 settings_maker.make_setting('archive_cache.objectstore.secret', '')
207 settings_maker.make_setting('archive_cache.objectstore.secret', '')
207 settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1')
208 settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1')
208 settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,)
209 settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,)
209 settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
210 settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
210
211
211 settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
212 settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
212 settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
213 settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
213
214
214 settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
215 settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
215 settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
216 settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
216 settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
217 settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
217
218
218 settings_maker.env_expand()
219 settings_maker.env_expand()
219
220
220 # configure instance id
221 # configure instance id
221 config_utils.set_instance_id(settings)
222 config_utils.set_instance_id(settings)
222
223
223 return settings
224 return settings
@@ -1,355 +1,354 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20
20
21 from webhelpers2.html.builder import literal
21 from webhelpers2.html.builder import literal
22 from webhelpers2.html.tags import link_to
22 from webhelpers2.html.tags import link_to
23
23
24 from rhodecode.lib.utils2 import AttributeDict
24 from rhodecode.lib.utils2 import AttributeDict
25 from rhodecode.lib.vcs.backends.base import BaseCommit
25 from rhodecode.lib.vcs.backends.base import BaseCommit
26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
27
27
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 def action_parser(request, user_log, feed=False, parse_cs=False):
32 def action_parser(request, user_log, feed=False, parse_cs=False):
33 """
33 """
34 This helper will action_map the specified string action into translated
34 This helper will action_map the specified string action into translated
35 fancy names with icons and links
35 fancy names with icons and links
36
36
37 :param user_log: user log instance
37 :param user_log: user log instance
38 :param feed: use output for feeds (no html and fancy icons)
38 :param feed: use output for feeds (no html and fancy icons)
39 :param parse_cs: parse Changesets into VCS instances
39 :param parse_cs: parse Changesets into VCS instances
40 """
40 """
41 if user_log.version == 'v2':
41 if user_log.version == 'v2':
42 ap = AuditLogParser(request, user_log)
42 ap = AuditLogParser(request, user_log)
43 return ap.callbacks()
43 return ap.callbacks()
44 else:
44 else:
45 # old style
45 # old style
46 ap = ActionParser(request, user_log, feed=False, parse_commits=False)
46 ap = ActionParser(request, user_log, feed=False, parse_commits=False)
47 return ap.callbacks()
47 return ap.callbacks()
48
48
49
49
50 class ActionParser(object):
50 class ActionParser(object):
51
51
52 commits_limit = 3 # display this amount always
52 commits_limit = 3 # display this amount always
53 commits_top_limit = 50 # show up to this amount of commits hidden
53 commits_top_limit = 50 # show up to this amount of commits hidden
54
54
55 def __init__(self, request, user_log, feed=False, parse_commits=False):
55 def __init__(self, request, user_log, feed=False, parse_commits=False):
56 self.user_log = user_log
56 self.user_log = user_log
57 self.feed = feed
57 self.feed = feed
58 self.parse_commits = parse_commits
58 self.parse_commits = parse_commits
59 self.request = request
59 self.request = request
60
60
61 self.action = user_log.action
61 self.action = user_log.action
62 self.action_params = ' '
62 self.action_params = ' '
63 x = self.action.split(':', 1)
63 x = self.action.split(':', 1)
64 if len(x) > 1:
64 if len(x) > 1:
65 self.action, self.action_params = x
65 self.action, self.action_params = x
66
66
67 def callbacks(self):
67 def callbacks(self):
68 action_str = self.action_map.get(self.action, self.action)
68 action_str = self.action_map.get(self.action, self.action)
69 if self.feed:
69 if self.feed:
70 action = action_str[0].replace('[', '').replace(']', '')
70 action = action_str[0].replace('[', '').replace(']', '')
71 else:
71 else:
72 action = action_str[0]\
72 action = action_str[0]\
73 .replace('[', '<span class="journal_highlight">')\
73 .replace('[', '<span class="journal_highlight">')\
74 .replace(']', '</span>')
74 .replace(']', '</span>')
75
75
76 action_params_func = _no_params_func
76 action_params_func = _no_params_func
77 if callable(action_str[1]):
77 if callable(action_str[1]):
78 action_params_func = action_str[1]
78 action_params_func = action_str[1]
79
79
80 # returned callbacks we need to call to get
80 # returned callbacks we need to call to get
81 return [
81 return [
82 lambda: literal(action), action_params_func,
82 lambda: literal(action), action_params_func,
83 self.action_parser_icon]
83 self.action_parser_icon]
84
84
85 @property
85 @property
86 def action_map(self):
86 def action_map(self):
87 _ = self.request.translate
87 _ = self.request.translate
88 # action : translated str, callback(extractor), icon
88 # action : translated str, callback(extractor), icon
89 action_map = {
89 action_map = {
90 'user_deleted_repo': (
90 'user_deleted_repo': (
91 _('[deleted] repository'),
91 _('[deleted] repository'),
92 None, 'icon-trash'),
92 None, 'icon-trash'),
93 'user_created_repo': (
93 'user_created_repo': (
94 _('[created] repository'),
94 _('[created] repository'),
95 None, 'icon-plus icon-plus-colored'),
95 None, 'icon-plus icon-plus-colored'),
96 'user_created_fork': (
96 'user_created_fork': (
97 _('[created] repository as fork'),
97 _('[created] repository as fork'),
98 None, 'icon-code-fork'),
98 None, 'icon-code-fork'),
99 'user_forked_repo': (
99 'user_forked_repo': (
100 _('[forked] repository'),
100 _('[forked] repository'),
101 self.get_fork_name, 'icon-code-fork'),
101 self.get_fork_name, 'icon-code-fork'),
102 'user_updated_repo': (
102 'user_updated_repo': (
103 _('[updated] repository'),
103 _('[updated] repository'),
104 None, 'icon-pencil icon-pencil-colored'),
104 None, 'icon-pencil icon-pencil-colored'),
105 'user_downloaded_archive': (
105 'user_downloaded_archive': (
106 _('[downloaded] archive from repository'),
106 _('[downloaded] archive from repository'),
107 self.get_archive_name, 'icon-download-alt'),
107 self.get_archive_name, 'icon-download-alt'),
108 'admin_deleted_repo': (
108 'admin_deleted_repo': (
109 _('[delete] repository'),
109 _('[delete] repository'),
110 None, 'icon-trash'),
110 None, 'icon-trash'),
111 'admin_created_repo': (
111 'admin_created_repo': (
112 _('[created] repository'),
112 _('[created] repository'),
113 None, 'icon-plus icon-plus-colored'),
113 None, 'icon-plus icon-plus-colored'),
114 'admin_forked_repo': (
114 'admin_forked_repo': (
115 _('[forked] repository'),
115 _('[forked] repository'),
116 None, 'icon-code-fork icon-fork-colored'),
116 None, 'icon-code-fork icon-fork-colored'),
117 'admin_updated_repo': (
117 'admin_updated_repo': (
118 _('[updated] repository'),
118 _('[updated] repository'),
119 None, 'icon-pencil icon-pencil-colored'),
119 None, 'icon-pencil icon-pencil-colored'),
120 'admin_created_user': (
120 'admin_created_user': (
121 _('[created] user'),
121 _('[created] user'),
122 self.get_user_name, 'icon-user icon-user-colored'),
122 self.get_user_name, 'icon-user icon-user-colored'),
123 'admin_updated_user': (
123 'admin_updated_user': (
124 _('[updated] user'),
124 _('[updated] user'),
125 self.get_user_name, 'icon-user icon-user-colored'),
125 self.get_user_name, 'icon-user icon-user-colored'),
126 'admin_created_users_group': (
126 'admin_created_users_group': (
127 _('[created] user group'),
127 _('[created] user group'),
128 self.get_users_group, 'icon-pencil icon-pencil-colored'),
128 self.get_users_group, 'icon-pencil icon-pencil-colored'),
129 'admin_updated_users_group': (
129 'admin_updated_users_group': (
130 _('[updated] user group'),
130 _('[updated] user group'),
131 self.get_users_group, 'icon-pencil icon-pencil-colored'),
131 self.get_users_group, 'icon-pencil icon-pencil-colored'),
132 'user_commented_revision': (
132 'user_commented_revision': (
133 _('[commented] on commit in repository'),
133 _('[commented] on commit in repository'),
134 self.get_cs_links, 'icon-comment icon-comment-colored'),
134 self.get_cs_links, 'icon-comment icon-comment-colored'),
135 'user_commented_pull_request': (
135 'user_commented_pull_request': (
136 _('[commented] on pull request for'),
136 _('[commented] on pull request for'),
137 self.get_pull_request, 'icon-comment icon-comment-colored'),
137 self.get_pull_request, 'icon-comment icon-comment-colored'),
138 'user_closed_pull_request': (
138 'user_closed_pull_request': (
139 _('[closed] pull request for'),
139 _('[closed] pull request for'),
140 self.get_pull_request, 'icon-check'),
140 self.get_pull_request, 'icon-check'),
141 'user_merged_pull_request': (
141 'user_merged_pull_request': (
142 _('[merged] pull request for'),
142 _('[merged] pull request for'),
143 self.get_pull_request, 'icon-check'),
143 self.get_pull_request, 'icon-check'),
144 'push': (
144 'push': (
145 _('[pushed] into'),
145 _('[pushed] into'),
146 self.get_cs_links, 'icon-arrow-up'),
146 self.get_cs_links, 'icon-arrow-up'),
147 'push_local': (
147 'push_local': (
148 _('[committed via RhodeCode] into repository'),
148 _('[committed via RhodeCode] into repository'),
149 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
149 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
150 'push_remote': (
150 'push_remote': (
151 _('[pulled from remote] into repository'),
151 _('[pulled from remote] into repository'),
152 self.get_cs_links, 'icon-arrow-up'),
152 self.get_cs_links, 'icon-arrow-up'),
153 'pull': (
153 'pull': (
154 _('[pulled] from'),
154 _('[pulled] from'),
155 None, 'icon-arrow-down'),
155 None, 'icon-arrow-down'),
156 'started_following_repo': (
156 'started_following_repo': (
157 _('[started following] repository'),
157 _('[started following] repository'),
158 None, 'icon-heart icon-heart-colored'),
158 None, 'icon-heart icon-heart-colored'),
159 'stopped_following_repo': (
159 'stopped_following_repo': (
160 _('[stopped following] repository'),
160 _('[stopped following] repository'),
161 None, 'icon-heart-empty icon-heart-colored'),
161 None, 'icon-heart-empty icon-heart-colored'),
162 }
162 }
163 return action_map
163 return action_map
164
164
165 def get_fork_name(self):
165 def get_fork_name(self):
166 from rhodecode.lib import helpers as h
166 from rhodecode.lib import helpers as h
167 _ = self.request.translate
167 _ = self.request.translate
168 repo_name = self.action_params
168 repo_name = self.action_params
169 _url = h.route_path('repo_summary', repo_name=repo_name)
169 _url = h.route_path('repo_summary', repo_name=repo_name)
170 return _('fork name %s') % link_to(self.action_params, _url)
170 return _('fork name %s') % link_to(self.action_params, _url)
171
171
172 def get_user_name(self):
172 def get_user_name(self):
173 user_name = self.action_params
173 user_name = self.action_params
174 return user_name
174 return user_name
175
175
176 def get_users_group(self):
176 def get_users_group(self):
177 group_name = self.action_params
177 group_name = self.action_params
178 return group_name
178 return group_name
179
179
180 def get_pull_request(self):
180 def get_pull_request(self):
181 from rhodecode.lib import helpers as h
181 from rhodecode.lib import helpers as h
182 _ = self.request.translate
182 _ = self.request.translate
183 pull_request_id = self.action_params
183 pull_request_id = self.action_params
184 if self.is_deleted():
184 if self.is_deleted():
185 repo_name = self.user_log.repository_name
185 repo_name = self.user_log.repository_name
186 else:
186 else:
187 repo_name = self.user_log.repository.repo_name
187 repo_name = self.user_log.repository.repo_name
188 return link_to(
188 return link_to(
189 _('Pull request #%s') % pull_request_id,
189 _('Pull request #%s') % pull_request_id,
190 h.route_path('pullrequest_show', repo_name=repo_name,
190 h.route_path('pullrequest_show', repo_name=repo_name,
191 pull_request_id=pull_request_id))
191 pull_request_id=pull_request_id))
192
192
193 def get_archive_name(self):
193 def get_archive_name(self):
194 archive_name = self.action_params
194 archive_name = self.action_params
195 return archive_name
195 return archive_name
196
196
197 def action_parser_icon(self):
197 def action_parser_icon(self):
198 tmpl = """<i class="%s" alt="%s"></i>"""
198 tmpl = """<i class="%s" alt="%s"></i>"""
199 ico = self.action_map.get(self.action, ['', '', ''])[2]
199 ico = self.action_map.get(self.action, ['', '', ''])[2]
200 return literal(tmpl % (ico, self.action))
200 return literal(tmpl % (ico, self.action))
201
201
202 def get_cs_links(self):
202 def get_cs_links(self):
203 from rhodecode.lib import helpers as h
203 from rhodecode.lib import helpers as h
204 _ = self.request.translate
204 _ = self.request.translate
205 if self.is_deleted():
205 if self.is_deleted():
206 return self.action_params
206 return self.action_params
207
207
208 repo_name = self.user_log.repository.repo_name
208 repo_name = self.user_log.repository.repo_name
209 commit_ids = self.action_params.split(',')
209 commit_ids = self.action_params.split(',')
210 commits = self.get_commits(commit_ids)
210 commits = self.get_commits(commit_ids)
211
211
212 link_generator = (
212 link_generator = (
213 self.lnk(commit, repo_name)
213 self.lnk(commit, repo_name)
214 for commit in commits[:self.commits_limit])
214 for commit in commits[:self.commits_limit])
215 commit_links = [" " + ', '.join(link_generator)]
215 commit_links = [" " + ', '.join(link_generator)]
216 _op1, _name1 = _get_op(commit_ids[0])
216 _op1, _name1 = _get_op(commit_ids[0])
217 _op2, _name2 = _get_op(commit_ids[-1])
217 _op2, _name2 = _get_op(commit_ids[-1])
218
218
219 commit_id_range = '%s...%s' % (_name1, _name2)
219 commit_id_range = '%s...%s' % (_name1, _name2)
220
220
221 compare_view = (
221 compare_view = (
222 ' <div class="compare_view tooltip" title="%s">'
222 ' <div class="compare_view tooltip" title="%s">'
223 '<a href="%s">%s</a> </div>' % (
223 '<a href="%s">%s</a> </div>' % (
224 _('Show all combined commits %s->%s') % (
224 _('Show all combined commits %s->%s') % (
225 commit_ids[0][:12], commit_ids[-1][:12]
225 commit_ids[0][:12], commit_ids[-1][:12]
226 ),
226 ),
227 h.route_path(
227 h.route_path(
228 'repo_commit', repo_name=repo_name,
228 'repo_commit', repo_name=repo_name,
229 commit_id=commit_id_range), _('compare view')
229 commit_id=commit_id_range), _('compare view')
230 )
230 )
231 )
231 )
232
232
233 if len(commit_ids) > self.commits_limit:
233 if len(commit_ids) > self.commits_limit:
234 more_count = len(commit_ids) - self.commits_limit
234 more_count = len(commit_ids) - self.commits_limit
235 commit_links.append(
235 commit_links.append(
236 _(' and %(num)s more commits') % {'num': more_count}
236 _(' and %(num)s more commits') % {'num': more_count}
237 )
237 )
238
238
239 if len(commits) > 1:
239 if len(commits) > 1:
240 commit_links.append(compare_view)
240 commit_links.append(compare_view)
241 return ''.join(commit_links)
241 return ''.join(commit_links)
242
242
243 def get_commits(self, commit_ids):
243 def get_commits(self, commit_ids):
244 commits = []
244 commits = []
245 if not [v for v in commit_ids if v != '']:
245 if not [v for v in commit_ids if v != '']:
246 return commits
246 return commits
247
247
248 repo = None
248 repo = None
249 if self.parse_commits:
249 if self.parse_commits:
250 repo = self.user_log.repository.scm_instance()
250 repo = self.user_log.repository.scm_instance()
251
251
252 for commit_id in commit_ids[:self.commits_top_limit]:
252 for commit_id in commit_ids[:self.commits_top_limit]:
253 _op, _name = _get_op(commit_id)
253 _op, _name = _get_op(commit_id)
254
254
255 # we want parsed commits, or new log store format is bad
255 # we want parsed commits, or new log store format is bad
256 if self.parse_commits:
256 if self.parse_commits:
257 try:
257 try:
258 commit = repo.get_commit(commit_id=commit_id)
258 commit = repo.get_commit(commit_id=commit_id)
259 commits.append(commit)
259 commits.append(commit)
260 except CommitDoesNotExistError:
260 except CommitDoesNotExistError:
261 log.error(
261 log.error('cannot find commit id %s in this repository',
262 'cannot find commit id %s in this repository',
263 commit_id)
262 commit_id)
264 commits.append(commit_id)
263 commits.append(commit_id)
265 continue
264 continue
266 else:
265 else:
267 fake_commit = AttributeDict({
266 fake_commit = AttributeDict({
268 'short_id': commit_id[:12],
267 'short_id': commit_id[:12],
269 'raw_id': commit_id,
268 'raw_id': commit_id,
270 'message': '',
269 'message': '',
271 'op': _op,
270 'op': _op,
272 'ref_name': _name
271 'ref_name': _name
273 })
272 })
274 commits.append(fake_commit)
273 commits.append(fake_commit)
275
274
276 return commits
275 return commits
277
276
278 def lnk(self, commit_or_id, repo_name):
277 def lnk(self, commit_or_id, repo_name):
279 from rhodecode.lib.helpers import tooltip
278 from rhodecode.lib.helpers import tooltip
280 from rhodecode.lib import helpers as h
279 from rhodecode.lib import helpers as h
281 _ = self.request.translate
280 _ = self.request.translate
282 title = ''
281 title = ''
283 lazy_cs = True
282 lazy_cs = True
284 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
283 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
285 lazy_cs = True
284 lazy_cs = True
286 if (getattr(commit_or_id, 'op', None) and
285 if (getattr(commit_or_id, 'op', None) and
287 getattr(commit_or_id, 'ref_name', None)):
286 getattr(commit_or_id, 'ref_name', None)):
288 lazy_cs = False
287 lazy_cs = False
289 lbl = '?'
288 lbl = '?'
290 if commit_or_id.op == 'delete_branch':
289 if commit_or_id.op == 'delete_branch':
291 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
290 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
292 title = ''
291 title = ''
293 elif commit_or_id.op == 'tag':
292 elif commit_or_id.op == 'tag':
294 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
293 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
295 title = ''
294 title = ''
296 _url = '#'
295 _url = '#'
297
296
298 else:
297 else:
299 lbl = '%s' % (commit_or_id.short_id[:8])
298 lbl = '%s' % (commit_or_id.short_id[:8])
300 _url = h.route_path('repo_commit', repo_name=repo_name,
299 _url = h.route_path('repo_commit', repo_name=repo_name,
301 commit_id=commit_or_id.raw_id)
300 commit_id=commit_or_id.raw_id)
302 title = tooltip(commit_or_id.message)
301 title = tooltip(commit_or_id.message)
303 else:
302 else:
304 # commit cannot be found/striped/removed etc.
303 # commit cannot be found/striped/removed etc.
305 lbl = ('%s' % commit_or_id)[:12]
304 lbl = ('%s' % commit_or_id)[:12]
306 _url = '#'
305 _url = '#'
307 title = _('Commit not found')
306 title = _('Commit not found')
308 if self.parse_commits:
307 if self.parse_commits:
309 return link_to(lbl, _url, title=title, class_='tooltip')
308 return link_to(lbl, _url, title=title, class_='tooltip')
310 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
309 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
311 class_='lazy-cs' if lazy_cs else '')
310 class_='lazy-cs' if lazy_cs else '')
312
311
313 def is_deleted(self):
312 def is_deleted(self):
314 return self.user_log.repository is None
313 return self.user_log.repository is None
315
314
316
315
317 class AuditLogParser(object):
316 class AuditLogParser(object):
318 def __init__(self, request, audit_log_entry):
317 def __init__(self, request, audit_log_entry):
319 self.audit_log_entry = audit_log_entry
318 self.audit_log_entry = audit_log_entry
320 self.request = request
319 self.request = request
321
320
322 def get_icon(self, action):
321 def get_icon(self, action):
323 return 'icon-rhodecode'
322 return 'icon-rhodecode'
324
323
325 def callbacks(self):
324 def callbacks(self):
326 action_str = self.audit_log_entry.action
325 action_str = self.audit_log_entry.action
327
326
328 def callback():
327 def callback():
329 # returned callbacks we need to call to get
328 # returned callbacks we need to call to get
330 action = action_str \
329 action = action_str \
331 .replace('[', '<span class="journal_highlight">')\
330 .replace('[', '<span class="journal_highlight">')\
332 .replace(']', '</span>')
331 .replace(']', '</span>')
333 return literal(action)
332 return literal(action)
334
333
335 def icon():
334 def icon():
336 tmpl = """<i class="%s" alt="%s"></i>"""
335 tmpl = """<i class="%s" alt="%s"></i>"""
337 ico = self.get_icon(action_str)
336 ico = self.get_icon(action_str)
338 return literal(tmpl % (ico, action_str))
337 return literal(tmpl % (ico, action_str))
339
338
340 action_params_func = _no_params_func
339 action_params_func = _no_params_func
341
340
342 return [
341 return [
343 callback, action_params_func, icon]
342 callback, action_params_func, icon]
344
343
345
344
346 def _no_params_func():
345 def _no_params_func():
347 return ""
346 return ""
348
347
349
348
350 def _get_op(commit_id):
349 def _get_op(commit_id):
351 _op = None
350 _op = None
352 _name = commit_id
351 _name = commit_id
353 if len(commit_id.split('=>')) == 2:
352 if len(commit_id.split('=>')) == 2:
354 _op, _name = commit_id.split('=>')
353 _op, _name = commit_id.split('=>')
355 return _op, _name
354 return _op, _name
@@ -1,115 +1,89 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 import os
19 import os
19 import time
20 import time
20 import logging
21 import logging
21 import tempfile
22
22
23 from rhodecode.lib.config_utils import get_config
23 from rhodecode.lib.config_utils import get_config
24 from rhodecode.lib.ext_json import json
24
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
25
26
26 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
27
28
28
29
29 class BaseHooksCallbackDaemon:
30 class BaseHooksCallbackDaemon:
30 """
31 """
31 Basic context manager for actions that don't require some extra
32 Basic context manager for actions that don't require some extra
32 """
33 """
33 def __init__(self):
34 def __init__(self):
34 pass
35 pass
35
36
36 def __enter__(self):
37 def __enter__(self):
37 log.debug('Running `%s` callback daemon', self.__class__.__name__)
38 log.debug('Running `%s` callback daemon', self.__class__.__name__)
38 return self
39 return self
39
40
40 def __exit__(self, exc_type, exc_val, exc_tb):
41 def __exit__(self, exc_type, exc_val, exc_tb):
41 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
42
43
43
44
44 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
45
46
46 def __init__(self, module):
47 def __init__(self, module):
47 super().__init__()
48 super().__init__()
48 self.hooks_module = module
49 self.hooks_module = module
49
50
50
51 def __repr__(self):
51 def get_txn_id_data_path(txn_id):
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
52 import rhodecode
53
54 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
55 final_dir = os.path.join(root, 'svn_txn_id')
56
57 if not os.path.isdir(final_dir):
58 os.makedirs(final_dir)
59 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
60
61
62 def store_txn_id_data(txn_id, data_dict):
63 if not txn_id:
64 log.warning('Cannot store txn_id because it is empty')
65 return
66
67 path = get_txn_id_data_path(txn_id)
68 try:
69 with open(path, 'wb') as f:
70 f.write(json.dumps(data_dict))
71 except Exception:
72 log.exception('Failed to write txn_id metadata')
73
74
75 def get_txn_id_from_store(txn_id):
76 """
77 Reads txn_id from store and if present returns the data for callback manager
78 """
79 path = get_txn_id_data_path(txn_id)
80 try:
81 with open(path, 'rb') as f:
82 return json.loads(f.read())
83 except Exception:
84 return {}
85
53
86
54
87 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
88 txn_details = get_txn_id_from_store(txn_id)
56
89 port = txn_details.get('port', 0)
90 match protocol:
57 match protocol:
91 case 'http':
58 case 'http':
92 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
60 port = 0
61 if txn_id:
62 # read txn-id to re-use the PORT for callback daemon
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 port = txn_details.get('port', 0)
66
93 callback_daemon = HttpHooksCallbackDaemon(
67 callback_daemon = HttpHooksCallbackDaemon(
94 txn_id=txn_id, host=host, port=port)
68 txn_id=txn_id, host=host, port=port)
95 case 'celery':
69 case 'celery':
96 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
97 callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
71 callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
98 case 'local':
72 case 'local':
99 from rhodecode.lib.hook_daemon.hook_module import Hooks
73 from rhodecode.lib.hook_daemon.hook_module import Hooks
100 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
74 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
101 case _:
75 case _:
102 log.error('Unsupported callback daemon protocol "%s"', protocol)
76 log.error('Unsupported callback daemon protocol "%s"', protocol)
103 raise Exception('Unsupported callback daemon protocol.')
77 raise Exception('Unsupported callback daemon protocol.')
104
78
105 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
79 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
106 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
80 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
107 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
81 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
108 extras['hooks_protocol'] = protocol
82 extras['hooks_protocol'] = protocol
109 extras['time'] = time.time()
83 extras['time'] = time.time()
110
84
111 # register txn_id
85 # register txn_id
112 extras['txn_id'] = txn_id
86 extras['txn_id'] = txn_id
113 log.debug('Prepared a callback daemon: %s',
87 log.debug('Prepared a callback daemon: %s',
114 callback_daemon.__class__.__name__)
88 callback_daemon.__class__.__name__)
115 return callback_daemon, extras
89 return callback_daemon, extras
@@ -1,30 +1,33 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
20
20
21
21
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
23 """
23 """
24 Context manger for achieving a compatibility with celery backend
24 Context manger for achieving a compatibility with celery backend
25 """
25 """
26
26
27 def __init__(self, config):
27 def __init__(self, config):
28 # TODO: replace this with settings bootstrapped...
28 # TODO: replace this with settings bootstrapped...
29 self.task_queue = config.get('app:main', 'celery.broker_url')
29 self.task_queue = config.get('app:main', 'celery.broker_url')
30 self.task_backend = config.get('app:main', 'celery.result_backend')
30 self.task_backend = config.get('app:main', 'celery.result_backend')
31
32 def __repr__(self):
33 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -1,280 +1,287 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20 import logging
20 import logging
21 import traceback
21 import traceback
22 import threading
22 import threading
23 import socket
23 import socket
24 import msgpack
24 import msgpack
25 import gevent
25 import gevent
26
26
27 from http.server import BaseHTTPRequestHandler
27 from http.server import BaseHTTPRequestHandler
28 from socketserver import TCPServer
28 from socketserver import TCPServer
29
29
30 from rhodecode.model import meta
30 from rhodecode.model import meta
31 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.ext_json import json
32 from rhodecode.lib import rc_cache
32 from rhodecode.lib import rc_cache
33 from rhodecode.lib.hook_daemon.base import get_txn_id_data_path
33 from rhodecode.lib.svn_txn_utils import get_txn_id_data_key
34 from rhodecode.lib.hook_daemon.hook_module import Hooks
34 from rhodecode.lib.hook_daemon.hook_module import Hooks
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class HooksHttpHandler(BaseHTTPRequestHandler):
39 class HooksHttpHandler(BaseHTTPRequestHandler):
40
40
41 JSON_HOOKS_PROTO = 'json.v1'
41 JSON_HOOKS_PROTO = 'json.v1'
42 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
42 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
43 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
43 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
44 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
44 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
45
45
46 @classmethod
46 @classmethod
47 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
47 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
48 if proto == cls.MSGPACK_HOOKS_PROTO:
48 if proto == cls.MSGPACK_HOOKS_PROTO:
49 return msgpack.packb(data)
49 return msgpack.packb(data)
50 return json.dumps(data)
50 return json.dumps(data)
51
51
52 @classmethod
52 @classmethod
53 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
53 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
54 if proto == cls.MSGPACK_HOOKS_PROTO:
54 if proto == cls.MSGPACK_HOOKS_PROTO:
55 return msgpack.unpackb(data)
55 return msgpack.unpackb(data)
56 return json.loads(data)
56 return json.loads(data)
57
57
58 def do_POST(self):
58 def do_POST(self):
59 hooks_proto, method, extras = self._read_request()
59 hooks_proto, method, extras = self._read_request()
60 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
60 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
61
61
62 txn_id = getattr(self.server, 'txn_id', None)
62 txn_id = getattr(self.server, 'txn_id', None)
63 if txn_id:
63 if txn_id:
64 log.debug('Computing TXN_ID based on `%s`:`%s`',
64 log.debug('Computing TXN_ID based on `%s`:`%s`',
65 extras['repository'], extras['txn_id'])
65 extras['repository'], extras['txn_id'])
66 computed_txn_id = rc_cache.utils.compute_key_from_params(
66 computed_txn_id = rc_cache.utils.compute_key_from_params(
67 extras['repository'], extras['txn_id'])
67 extras['repository'], extras['txn_id'])
68 if txn_id != computed_txn_id:
68 if txn_id != computed_txn_id:
69 raise Exception(
69 raise Exception(
70 'TXN ID fail: expected {} got {} instead'.format(
70 'TXN ID fail: expected {} got {} instead'.format(
71 txn_id, computed_txn_id))
71 txn_id, computed_txn_id))
72
72
73 request = getattr(self.server, 'request', None)
73 request = getattr(self.server, 'request', None)
74 try:
74 try:
75 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
75 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
76 result = self._call_hook_method(hooks, method, extras)
76 result = self._call_hook_method(hooks, method, extras)
77
77
78 except Exception as e:
78 except Exception as e:
79 exc_tb = traceback.format_exc()
79 exc_tb = traceback.format_exc()
80 result = {
80 result = {
81 'exception': e.__class__.__name__,
81 'exception': e.__class__.__name__,
82 'exception_traceback': exc_tb,
82 'exception_traceback': exc_tb,
83 'exception_args': e.args
83 'exception_args': e.args
84 }
84 }
85 self._write_response(hooks_proto, result)
85 self._write_response(hooks_proto, result)
86
86
87 def _read_request(self):
87 def _read_request(self):
88 length = int(self.headers['Content-Length'])
88 length = int(self.headers['Content-Length'])
89 # respect sent headers, fallback to OLD proto for compatability
89 # respect sent headers, fallback to OLD proto for compatability
90 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
90 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
91 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
91 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
92 # support for new vcsserver msgpack based protocol hooks
92 # support for new vcsserver msgpack based protocol hooks
93 body = self.rfile.read(length)
93 body = self.rfile.read(length)
94 data = self.deserialize_data(body)
94 data = self.deserialize_data(body)
95 else:
95 else:
96 body = self.rfile.read(length)
96 body = self.rfile.read(length)
97 data = self.deserialize_data(body)
97 data = self.deserialize_data(body)
98
98
99 return hooks_proto, data['method'], data['extras']
99 return hooks_proto, data['method'], data['extras']
100
100
101 def _write_response(self, hooks_proto, result):
101 def _write_response(self, hooks_proto, result):
102 self.send_response(200)
102 self.send_response(200)
103 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
103 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
104 self.send_header("Content-type", "application/msgpack")
104 self.send_header("Content-type", "application/msgpack")
105 self.end_headers()
105 self.end_headers()
106 data = self.serialize_data(result)
106 data = self.serialize_data(result)
107 self.wfile.write(data)
107 self.wfile.write(data)
108 else:
108 else:
109 self.send_header("Content-type", "text/json")
109 self.send_header("Content-type", "text/json")
110 self.end_headers()
110 self.end_headers()
111 data = self.serialize_data(result)
111 data = self.serialize_data(result)
112 self.wfile.write(data)
112 self.wfile.write(data)
113
113
114 def _call_hook_method(self, hooks, method, extras):
114 def _call_hook_method(self, hooks, method, extras):
115 try:
115 try:
116 result = getattr(hooks, method)(extras)
116 result = getattr(hooks, method)(extras)
117 finally:
117 finally:
118 meta.Session.remove()
118 meta.Session.remove()
119 return result
119 return result
120
120
121 def log_message(self, format, *args):
121 def log_message(self, format, *args):
122 """
122 """
123 This is an overridden method of BaseHTTPRequestHandler which logs using
123 This is an overridden method of BaseHTTPRequestHandler which logs using
124 a logging library instead of writing directly to stderr.
124 a logging library instead of writing directly to stderr.
125 """
125 """
126
126
127 message = format % args
127 message = format % args
128
128
129 log.debug(
129 log.debug(
130 "HOOKS: client=%s - - [%s] %s", self.client_address,
130 "HOOKS: client=%s - - [%s] %s", self.client_address,
131 self.log_date_time_string(), message)
131 self.log_date_time_string(), message)
132
132
133
133
134 class ThreadedHookCallbackDaemon(object):
134 class ThreadedHookCallbackDaemon(object):
135
135
136 _callback_thread = None
136 _callback_thread = None
137 _daemon = None
137 _daemon = None
138 _done = False
138 _done = False
139 use_gevent = False
139 use_gevent = False
140
140
141 def __init__(self, txn_id=None, host=None, port=None):
141 def __init__(self, txn_id=None, host=None, port=None):
142 self._prepare(txn_id=txn_id, host=host, port=port)
142 self._prepare(txn_id=txn_id, host=host, port=port)
143 if self.use_gevent:
143 if self.use_gevent:
144 self._run_func = self._run_gevent
144 self._run_func = self._run_gevent
145 self._stop_func = self._stop_gevent
145 self._stop_func = self._stop_gevent
146 else:
146 else:
147 self._run_func = self._run
147 self._run_func = self._run
148 self._stop_func = self._stop
148 self._stop_func = self._stop
149
149
150 def __enter__(self):
150 def __enter__(self):
151 log.debug('Running `%s` callback daemon', self.__class__.__name__)
151 log.debug('Running `%s` callback daemon', self.__class__.__name__)
152 self._run_func()
152 self._run_func()
153 return self
153 return self
154
154
155 def __exit__(self, exc_type, exc_val, exc_tb):
155 def __exit__(self, exc_type, exc_val, exc_tb):
156 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
156 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
157 self._stop_func()
157 self._stop_func()
158
158
159 def _prepare(self, txn_id=None, host=None, port=None):
159 def _prepare(self, txn_id=None, host=None, port=None):
160 raise NotImplementedError()
160 raise NotImplementedError()
161
161
162 def _run(self):
162 def _run(self):
163 raise NotImplementedError()
163 raise NotImplementedError()
164
164
165 def _stop(self):
165 def _stop(self):
166 raise NotImplementedError()
166 raise NotImplementedError()
167
167
168 def _run_gevent(self):
168 def _run_gevent(self):
169 raise NotImplementedError()
169 raise NotImplementedError()
170
170
171 def _stop_gevent(self):
171 def _stop_gevent(self):
172 raise NotImplementedError()
172 raise NotImplementedError()
173
173
174
174
175 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
175 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
176 """
176 """
177 Context manager which will run a callback daemon in a background thread.
177 Context manager which will run a callback daemon in a background thread.
178 """
178 """
179
179
180 hooks_uri = None
180 hooks_uri = None
181
181
182 # From Python docs: Polling reduces our responsiveness to a shutdown
182 # From Python docs: Polling reduces our responsiveness to a shutdown
183 # request and wastes cpu at all other times.
183 # request and wastes cpu at all other times.
184 POLL_INTERVAL = 0.01
184 POLL_INTERVAL = 0.01
185
185
186 use_gevent = False
186 use_gevent = False
187
187
188 def __repr__(self):
189 return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})'
190
188 @property
191 @property
189 def _hook_prefix(self):
192 def _hook_prefix(self):
190 return 'HOOKS: {} '.format(self.hooks_uri)
193 return f'HOOKS: {self.hooks_uri} '
191
194
192 def get_hostname(self):
195 def get_hostname(self):
193 return socket.gethostname() or '127.0.0.1'
196 return socket.gethostname() or '127.0.0.1'
194
197
195 def get_available_port(self, min_port=20000, max_port=65535):
198 def get_available_port(self, min_port=20000, max_port=65535):
196 from rhodecode.lib.utils2 import get_available_port as _get_port
199 from rhodecode.lib.utils2 import get_available_port as _get_port
197 return _get_port(min_port, max_port)
200 return _get_port(min_port, max_port)
198
201
199 def _prepare(self, txn_id=None, host=None, port=None):
202 def _prepare(self, txn_id=None, host=None, port=None):
200 from pyramid.threadlocal import get_current_request
203 from pyramid.threadlocal import get_current_request
201
204
202 if not host or host == "*":
205 if not host or host == "*":
203 host = self.get_hostname()
206 host = self.get_hostname()
204 if not port:
207 if not port:
205 port = self.get_available_port()
208 port = self.get_available_port()
206
209
207 server_address = (host, port)
210 server_address = (host, port)
208 self.hooks_uri = '{}:{}'.format(host, port)
211 self.hooks_uri = f'{host}:{port}'
209 self.txn_id = txn_id
212 self.txn_id = txn_id
210 self._done = False
213 self._done = False
211
214
212 log.debug(
215 log.debug(
213 "%s Preparing HTTP callback daemon registering hook object: %s",
216 "%s Preparing HTTP callback daemon registering hook object: %s",
214 self._hook_prefix, HooksHttpHandler)
217 self._hook_prefix, HooksHttpHandler)
215
218
216 self._daemon = TCPServer(server_address, HooksHttpHandler)
219 self._daemon = TCPServer(server_address, HooksHttpHandler)
217 # inject transaction_id for later verification
220 # inject transaction_id for later verification
218 self._daemon.txn_id = self.txn_id
221 self._daemon.txn_id = self.txn_id
219
222
220 # pass the WEB app request into daemon
223 # pass the WEB app request into daemon
221 self._daemon.request = get_current_request()
224 self._daemon.request = get_current_request()
222
225
223 def _run(self):
226 def _run(self):
224 log.debug("Running thread-based loop of callback daemon in background")
227 log.debug("Running thread-based loop of callback daemon in background")
225 callback_thread = threading.Thread(
228 callback_thread = threading.Thread(
226 target=self._daemon.serve_forever,
229 target=self._daemon.serve_forever,
227 kwargs={'poll_interval': self.POLL_INTERVAL})
230 kwargs={'poll_interval': self.POLL_INTERVAL})
228 callback_thread.daemon = True
231 callback_thread.daemon = True
229 callback_thread.start()
232 callback_thread.start()
230 self._callback_thread = callback_thread
233 self._callback_thread = callback_thread
231
234
232 def _run_gevent(self):
235 def _run_gevent(self):
233 log.debug("Running gevent-based loop of callback daemon in background")
236 log.debug("Running gevent-based loop of callback daemon in background")
234 # create a new greenlet for the daemon's serve_forever method
237 # create a new greenlet for the daemon's serve_forever method
235 callback_greenlet = gevent.spawn(
238 callback_greenlet = gevent.spawn(
236 self._daemon.serve_forever,
239 self._daemon.serve_forever,
237 poll_interval=self.POLL_INTERVAL)
240 poll_interval=self.POLL_INTERVAL)
238
241
239 # store reference to greenlet
242 # store reference to greenlet
240 self._callback_greenlet = callback_greenlet
243 self._callback_greenlet = callback_greenlet
241
244
242 # switch to this greenlet
245 # switch to this greenlet
243 gevent.sleep(0.01)
246 gevent.sleep(0.01)
244
247
245 def _stop(self):
248 def _stop(self):
246 log.debug("Waiting for background thread to finish.")
249 log.debug("Waiting for background thread to finish.")
247 self._daemon.shutdown()
250 self._daemon.shutdown()
248 self._callback_thread.join()
251 self._callback_thread.join()
249 self._daemon = None
252 self._daemon = None
250 self._callback_thread = None
253 self._callback_thread = None
251 if self.txn_id:
254 if self.txn_id:
252 txn_id_file = get_txn_id_data_path(self.txn_id)
255 #TODO: figure out the repo_path...
256 repo_path = ''
257 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
253 log.debug('Cleaning up TXN ID %s', txn_id_file)
258 log.debug('Cleaning up TXN ID %s', txn_id_file)
254 if os.path.isfile(txn_id_file):
259 if os.path.isfile(txn_id_file):
255 os.remove(txn_id_file)
260 os.remove(txn_id_file)
256
261
257 log.debug("Background thread done.")
262 log.debug("Background thread done.")
258
263
259 def _stop_gevent(self):
264 def _stop_gevent(self):
260 log.debug("Waiting for background greenlet to finish.")
265 log.debug("Waiting for background greenlet to finish.")
261
266
262 # if greenlet exists and is running
267 # if greenlet exists and is running
263 if self._callback_greenlet and not self._callback_greenlet.dead:
268 if self._callback_greenlet and not self._callback_greenlet.dead:
264 # shutdown daemon if it exists
269 # shutdown daemon if it exists
265 if self._daemon:
270 if self._daemon:
266 self._daemon.shutdown()
271 self._daemon.shutdown()
267
272
268 # kill the greenlet
273 # kill the greenlet
269 self._callback_greenlet.kill()
274 self._callback_greenlet.kill()
270
275
271 self._daemon = None
276 self._daemon = None
272 self._callback_greenlet = None
277 self._callback_greenlet = None
273
278
274 if self.txn_id:
279 if self.txn_id:
275 txn_id_file = get_txn_id_data_path(self.txn_id)
280 #TODO: figure out the repo_path...
281 repo_path = ''
282 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
276 log.debug('Cleaning up TXN ID %s', txn_id_file)
283 log.debug('Cleaning up TXN ID %s', txn_id_file)
277 if os.path.isfile(txn_id_file):
284 if os.path.isfile(txn_id_file):
278 os.remove(txn_id_file)
285 os.remove(txn_id_file)
279
286
280 log.debug("Background greenlet done.")
287 log.debug("Background greenlet done.")
@@ -1,242 +1,258 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import base64
20 import re
21 import os
21 import logging
22 import logging
22 import urllib.request
23 import urllib.request
23 import urllib.parse
24 import urllib.parse
24 import urllib.error
25 import urllib.error
25 import urllib.parse
26 import urllib.parse
26
27
27 import requests
28 import requests
28 from pyramid.httpexceptions import HTTPNotAcceptable
29 from pyramid.httpexceptions import HTTPNotAcceptable
29
30
30 from rhodecode import ConfigGet
31 from rhodecode import ConfigGet
31 from rhodecode.lib import rc_cache
32 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware import simplevcs
33 from rhodecode.lib.middleware.utils import get_path_info
33 from rhodecode.lib.middleware.utils import get_path_info
34 from rhodecode.lib.utils import is_valid_repo
34 from rhodecode.lib.utils import is_valid_repo
35 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.type_utils import str2bool
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.hook_daemon.base import store_txn_id_data
39
36
40 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
41
38
42
39
43 class SimpleSvnApp(object):
40 class SimpleSvnApp(object):
44 IGNORED_HEADERS = [
41 IGNORED_HEADERS = [
45 'connection', 'keep-alive', 'content-encoding',
42 'connection', 'keep-alive', 'content-encoding',
46 'transfer-encoding', 'content-length']
43 'transfer-encoding', 'content-length']
47 rc_extras = {}
44 rc_extras = {}
48
45
49 def __init__(self, config):
46 def __init__(self, config):
50 self.config = config
47 self.config = config
51 self.session = requests.Session()
48 self.session = requests.Session()
52
49
53 def __call__(self, environ, start_response):
50 def __call__(self, environ, start_response):
54 request_headers = self._get_request_headers(environ)
51 request_headers = self._get_request_headers(environ)
55 data_io = environ['wsgi.input']
52 data_io = environ['wsgi.input']
56 req_method: str = environ['REQUEST_METHOD']
53 req_method: str = environ['REQUEST_METHOD']
57 has_content_length: bool = 'CONTENT_LENGTH' in environ
54 has_content_length: bool = 'CONTENT_LENGTH' in environ
58
55
59 path_info = self._get_url(
56 path_info = self._get_url(
60 self.config.get('subversion_http_server_url', ''), get_path_info(environ))
57 self.config.get('subversion_http_server_url', ''), get_path_info(environ))
61 transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '')
58 transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '')
62 log.debug('Handling: %s method via `%s` has_content_length:%s', req_method, path_info, has_content_length)
59 log.debug('Handling: %s method via `%s` has_content_length:%s', req_method, path_info, has_content_length)
63
60
64 # stream control flag, based on request and content type...
61 # stream control flag, based on request and content type...
65 stream = False
62 stream = False
66
67 if req_method in ['MKCOL'] or has_content_length:
63 if req_method in ['MKCOL'] or has_content_length:
68 data_processed = False
69 # read chunk to check if we have txn-with-props
70 initial_data: bytes = data_io.read(1024)
71 if initial_data.startswith(b'(create-txn-with-props'):
72 data_io = initial_data + data_io.read()
73 # store on-the-fly our rc_extra using svn revision properties
74 # those can be read later on in hooks executed so we have a way
75 # to pass in the data into svn hooks
76 rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
77 rc_data_len = str(len(rc_data))
78 # header defines data length, and serialized data
79 skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data))
80 data_io = data_io[:-2] + skel + b'))'
81 data_processed = True
82
83 if not data_processed:
84 # NOTE(johbo): Avoid that we end up with sending the request in chunked
64 # NOTE(johbo): Avoid that we end up with sending the request in chunked
85 # transfer encoding (mainly on Gunicorn). If we know the content
65 # transfer encoding (mainly on Gunicorn). If we know the content
86 # length, then we should transfer the payload in one request.
66 # length, then we should transfer the payload in one request.
87 data_io = initial_data + data_io.read()
67 data_io = data_io.read()
88
68
89 if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked':
69 if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked':
90 # NOTE(marcink): when getting/uploading files, we want to STREAM content
70 # NOTE(marcink): when getting/uploading files, we want to STREAM content
91 # back to the client/proxy instead of buffering it here...
71 # back to the client/proxy instead of buffering it here...
92 stream = True
72 stream = True
93
73
94 stream = stream
74 stream = stream
95 log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s',
75 log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s',
96 path_info, req_method, stream)
76 path_info, req_method, stream)
97
77
98 call_kwargs = dict(
78 call_kwargs = dict(
99 data=data_io,
79 data=data_io,
100 headers=request_headers,
80 headers=request_headers,
101 stream=stream
81 stream=stream
102 )
82 )
103 if req_method in ['HEAD', 'DELETE']:
83 if req_method in ['HEAD', 'DELETE']:
84 # NOTE(marcink): HEAD might be deprecated for SVN 1.14+ protocol
104 del call_kwargs['data']
85 del call_kwargs['data']
105
86
106 try:
87 try:
107 response = self.session.request(
88 response = self.session.request(
108 req_method, path_info, **call_kwargs)
89 req_method, path_info, **call_kwargs)
109 except requests.ConnectionError:
90 except requests.ConnectionError:
110 log.exception('ConnectionError occurred for endpoint %s', path_info)
91 log.exception('ConnectionError occurred for endpoint %s', path_info)
111 raise
92 raise
112
93
113 if response.status_code not in [200, 401]:
94 if response.status_code not in [200, 401]:
114 text = '\n{}'.format(safe_str(response.text)) if response.text else ''
95 text = '\n{}'.format(safe_str(response.text)) if response.text else ''
115 if response.status_code >= 500:
96 if response.status_code >= 500:
116 log.error('Got SVN response:%s with text:`%s`', response, text)
97 log.error('Got SVN response:%s with text:`%s`', response, text)
117 else:
98 else:
118 log.debug('Got SVN response:%s with text:`%s`', response, text)
99 log.debug('Got SVN response:%s with text:`%s`', response, text)
119 else:
100 else:
120 log.debug('got response code: %s', response.status_code)
101 log.debug('got response code: %s', response.status_code)
121
102
122 response_headers = self._get_response_headers(response.headers)
103 response_headers = self._get_response_headers(response.headers)
123
124 if response.headers.get('SVN-Txn-name'):
125 svn_tx_id = response.headers.get('SVN-Txn-name')
126 txn_id = rc_cache.utils.compute_key_from_params(
127 self.config['repository'], svn_tx_id)
128 port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
129 store_txn_id_data(txn_id, {'port': port})
130
131 start_response(f'{response.status_code} {response.reason}', response_headers)
104 start_response(f'{response.status_code} {response.reason}', response_headers)
132 return response.iter_content(chunk_size=1024)
105 return response.iter_content(chunk_size=1024)
133
106
134 def _get_url(self, svn_http_server, path):
107 def _get_url(self, svn_http_server, path):
135 svn_http_server_url = (svn_http_server or '').rstrip('/')
108 svn_http_server_url = (svn_http_server or '').rstrip('/')
136 url_path = urllib.parse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/'))
109 url_path = urllib.parse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/'))
137 url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'")
110 url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'")
138 return url_path
111 return url_path
139
112
113 def _get_txn_id(self, environ):
114 url = environ['RAW_URI']
115
116 # Define the regex pattern
117 pattern = r'/txr/([^/]+)/'
118
119 # Search for the pattern in the URL
120 match = re.search(pattern, url)
121
122 # Check if a match is found and extract the captured group
123 if match:
124 txn_id = match.group(1)
125 return txn_id
126
140 def _get_request_headers(self, environ):
127 def _get_request_headers(self, environ):
141 headers = {}
128 headers = {}
142 whitelist = {
129 whitelist = {
143 'Authorization': {}
130 'Authorization': {}
144 }
131 }
145 for key in environ:
132 for key in environ:
146 if key in whitelist:
133 if key in whitelist:
147 headers[key] = environ[key]
134 headers[key] = environ[key]
148 elif not key.startswith('HTTP_'):
135 elif not key.startswith('HTTP_'):
149 continue
136 continue
150 else:
137 else:
151 new_key = key.split('_')
138 new_key = key.split('_')
152 new_key = [k.capitalize() for k in new_key[1:]]
139 new_key = [k.capitalize() for k in new_key[1:]]
153 new_key = '-'.join(new_key)
140 new_key = '-'.join(new_key)
154 headers[new_key] = environ[key]
141 headers[new_key] = environ[key]
155
142
156 if 'CONTENT_TYPE' in environ:
143 if 'CONTENT_TYPE' in environ:
157 headers['Content-Type'] = environ['CONTENT_TYPE']
144 headers['Content-Type'] = environ['CONTENT_TYPE']
158
145
159 if 'CONTENT_LENGTH' in environ:
146 if 'CONTENT_LENGTH' in environ:
160 headers['Content-Length'] = environ['CONTENT_LENGTH']
147 headers['Content-Length'] = environ['CONTENT_LENGTH']
161
148
162 return headers
149 return headers
163
150
164 def _get_response_headers(self, headers):
151 def _get_response_headers(self, headers):
165 headers = [
152 headers = [
166 (h, headers[h])
153 (h, headers[h])
167 for h in headers
154 for h in headers
168 if h.lower() not in self.IGNORED_HEADERS
155 if h.lower() not in self.IGNORED_HEADERS
169 ]
156 ]
170
157
171 return headers
158 return headers
172
159
173
160
174 class DisabledSimpleSvnApp(object):
161 class DisabledSimpleSvnApp(object):
175 def __init__(self, config):
162 def __init__(self, config):
176 self.config = config
163 self.config = config
177
164
178 def __call__(self, environ, start_response):
165 def __call__(self, environ, start_response):
179 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
166 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
180 log.warning(reason)
167 log.warning(reason)
181 return HTTPNotAcceptable(reason)(environ, start_response)
168 return HTTPNotAcceptable(reason)(environ, start_response)
182
169
183
170
184 class SimpleSvn(simplevcs.SimpleVCS):
171 class SimpleSvn(simplevcs.SimpleVCS):
172 """
173 details: https://svn.apache.org/repos/asf/subversion/trunk/notes/http-and-webdav/webdav-protocol
174
175 Read Commands : (OPTIONS, PROPFIND, GET, REPORT)
176
177 GET: fetch info about resources
178 PROPFIND: Used to retrieve properties of resources.
179 REPORT: Used for specialized queries to the repository. E.g History etc...
180 OPTIONS: request is sent to an SVN server, the server responds with information about the available HTTP
181 methods and other server capabilities.
182
183 Write Commands : (MKACTIVITY, PROPPATCH, PUT, CHECKOUT, MKCOL, MOVE,
184 -------------- COPY, DELETE, LOCK, UNLOCK, MERGE)
185
186 With the exception of LOCK/UNLOCK, every write command performs some
187 sort of DeltaV commit operation. In DeltaV, a commit always starts
188 by creating a transaction (MKACTIVITY), applies a log message
189 (PROPPATCH), does some other write methods, and then ends by
190 committing the transaction (MERGE). If the MERGE fails, the client
191 may try to remove the transaction with a DELETE.
192
193 PROPPATCH: Used to set and/or remove properties on resources.
194 MKCOL: Creates a new collection (directory).
195 DELETE: Removes a resource.
196 COPY and MOVE: Used for copying and moving resources.
197 MERGE: Used to merge changes from different branches.
198 CHECKOUT, CHECKIN, UNCHECKOUT: DeltaV methods for managing working resources and versions.
199 """
185
200
186 SCM = 'svn'
201 SCM = 'svn'
187 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
202 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
188 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
203 WRITE_COMMANDS = ('MERGE', 'POST', 'PUT', 'COPY', 'MOVE', 'DELETE', 'MKCOL')
204 DEFAULT_HTTP_SERVER = 'http://svn:8090'
189
205
190 def _get_repository_name(self, environ):
206 def _get_repository_name(self, environ):
191 """
207 """
192 Gets repository name out of PATH_INFO header
208 Gets repository name out of PATH_INFO header
193
209
194 :param environ: environ where PATH_INFO is stored
210 :param environ: environ where PATH_INFO is stored
195 """
211 """
196 path = get_path_info(environ).split('!')
212 path = get_path_info(environ).split('!')
197 repo_name = path[0].strip('/')
213 repo_name = path[0].strip('/')
198
214
199 # SVN includes the whole path in it's requests, including
215 # SVN includes the whole path in it's requests, including
200 # subdirectories inside the repo. Therefore we have to search for
216 # subdirectories inside the repo. Therefore we have to search for
201 # the repo root directory.
217 # the repo root directory.
202 if not is_valid_repo(
218 if not is_valid_repo(
203 repo_name, self.base_path, explicit_scm=self.SCM):
219 repo_name, self.base_path, explicit_scm=self.SCM):
204 current_path = ''
220 current_path = ''
205 for component in repo_name.split('/'):
221 for component in repo_name.split('/'):
206 current_path += component
222 current_path += component
207 if is_valid_repo(
223 if is_valid_repo(
208 current_path, self.base_path, explicit_scm=self.SCM):
224 current_path, self.base_path, explicit_scm=self.SCM):
209 return current_path
225 return current_path
210 current_path += '/'
226 current_path += '/'
211
227
212 return repo_name
228 return repo_name
213
229
214 def _get_action(self, environ):
230 def _get_action(self, environ):
215 return (
231 return (
216 'pull'
232 'pull'
217 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
233 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
218 else 'push')
234 else 'push')
219
235
220 def _should_use_callback_daemon(self, extras, environ, action):
236 def _should_use_callback_daemon(self, extras, environ, action):
221 # only MERGE command triggers hooks, so we don't want to start
237 # only PUT & MERGE command triggers hooks, so we don't want to start
222 # hooks server too many times. POST however starts the svn transaction
238 # hooks server too many times. POST however starts the svn transaction
223 # so we also need to run the init of callback daemon of POST
239 # so we also need to run the init of callback daemon of POST
224 if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
240 if environ['REQUEST_METHOD'] not in self.READ_ONLY_COMMANDS:
225 return True
241 return True
226 return False
242 return False
227
243
228 def _create_wsgi_app(self, repo_path, repo_name, config):
244 def _create_wsgi_app(self, repo_path, repo_name, config):
229 if self._is_svn_enabled():
245 if self._is_svn_enabled():
230 return SimpleSvnApp(config)
246 return SimpleSvnApp(config)
231 # we don't have http proxy enabled return dummy request handler
247 # we don't have http proxy enabled return dummy request handler
232 return DisabledSimpleSvnApp(config)
248 return DisabledSimpleSvnApp(config)
233
249
234 def _is_svn_enabled(self):
250 def _is_svn_enabled(self):
235 return ConfigGet().get_bool('vcs.svn.proxy.enabled')
251 return ConfigGet().get_bool('vcs.svn.proxy.enabled')
236
252
237 def _create_config(self, extras, repo_name, scheme='http'):
253 def _create_config(self, extras, repo_name, scheme='http'):
238 server_url = ConfigGet().get_str('vcs.svn.proxy.host')
254 server_url = ConfigGet().get_str('vcs.svn.proxy.host')
239 server_url = server_url or self.DEFAULT_HTTP_SERVER
255 server_url = server_url or self.DEFAULT_HTTP_SERVER
240
256
241 extras['subversion_http_server_url'] = server_url
257 extras['subversion_http_server_url'] = server_url
242 return extras
258 return extras
@@ -1,694 +1,683 b''
1
1
2
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import io
29 import logging
28 import logging
30 import importlib
29 import importlib
31 from functools import wraps
30 from functools import wraps
32 from lxml import etree
33
31
34 import time
32 import time
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36
34
37 from pyramid.httpexceptions import (
35 from pyramid.httpexceptions import (
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
40
38
41 import rhodecode
39 import rhodecode
42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
40 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 from rhodecode.lib import rc_cache
41 from rhodecode.lib import rc_cache
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.base import (
44 from rhodecode.lib.base import (
46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
46 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
49 from rhodecode.lib.middleware import appenlight
48 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware.utils import scm_app_http
49 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.str_utils import safe_bytes
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
54 from rhodecode.lib.vcs.backends import base
56
55
57 from rhodecode.model import meta
56 from rhodecode.model import meta
58 from rhodecode.model.db import User, Repository, PullRequest
57 from rhodecode.model.db import User, Repository, PullRequest
59 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.pull_request import PullRequestModel
59 from rhodecode.model.pull_request import PullRequestModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62
61
63 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
64
63
65
64
66 def extract_svn_txn_id(acl_repo_name, data: bytes):
67 """
68 Helper method for extraction of svn txn_id from submitted XML data during
69 POST operations
70 """
71
72 try:
73 root = etree.fromstring(data)
74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
75 for el in root:
76 if el.tag == '{DAV:}source':
77 for sub_el in el:
78 if sub_el.tag == '{DAV:}href':
79 match = pat.search(sub_el.text)
80 if match:
81 svn_tx_id = match.groupdict()['txn_id']
82 txn_id = rc_cache.utils.compute_key_from_params(
83 acl_repo_name, svn_tx_id)
84 return txn_id
85 except Exception:
86 log.exception('Failed to extract txn_id')
87
88
89 def initialize_generator(factory):
65 def initialize_generator(factory):
90 """
66 """
91 Initializes the returned generator by draining its first element.
67 Initializes the returned generator by draining its first element.
92
68
93 This can be used to give a generator an initializer, which is the code
69 This can be used to give a generator an initializer, which is the code
94 up to the first yield statement. This decorator enforces that the first
70 up to the first yield statement. This decorator enforces that the first
95 produced element has the value ``"__init__"`` to make its special
71 produced element has the value ``"__init__"`` to make its special
96 purpose very explicit in the using code.
72 purpose very explicit in the using code.
97 """
73 """
98
74
99 @wraps(factory)
75 @wraps(factory)
100 def wrapper(*args, **kwargs):
76 def wrapper(*args, **kwargs):
101 gen = factory(*args, **kwargs)
77 gen = factory(*args, **kwargs)
102 try:
78 try:
103 init = next(gen)
79 init = next(gen)
104 except StopIteration:
80 except StopIteration:
105 raise ValueError('Generator must yield at least one element.')
81 raise ValueError('Generator must yield at least one element.')
106 if init != "__init__":
82 if init != "__init__":
107 raise ValueError('First yielded element must be "__init__".')
83 raise ValueError('First yielded element must be "__init__".')
108 return gen
84 return gen
109 return wrapper
85 return wrapper
110
86
111
87
112 class SimpleVCS(object):
88 class SimpleVCS(object):
113 """Common functionality for SCM HTTP handlers."""
89 """Common functionality for SCM HTTP handlers."""
114
90
115 SCM = 'unknown'
91 SCM = 'unknown'
116
92
117 acl_repo_name = None
93 acl_repo_name = None
118 url_repo_name = None
94 url_repo_name = None
119 vcs_repo_name = None
95 vcs_repo_name = None
120 rc_extras = {}
96 rc_extras = {}
121
97
122 # We have to handle requests to shadow repositories different than requests
98 # We have to handle requests to shadow repositories different than requests
123 # to normal repositories. Therefore we have to distinguish them. To do this
99 # to normal repositories. Therefore we have to distinguish them. To do this
124 # we use this regex which will match only on URLs pointing to shadow
100 # we use this regex which will match only on URLs pointing to shadow
125 # repositories.
101 # repositories.
126 shadow_repo_re = re.compile(
102 shadow_repo_re = re.compile(
127 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
128 '(?P<target>{slug_pat})/' # target repo
104 '(?P<target>{slug_pat})/' # target repo
129 'pull-request/(?P<pr_id>\\d+)/' # pull request
105 'pull-request/(?P<pr_id>\\d+)/' # pull request
130 'repository$' # shadow repo
106 'repository$' # shadow repo
131 .format(slug_pat=SLUG_RE.pattern))
107 .format(slug_pat=SLUG_RE.pattern))
132
108
133 def __init__(self, config, registry):
109 def __init__(self, config, registry):
134 self.registry = registry
110 self.registry = registry
135 self.config = config
111 self.config = config
136 # re-populated by specialized middleware
112 # re-populated by specialized middleware
137 self.repo_vcs_config = base.Config()
113 self.repo_vcs_config = base.Config()
138
114
139 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
115 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
140 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
116 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
141
117
142 # authenticate this VCS request using authfunc
118 # authenticate this VCS request using authfunc
143 auth_ret_code_detection = \
119 auth_ret_code_detection = \
144 str2bool(self.config.get('auth_ret_code_detection', False))
120 str2bool(self.config.get('auth_ret_code_detection', False))
145 self.authenticate = BasicAuth(
121 self.authenticate = BasicAuth(
146 '', authenticate, registry, config.get('auth_ret_code'),
122 '', authenticate, registry, config.get('auth_ret_code'),
147 auth_ret_code_detection, rc_realm=realm)
123 auth_ret_code_detection, rc_realm=realm)
148 self.ip_addr = '0.0.0.0'
124 self.ip_addr = '0.0.0.0'
149
125
150 @LazyProperty
126 @LazyProperty
151 def global_vcs_config(self):
127 def global_vcs_config(self):
152 try:
128 try:
153 return VcsSettingsModel().get_ui_settings_as_config_obj()
129 return VcsSettingsModel().get_ui_settings_as_config_obj()
154 except Exception:
130 except Exception:
155 return base.Config()
131 return base.Config()
156
132
157 @property
133 @property
158 def base_path(self):
134 def base_path(self):
159 settings_path = self.config.get('repo_store.path')
135 settings_path = self.config.get('repo_store.path')
160
136
161 if not settings_path:
137 if not settings_path:
162 raise ValueError('FATAL: repo_store.path is empty')
138 raise ValueError('FATAL: repo_store.path is empty')
163 return settings_path
139 return settings_path
164
140
165 def set_repo_names(self, environ):
141 def set_repo_names(self, environ):
166 """
142 """
167 This will populate the attributes acl_repo_name, url_repo_name,
143 This will populate the attributes acl_repo_name, url_repo_name,
168 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
144 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
169 shadow) repositories all names are equal. In case of requests to a
145 shadow) repositories all names are equal. In case of requests to a
170 shadow repository the acl-name points to the target repo of the pull
146 shadow repository the acl-name points to the target repo of the pull
171 request and the vcs-name points to the shadow repo file system path.
147 request and the vcs-name points to the shadow repo file system path.
172 The url-name is always the URL used by the vcs client program.
148 The url-name is always the URL used by the vcs client program.
173
149
174 Example in case of a shadow repo:
150 Example in case of a shadow repo:
175 acl_repo_name = RepoGroup/MyRepo
151 acl_repo_name = RepoGroup/MyRepo
176 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
152 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
177 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
153 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
178 """
154 """
179 # First we set the repo name from URL for all attributes. This is the
155 # First we set the repo name from URL for all attributes. This is the
180 # default if handling normal (non shadow) repo requests.
156 # default if handling normal (non shadow) repo requests.
181 self.url_repo_name = self._get_repository_name(environ)
157 self.url_repo_name = self._get_repository_name(environ)
182 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
158 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
183 self.is_shadow_repo = False
159 self.is_shadow_repo = False
184
160
185 # Check if this is a request to a shadow repository.
161 # Check if this is a request to a shadow repository.
186 match = self.shadow_repo_re.match(self.url_repo_name)
162 match = self.shadow_repo_re.match(self.url_repo_name)
187 if match:
163 if match:
188 match_dict = match.groupdict()
164 match_dict = match.groupdict()
189
165
190 # Build acl repo name from regex match.
166 # Build acl repo name from regex match.
191 acl_repo_name = safe_str('{groups}{target}'.format(
167 acl_repo_name = safe_str('{groups}{target}'.format(
192 groups=match_dict['groups'] or '',
168 groups=match_dict['groups'] or '',
193 target=match_dict['target']))
169 target=match_dict['target']))
194
170
195 # Retrieve pull request instance by ID from regex match.
171 # Retrieve pull request instance by ID from regex match.
196 pull_request = PullRequest.get(match_dict['pr_id'])
172 pull_request = PullRequest.get(match_dict['pr_id'])
197
173
198 # Only proceed if we got a pull request and if acl repo name from
174 # Only proceed if we got a pull request and if acl repo name from
199 # URL equals the target repo name of the pull request.
175 # URL equals the target repo name of the pull request.
200 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
176 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
201
177
202 # Get file system path to shadow repository.
178 # Get file system path to shadow repository.
203 workspace_id = PullRequestModel()._workspace_id(pull_request)
179 workspace_id = PullRequestModel()._workspace_id(pull_request)
204 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
180 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
205
181
206 # Store names for later usage.
182 # Store names for later usage.
207 self.vcs_repo_name = vcs_repo_name
183 self.vcs_repo_name = vcs_repo_name
208 self.acl_repo_name = acl_repo_name
184 self.acl_repo_name = acl_repo_name
209 self.is_shadow_repo = True
185 self.is_shadow_repo = True
210
186
211 log.debug('Setting all VCS repository names: %s', {
187 log.debug('Setting all VCS repository names: %s', {
212 'acl_repo_name': self.acl_repo_name,
188 'acl_repo_name': self.acl_repo_name,
213 'url_repo_name': self.url_repo_name,
189 'url_repo_name': self.url_repo_name,
214 'vcs_repo_name': self.vcs_repo_name,
190 'vcs_repo_name': self.vcs_repo_name,
215 })
191 })
216
192
217 @property
193 @property
218 def scm_app(self):
194 def scm_app(self):
219 custom_implementation = self.config['vcs.scm_app_implementation']
195 custom_implementation = self.config['vcs.scm_app_implementation']
220 if custom_implementation == 'http':
196 if custom_implementation == 'http':
221 log.debug('Using HTTP implementation of scm app.')
197 log.debug('Using HTTP implementation of scm app.')
222 scm_app_impl = scm_app_http
198 scm_app_impl = scm_app_http
223 else:
199 else:
224 log.debug('Using custom implementation of scm_app: "{}"'.format(
200 log.debug('Using custom implementation of scm_app: "{}"'.format(
225 custom_implementation))
201 custom_implementation))
226 scm_app_impl = importlib.import_module(custom_implementation)
202 scm_app_impl = importlib.import_module(custom_implementation)
227 return scm_app_impl
203 return scm_app_impl
228
204
229 def _get_by_id(self, repo_name):
205 def _get_by_id(self, repo_name):
230 """
206 """
231 Gets a special pattern _<ID> from clone url and tries to replace it
207 Gets a special pattern _<ID> from clone url and tries to replace it
232 with a repository_name for support of _<ID> non changeable urls
208 with a repository_name for support of _<ID> non changeable urls
233 """
209 """
234
210
235 data = repo_name.split('/')
211 data = repo_name.split('/')
236 if len(data) >= 2:
212 if len(data) >= 2:
237 from rhodecode.model.repo import RepoModel
213 from rhodecode.model.repo import RepoModel
238 by_id_match = RepoModel().get_repo_by_id(repo_name)
214 by_id_match = RepoModel().get_repo_by_id(repo_name)
239 if by_id_match:
215 if by_id_match:
240 data[1] = by_id_match.repo_name
216 data[1] = by_id_match.repo_name
241
217
242 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
218 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
243 # and we use this data
219 # and we use this data
244 maybe_new_path = '/'.join(data)
220 maybe_new_path = '/'.join(data)
245 return safe_bytes(maybe_new_path).decode('latin1')
221 return safe_bytes(maybe_new_path).decode('latin1')
246
222
247 def _invalidate_cache(self, repo_name):
223 def _invalidate_cache(self, repo_name):
248 """
224 """
249 Set's cache for this repository for invalidation on next access
225 Set's cache for this repository for invalidation on next access
250
226
251 :param repo_name: full repo name, also a cache key
227 :param repo_name: full repo name, also a cache key
252 """
228 """
253 ScmModel().mark_for_invalidation(repo_name)
229 ScmModel().mark_for_invalidation(repo_name)
254
230
255 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
231 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
256 db_repo = Repository.get_by_repo_name(repo_name)
232 db_repo = Repository.get_by_repo_name(repo_name)
257 if not db_repo:
233 if not db_repo:
258 log.debug('Repository `%s` not found inside the database.',
234 log.debug('Repository `%s` not found inside the database.',
259 repo_name)
235 repo_name)
260 return False
236 return False
261
237
262 if db_repo.repo_type != scm_type:
238 if db_repo.repo_type != scm_type:
263 log.warning(
239 log.warning(
264 'Repository `%s` have incorrect scm_type, expected %s got %s',
240 'Repository `%s` have incorrect scm_type, expected %s got %s',
265 repo_name, db_repo.repo_type, scm_type)
241 repo_name, db_repo.repo_type, scm_type)
266 return False
242 return False
267
243
268 config = db_repo._config
244 config = db_repo._config
269 config.set('extensions', 'largefiles', '')
245 config.set('extensions', 'largefiles', '')
270 return is_valid_repo(
246 return is_valid_repo(
271 repo_name, base_path,
247 repo_name, base_path,
272 explicit_scm=scm_type, expect_scm=scm_type, config=config)
248 explicit_scm=scm_type, expect_scm=scm_type, config=config)
273
249
274 def valid_and_active_user(self, user):
250 def valid_and_active_user(self, user):
275 """
251 """
276 Checks if that user is not empty, and if it's actually object it checks
252 Checks if that user is not empty, and if it's actually object it checks
277 if he's active.
253 if he's active.
278
254
279 :param user: user object or None
255 :param user: user object or None
280 :return: boolean
256 :return: boolean
281 """
257 """
282 if user is None:
258 if user is None:
283 return False
259 return False
284
260
285 elif user.active:
261 elif user.active:
286 return True
262 return True
287
263
288 return False
264 return False
289
265
290 @property
266 @property
291 def is_shadow_repo_dir(self):
267 def is_shadow_repo_dir(self):
292 return os.path.isdir(self.vcs_repo_name)
268 return os.path.isdir(self.vcs_repo_name)
293
269
294 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
270 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
295 plugin_id='', plugin_cache_active=False, cache_ttl=0):
271 plugin_id='', plugin_cache_active=False, cache_ttl=0):
296 """
272 """
297 Checks permissions using action (push/pull) user and repository
273 Checks permissions using action (push/pull) user and repository
298 name. If plugin_cache and ttl is set it will use the plugin which
274 name. If plugin_cache and ttl is set it will use the plugin which
299 authenticated the user to store the cached permissions result for N
275 authenticated the user to store the cached permissions result for N
300 amount of seconds as in cache_ttl
276 amount of seconds as in cache_ttl
301
277
302 :param action: push or pull action
278 :param action: push or pull action
303 :param user: user instance
279 :param user: user instance
304 :param repo_name: repository name
280 :param repo_name: repository name
305 """
281 """
306
282
307 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
283 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
308 plugin_id, plugin_cache_active, cache_ttl)
284 plugin_id, plugin_cache_active, cache_ttl)
309
285
310 user_id = user.user_id
286 user_id = user.user_id
311 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
287 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
312 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
288 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
313
289
314 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
290 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
315 expiration_time=cache_ttl,
291 expiration_time=cache_ttl,
316 condition=plugin_cache_active)
292 condition=plugin_cache_active)
317 def compute_perm_vcs(
293 def compute_perm_vcs(
318 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
294 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
319
295
320 log.debug('auth: calculating permission access now...')
296 log.debug('auth: calculating permission access now...')
321 # check IP
297 # check IP
322 inherit = user.inherit_default_permissions
298 inherit = user.inherit_default_permissions
323 ip_allowed = AuthUser.check_ip_allowed(
299 ip_allowed = AuthUser.check_ip_allowed(
324 user_id, ip_addr, inherit_from_default=inherit)
300 user_id, ip_addr, inherit_from_default=inherit)
325 if ip_allowed:
301 if ip_allowed:
326 log.info('Access for IP:%s allowed', ip_addr)
302 log.info('Access for IP:%s allowed', ip_addr)
327 else:
303 else:
328 return False
304 return False
329
305
330 if action == 'push':
306 if action == 'push':
331 perms = ('repository.write', 'repository.admin')
307 perms = ('repository.write', 'repository.admin')
332 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
308 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
333 return False
309 return False
334
310
335 else:
311 else:
336 # any other action need at least read permission
312 # any other action need at least read permission
337 perms = (
313 perms = (
338 'repository.read', 'repository.write', 'repository.admin')
314 'repository.read', 'repository.write', 'repository.admin')
339 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
315 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
340 return False
316 return False
341
317
342 return True
318 return True
343
319
344 start = time.time()
320 start = time.time()
345 log.debug('Running plugin `%s` permissions check', plugin_id)
321 log.debug('Running plugin `%s` permissions check', plugin_id)
346
322
347 # for environ based auth, password can be empty, but then the validation is
323 # for environ based auth, password can be empty, but then the validation is
348 # on the server that fills in the env data needed for authentication
324 # on the server that fills in the env data needed for authentication
349 perm_result = compute_perm_vcs(
325 perm_result = compute_perm_vcs(
350 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
326 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
351
327
352 auth_time = time.time() - start
328 auth_time = time.time() - start
353 log.debug('Permissions for plugin `%s` completed in %.4fs, '
329 log.debug('Permissions for plugin `%s` completed in %.4fs, '
354 'expiration time of fetched cache %.1fs.',
330 'expiration time of fetched cache %.1fs.',
355 plugin_id, auth_time, cache_ttl)
331 plugin_id, auth_time, cache_ttl)
356
332
357 return perm_result
333 return perm_result
358
334
359 def _get_http_scheme(self, environ):
335 def _get_http_scheme(self, environ):
360 try:
336 try:
361 return environ['wsgi.url_scheme']
337 return environ['wsgi.url_scheme']
362 except Exception:
338 except Exception:
363 log.exception('Failed to read http scheme')
339 log.exception('Failed to read http scheme')
364 return 'http'
340 return 'http'
365
341
366 def _check_ssl(self, environ, start_response):
342 def _check_ssl(self, environ, start_response):
367 """
343 """
368 Checks the SSL check flag and returns False if SSL is not present
344 Checks the SSL check flag and returns False if SSL is not present
369 and required True otherwise
345 and required True otherwise
370 """
346 """
371 org_proto = environ['wsgi._org_proto']
347 org_proto = environ['wsgi._org_proto']
372 # check if we have SSL required ! if not it's a bad request !
348 # check if we have SSL required ! if not it's a bad request !
373 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
349 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
374 if require_ssl and org_proto == 'http':
350 if require_ssl and org_proto == 'http':
375 log.debug(
351 log.debug(
376 'Bad request: detected protocol is `%s` and '
352 'Bad request: detected protocol is `%s` and '
377 'SSL/HTTPS is required.', org_proto)
353 'SSL/HTTPS is required.', org_proto)
378 return False
354 return False
379 return True
355 return True
380
356
381 def _get_default_cache_ttl(self):
357 def _get_default_cache_ttl(self):
382 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
358 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
383 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
359 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
384 plugin_settings = plugin.get_settings()
360 plugin_settings = plugin.get_settings()
385 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
361 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
386 plugin_settings) or (False, 0)
362 plugin_settings) or (False, 0)
387 return plugin_cache_active, cache_ttl
363 return plugin_cache_active, cache_ttl
388
364
389 def __call__(self, environ, start_response):
365 def __call__(self, environ, start_response):
390 try:
366 try:
391 return self._handle_request(environ, start_response)
367 return self._handle_request(environ, start_response)
392 except Exception:
368 except Exception:
393 log.exception("Exception while handling request")
369 log.exception("Exception while handling request")
394 appenlight.track_exception(environ)
370 appenlight.track_exception(environ)
395 return HTTPInternalServerError()(environ, start_response)
371 return HTTPInternalServerError()(environ, start_response)
396 finally:
372 finally:
397 meta.Session.remove()
373 meta.Session.remove()
398
374
399 def _handle_request(self, environ, start_response):
375 def _handle_request(self, environ, start_response):
400 if not self._check_ssl(environ, start_response):
376 if not self._check_ssl(environ, start_response):
401 reason = ('SSL required, while RhodeCode was unable '
377 reason = ('SSL required, while RhodeCode was unable '
402 'to detect this as SSL request')
378 'to detect this as SSL request')
403 log.debug('User not allowed to proceed, %s', reason)
379 log.debug('User not allowed to proceed, %s', reason)
404 return HTTPNotAcceptable(reason)(environ, start_response)
380 return HTTPNotAcceptable(reason)(environ, start_response)
405
381
406 if not self.url_repo_name:
382 if not self.url_repo_name:
407 log.warning('Repository name is empty: %s', self.url_repo_name)
383 log.warning('Repository name is empty: %s', self.url_repo_name)
408 # failed to get repo name, we fail now
384 # failed to get repo name, we fail now
409 return HTTPNotFound()(environ, start_response)
385 return HTTPNotFound()(environ, start_response)
410 log.debug('Extracted repo name is %s', self.url_repo_name)
386 log.debug('Extracted repo name is %s', self.url_repo_name)
411
387
412 ip_addr = get_ip_addr(environ)
388 ip_addr = get_ip_addr(environ)
413 user_agent = get_user_agent(environ)
389 user_agent = get_user_agent(environ)
414 username = None
390 username = None
415
391
416 # skip passing error to error controller
392 # skip passing error to error controller
417 environ['pylons.status_code_redirect'] = True
393 environ['pylons.status_code_redirect'] = True
418
394
419 # ======================================================================
395 # ======================================================================
420 # GET ACTION PULL or PUSH
396 # GET ACTION PULL or PUSH
421 # ======================================================================
397 # ======================================================================
422 action = self._get_action(environ)
398 action = self._get_action(environ)
423
399
424 # ======================================================================
400 # ======================================================================
425 # Check if this is a request to a shadow repository of a pull request.
401 # Check if this is a request to a shadow repository of a pull request.
426 # In this case only pull action is allowed.
402 # In this case only pull action is allowed.
427 # ======================================================================
403 # ======================================================================
428 if self.is_shadow_repo and action != 'pull':
404 if self.is_shadow_repo and action != 'pull':
429 reason = 'Only pull action is allowed for shadow repositories.'
405 reason = 'Only pull action is allowed for shadow repositories.'
430 log.debug('User not allowed to proceed, %s', reason)
406 log.debug('User not allowed to proceed, %s', reason)
431 return HTTPNotAcceptable(reason)(environ, start_response)
407 return HTTPNotAcceptable(reason)(environ, start_response)
432
408
433 # Check if the shadow repo actually exists, in case someone refers
409 # Check if the shadow repo actually exists, in case someone refers
434 # to it, and it has been deleted because of successful merge.
410 # to it, and it has been deleted because of successful merge.
435 if self.is_shadow_repo and not self.is_shadow_repo_dir:
411 if self.is_shadow_repo and not self.is_shadow_repo_dir:
436 log.debug(
412 log.debug(
437 'Shadow repo detected, and shadow repo dir `%s` is missing',
413 'Shadow repo detected, and shadow repo dir `%s` is missing',
438 self.is_shadow_repo_dir)
414 self.is_shadow_repo_dir)
439 return HTTPNotFound()(environ, start_response)
415 return HTTPNotFound()(environ, start_response)
440
416
441 # ======================================================================
417 # ======================================================================
442 # CHECK ANONYMOUS PERMISSION
418 # CHECK ANONYMOUS PERMISSION
443 # ======================================================================
419 # ======================================================================
444 detect_force_push = False
420 detect_force_push = False
445 check_branch_perms = False
421 check_branch_perms = False
446 if action in ['pull', 'push']:
422 if action in ['pull', 'push']:
447 user_obj = anonymous_user = User.get_default_user()
423 user_obj = anonymous_user = User.get_default_user()
448 auth_user = user_obj.AuthUser()
424 auth_user = user_obj.AuthUser()
449 username = anonymous_user.username
425 username = anonymous_user.username
450 if anonymous_user.active:
426 if anonymous_user.active:
451 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
427 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
452 # ONLY check permissions if the user is activated
428 # ONLY check permissions if the user is activated
453 anonymous_perm = self._check_permission(
429 anonymous_perm = self._check_permission(
454 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
430 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
455 plugin_id='anonymous_access',
431 plugin_id='anonymous_access',
456 plugin_cache_active=plugin_cache_active,
432 plugin_cache_active=plugin_cache_active,
457 cache_ttl=cache_ttl,
433 cache_ttl=cache_ttl,
458 )
434 )
459 else:
435 else:
460 anonymous_perm = False
436 anonymous_perm = False
461
437
462 if not anonymous_user.active or not anonymous_perm:
438 if not anonymous_user.active or not anonymous_perm:
463 if not anonymous_user.active:
439 if not anonymous_user.active:
464 log.debug('Anonymous access is disabled, running '
440 log.debug('Anonymous access is disabled, running '
465 'authentication')
441 'authentication')
466
442
467 if not anonymous_perm:
443 if not anonymous_perm:
468 log.debug('Not enough credentials to access repo: `%s` '
444 log.debug('Not enough credentials to access repo: `%s` '
469 'repository as anonymous user', self.acl_repo_name)
445 'repository as anonymous user', self.acl_repo_name)
470
446
471
472 username = None
447 username = None
473 # ==============================================================
448 # ==============================================================
474 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
449 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
475 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
450 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
476 # ==============================================================
451 # ==============================================================
477
452
478 # try to auth based on environ, container auth methods
453 # try to auth based on environ, container auth methods
479 log.debug('Running PRE-AUTH for container|headers based authentication')
454 log.debug('Running PRE-AUTH for container|headers based authentication')
480
455
481 # headers auth, by just reading special headers and bypass the auth with user/passwd
456 # headers auth, by just reading special headers and bypass the auth with user/passwd
482 pre_auth = authenticate(
457 pre_auth = authenticate(
483 '', '', environ, VCS_TYPE, registry=self.registry,
458 '', '', environ, VCS_TYPE, registry=self.registry,
484 acl_repo_name=self.acl_repo_name)
459 acl_repo_name=self.acl_repo_name)
485
460
486 if pre_auth and pre_auth.get('username'):
461 if pre_auth and pre_auth.get('username'):
487 username = pre_auth['username']
462 username = pre_auth['username']
488 log.debug('PRE-AUTH got `%s` as username', username)
463 log.debug('PRE-AUTH got `%s` as username', username)
489 if pre_auth:
464 if pre_auth:
490 log.debug('PRE-AUTH successful from %s',
465 log.debug('PRE-AUTH successful from %s',
491 pre_auth.get('auth_data', {}).get('_plugin'))
466 pre_auth.get('auth_data', {}).get('_plugin'))
492
467
493 # If not authenticated by the container, running basic auth
468 # If not authenticated by the container, running basic auth
494 # before inject the calling repo_name for special scope checks
469 # before inject the calling repo_name for special scope checks
495 self.authenticate.acl_repo_name = self.acl_repo_name
470 self.authenticate.acl_repo_name = self.acl_repo_name
496
471
497 plugin_cache_active, cache_ttl = False, 0
472 plugin_cache_active, cache_ttl = False, 0
498 plugin = None
473 plugin = None
499
474
500 # regular auth chain
475 # regular auth chain
501 if not username:
476 if not username:
502 self.authenticate.realm = self.authenticate.get_rc_realm()
477 self.authenticate.realm = self.authenticate.get_rc_realm()
503
478
504 try:
479 try:
505 auth_result = self.authenticate(environ)
480 auth_result = self.authenticate(environ)
506 except (UserCreationError, NotAllowedToCreateUserError) as e:
481 except (UserCreationError, NotAllowedToCreateUserError) as e:
507 log.error(e)
482 log.error(e)
508 reason = safe_str(e)
483 reason = safe_str(e)
509 return HTTPNotAcceptable(reason)(environ, start_response)
484 return HTTPNotAcceptable(reason)(environ, start_response)
510
485
511 if isinstance(auth_result, dict):
486 if isinstance(auth_result, dict):
512 AUTH_TYPE.update(environ, 'basic')
487 AUTH_TYPE.update(environ, 'basic')
513 REMOTE_USER.update(environ, auth_result['username'])
488 REMOTE_USER.update(environ, auth_result['username'])
514 username = auth_result['username']
489 username = auth_result['username']
515 plugin = auth_result.get('auth_data', {}).get('_plugin')
490 plugin = auth_result.get('auth_data', {}).get('_plugin')
516 log.info(
491 log.info(
517 'MAIN-AUTH successful for user `%s` from %s plugin',
492 'MAIN-AUTH successful for user `%s` from %s plugin',
518 username, plugin)
493 username, plugin)
519
494
520 plugin_cache_active, cache_ttl = auth_result.get(
495 plugin_cache_active, cache_ttl = auth_result.get(
521 'auth_data', {}).get('_ttl_cache') or (False, 0)
496 'auth_data', {}).get('_ttl_cache') or (False, 0)
522 else:
497 else:
523 return auth_result.wsgi_application(environ, start_response)
498 return auth_result.wsgi_application(environ, start_response)
524
499
525 # ==============================================================
500 # ==============================================================
526 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
501 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
527 # ==============================================================
502 # ==============================================================
528 user = User.get_by_username(username)
503 user = User.get_by_username(username)
529 if not self.valid_and_active_user(user):
504 if not self.valid_and_active_user(user):
530 return HTTPForbidden()(environ, start_response)
505 return HTTPForbidden()(environ, start_response)
531 username = user.username
506 username = user.username
532 user_id = user.user_id
507 user_id = user.user_id
533
508
534 # check user attributes for password change flag
509 # check user attributes for password change flag
535 user_obj = user
510 user_obj = user
536 auth_user = user_obj.AuthUser()
511 auth_user = user_obj.AuthUser()
537 if user_obj and user_obj.username != User.DEFAULT_USER and \
512 if user_obj and user_obj.username != User.DEFAULT_USER and \
538 user_obj.user_data.get('force_password_change'):
513 user_obj.user_data.get('force_password_change'):
539 reason = 'password change required'
514 reason = 'password change required'
540 log.debug('User not allowed to authenticate, %s', reason)
515 log.debug('User not allowed to authenticate, %s', reason)
541 return HTTPNotAcceptable(reason)(environ, start_response)
516 return HTTPNotAcceptable(reason)(environ, start_response)
542
517
543 # check permissions for this repository
518 # check permissions for this repository
544 perm = self._check_permission(
519 perm = self._check_permission(
545 action, user, auth_user, self.acl_repo_name, ip_addr,
520 action, user, auth_user, self.acl_repo_name, ip_addr,
546 plugin, plugin_cache_active, cache_ttl)
521 plugin, plugin_cache_active, cache_ttl)
547 if not perm:
522 if not perm:
548 return HTTPForbidden()(environ, start_response)
523 return HTTPForbidden()(environ, start_response)
549 environ['rc_auth_user_id'] = str(user_id)
524 environ['rc_auth_user_id'] = str(user_id)
550
525
551 if action == 'push':
526 if action == 'push':
552 perms = auth_user.get_branch_permissions(self.acl_repo_name)
527 perms = auth_user.get_branch_permissions(self.acl_repo_name)
553 if perms:
528 if perms:
554 check_branch_perms = True
529 check_branch_perms = True
555 detect_force_push = True
530 detect_force_push = True
556
531
557 # extras are injected into UI object and later available
532 # extras are injected into UI object and later available
558 # in hooks executed by RhodeCode
533 # in hooks executed by RhodeCode
559 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
534 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
560
535
561 extras = vcs_operation_context(
536 extras = vcs_operation_context(
562 environ, repo_name=self.acl_repo_name, username=username,
537 environ, repo_name=self.acl_repo_name, username=username,
563 action=action, scm=self.SCM, check_locking=check_locking,
538 action=action, scm=self.SCM, check_locking=check_locking,
564 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
539 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
565 detect_force_push=detect_force_push
540 detect_force_push=detect_force_push
566 )
541 )
567
542
568 # ======================================================================
543 # ======================================================================
569 # REQUEST HANDLING
544 # REQUEST HANDLING
570 # ======================================================================
545 # ======================================================================
571 repo_path = os.path.join(
546 repo_path = os.path.join(
572 safe_str(self.base_path), safe_str(self.vcs_repo_name))
547 safe_str(self.base_path), safe_str(self.vcs_repo_name))
573 log.debug('Repository path is %s', repo_path)
548 log.debug('Repository path is %s', repo_path)
574
549
575 fix_PATH()
550 fix_PATH()
576
551
577 log.info(
552 log.info(
578 '%s action on %s repo "%s" by "%s" from %s %s',
553 '%s action on %s repo "%s" by "%s" from %s %s',
579 action, self.SCM, safe_str(self.url_repo_name),
554 action, self.SCM, safe_str(self.url_repo_name),
580 safe_str(username), ip_addr, user_agent)
555 safe_str(username), ip_addr, user_agent)
581
556
582 return self._generate_vcs_response(
557 return self._generate_vcs_response(
583 environ, start_response, repo_path, extras, action)
558 environ, start_response, repo_path, extras, action)
584
559
560 def _get_txn_id(self, environ):
561
562 for k in ['RAW_URI', 'HTTP_DESTINATION']:
563 url = environ.get(k)
564 if not url:
565 continue
566
567 # regex to search for svn-txn-id
568 pattern = r'/!svn/txr/([^/]+)/'
569
570 # Search for the pattern in the URL
571 match = re.search(pattern, url)
572
573 # Check if a match is found and extract the captured group
574 if match:
575 txn_id = match.group(1)
576 return txn_id
577
585 @initialize_generator
578 @initialize_generator
586 def _generate_vcs_response(
579 def _generate_vcs_response(
587 self, environ, start_response, repo_path, extras, action):
580 self, environ, start_response, repo_path, extras, action):
588 """
581 """
589 Returns a generator for the response content.
582 Returns a generator for the response content.
590
583
591 This method is implemented as a generator, so that it can trigger
584 This method is implemented as a generator, so that it can trigger
592 the cache validation after all content sent back to the client. It
585 the cache validation after all content sent back to the client. It
593 also handles the locking exceptions which will be triggered when
586 also handles the locking exceptions which will be triggered when
594 the first chunk is produced by the underlying WSGI application.
587 the first chunk is produced by the underlying WSGI application.
595 """
588 """
596
589 svn_txn_id = ''
597 txn_id = ''
590 if action == 'push':
598 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
591 svn_txn_id = self._get_txn_id(environ)
599 # case for SVN, we want to re-use the callback daemon port
600 # so we use the txn_id, for this we peek the body, and still save
601 # it as wsgi.input
602
603 stream = environ['wsgi.input']
604
605 if isinstance(stream, io.BytesIO):
606 data: bytes = stream.getvalue()
607 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
608 data: bytes = stream.buf.getvalue()
609 else:
610 # fallback to the crudest way, copy the iterator
611 data = safe_bytes(stream.read())
612 environ['wsgi.input'] = io.BytesIO(data)
613
614 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
615
592
616 callback_daemon, extras = self._prepare_callback_daemon(
593 callback_daemon, extras = self._prepare_callback_daemon(
617 extras, environ, action, txn_id=txn_id)
594 extras, environ, action, txn_id=svn_txn_id)
595
596 if svn_txn_id:
597
598 port = safe_int(extras['hooks_uri'].split(':')[-1])
599 txn_id_data = extras.copy()
600 txn_id_data.update({'port': port})
601 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
602
603 full_repo_path = repo_path
604 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
605
618 log.debug('HOOKS extras is %s', extras)
606 log.debug('HOOKS extras is %s', extras)
619
607
620 http_scheme = self._get_http_scheme(environ)
608 http_scheme = self._get_http_scheme(environ)
621
609
622 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
610 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
623 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
611 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
624 with callback_daemon:
612 with callback_daemon:
625 app.rc_extras = extras
613 app.rc_extras = extras
626
614
627 try:
615 try:
628 response = app(environ, start_response)
616 response = app(environ, start_response)
629 finally:
617 finally:
630 # This statement works together with the decorator
618 # This statement works together with the decorator
631 # "initialize_generator" above. The decorator ensures that
619 # "initialize_generator" above. The decorator ensures that
632 # we hit the first yield statement before the generator is
620 # we hit the first yield statement before the generator is
633 # returned back to the WSGI server. This is needed to
621 # returned back to the WSGI server. This is needed to
634 # ensure that the call to "app" above triggers the
622 # ensure that the call to "app" above triggers the
635 # needed callback to "start_response" before the
623 # needed callback to "start_response" before the
636 # generator is actually used.
624 # generator is actually used.
637 yield "__init__"
625 yield "__init__"
638
626
639 # iter content
627 # iter content
640 for chunk in response:
628 for chunk in response:
641 yield chunk
629 yield chunk
642
630
643 try:
631 try:
644 # invalidate cache on push
632 # invalidate cache on push
645 if action == 'push':
633 if action == 'push':
646 self._invalidate_cache(self.url_repo_name)
634 self._invalidate_cache(self.url_repo_name)
647 finally:
635 finally:
648 meta.Session.remove()
636 meta.Session.remove()
649
637
650 def _get_repository_name(self, environ):
638 def _get_repository_name(self, environ):
651 """Get repository name out of the environmnent
639 """Get repository name out of the environmnent
652
640
653 :param environ: WSGI environment
641 :param environ: WSGI environment
654 """
642 """
655 raise NotImplementedError()
643 raise NotImplementedError()
656
644
657 def _get_action(self, environ):
645 def _get_action(self, environ):
658 """Map request commands into a pull or push command.
646 """Map request commands into a pull or push command.
659
647
660 :param environ: WSGI environment
648 :param environ: WSGI environment
661 """
649 """
662 raise NotImplementedError()
650 raise NotImplementedError()
663
651
664 def _create_wsgi_app(self, repo_path, repo_name, config):
652 def _create_wsgi_app(self, repo_path, repo_name, config):
665 """Return the WSGI app that will finally handle the request."""
653 """Return the WSGI app that will finally handle the request."""
666 raise NotImplementedError()
654 raise NotImplementedError()
667
655
668 def _create_config(self, extras, repo_name, scheme='http'):
656 def _create_config(self, extras, repo_name, scheme='http'):
669 """Create a safe config representation."""
657 """Create a safe config representation."""
670 raise NotImplementedError()
658 raise NotImplementedError()
671
659
672 def _should_use_callback_daemon(self, extras, environ, action):
660 def _should_use_callback_daemon(self, extras, environ, action):
673 if extras.get('is_shadow_repo'):
661 if extras.get('is_shadow_repo'):
674 # we don't want to execute hooks, and callback daemon for shadow repos
662 # we don't want to execute hooks, and callback daemon for shadow repos
675 return False
663 return False
676 return True
664 return True
677
665
678 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
666 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
679 protocol = vcs_settings.HOOKS_PROTOCOL
667 protocol = vcs_settings.HOOKS_PROTOCOL
668
680 if not self._should_use_callback_daemon(extras, environ, action):
669 if not self._should_use_callback_daemon(extras, environ, action):
681 # disable callback daemon for actions that don't require it
670 # disable callback daemon for actions that don't require it
682 protocol = 'local'
671 protocol = 'local'
683
672
684 return prepare_callback_daemon(
673 return prepare_callback_daemon(
685 extras, protocol=protocol,
674 extras, protocol=protocol,
686 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
675 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
687
676
688
677
689 def _should_check_locking(query_string):
678 def _should_check_locking(query_string):
690 # this is kind of hacky, but due to how mercurial handles client-server
679 # this is kind of hacky, but due to how mercurial handles client-server
691 # server see all operation on commit; bookmarks, phases and
680 # server see all operation on commit; bookmarks, phases and
692 # obsolescence marker in different transaction, we don't want to check
681 # obsolescence marker in different transaction, we don't want to check
693 # locking on those
682 # locking on those
694 return query_string not in ['cmd=listkeys']
683 return query_string not in ['cmd=listkeys']
@@ -1,421 +1,391 b''
1
1
2 /******************************************************************************
2 /******************************************************************************
3 * *
3 * *
4 * DO NOT CHANGE THIS FILE MANUALLY *
4 * DO NOT CHANGE THIS FILE MANUALLY *
5 * *
5 * *
6 * *
6 * *
7 * This file is automatically generated when the app starts up with *
7 * This file is automatically generated when the app starts up with *
8 * generate_js_files = true *
8 * generate_js_files = true *
9 * *
9 * *
10 * To add a route here pass jsroute=True to the route definition in the app *
10 * To add a route here pass jsroute=True to the route definition in the app *
11 * *
11 * *
12 ******************************************************************************/
12 ******************************************************************************/
13 function registerRCRoutes() {
13 function registerRCRoutes() {
14 // routes registration
14 // routes registration
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
15 pyroutes.register('admin_artifacts', '/_admin/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
16 pyroutes.register('admin_artifacts_delete', '/_admin/_admin/artifacts/%(uid)s/delete', ['uid']);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
17 pyroutes.register('admin_artifacts_show_all', '/_admin/_admin/artifacts', []);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
18 pyroutes.register('admin_artifacts_show_info', '/_admin/_admin/artifacts/%(uid)s', ['uid']);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
19 pyroutes.register('admin_artifacts_update', '/_admin/_admin/artifacts/%(uid)s/update', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
21 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
20 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
22 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
21 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
23 pyroutes.register('admin_automation', '/_admin/automation', []);
22 pyroutes.register('admin_automation', '/_admin/_admin/automation', []);
24 pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']);
25 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
26 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
27 pyroutes.register('admin_home', '/_admin', []);
25 pyroutes.register('admin_home', '/_admin', []);
28 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
26 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
29 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
30 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
31 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
32 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
33 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
30 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
34 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
31 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
35 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
32 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
36 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
33 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
37 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
34 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
38 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
35 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
39 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
36 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
40 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
37 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
41 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
38 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
42 pyroutes.register('admin_scheduler', '/_admin/scheduler', []);
39 pyroutes.register('admin_scheduler', '/_admin/_admin/scheduler', []);
43 pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []);
44 pyroutes.register('admin_settings', '/_admin/settings', []);
40 pyroutes.register('admin_settings', '/_admin/settings', []);
45 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
41 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
46 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
42 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
47 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
43 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
48 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
44 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
49 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
45 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
50 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
46 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
51 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
47 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
52 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
48 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
53 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
49 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
54 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
50 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
55 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
51 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
56 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
52 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
57 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
53 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
58 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
54 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
59 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
55 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
60 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
56 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
61 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
57 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
62 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
63 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
64 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
58 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
65 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
59 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
66 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
60 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
67 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
61 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
68 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
62 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
69 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
63 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
70 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
64 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
71 pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []);
72 pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']);
73 pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']);
74 pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']);
75 pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []);
76 pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']);
77 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
65 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
78 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
66 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
79 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
67 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
80 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
68 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
81 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
69 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
82 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
70 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
83 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
71 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
84 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
72 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
85 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
73 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
86 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
74 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
87 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
75 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
88 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
76 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
89 pyroutes.register('apiv2', '/_admin/api', []);
77 pyroutes.register('apiv2', '/_admin/api', []);
90 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
78 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
91 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
79 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
92 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
80 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
93 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
81 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
94 pyroutes.register('branch_remove', '/%(repo_name)s/branches/%(branch_name)s/remove', ['repo_name', 'branch_name']);
82 pyroutes.register('branch_remove', '/%(repo_name)s/branches/%(branch_name)s/remove', ['repo_name', 'branch_name']);
95 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
83 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
96 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
84 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
97 pyroutes.register('channelstream_proxy', '/_channelstream', []);
85 pyroutes.register('channelstream_proxy', '/_channelstream', []);
98 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
86 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
99 pyroutes.register('check_2fa', '/_admin/check_2fa', []);
87 pyroutes.register('check_2fa', '/_admin/check_2fa', []);
100 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
101 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
88 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
102 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
89 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
103 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
90 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
104 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
91 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
105 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
92 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
106 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
93 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
107 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
94 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
108 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
95 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
109 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
96 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
110 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
97 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
111 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
98 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
112 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
99 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
113 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
100 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
114 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
101 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
115 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
102 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
116 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
103 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
117 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
104 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
118 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
105 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
119 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
106 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
120 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
107 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
121 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
108 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
122 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
109 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
123 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
110 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
124 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
111 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
125 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
112 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
126 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
113 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
127 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
114 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
128 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
115 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
129 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
116 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
130 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
117 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
131 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
118 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
132 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
119 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
133 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
120 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
134 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
121 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
135 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
122 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
136 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
123 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
137 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
124 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
138 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
125 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
139 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
126 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
140 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
127 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
141 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
128 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
142 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
129 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
143 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
130 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
144 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
131 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
145 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
132 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
146 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
133 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
147 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
134 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
148 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
135 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
149 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
136 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
150 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
137 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
151 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
138 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
152 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
139 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
153 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
140 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
154 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
141 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
155 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
142 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
156 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
143 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
157 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
144 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
158 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
145 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
159 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
146 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
160 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
147 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
161 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
148 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
162 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
149 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
163 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
150 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
164 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
151 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
165 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
152 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
166 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
153 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
167 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
154 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
168 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
155 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
169 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
156 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
170 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
157 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
171 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
158 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
172 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
159 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
173 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
160 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
174 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
161 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
175 pyroutes.register('favicon', '/favicon.ico', []);
162 pyroutes.register('favicon', '/favicon.ico', []);
176 pyroutes.register('file_preview', '/_file_preview', []);
163 pyroutes.register('file_preview', '/_file_preview', []);
177 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
164 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
178 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
165 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
179 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
166 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
180 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
167 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
181 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
168 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
182 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
169 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
183 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
170 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
184 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
171 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
185 pyroutes.register('gists_create', '/_admin/gists/create', []);
172 pyroutes.register('gists_create', '/_admin/gists/create', []);
186 pyroutes.register('gists_new', '/_admin/gists/new', []);
173 pyroutes.register('gists_new', '/_admin/gists/new', []);
187 pyroutes.register('gists_show', '/_admin/gists', []);
174 pyroutes.register('gists_show', '/_admin/gists', []);
188 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
175 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
189 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
176 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
190 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
177 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
191 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
178 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
192 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
179 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
193 pyroutes.register('goto_switcher_data', '/_goto_data', []);
180 pyroutes.register('goto_switcher_data', '/_goto_data', []);
194 pyroutes.register('home', '/', []);
181 pyroutes.register('home', '/', []);
195 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
182 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
196 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
183 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
197 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
184 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
198 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
185 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
199 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
186 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
200 pyroutes.register('journal', '/_admin/journal', []);
187 pyroutes.register('journal', '/_admin/journal', []);
201 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
188 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
202 pyroutes.register('journal_public', '/_admin/public_journal', []);
189 pyroutes.register('journal_public', '/_admin/public_journal', []);
203 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
190 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
204 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
191 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
205 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
192 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
206 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
193 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
207 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
194 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
208 pyroutes.register('login', '/_admin/login', []);
195 pyroutes.register('login', '/_admin/login', []);
209 pyroutes.register('logout', '/_admin/logout', []);
196 pyroutes.register('logout', '/_admin/logout', []);
210 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
197 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
211 pyroutes.register('main_page_repos_data', '/_home_repos', []);
198 pyroutes.register('main_page_repos_data', '/_home_repos', []);
212 pyroutes.register('markup_preview', '/_markup_preview', []);
199 pyroutes.register('markup_preview', '/_markup_preview', []);
213 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
200 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
214 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
201 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
215 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
202 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
216 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
203 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
217 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
204 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
218 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
205 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
219 pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []);
206 pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []);
220 pyroutes.register('my_account_configure_2fa_update', '/_admin/my_account/configure_2fa_update', []);
207 pyroutes.register('my_account_configure_2fa_update', '/_admin/my_account/configure_2fa_update', []);
221 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
208 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
222 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
209 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
223 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
210 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
224 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
211 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
225 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
226 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
227 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
212 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
228 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
213 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
229 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
214 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
230 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
215 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
231 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
216 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
232 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
217 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
233 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
218 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
234 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
219 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
235 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
220 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
236 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
221 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
237 pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []);
222 pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []);
238 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
223 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
239 pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []);
224 pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []);
240 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
225 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
241 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
226 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
242 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
227 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
243 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
228 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
244 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
229 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
245 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
230 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
246 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
231 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
247 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
232 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
248 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
233 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
249 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
234 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
250 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
235 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
251 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
236 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
252 pyroutes.register('ops_celery_error_test', '/_admin/ops/error-celery', []);
237 pyroutes.register('ops_celery_error_test', '/_admin/ops/error-celery', []);
253 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
238 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
254 pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
239 pyroutes.register('ops_healthcheck', '/_admin/ops/status', []);
255 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
240 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
256 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
241 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
257 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
258 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
242 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
259 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
243 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
260 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
244 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
261 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
245 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
262 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
246 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
263 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
247 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
264 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
248 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
265 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
249 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
266 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
250 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
267 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
268 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
251 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
269 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
252 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
270 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
253 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
271 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
254 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
272 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
255 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
273 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
256 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
274 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
257 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
275 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
258 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
276 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
259 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
277 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
260 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
278 pyroutes.register('register', '/_admin/register', []);
261 pyroutes.register('register', '/_admin/register', []);
279 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
262 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
280 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
281 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
282 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
283 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
284 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
263 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
285 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
286 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
287 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
288 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
289 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
290 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
264 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
291 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
292 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
265 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
293 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
266 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
294 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
267 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
295 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
268 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
296 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
269 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
297 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
270 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
298 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
271 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
299 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
272 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
300 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
273 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
301 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
274 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
302 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
275 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
303 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
276 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
304 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
277 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
305 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
278 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
306 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
279 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
307 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
280 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
308 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
281 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
309 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
282 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
310 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
283 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
311 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
284 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
285 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
313 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
286 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
314 pyroutes.register('repo_create', '/_admin/repos/create', []);
287 pyroutes.register('repo_create', '/_admin/repos/create', []);
315 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
288 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
316 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
289 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
317 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
290 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
318 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
291 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
319 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
292 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
320 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
293 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
321 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
294 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
322 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
295 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
296 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
297 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
325 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
298 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
326 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
299 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
327 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
300 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
328 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
301 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
329 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
302 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
330 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
303 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
331 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
304 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
332 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
305 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
333 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
306 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
334 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
307 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
335 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
308 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
336 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
309 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
337 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
310 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
338 pyroutes.register('repo_files_replace_binary', '/%(repo_name)s/replace_binary/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
311 pyroutes.register('repo_files_replace_binary', '/%(repo_name)s/replace_binary/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
339 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
340 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
313 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
341 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
314 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
342 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
315 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
343 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
316 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
344 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
317 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
345 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
318 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
346 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
319 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
347 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
320 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
348 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
321 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
349 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
322 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
350 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
323 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
351 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
324 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
352 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
325 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
353 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
326 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
354 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
327 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
355 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
328 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
356 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
329 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
357 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
330 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
358 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
331 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
359 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
332 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
360 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
333 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
361 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
334 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
362 pyroutes.register('repo_list_data', '/_repos', []);
335 pyroutes.register('repo_list_data', '/_repos', []);
363 pyroutes.register('repo_new', '/_admin/repos/new', []);
336 pyroutes.register('repo_new', '/_admin/repos/new', []);
364 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
337 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
365 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
338 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
366 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
339 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
367 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
340 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
368 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
341 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
369 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
370 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
371 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
372 pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']);
342 pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']);
373 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
343 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
374 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
344 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
375 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
345 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
376 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
346 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
377 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
347 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
378 pyroutes.register('repos', '/_admin/repos', []);
348 pyroutes.register('repos', '/_admin/repos', []);
379 pyroutes.register('repos_data', '/_admin/repos_data', []);
349 pyroutes.register('repos_data', '/_admin/repos_data', []);
380 pyroutes.register('reset_password', '/_admin/password_reset', []);
350 pyroutes.register('reset_password', '/_admin/password_reset', []);
381 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
351 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
382 pyroutes.register('robots', '/robots.txt', []);
352 pyroutes.register('robots', '/robots.txt', []);
383 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
353 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
384 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
354 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
385 pyroutes.register('search', '/_admin/search', []);
355 pyroutes.register('search', '/_admin/search', []);
386 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
356 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
387 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
357 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
388 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
358 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
389 pyroutes.register('setup_2fa', '/_admin/setup_2fa', []);
359 pyroutes.register('setup_2fa', '/_admin/setup_2fa', []);
390 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
360 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
391 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
361 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
392 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
362 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
393 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
363 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
394 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
364 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
395 pyroutes.register('upload_file', '/_file_store/upload', []);
365 pyroutes.register('upload_file', '/_file_store/upload', []);
396 pyroutes.register('user_autocomplete_data', '/_users', []);
366 pyroutes.register('user_autocomplete_data', '/_users', []);
397 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
367 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
398 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
368 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
399 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
369 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
400 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
370 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
401 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
371 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
402 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
372 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
403 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
373 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
404 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
374 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
405 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
375 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
406 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
376 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
407 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
377 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
408 pyroutes.register('user_groups', '/_admin/user_groups', []);
378 pyroutes.register('user_groups', '/_admin/user_groups', []);
409 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
379 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
410 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
380 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
411 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
381 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
412 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
382 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
413 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
383 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
414 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
384 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
415 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
385 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
416 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
386 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
417 pyroutes.register('users', '/_admin/users', []);
387 pyroutes.register('users', '/_admin/users', []);
418 pyroutes.register('users_create', '/_admin/users/create', []);
388 pyroutes.register('users_create', '/_admin/users/create', []);
419 pyroutes.register('users_data', '/_admin/users_data', []);
389 pyroutes.register('users_data', '/_admin/users_data', []);
420 pyroutes.register('users_new', '/_admin/users/new', []);
390 pyroutes.register('users_new', '/_admin/users/new', []);
421 }
391 }
@@ -1,328 +1,328 b''
1 ## snippet for displaying issue tracker settings
1 ## snippet for displaying issue tracker settings
2 ## usage:
2 ## usage:
3 ## <%namespace name="its" file="/base/issue_tracker_settings.mako"/>
3 ## <%namespace name="its" file="/base/issue_tracker_settings.mako"/>
4 ## ${its.issue_tracker_settings_table(patterns, form_url, delete_url)}
4 ## ${its.issue_tracker_settings_table(patterns, form_url, delete_url)}
5 ## ${its.issue_tracker_settings_test(test_url)}
5 ## ${its.issue_tracker_settings_test(test_url)}
6
6
7 <%def name="issue_tracker_settings_table(patterns, form_url, delete_url)">
7 <%def name="issue_tracker_settings_table(patterns, form_url, delete_url)">
8 <%
8 <%
9 # Name/desc, pattern, issue prefix
9 # Name/desc, pattern, issue prefix
10 examples = [
10 examples = [
11 (
11 (
12 ' ',
12 ' ',
13 ' ',
13 ' ',
14 ' ',
14 ' ',
15 ' '
15 ' '
16 ),
16 ),
17
17
18 (
18 (
19 'Tickets with #123 (Redmine etc)',
19 'Tickets with #123 (Redmine etc)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>\d+)',
20 '(?<![a-zA-Z0-9_/]{1,10}-?)(#)(?P<issue_id>[0-9]+)',
21 'https://myissueserver.com/${repo}/issue/${issue_id}',
21 'https://myissueserver.com/${repo}/issue/${issue_id}',
22 ''
22 ''
23 ),
23 ),
24
24
25 (
25 (
26 'Redmine - Alternative',
26 'Redmine - Alternative',
27 '(?:issue-)(\d+)',
27 '(?:issue-)(\d+)',
28 'https://myissueserver.com/redmine/issue/${id}',
28 'https://myissueserver.com/redmine/issue/${id}',
29 ''
29 ''
30 ),
30 ),
31
31
32 (
32 (
33 'Redmine - Wiki',
33 'Redmine - Wiki',
34 '(?:wiki-)([a-zA-Z0-9]+)',
34 '(?:wiki-)([a-zA-Z0-9]+)',
35 'http://example.org/projects/${repo_name}/wiki/${id}',
35 'http://example.org/projects/${repo_name}/wiki/${id}',
36 'wiki-'
36 'wiki-'
37 ),
37 ),
38
38
39 (
39 (
40 'JIRA - All tickets',
40 'JIRA - All tickets',
41 # official JIRA ticket pattern
41 # official JIRA ticket pattern
42 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>[A-Z]{1,6}-(?:[1-9][0-9]{0,7}))',
42 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>[A-Z]{1,6}-(?:[1-9][0-9]{0,7}))',
43 'https://myjira.com/browse/${issue_id}',
43 'https://myjira.com/browse/${issue_id}',
44 ''
44 ''
45 ),
45 ),
46
46
47 (
47 (
48 'JIRA - Single project (JRA-XXXXXXXX)',
48 'JIRA - Single project (JRA-XXXXXXXX)',
49 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>JRA-(?:[1-9][0-9]{0,7}))',
49 '(?<![a-zA-Z0-9_/#]-?)(?P<issue_id>JRA-(?:[1-9][0-9]{0,7}))',
50 'https://myjira.com/${issue_id}',
50 'https://myjira.com/${issue_id}',
51 ''
51 ''
52 ),
52 ),
53
53
54 (
54 (
55 'Confluence WIKI',
55 'Confluence WIKI',
56 '(?:conf-)([A-Z0-9]+)',
56 '(?:conf-)([A-Z0-9]+)',
57 'https://example.atlassian.net/display/wiki/${id}/${repo_name}',
57 'https://example.atlassian.net/display/wiki/${id}/${repo_name}',
58 'CONF-',
58 'CONF-',
59 ),
59 ),
60
60
61 (
61 (
62 'Pivotal Tracker',
62 'Pivotal Tracker',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>\d+)',
63 '(?:pivot-)(?P<project_id>\d+)-(?P<story>[0-9]+)',
64 'https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}',
64 'https://www.pivotaltracker.com/s/projects/${project_id}/stories/${story}',
65 'PIV-',
65 'PIV-',
66 ),
66 ),
67
67
68 (
68 (
69 'Trello',
69 'Trello',
70 '(?:trello-)(?P<card_id>[a-zA-Z0-9]+)',
70 '(?:trello-)(?P<card_id>[a-zA-Z0-9]+)',
71 'https://trello.com/example.com/${card_id}',
71 'https://trello.com/example.com/${card_id}',
72 'TRELLO-',
72 'TRELLO-',
73 ),
73 ),
74 ]
74 ]
75 %>
75 %>
76
76
77 <table class="rctable issuetracker">
77 <table class="rctable issuetracker">
78 <tr>
78 <tr>
79 <th>${_('Description')}</th>
79 <th>${_('Description')}</th>
80 <th>${_('Pattern')}</th>
80 <th>${_('Pattern')}</th>
81 <th>${_('Url')}</th>
81 <th>${_('Url')}</th>
82 <th>${_('Extra Prefix')}</th>
82 <th>${_('Extra Prefix')}</th>
83 <th ></th>
83 <th ></th>
84 </tr>
84 </tr>
85 % for name, pat, url, pref in examples:
85 % for name, pat, url, pref in examples:
86 <tr class="it-examples" style="${'' if loop.index == 0 else 'display:none'}">
86 <tr class="it-examples" style="${'' if loop.index == 0 else 'display:none'}">
87 <td class="td-issue-tracker-name issue-tracker-example">${name}</td>
87 <td class="td-issue-tracker-name issue-tracker-example">${name}</td>
88 <td class="td-regex issue-tracker-example">${pat}</td>
88 <td class="td-regex issue-tracker-example">${pat}</td>
89 <td class="td-url issue-tracker-example">${url}</td>
89 <td class="td-url issue-tracker-example">${url}</td>
90 <td class="td-prefix issue-tracker-example">${pref}</td>
90 <td class="td-prefix issue-tracker-example">${pref}</td>
91 <td>
91 <td>
92 % if loop.index == 0:
92 % if loop.index == 0:
93 <a href="#showMore" onclick="$('.it-examples').toggle(); return false">${_('show examples')}</a>
93 <a href="#showMore" onclick="$('.it-examples').toggle(); return false">${_('show examples')}</a>
94 % else:
94 % else:
95 <a href="#copyToInput" onclick="copyToInput(this, '${h.str_json(name)}', '${h.str_json(pat)}', '${h.str_json(url)}', '${h.str_json(pref)}'); return false">copy to input</a>
95 <a href="#copyToInput" onclick="copyToInput(this, '${h.str_json(name)}', '${h.str_json(pat)}', '${h.str_json(url)}', '${h.str_json(pref)}'); return false">copy to input</a>
96 % endif
96 % endif
97 </td>
97 </td>
98 </tr>
98 </tr>
99 % endfor
99 % endfor
100
100
101 %for uid, entry in patterns:
101 %for uid, entry in patterns:
102 <tr id="entry_${uid}">
102 <tr id="entry_${uid}">
103 <td class="td-issue-tracker-name issuetracker_desc">
103 <td class="td-issue-tracker-name issuetracker_desc">
104 <span class="entry">
104 <span class="entry">
105 ${entry.desc}
105 ${entry.desc}
106 </span>
106 </span>
107 <span class="edit">
107 <span class="edit">
108 ${h.text('new_pattern_description_'+uid, class_='medium-inline', value=entry.desc or '')}
108 ${h.text('new_pattern_description_'+uid, class_='medium-inline', value=entry.desc or '')}
109 </span>
109 </span>
110 </td>
110 </td>
111 <td class="td-issue-tracker-regex issuetracker_pat">
111 <td class="td-issue-tracker-regex issuetracker_pat">
112 <span class="entry">
112 <span class="entry">
113 ${entry.pat}
113 ${entry.pat}
114 </span>
114 </span>
115 <span class="edit">
115 <span class="edit">
116 ${h.text('new_pattern_pattern_'+uid, class_='medium-inline', value=entry.pat or '')}
116 ${h.text('new_pattern_pattern_'+uid, class_='medium-inline', value=entry.pat or '')}
117 </span>
117 </span>
118 </td>
118 </td>
119 <td class="td-url issuetracker_url">
119 <td class="td-url issuetracker_url">
120 <span class="entry">
120 <span class="entry">
121 ${entry.url}
121 ${entry.url}
122 </span>
122 </span>
123 <span class="edit">
123 <span class="edit">
124 ${h.text('new_pattern_url_'+uid, class_='medium-inline', value=entry.url or '')}
124 ${h.text('new_pattern_url_'+uid, class_='medium-inline', value=entry.url or '')}
125 </span>
125 </span>
126 </td>
126 </td>
127 <td class="td-prefix issuetracker_pref">
127 <td class="td-prefix issuetracker_pref">
128 <span class="entry">
128 <span class="entry">
129 ${entry.pref}
129 ${entry.pref}
130 </span>
130 </span>
131 <span class="edit">
131 <span class="edit">
132 ${h.text('new_pattern_prefix_'+uid, class_='medium-inline', value=entry.pref or '')}
132 ${h.text('new_pattern_prefix_'+uid, class_='medium-inline', value=entry.pref or '')}
133 </span>
133 </span>
134 </td>
134 </td>
135 <td class="td-action">
135 <td class="td-action">
136 <div class="grid_edit">
136 <div class="grid_edit">
137 <span class="entry">
137 <span class="entry">
138 <a class="edit_issuetracker_entry" href="">${_('Edit')}</a>
138 <a class="edit_issuetracker_entry" href="">${_('Edit')}</a>
139 </span>
139 </span>
140 <span class="edit">
140 <span class="edit">
141 <input id="uid_${uid}" name="uid" type="hidden" value="${uid}">
141 <input id="uid_${uid}" name="uid" type="hidden" value="${uid}">
142 </span>
142 </span>
143 </div>
143 </div>
144 <div class="grid_delete">
144 <div class="grid_delete">
145 <span class="entry">
145 <span class="entry">
146 <a class="btn btn-link btn-danger delete_issuetracker_entry" data-desc="${entry.desc}" data-uid="${uid}">
146 <a class="btn btn-link btn-danger delete_issuetracker_entry" data-desc="${entry.desc}" data-uid="${uid}">
147 ${_('Delete')}
147 ${_('Delete')}
148 </a>
148 </a>
149 </span>
149 </span>
150 <span class="edit">
150 <span class="edit">
151 <a class="btn btn-link btn-danger edit_issuetracker_cancel" data-uid="${uid}">${_('Cancel')}</a>
151 <a class="btn btn-link btn-danger edit_issuetracker_cancel" data-uid="${uid}">${_('Cancel')}</a>
152 </span>
152 </span>
153 </div>
153 </div>
154 </td>
154 </td>
155 </tr>
155 </tr>
156 %endfor
156 %endfor
157 <tr id="last-row"></tr>
157 <tr id="last-row"></tr>
158 </table>
158 </table>
159 <p>
159 <p>
160 <a id="add_pattern" class="link">
160 <a id="add_pattern" class="link">
161 ${_('Add new')}
161 ${_('Add new')}
162 </a>
162 </a>
163 </p>
163 </p>
164
164
165 <script type="text/javascript">
165 <script type="text/javascript">
166 var newEntryLabel = $('label[for="new_entry"]');
166 var newEntryLabel = $('label[for="new_entry"]');
167
167
168 var resetEntry = function() {
168 var resetEntry = function() {
169 newEntryLabel.text("${_('New Entry')}:");
169 newEntryLabel.text("${_('New Entry')}:");
170 };
170 };
171
171
172 var delete_pattern = function(entry) {
172 var delete_pattern = function(entry) {
173 if (confirm("${_('Confirm to remove this pattern:')} "+$(entry).data('desc'))) {
173 if (confirm("${_('Confirm to remove this pattern:')} "+$(entry).data('desc'))) {
174 $.ajax({
174 $.ajax({
175 type: "POST",
175 type: "POST",
176 url: "${delete_url}",
176 url: "${delete_url}",
177 data: {
177 data: {
178 'csrf_token': CSRF_TOKEN,
178 'csrf_token': CSRF_TOKEN,
179 'uid':$(entry).data('uid')
179 'uid':$(entry).data('uid')
180 },
180 },
181 success: function(){
181 success: function(){
182 window.location.reload();
182 window.location.reload();
183 },
183 },
184 error: function(data, textStatus, errorThrown){
184 error: function(data, textStatus, errorThrown){
185 alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url));
185 alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url));
186 }
186 }
187 });
187 });
188 }
188 }
189 };
189 };
190
190
191 $('.delete_issuetracker_entry').on('click', function(e){
191 $('.delete_issuetracker_entry').on('click', function(e){
192 e.preventDefault();
192 e.preventDefault();
193 delete_pattern(this);
193 delete_pattern(this);
194 });
194 });
195
195
196 $('.edit_issuetracker_entry').on('click', function(e){
196 $('.edit_issuetracker_entry').on('click', function(e){
197 e.preventDefault();
197 e.preventDefault();
198 $(this).parents('tr').addClass('editopen');
198 $(this).parents('tr').addClass('editopen');
199 });
199 });
200
200
201 $('.edit_issuetracker_cancel').on('click', function(e){
201 $('.edit_issuetracker_cancel').on('click', function(e){
202 e.preventDefault();
202 e.preventDefault();
203 $(this).parents('tr').removeClass('editopen');
203 $(this).parents('tr').removeClass('editopen');
204 // Reset to original value
204 // Reset to original value
205 var uid = $(this).data('uid');
205 var uid = $(this).data('uid');
206 $('#'+uid+' input').each(function(e) {
206 $('#'+uid+' input').each(function(e) {
207 this.value = this.defaultValue;
207 this.value = this.defaultValue;
208 });
208 });
209 });
209 });
210
210
211 $('input#reset').on('click', function(e) {
211 $('input#reset').on('click', function(e) {
212 resetEntry();
212 resetEntry();
213 });
213 });
214
214
215 $('#add_pattern').on('click', function(e) {
215 $('#add_pattern').on('click', function(e) {
216 addNewPatternInput();
216 addNewPatternInput();
217 });
217 });
218
218
219 var copied = false;
219 var copied = false;
220 copyToInput = function (elem, name, pat, url, pref) {
220 copyToInput = function (elem, name, pat, url, pref) {
221 if (copied === false) {
221 if (copied === false) {
222 addNewPatternInput();
222 addNewPatternInput();
223 copied = true;
223 copied = true;
224 }
224 }
225 $(elem).hide();
225 $(elem).hide();
226 var load = function(text){
226 var load = function(text){
227 return text.replace(/["]/g, "")
227 return text.replace(/["]/g, "")
228 };
228 };
229 $('#description_1').val(load(name));
229 $('#description_1').val(load(name));
230 $('#pattern_1').val(load(pat));
230 $('#pattern_1').val(load(pat));
231 $('#url_1').val(load(url));
231 $('#url_1').val(load(url));
232 $('#prefix_1').val(load(pref));
232 $('#prefix_1').val(load(pref));
233
233
234 }
234 }
235
235
236 </script>
236 </script>
237 </%def>
237 </%def>
238
238
239 <%def name="issue_tracker_new_row()">
239 <%def name="issue_tracker_new_row()">
240 <table id="add-row-tmpl" style="display: none;">
240 <table id="add-row-tmpl" style="display: none;">
241 <tbody>
241 <tbody>
242 <tr class="new_pattern">
242 <tr class="new_pattern">
243 <td class="td-issue-tracker-name issuetracker_desc">
243 <td class="td-issue-tracker-name issuetracker_desc">
244 <span class="entry">
244 <span class="entry">
245 <input class="medium-inline" id="description_##UUID##" name="new_pattern_description_##UUID##" value="##DESCRIPTION##" type="text">
245 <input class="medium-inline" id="description_##UUID##" name="new_pattern_description_##UUID##" value="##DESCRIPTION##" type="text">
246 </span>
246 </span>
247 </td>
247 </td>
248 <td class="td-issue-tracker-regex issuetracker_pat">
248 <td class="td-issue-tracker-regex issuetracker_pat">
249 <span class="entry">
249 <span class="entry">
250 <input class="medium-inline" id="pattern_##UUID##" name="new_pattern_pattern_##UUID##" placeholder="Pattern"
250 <input class="medium-inline" id="pattern_##UUID##" name="new_pattern_pattern_##UUID##" placeholder="Pattern"
251 value="##PATTERN##" type="text">
251 value="##PATTERN##" type="text">
252 </span>
252 </span>
253 </td>
253 </td>
254 <td class="td-url issuetracker_url">
254 <td class="td-url issuetracker_url">
255 <span class="entry">
255 <span class="entry">
256 <input class="medium-inline" id="url_##UUID##" name="new_pattern_url_##UUID##" placeholder="Url" value="##URL##" type="text">
256 <input class="medium-inline" id="url_##UUID##" name="new_pattern_url_##UUID##" placeholder="Url" value="##URL##" type="text">
257 </span>
257 </span>
258 </td>
258 </td>
259 <td class="td-prefix issuetracker_pref">
259 <td class="td-prefix issuetracker_pref">
260 <span class="entry">
260 <span class="entry">
261 <input class="medium-inline" id="prefix_##UUID##" name="new_pattern_prefix_##UUID##" placeholder="Prefix" value="##PREFIX##" type="text">
261 <input class="medium-inline" id="prefix_##UUID##" name="new_pattern_prefix_##UUID##" placeholder="Prefix" value="##PREFIX##" type="text">
262 </span>
262 </span>
263 </td>
263 </td>
264 <td class="td-action">
264 <td class="td-action">
265 </td>
265 </td>
266 <input id="uid_##UUID##" name="uid_##UUID##" type="hidden" value="">
266 <input id="uid_##UUID##" name="uid_##UUID##" type="hidden" value="">
267 </tr>
267 </tr>
268 </tbody>
268 </tbody>
269 </table>
269 </table>
270 </%def>
270 </%def>
271
271
272 <%def name="issue_tracker_settings_test(test_url)">
272 <%def name="issue_tracker_settings_test(test_url)">
273 <div class="form-vertical">
273 <div class="form-vertical">
274 <div class="fields">
274 <div class="fields">
275 <div class="field">
275 <div class="field">
276 <div class='textarea-full'>
276 <div class='textarea-full'>
277 <textarea id="test_pattern_data" rows="12">
277 <textarea id="test_pattern_data" rows="12">
278 This is an example text for testing issue tracker patterns.
278 This is an example text for testing issue tracker patterns.
279 This commit fixes ticket #451 and ticket #910, reference for JRA-401.
279 This commit fixes ticket #451 and ticket #910, reference for JRA-401.
280 The following tickets will get mentioned:
280 The following tickets will get mentioned:
281 #123
281 #123
282 #456 and PROJ-101
282 #456 and PROJ-101
283 JRA-123 and #123
283 JRA-123 and #123
284 PROJ-456
284 PROJ-456
285
285
286 [my artifact](http://something.com/JRA-1234-build.zip)
286 [my artifact](http://something.com/JRA-1234-build.zip)
287
287
288 - #1001
288 - #1001
289 - JRA-998
289 - JRA-998
290
290
291 Open a pull request !101 to contribute!
291 Open a pull request !101 to contribute!
292 Added tag v1.3.0 for commit 0f3b629be725
292 Added tag v1.3.0 for commit 0f3b629be725
293
293
294 Add a test pattern here and hit preview to see the link.
294 Add a test pattern here and hit preview to see the link.
295 </textarea>
295 </textarea>
296 </div>
296 </div>
297 </div>
297 </div>
298 </div>
298 </div>
299 <div class="test_pattern_preview">
299 <div class="test_pattern_preview">
300 <div id="test_pattern" class="btn btn-small" >${_('Preview')}</div>
300 <div id="test_pattern" class="btn btn-small" >${_('Preview')}</div>
301 <p>${_('Test Pattern Preview')}</p>
301 <p>${_('Test Pattern Preview')}</p>
302 <div id="test_pattern_result" style="white-space: pre-wrap"></div>
302 <div id="test_pattern_result" style="white-space: pre-wrap"></div>
303 </div>
303 </div>
304 </div>
304 </div>
305
305
306 <script type="text/javascript">
306 <script type="text/javascript">
307 $('#test_pattern').on('click', function(e) {
307 $('#test_pattern').on('click', function(e) {
308 $.ajax({
308 $.ajax({
309 type: "POST",
309 type: "POST",
310 url: "${test_url}",
310 url: "${test_url}",
311 data: {
311 data: {
312 'test_text': $('#test_pattern_data').val(),
312 'test_text': $('#test_pattern_data').val(),
313 'csrf_token': CSRF_TOKEN
313 'csrf_token': CSRF_TOKEN
314 },
314 },
315 success: function(data){
315 success: function(data){
316 $('#test_pattern_result').html(data);
316 $('#test_pattern_result').html(data);
317 tooltipActivate();
317 tooltipActivate();
318 },
318 },
319 error: function(jqXHR, textStatus, errorThrown){
319 error: function(jqXHR, textStatus, errorThrown){
320 $('#test_pattern_result').html('Error: ' + errorThrown);
320 $('#test_pattern_result').html('Error: ' + errorThrown);
321 }
321 }
322 });
322 });
323 $('#test_pattern_result').show();
323 $('#test_pattern_result').show();
324 });
324 });
325 </script>
325 </script>
326 </%def>
326 </%def>
327
327
328
328
@@ -1,346 +1,345 b''
1 ## snippet for displaying vcs settings
1 ## snippet for displaying vcs settings
2 ## usage:
2 ## usage:
3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 ## ${vcss.vcs_settings_fields()}
4 ## ${vcss.vcs_settings_fields()}
5
5
6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)">
7 % if display_globals:
7 % if display_globals:
8 <div class="panel panel-default">
8 <div class="panel panel-default">
9 <div class="panel-heading" id="general">
9 <div class="panel-heading" id="general">
10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
11 </div>
11 </div>
12 <div class="panel-body">
12 <div class="panel-body">
13 <div class="field">
13 <div class="field">
14 <div class="checkbox">
14 <div class="checkbox">
15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 </div>
17 </div>
18 <div class="label">
18 <div class="label">
19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 </div>
20 </div>
21 </div>
21 </div>
22 </div>
22 </div>
23 </div>
23 </div>
24 % endif
24 % endif
25
25
26 % if display_globals or repo_type in ['git', 'hg']:
26 % if display_globals or repo_type in ['git', 'hg']:
27 <div class="panel panel-default">
27 <div class="panel panel-default">
28 <div class="panel-heading" id="vcs-hooks-options">
28 <div class="panel-heading" id="vcs-hooks-options">
29 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
29 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
30 </div>
30 </div>
31 <div class="panel-body">
31 <div class="panel-body">
32 <div class="field">
32 <div class="field">
33 <div class="checkbox">
33 <div class="checkbox">
34 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
34 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
35 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
35 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
36 </div>
36 </div>
37
37
38 <div class="label">
38 <div class="label">
39 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
39 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
40 </div>
40 </div>
41 <div class="checkbox">
41 <div class="checkbox">
42 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
42 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
43 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
43 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
44 </div>
44 </div>
45 <div class="label">
45 <div class="label">
46 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
46 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
47 </div>
47 </div>
48 <div class="checkbox">
48 <div class="checkbox">
49 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
49 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
50 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
50 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
51 </div>
51 </div>
52 <div class="label">
52 <div class="label">
53 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
53 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
54 </div>
54 </div>
55 </div>
55 </div>
56 </div>
56 </div>
57 </div>
57 </div>
58 % endif
58 % endif
59
59
60 % if display_globals or repo_type in ['hg']:
60 % if display_globals or repo_type in ['hg']:
61 <div class="panel panel-default">
61 <div class="panel panel-default">
62 <div class="panel-heading" id="vcs-hg-options">
62 <div class="panel-heading" id="vcs-hg-options">
63 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
63 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
64 </div>
64 </div>
65 <div class="panel-body">
65 <div class="panel-body">
66 <div class="checkbox">
66 <div class="checkbox">
67 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
67 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
68 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
68 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
69 </div>
69 </div>
70 <div class="label">
70 <div class="label">
71 % if display_globals:
71 % if display_globals:
72 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
72 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
73 % else:
73 % else:
74 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
74 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
75 % endif
75 % endif
76 </div>
76 </div>
77
77
78 % if display_globals:
78 % if display_globals:
79 <div class="field">
79 <div class="field">
80 <div class="input">
80 <div class="input">
81 ${h.text('largefiles_usercache' + suffix, size=59)}
81 ${h.text('largefiles_usercache' + suffix, size=59)}
82 </div>
82 </div>
83 </div>
83 </div>
84 <div class="label">
84 <div class="label">
85 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
85 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
86 </div>
86 </div>
87 % endif
87 % endif
88
88
89 <div class="checkbox">
89 <div class="checkbox">
90 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
90 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
91 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
91 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
92 </div>
92 </div>
93 <div class="label">
93 <div class="label">
94 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
94 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
95 </div>
95 </div>
96
96
97 <div class="checkbox">
97 <div class="checkbox">
98 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
98 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
99 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
99 <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label>
100 </div>
100 </div>
101 <div class="label">
101 <div class="label">
102 % if display_globals:
102 % if display_globals:
103 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
103 <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span>
104 % else:
104 % else:
105 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
105 <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span>
106 % endif
106 % endif
107 </div>
107 </div>
108
108
109 </div>
109 </div>
110 </div>
110 </div>
111 % endif
111 % endif
112
112
113 % if display_globals or repo_type in ['git']:
113 % if display_globals or repo_type in ['git']:
114 <div class="panel panel-default">
114 <div class="panel panel-default">
115 <div class="panel-heading" id="vcs-git-options">
115 <div class="panel-heading" id="vcs-git-options">
116 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
116 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
117 </div>
117 </div>
118 <div class="panel-body">
118 <div class="panel-body">
119 <div class="checkbox">
119 <div class="checkbox">
120 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
120 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
121 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
121 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
122 </div>
122 </div>
123 <div class="label">
123 <div class="label">
124 % if display_globals:
124 % if display_globals:
125 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
125 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
126 % else:
126 % else:
127 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
127 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
128 % endif
128 % endif
129 </div>
129 </div>
130
130
131 % if display_globals:
131 % if display_globals:
132 <div class="field">
132 <div class="field">
133 <div class="input">
133 <div class="input">
134 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
134 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
135 </div>
135 </div>
136 </div>
136 </div>
137 <div class="label">
137 <div class="label">
138 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
138 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
139 </div>
139 </div>
140 % endif
140 % endif
141 </div>
141 </div>
142 </div>
142 </div>
143 % endif
143 % endif
144
144
145 % if display_globals or repo_type in ['svn']:
145 % if display_globals or repo_type in ['svn']:
146 <div class="panel panel-default">
146 <div class="panel panel-default">
147 <div class="panel-heading" id="vcs-svn-options">
147 <div class="panel-heading" id="vcs-svn-options">
148 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
148 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
149 </div>
149 </div>
150 <div class="panel-body">
150 <div class="panel-body">
151 % if display_globals:
151 % if display_globals:
152 <div class="field">
152 <div class="field">
153 <div class="content" >
153 <div class="content" >
154 <label>${_('mod_dav config')}</label><br/>
154 <label>${_('mod_dav config')}</label><br/>
155 <code>path: ${c.svn_config_path}</code>
155 <code>path: ${c.svn_config_path}</code>
156 </div>
156 </div>
157 <br/>
157 <br/>
158
158
159 <div>
159 <div>
160
160
161 % if c.svn_generate_config:
161 % if c.svn_generate_config:
162 <span class="buttons">
162 <span class="buttons">
163 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Re-generate Apache Config')}</button>
163 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Re-generate Apache Config')}</button>
164 </span>
164 </span>
165 % endif
165 % endif
166 </div>
166 </div>
167 </div>
167 </div>
168 % endif
168 % endif
169
169
170 <div class="field">
170 <div class="field">
171 <div class="content" >
171 <div class="content" >
172 <label>${_('Repository patterns')}</label><br/>
172 <label>${_('Repository patterns')}</label><br/>
173 </div>
173 </div>
174 </div>
174 </div>
175 <div class="label">
175 <div class="label">
176 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
176 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
177 </div>
177 </div>
178
178
179 <div class="field branch_patterns">
179 <div class="field branch_patterns">
180 <div class="input" >
180 <div class="input" >
181 <label>${_('Branches')}:</label><br/>
181 <label>${_('Branches')}:</label><br/>
182 </div>
182 </div>
183 % if svn_branch_patterns:
183 % if svn_branch_patterns:
184 % for branch in svn_branch_patterns:
184 % for branch in svn_branch_patterns:
185 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
185 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
186 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
186 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
187 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
187 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
188 % if kwargs.get('disabled') != 'disabled':
188 % if kwargs.get('disabled') != 'disabled':
189 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
189 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
190 ${_('Delete')}
190 ${_('Delete')}
191 </span>
191 </span>
192 % endif
192 % endif
193 </div>
193 </div>
194 % endfor
194 % endfor
195 %endif
195 %endif
196 </div>
196 </div>
197 % if kwargs.get('disabled') != 'disabled':
197 % if kwargs.get('disabled') != 'disabled':
198 <div class="field branch_patterns">
198 <div class="field branch_patterns">
199 <div class="input" >
199 <div class="input" >
200 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
200 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
201 </div>
201 </div>
202 </div>
202 </div>
203 % endif
203 % endif
204 <div class="field tag_patterns">
204 <div class="field tag_patterns">
205 <div class="input" >
205 <div class="input" >
206 <label>${_('Tags')}:</label><br/>
206 <label>${_('Tags')}:</label><br/>
207 </div>
207 </div>
208 % if svn_tag_patterns:
208 % if svn_tag_patterns:
209 % for tag in svn_tag_patterns:
209 % for tag in svn_tag_patterns:
210 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
210 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
211 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
211 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
212 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
212 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
213 % if kwargs.get('disabled') != 'disabled':
213 % if kwargs.get('disabled') != 'disabled':
214 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
214 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
215 ${_('Delete')}
215 ${_('Delete')}
216 </span>
216 </span>
217 %endif
217 %endif
218 </div>
218 </div>
219 % endfor
219 % endfor
220 % endif
220 % endif
221 </div>
221 </div>
222 % if kwargs.get('disabled') != 'disabled':
222 % if kwargs.get('disabled') != 'disabled':
223 <div class="field tag_patterns">
223 <div class="field tag_patterns">
224 <div class="input" >
224 <div class="input" >
225 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
225 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
226 </div>
226 </div>
227 </div>
227 </div>
228 %endif
228 %endif
229 </div>
229 </div>
230 </div>
230 </div>
231 % else:
231 % else:
232 ${h.hidden('new_svn_branch' + suffix, '')}
232 ${h.hidden('new_svn_branch' + suffix, '')}
233 ${h.hidden('new_svn_tag' + suffix, '')}
233 ${h.hidden('new_svn_tag' + suffix, '')}
234 % endif
234 % endif
235
235
236
236
237 % if display_globals or repo_type in ['hg', 'git']:
237 % if display_globals or repo_type in ['hg', 'git']:
238 <div class="panel panel-default">
238 <div class="panel panel-default">
239 <div class="panel-heading" id="vcs-pull-requests-options">
239 <div class="panel-heading" id="vcs-pull-requests-options">
240 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
240 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
241 </div>
241 </div>
242 <div class="panel-body">
242 <div class="panel-body">
243 <div class="checkbox">
243 <div class="checkbox">
244 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
244 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
245 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
245 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
246 </div>
246 </div>
247 <div class="label">
247 <div class="label">
248 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
248 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
249 </div>
249 </div>
250 <div class="checkbox">
250 <div class="checkbox">
251 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
251 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
252 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
252 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
253 </div>
253 </div>
254 <div class="label">
254 <div class="label">
255 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
255 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
256 </div>
256 </div>
257 </div>
257 </div>
258 </div>
258 </div>
259 % endif
259 % endif
260
260
261 % if display_globals or repo_type in ['hg', 'git', 'svn']:
261 % if display_globals or repo_type in ['hg', 'git', 'svn']:
262 <div class="panel panel-default">
262 <div class="panel panel-default">
263 <div class="panel-heading" id="vcs-pull-requests-options">
263 <div class="panel-heading" id="vcs-pull-requests-options">
264 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
264 <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
265 </div>
265 </div>
266 <div class="panel-body">
266 <div class="panel-body">
267 <div class="checkbox">
267 <div class="checkbox">
268 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
268 ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)}
269 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
269 <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label>
270 </div>
270 </div>
271 </div>
271 </div>
272 </div>
272 </div>
273 % endif
273 % endif
274
274
275 % if display_globals or repo_type in ['hg',]:
275 % if display_globals or repo_type in ['hg',]:
276 <div class="panel panel-default">
276 <div class="panel panel-default">
277 <div class="panel-heading" id="vcs-pull-requests-options">
277 <div class="panel-heading" id="vcs-pull-requests-options">
278 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
278 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
279 </div>
279 </div>
280 <div class="panel-body">
280 <div class="panel-body">
281 ## Specific HG settings
281 ## Specific HG settings
282 <div class="checkbox">
282 <div class="checkbox">
283 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
283 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
284 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
284 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
285 </div>
285 </div>
286 <div class="label">
286 <div class="label">
287 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
287 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
288 </div>
288 </div>
289
289
290 <div class="checkbox">
290 <div class="checkbox">
291 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
291 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
292 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
292 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
293 </div>
293 </div>
294 <div class="label">
294 <div class="label">
295 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
295 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
296 </div>
296 </div>
297
297
298
298
299 </div>
299 </div>
300 </div>
300 </div>
301 % endif
301 % endif
302
302
303 % if display_globals or repo_type in ['git']:
303 % if display_globals or repo_type in ['git']:
304 <div class="panel panel-default">
304 <div class="panel panel-default">
305 <div class="panel-heading" id="vcs-pull-requests-options">
305 <div class="panel-heading" id="vcs-pull-requests-options">
306 <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
306 <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
307 </div>
307 </div>
308 <div class="panel-body">
308 <div class="panel-body">
309 ## <div class="checkbox">
309 ## <div class="checkbox">
310 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
310 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
311 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
311 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
312 ## </div>
312 ## </div>
313 ## <div class="label">
313 ## <div class="label">
314 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
314 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
315 ## </div>
315 ## </div>
316
316
317 <div class="checkbox">
317 <div class="checkbox">
318 ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
318 ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
319 <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
319 <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
320 </div>
320 </div>
321 <div class="label">
321 <div class="label">
322 <span class="help-block">${_('Delete branch after merging it into destination branch.')}</span>
322 <span class="help-block">${_('Delete branch after merging it into destination branch.')}</span>
323 </div>
323 </div>
324 </div>
324 </div>
325 </div>
325 </div>
326 % endif
326 % endif
327
327
328 <script type="text/javascript">
328 <script type="text/javascript">
329
329
330 $(document).ready(function() {
330 $(document).ready(function() {
331 /* On click handler for the `Generate Apache Config` button. It sends a
331 /* On click handler for the `Generate Apache Config` button. It sends a
332 POST request to trigger the (re)generation of the mod_dav_svn config. */
332 POST request to trigger the (re)generation of the mod_dav_svn config. */
333 $('#vcs_svn_generate_cfg').on('click', function(event) {
333 $('#vcs_svn_generate_cfg').on('click', function(event) {
334 event.preventDefault();
334 event.preventDefault();
335 alert('i cliked it !!')
336 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
335 var url = "${h.route_path('admin_settings_vcs_svn_generate_cfg')}";
337 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
336 var jqxhr = $.post(url, {'csrf_token': CSRF_TOKEN});
338 jqxhr.done(function(data) {
337 jqxhr.done(function(data) {
339 $.Topic('/notifications').publish(data);
338 $.Topic('/notifications').publish(data);
340 });
339 });
341 });
340 });
342 });
341 });
343
342
344 </script>
343 </script>
345 </%def>
344 </%def>
346
345
@@ -1,226 +1,226 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import pytest
20 import pytest
21
21
22 from rhodecode.lib.config_utils import get_app_config
22 from rhodecode.lib.config_utils import get_app_config
23 from rhodecode.tests.fixture import TestINI
23 from rhodecode.tests.fixture import TestINI
24 from rhodecode.tests import TESTS_TMP_PATH
24 from rhodecode.tests import TESTS_TMP_PATH
25 from rhodecode.tests.server_utils import RcVCSServer
25 from rhodecode.tests.server_utils import RcVCSServer
26
26
27
27
28 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 """
30 """
31 Session scope VCSServer.
31 Session scope VCSServer.
32
32
33 Tests which need the VCSServer have to rely on this fixture in order
33 Tests which need the VCSServer have to rely on this fixture in order
34 to ensure it will be running.
34 to ensure it will be running.
35
35
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 adjust the configuration file for the test run.
37 adjust the configuration file for the test run.
38
38
39 Command line args:
39 Command line args:
40
40
41 --without-vcsserver: Allows to switch this fixture off. You have to
41 --without-vcsserver: Allows to switch this fixture off. You have to
42 manually start the server.
42 manually start the server.
43
43
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 """
45 """
46
46
47 if not request.config.getoption('with_vcsserver'):
47 if not request.config.getoption('with_vcsserver'):
48 return None
48 return None
49
49
50 return vcsserver_factory(
50 return vcsserver_factory(
51 request, vcsserver_port=vcsserver_port)
51 request, vcsserver_port=vcsserver_port)
52
52
53
53
54 @pytest.fixture(scope='session')
54 @pytest.fixture(scope='session')
55 def vcsserver_factory(tmpdir_factory):
55 def vcsserver_factory(tmpdir_factory):
56 """
56 """
57 Use this if you need a running vcsserver with a special configuration.
57 Use this if you need a running vcsserver with a special configuration.
58 """
58 """
59
59
60 def factory(request, overrides=(), vcsserver_port=None,
60 def factory(request, overrides=(), vcsserver_port=None,
61 log_file=None, workers='2'):
61 log_file=None, workers='2'):
62
62
63 if vcsserver_port is None:
63 if vcsserver_port is None:
64 vcsserver_port = get_available_port()
64 vcsserver_port = get_available_port()
65
65
66 overrides = list(overrides)
66 overrides = list(overrides)
67 overrides.append({'server:main': {'port': vcsserver_port}})
67 overrides.append({'server:main': {'port': vcsserver_port}})
68
68
69 option_name = 'vcsserver_config_http'
69 option_name = 'vcsserver_config_http'
70 override_option_name = 'vcsserver_config_override'
70 override_option_name = 'vcsserver_config_override'
71 config_file = get_config(
71 config_file = get_config(
72 request.config, option_name=option_name,
72 request.config, option_name=option_name,
73 override_option_name=override_option_name, overrides=overrides,
73 override_option_name=override_option_name, overrides=overrides,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 prefix='test_vcs_')
75 prefix='test_vcs_')
76
76
77 server = RcVCSServer(config_file, log_file, workers)
77 server = RcVCSServer(config_file, log_file, workers)
78 server.start()
78 server.start()
79
79
80 @request.addfinalizer
80 @request.addfinalizer
81 def cleanup():
81 def cleanup():
82 server.shutdown()
82 server.shutdown()
83
83
84 server.wait_until_ready()
84 server.wait_until_ready()
85 return server
85 return server
86
86
87 return factory
87 return factory
88
88
89
89
90 def _use_log_level(config):
90 def _use_log_level(config):
91 level = config.getoption('test_loglevel') or 'critical'
91 level = config.getoption('test_loglevel') or 'critical'
92 return level.upper()
92 return level.upper()
93
93
94
94
95 @pytest.fixture(scope='session')
95 @pytest.fixture(scope='session')
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 option_name = 'pyramid_config'
97 option_name = 'pyramid_config'
98 log_level = _use_log_level(request.config)
98 log_level = _use_log_level(request.config)
99
99
100 overrides = [
100 overrides = [
101 {'server:main': {'port': rcserver_port}},
101 {'server:main': {'port': rcserver_port}},
102 {'app:main': {
102 {'app:main': {
103 'cache_dir': '%(here)s/rc-tests/rc_data',
103 'cache_dir': '%(here)s/rc-tests/rc_data',
104 'vcs.server': f'localhost:{vcsserver_port}',
104 'vcs.server': f'localhost:{vcsserver_port}',
105 # johbo: We will always start the VCSServer on our own based on the
105 # johbo: We will always start the VCSServer on our own based on the
106 # fixtures of the test cases. For the test run it must always be
106 # fixtures of the test cases. For the test run it must always be
107 # off in the INI file.
107 # off in the INI file.
108 'vcs.start_server': 'false',
108 'vcs.start_server': 'false',
109
109
110 'vcs.server.protocol': 'http',
110 'vcs.server.protocol': 'http',
111 'vcs.scm_app_implementation': 'http',
111 'vcs.scm_app_implementation': 'http',
112 'vcs.svn.proxy.enabled': 'true',
112 'vcs.svn.proxy.enabled': 'true',
113 'vcs.hooks.protocol': 'http',
113 'vcs.hooks.protocol': 'http',
114 'vcs.hooks.host': '*',
114 'vcs.hooks.host': '*',
115 'repo_store.path': TESTS_TMP_PATH,
115 'repo_store.path': TESTS_TMP_PATH,
116 'app.service_api.token': 'service_secret_token',
116 'app.service_api.token': 'service_secret_token',
117 }},
117 }},
118
118
119 {'handler_console': {
119 {'handler_console': {
120 'class': 'StreamHandler',
120 'class': 'StreamHandler',
121 'args': '(sys.stderr,)',
121 'args': '(sys.stderr,)',
122 'level': log_level,
122 'level': log_level,
123 }},
123 }},
124
124
125 ]
125 ]
126
126
127 filename = get_config(
127 filename = get_config(
128 request.config, option_name=option_name,
128 request.config, option_name=option_name,
129 override_option_name='{}_override'.format(option_name),
129 override_option_name='{}_override'.format(option_name),
130 overrides=overrides,
130 overrides=overrides,
131 basetemp=tmpdir_factory.getbasetemp().strpath,
131 basetemp=tmpdir_factory.getbasetemp().strpath,
132 prefix='test_rce_')
132 prefix='test_rce_')
133 return filename
133 return filename
134
134
135
135
136 @pytest.fixture(scope='session')
136 @pytest.fixture(scope='session')
137 def ini_settings(ini_config):
137 def ini_settings(ini_config):
138 ini_path = ini_config
138 ini_path = ini_config
139 return get_app_config(ini_path)
139 return get_app_config(ini_path)
140
140
141
141
142 def get_available_port(min_port=40000, max_port=55555):
142 def get_available_port(min_port=40000, max_port=55555):
143 from rhodecode.lib.utils2 import get_available_port as _get_port
143 from rhodecode.lib.utils2 import get_available_port as _get_port
144 return _get_port(min_port, max_port)
144 return _get_port(min_port, max_port)
145
145
146
146
147 @pytest.fixture(scope='session')
147 @pytest.fixture(scope='session')
148 def rcserver_port(request):
148 def rcserver_port(request):
149 port = get_available_port()
149 port = get_available_port()
150 print(f'Using rhodecode port {port}')
150 print(f'Using rhodecode port {port}')
151 return port
151 return port
152
152
153
153
154 @pytest.fixture(scope='session')
154 @pytest.fixture(scope='session')
155 def vcsserver_port(request):
155 def vcsserver_port(request):
156 port = request.config.getoption('--vcsserver-port')
156 port = request.config.getoption('--vcsserver-port')
157 if port is None:
157 if port is None:
158 port = get_available_port()
158 port = get_available_port()
159 print(f'Using vcsserver port {port}')
159 print(f'Using vcsserver port {port}')
160 return port
160 return port
161
161
162
162
163 @pytest.fixture(scope='session')
163 @pytest.fixture(scope='session')
164 def available_port_factory():
164 def available_port_factory() -> get_available_port:
165 """
165 """
166 Returns a callable which returns free port numbers.
166 Returns a callable which returns free port numbers.
167 """
167 """
168 return get_available_port
168 return get_available_port
169
169
170
170
171 @pytest.fixture()
171 @pytest.fixture()
172 def available_port(available_port_factory):
172 def available_port(available_port_factory):
173 """
173 """
174 Gives you one free port for the current test.
174 Gives you one free port for the current test.
175
175
176 Uses "available_port_factory" to retrieve the port.
176 Uses "available_port_factory" to retrieve the port.
177 """
177 """
178 return available_port_factory()
178 return available_port_factory()
179
179
180
180
181 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
182 def testini_factory(tmpdir_factory, ini_config):
182 def testini_factory(tmpdir_factory, ini_config):
183 """
183 """
184 Factory to create an INI file based on TestINI.
184 Factory to create an INI file based on TestINI.
185
185
186 It will make sure to place the INI file in the correct directory.
186 It will make sure to place the INI file in the correct directory.
187 """
187 """
188 basetemp = tmpdir_factory.getbasetemp().strpath
188 basetemp = tmpdir_factory.getbasetemp().strpath
189 return TestIniFactory(basetemp, ini_config)
189 return TestIniFactory(basetemp, ini_config)
190
190
191
191
192 class TestIniFactory(object):
192 class TestIniFactory(object):
193
193
194 def __init__(self, basetemp, template_ini):
194 def __init__(self, basetemp, template_ini):
195 self._basetemp = basetemp
195 self._basetemp = basetemp
196 self._template_ini = template_ini
196 self._template_ini = template_ini
197
197
198 def __call__(self, ini_params, new_file_prefix='test'):
198 def __call__(self, ini_params, new_file_prefix='test'):
199 ini_file = TestINI(
199 ini_file = TestINI(
200 self._template_ini, ini_params=ini_params,
200 self._template_ini, ini_params=ini_params,
201 new_file_prefix=new_file_prefix, dir=self._basetemp)
201 new_file_prefix=new_file_prefix, dir=self._basetemp)
202 result = ini_file.create()
202 result = ini_file.create()
203 return result
203 return result
204
204
205
205
206 def get_config(
206 def get_config(
207 config, option_name, override_option_name, overrides=None,
207 config, option_name, override_option_name, overrides=None,
208 basetemp=None, prefix='test'):
208 basetemp=None, prefix='test'):
209 """
209 """
210 Find a configuration file and apply overrides for the given `prefix`.
210 Find a configuration file and apply overrides for the given `prefix`.
211 """
211 """
212 config_file = (
212 config_file = (
213 config.getoption(option_name) or config.getini(option_name))
213 config.getoption(option_name) or config.getini(option_name))
214 if not config_file:
214 if not config_file:
215 pytest.exit(
215 pytest.exit(
216 "Configuration error, could not extract {}.".format(option_name))
216 "Configuration error, could not extract {}.".format(option_name))
217
217
218 overrides = overrides or []
218 overrides = overrides or []
219 config_override = config.getoption(override_option_name)
219 config_override = config.getoption(override_option_name)
220 if config_override:
220 if config_override:
221 overrides.append(config_override)
221 overrides.append(config_override)
222 temp_ini_file = TestINI(
222 temp_ini_file = TestINI(
223 config_file, ini_params=overrides, new_file_prefix=prefix,
223 config_file, ini_params=overrides, new_file_prefix=prefix,
224 dir=basetemp)
224 dir=basetemp)
225
225
226 return temp_ini_file.create()
226 return temp_ini_file.create()
@@ -1,359 +1,360 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import logging
20 import logging
21 import io
21 import io
22
22
23 import mock
23 import mock
24 import msgpack
24 import msgpack
25 import pytest
25 import pytest
26 import tempfile
26 import tempfile
27
27
28 from rhodecode.lib.hook_daemon import http_hooks_deamon
28 from rhodecode.lib.hook_daemon import http_hooks_deamon
29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
30 from rhodecode.lib.hook_daemon import hook_module
30 from rhodecode.lib.hook_daemon import hook_module
31 from rhodecode.lib.hook_daemon import base as hook_base
31 from rhodecode.lib.hook_daemon import base as hook_base
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.tests.utils import assert_message_in_log
33 from rhodecode.tests.utils import assert_message_in_log
34 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.ext_json import json
35
35
36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
37
37
38
38
39 class TestHooks(object):
39 class TestHooks(object):
40 def test_hooks_can_be_used_as_a_context_processor(self):
40 def test_hooks_can_be_used_as_a_context_processor(self):
41 hooks = hook_module.Hooks()
41 hooks = hook_module.Hooks()
42 with hooks as return_value:
42 with hooks as return_value:
43 pass
43 pass
44 assert hooks == return_value
44 assert hooks == return_value
45
45
46
46
47 class TestHooksHttpHandler(object):
47 class TestHooksHttpHandler(object):
48 def test_read_request_parses_method_name_and_arguments(self):
48 def test_read_request_parses_method_name_and_arguments(self):
49 data = {
49 data = {
50 'method': 'test',
50 'method': 'test',
51 'extras': {
51 'extras': {
52 'param1': 1,
52 'param1': 1,
53 'param2': 'a'
53 'param2': 'a'
54 }
54 }
55 }
55 }
56 request = self._generate_post_request(data)
56 request = self._generate_post_request(data)
57 hooks_patcher = mock.patch.object(
57 hooks_patcher = mock.patch.object(
58 hook_module.Hooks, data['method'], create=True, return_value=1)
58 hook_module.Hooks, data['method'], create=True, return_value=1)
59
59
60 with hooks_patcher as hooks_mock:
60 with hooks_patcher as hooks_mock:
61 handler = http_hooks_deamon.HooksHttpHandler
61 handler = http_hooks_deamon.HooksHttpHandler
62 handler.DEFAULT_HOOKS_PROTO = test_proto
62 handler.DEFAULT_HOOKS_PROTO = test_proto
63 handler.wbufsize = 10240
63 handler.wbufsize = 10240
64 MockServer(handler, request)
64 MockServer(handler, request)
65
65
66 hooks_mock.assert_called_once_with(data['extras'])
66 hooks_mock.assert_called_once_with(data['extras'])
67
67
68 def test_hooks_serialized_result_is_returned(self):
68 def test_hooks_serialized_result_is_returned(self):
69 request = self._generate_post_request({})
69 request = self._generate_post_request({})
70 rpc_method = 'test'
70 rpc_method = 'test'
71 hook_result = {
71 hook_result = {
72 'first': 'one',
72 'first': 'one',
73 'second': 2
73 'second': 2
74 }
74 }
75 extras = {}
75 extras = {}
76
76
77 # patching our _read to return test method and proto used
77 # patching our _read to return test method and proto used
78 read_patcher = mock.patch.object(
78 read_patcher = mock.patch.object(
79 http_hooks_deamon.HooksHttpHandler, '_read_request',
79 http_hooks_deamon.HooksHttpHandler, '_read_request',
80 return_value=(test_proto, rpc_method, extras))
80 return_value=(test_proto, rpc_method, extras))
81
81
82 # patch Hooks instance to return hook_result data on 'test' call
82 # patch Hooks instance to return hook_result data on 'test' call
83 hooks_patcher = mock.patch.object(
83 hooks_patcher = mock.patch.object(
84 hook_module.Hooks, rpc_method, create=True,
84 hook_module.Hooks, rpc_method, create=True,
85 return_value=hook_result)
85 return_value=hook_result)
86
86
87 with read_patcher, hooks_patcher:
87 with read_patcher, hooks_patcher:
88 handler = http_hooks_deamon.HooksHttpHandler
88 handler = http_hooks_deamon.HooksHttpHandler
89 handler.DEFAULT_HOOKS_PROTO = test_proto
89 handler.DEFAULT_HOOKS_PROTO = test_proto
90 handler.wbufsize = 10240
90 handler.wbufsize = 10240
91 server = MockServer(handler, request)
91 server = MockServer(handler, request)
92
92
93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
94
94
95 server.request.output_stream.seek(0)
95 server.request.output_stream.seek(0)
96 assert server.request.output_stream.readlines()[-1] == expected_result
96 assert server.request.output_stream.readlines()[-1] == expected_result
97
97
98 def test_exception_is_returned_in_response(self):
98 def test_exception_is_returned_in_response(self):
99 request = self._generate_post_request({})
99 request = self._generate_post_request({})
100 rpc_method = 'test'
100 rpc_method = 'test'
101
101
102 read_patcher = mock.patch.object(
102 read_patcher = mock.patch.object(
103 http_hooks_deamon.HooksHttpHandler, '_read_request',
103 http_hooks_deamon.HooksHttpHandler, '_read_request',
104 return_value=(test_proto, rpc_method, {}))
104 return_value=(test_proto, rpc_method, {}))
105
105
106 hooks_patcher = mock.patch.object(
106 hooks_patcher = mock.patch.object(
107 hook_module.Hooks, rpc_method, create=True,
107 hook_module.Hooks, rpc_method, create=True,
108 side_effect=Exception('Test exception'))
108 side_effect=Exception('Test exception'))
109
109
110 with read_patcher, hooks_patcher:
110 with read_patcher, hooks_patcher:
111 handler = http_hooks_deamon.HooksHttpHandler
111 handler = http_hooks_deamon.HooksHttpHandler
112 handler.DEFAULT_HOOKS_PROTO = test_proto
112 handler.DEFAULT_HOOKS_PROTO = test_proto
113 handler.wbufsize = 10240
113 handler.wbufsize = 10240
114 server = MockServer(handler, request)
114 server = MockServer(handler, request)
115
115
116 server.request.output_stream.seek(0)
116 server.request.output_stream.seek(0)
117 data = server.request.output_stream.readlines()
117 data = server.request.output_stream.readlines()
118 msgpack_data = b''.join(data[5:])
118 msgpack_data = b''.join(data[5:])
119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
120 expected_result = {
120 expected_result = {
121 'exception': 'Exception',
121 'exception': 'Exception',
122 'exception_traceback': org_exc['exception_traceback'],
122 'exception_traceback': org_exc['exception_traceback'],
123 'exception_args': ['Test exception']
123 'exception_args': ['Test exception']
124 }
124 }
125 assert org_exc == expected_result
125 assert org_exc == expected_result
126
126
127 def test_log_message_writes_to_debug_log(self, caplog):
127 def test_log_message_writes_to_debug_log(self, caplog):
128 ip_port = ('0.0.0.0', 8888)
128 ip_port = ('0.0.0.0', 8888)
129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
130 fake_date = '1/Nov/2015 00:00:00'
130 fake_date = '1/Nov/2015 00:00:00'
131 date_patcher = mock.patch.object(
131 date_patcher = mock.patch.object(
132 handler, 'log_date_time_string', return_value=fake_date)
132 handler, 'log_date_time_string', return_value=fake_date)
133
133
134 with date_patcher, caplog.at_level(logging.DEBUG):
134 with date_patcher, caplog.at_level(logging.DEBUG):
135 handler.log_message('Some message %d, %s', 123, 'string')
135 handler.log_message('Some message %d, %s', 123, 'string')
136
136
137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
138
138
139 assert_message_in_log(
139 assert_message_in_log(
140 caplog.records, expected_message,
140 caplog.records, expected_message,
141 levelno=logging.DEBUG, module='http_hooks_deamon')
141 levelno=logging.DEBUG, module='http_hooks_deamon')
142
142
143 def _generate_post_request(self, data, proto=test_proto):
143 def _generate_post_request(self, data, proto=test_proto):
144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
145 payload = msgpack.packb(data)
145 payload = msgpack.packb(data)
146 else:
146 else:
147 payload = json.dumps(data)
147 payload = json.dumps(data)
148
148
149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
150 len(payload), payload)
150 len(payload), payload)
151
151
152
152
153 class ThreadedHookCallbackDaemon(object):
153 class ThreadedHookCallbackDaemon(object):
154 def test_constructor_calls_prepare(self):
154 def test_constructor_calls_prepare(self):
155 prepare_daemon_patcher = mock.patch.object(
155 prepare_daemon_patcher = mock.patch.object(
156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
157 with prepare_daemon_patcher as prepare_daemon_mock:
157 with prepare_daemon_patcher as prepare_daemon_mock:
158 http_hooks_deamon.ThreadedHookCallbackDaemon()
158 http_hooks_deamon.ThreadedHookCallbackDaemon()
159 prepare_daemon_mock.assert_called_once_with()
159 prepare_daemon_mock.assert_called_once_with()
160
160
161 def test_run_is_called_on_context_start(self):
161 def test_run_is_called_on_context_start(self):
162 patchers = mock.patch.multiple(
162 patchers = mock.patch.multiple(
163 http_hooks_deamon.ThreadedHookCallbackDaemon,
163 http_hooks_deamon.ThreadedHookCallbackDaemon,
164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
165
165
166 with patchers as mocks:
166 with patchers as mocks:
167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
168 with daemon as daemon_context:
168 with daemon as daemon_context:
169 pass
169 pass
170 mocks['_run'].assert_called_once_with()
170 mocks['_run'].assert_called_once_with()
171 assert daemon_context == daemon
171 assert daemon_context == daemon
172
172
173 def test_stop_is_called_on_context_exit(self):
173 def test_stop_is_called_on_context_exit(self):
174 patchers = mock.patch.multiple(
174 patchers = mock.patch.multiple(
175 http_hooks_deamon.ThreadedHookCallbackDaemon,
175 http_hooks_deamon.ThreadedHookCallbackDaemon,
176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
177
177
178 with patchers as mocks:
178 with patchers as mocks:
179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
180 with daemon as daemon_context:
180 with daemon as daemon_context:
181 assert mocks['_stop'].call_count == 0
181 assert mocks['_stop'].call_count == 0
182
182
183 mocks['_stop'].assert_called_once_with()
183 mocks['_stop'].assert_called_once_with()
184 assert daemon_context == daemon
184 assert daemon_context == daemon
185
185
186
186
187 class TestHttpHooksCallbackDaemon(object):
187 class TestHttpHooksCallbackDaemon(object):
188 def test_hooks_callback_generates_new_port(self, caplog):
188 def test_hooks_callback_generates_new_port(self, caplog):
189 with caplog.at_level(logging.DEBUG):
189 with caplog.at_level(logging.DEBUG):
190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
192
192
193 with caplog.at_level(logging.DEBUG):
193 with caplog.at_level(logging.DEBUG):
194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
195 assert daemon._daemon.server_address[1] in range(0, 66000)
195 assert daemon._daemon.server_address[1] in range(0, 66000)
196 assert daemon._daemon.server_address[0] != '127.0.0.1'
196 assert daemon._daemon.server_address[0] != '127.0.0.1'
197
197
198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
201 assert daemon._daemon == tcp_server
201 assert daemon._daemon == tcp_server
202
202
203 _, port = tcp_server.server_address
203 _, port = tcp_server.server_address
204
204
205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
207 assert_message_in_log(
207 assert_message_in_log(
208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
209
209
210 def test_prepare_inits_hooks_uri_and_logs_it(
210 def test_prepare_inits_hooks_uri_and_logs_it(
211 self, tcp_server, caplog):
211 self, tcp_server, caplog):
212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
214
214
215 _, port = tcp_server.server_address
215 _, port = tcp_server.server_address
216 expected_uri = '{}:{}'.format('127.0.0.1', port)
216 expected_uri = '{}:{}'.format('127.0.0.1', port)
217 assert daemon.hooks_uri == expected_uri
217 assert daemon.hooks_uri == expected_uri
218
218
219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
221
221
222 assert_message_in_log(
222 assert_message_in_log(
223 caplog.records, msg,
223 caplog.records, msg,
224 levelno=logging.DEBUG, module='http_hooks_deamon')
224 levelno=logging.DEBUG, module='http_hooks_deamon')
225
225
226 def test_run_creates_a_thread(self, tcp_server):
226 def test_run_creates_a_thread(self, tcp_server):
227 thread = mock.Mock()
227 thread = mock.Mock()
228
228
229 with self._tcp_patcher(tcp_server):
229 with self._tcp_patcher(tcp_server):
230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
231
231
232 with self._thread_patcher(thread) as thread_mock:
232 with self._thread_patcher(thread) as thread_mock:
233 daemon._run()
233 daemon._run()
234
234
235 thread_mock.assert_called_once_with(
235 thread_mock.assert_called_once_with(
236 target=tcp_server.serve_forever,
236 target=tcp_server.serve_forever,
237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
238 assert thread.daemon is True
238 assert thread.daemon is True
239 thread.start.assert_called_once_with()
239 thread.start.assert_called_once_with()
240
240
241 def test_run_logs(self, tcp_server, caplog):
241 def test_run_logs(self, tcp_server, caplog):
242
242
243 with self._tcp_patcher(tcp_server):
243 with self._tcp_patcher(tcp_server):
244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
245
245
246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
247 daemon._run()
247 daemon._run()
248
248
249 assert_message_in_log(
249 assert_message_in_log(
250 caplog.records,
250 caplog.records,
251 'Running thread-based loop of callback daemon in background',
251 'Running thread-based loop of callback daemon in background',
252 levelno=logging.DEBUG, module='http_hooks_deamon')
252 levelno=logging.DEBUG, module='http_hooks_deamon')
253
253
254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
255 thread = mock.Mock()
255 thread = mock.Mock()
256
256
257 with self._tcp_patcher(tcp_server):
257 with self._tcp_patcher(tcp_server):
258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
259
259
260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
261 with daemon:
261 with daemon:
262 assert daemon._daemon == tcp_server
262 assert daemon._daemon == tcp_server
263 assert daemon._callback_thread == thread
263 assert daemon._callback_thread == thread
264
264
265 assert daemon._daemon is None
265 assert daemon._daemon is None
266 assert daemon._callback_thread is None
266 assert daemon._callback_thread is None
267 tcp_server.shutdown.assert_called_with()
267 tcp_server.shutdown.assert_called_with()
268 thread.join.assert_called_once_with()
268 thread.join.assert_called_once_with()
269
269
270 assert_message_in_log(
270 assert_message_in_log(
271 caplog.records, 'Waiting for background thread to finish.',
271 caplog.records, 'Waiting for background thread to finish.',
272 levelno=logging.DEBUG, module='http_hooks_deamon')
272 levelno=logging.DEBUG, module='http_hooks_deamon')
273
273
274 def _tcp_patcher(self, tcp_server):
274 def _tcp_patcher(self, tcp_server):
275 return mock.patch.object(
275 return mock.patch.object(
276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
277
277
278 def _thread_patcher(self, thread):
278 def _thread_patcher(self, thread):
279 return mock.patch.object(
279 return mock.patch.object(
280 http_hooks_deamon.threading, 'Thread', return_value=thread)
280 http_hooks_deamon.threading, 'Thread', return_value=thread)
281
281
282
282
283 class TestPrepareHooksDaemon(object):
283 class TestPrepareHooksDaemon(object):
284
284
285 @pytest.mark.parametrize('protocol', ('celery',))
285 @pytest.mark.parametrize('protocol', ('celery',))
286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
287 self, protocol):
287 self, protocol):
288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
290 "celery.result_backend = redis://redis/0")
290 "celery.result_backend = redis://redis/0")
291 temp_file.flush()
291 temp_file.flush()
292 expected_extras = {'config': temp_file.name}
292 expected_extras = {'config': temp_file.name}
293 callback, extras = hook_base.prepare_callback_daemon(
293 callback, extras = hook_base.prepare_callback_daemon(
294 expected_extras, protocol=protocol, host='')
294 expected_extras, protocol=protocol, host='')
295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
296
296
297 @pytest.mark.parametrize('protocol, expected_class', (
297 @pytest.mark.parametrize('protocol, expected_class', (
298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
299 ))
299 ))
300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
301 self, protocol, expected_class):
301 self, protocol, expected_class):
302 expected_extras = {
302 expected_extras = {
303 'extra1': 'value1',
303 'extra1': 'value1',
304 'txn_id': 'txnid2',
304 'txn_id': 'txnid2',
305 'hooks_protocol': protocol.lower(),
305 'hooks_protocol': protocol.lower(),
306 'task_backend': '',
306 'task_backend': '',
307 'task_queue': ''
307 'task_queue': '',
308 'repo_store': '/var/opt/rhodecode_repo_store'
308 }
309 }
309 callback, extras = hook_base.prepare_callback_daemon(
310 callback, extras = hook_base.prepare_callback_daemon(
310 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
311 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
311 txn_id='txnid2')
312 txn_id='txnid2')
312 assert isinstance(callback, expected_class)
313 assert isinstance(callback, expected_class)
313 extras.pop('hooks_uri')
314 extras.pop('hooks_uri')
314 expected_extras['time'] = extras['time']
315 expected_extras['time'] = extras['time']
315 assert extras == expected_extras
316 assert extras == expected_extras
316
317
317 @pytest.mark.parametrize('protocol', (
318 @pytest.mark.parametrize('protocol', (
318 'invalid',
319 'invalid',
319 'Http',
320 'Http',
320 'HTTP',
321 'HTTP',
321 ))
322 ))
322 def test_raises_on_invalid_protocol(self, protocol):
323 def test_raises_on_invalid_protocol(self, protocol):
323 expected_extras = {
324 expected_extras = {
324 'extra1': 'value1',
325 'extra1': 'value1',
325 'hooks_protocol': protocol.lower()
326 'hooks_protocol': protocol.lower()
326 }
327 }
327 with pytest.raises(Exception):
328 with pytest.raises(Exception):
328 callback, extras = hook_base.prepare_callback_daemon(
329 callback, extras = hook_base.prepare_callback_daemon(
329 expected_extras.copy(),
330 expected_extras.copy(),
330 protocol=protocol, host='127.0.0.1')
331 protocol=protocol, host='127.0.0.1')
331
332
332
333
333 class MockRequest(object):
334 class MockRequest(object):
334
335
335 def __init__(self, request):
336 def __init__(self, request):
336 self.request = request
337 self.request = request
337 self.input_stream = io.BytesIO(safe_bytes(self.request))
338 self.input_stream = io.BytesIO(safe_bytes(self.request))
338 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
339 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
339 self.output_stream.close = lambda: None
340 self.output_stream.close = lambda: None
340
341
341 def makefile(self, mode, *args, **kwargs):
342 def makefile(self, mode, *args, **kwargs):
342 return self.output_stream if mode == 'wb' else self.input_stream
343 return self.output_stream if mode == 'wb' else self.input_stream
343
344
344
345
345 class MockServer(object):
346 class MockServer(object):
346
347
347 def __init__(self, handler_cls, request):
348 def __init__(self, handler_cls, request):
348 ip_port = ('0.0.0.0', 8888)
349 ip_port = ('0.0.0.0', 8888)
349 self.request = MockRequest(request)
350 self.request = MockRequest(request)
350 self.server_address = ip_port
351 self.server_address = ip_port
351 self.handler = handler_cls(self.request, ip_port, self)
352 self.handler = handler_cls(self.request, ip_port, self)
352
353
353
354
354 @pytest.fixture()
355 @pytest.fixture()
355 def tcp_server():
356 def tcp_server():
356 server = mock.Mock()
357 server = mock.Mock()
357 server.server_address = ('127.0.0.1', 8881)
358 server.server_address = ('127.0.0.1', 8881)
358 server.wbufsize = 1024
359 server.wbufsize = 1024
359 return server
360 return server
@@ -1,221 +1,229 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20
20
21 import os
21 import os
22 import time
22 import time
23 import tempfile
23 import tempfile
24 import pytest
24 import pytest
25 import subprocess
25 import subprocess
26 import logging
26 import logging
27 from urllib.request import urlopen
27 from urllib.request import urlopen
28 from urllib.error import URLError
28 from urllib.error import URLError
29 import configparser
29 import configparser
30
30
31
31
32 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
32 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
33 from rhodecode.tests.utils import is_url_reachable
33 from rhodecode.tests.utils import is_url_reachable
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def get_port(pyramid_config):
38 def get_port(pyramid_config):
39 config = configparser.ConfigParser()
39 config = configparser.ConfigParser()
40 config.read(pyramid_config)
40 config.read(pyramid_config)
41 return config.get('server:main', 'port')
41 return config.get('server:main', 'port')
42
42
43
43
44 def get_host_url(pyramid_config):
44 def get_host_url(pyramid_config):
45 """Construct the host url using the port in the test configuration."""
45 """Construct the host url using the port in the test configuration."""
46 port = get_port(pyramid_config)
46 port = get_port(pyramid_config)
47 return f'127.0.0.1:{port}'
47 return f'127.0.0.1:{port}'
48
48
49
49
50 def assert_no_running_instance(url):
50 def assert_no_running_instance(url):
51 if is_url_reachable(url):
51 if is_url_reachable(url):
52 print(f"Hint: Usually this means another instance of server "
52 print(f"Hint: Usually this means another instance of server "
53 f"is running in the background at {url}.")
53 f"is running in the background at {url}.")
54 pytest.fail(f"Port is not free at {url}, cannot start server at")
54 pytest.fail(f"Port is not free at {url}, cannot start server at")
55
55
56
56
57 class ServerBase(object):
57 class ServerBase(object):
58 _args = []
58 _args = []
59 log_file_name = 'NOT_DEFINED.log'
59 log_file_name = 'NOT_DEFINED.log'
60 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
60 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
61
61
62 def __init__(self, config_file, log_file):
62 def __init__(self, config_file, log_file):
63 self.config_file = config_file
63 self.config_file = config_file
64 config = configparser.ConfigParser()
64 config = configparser.ConfigParser()
65 config.read(config_file)
65 config.read(config_file)
66
66
67 self._config = {k: v for k, v in config['server:main'].items()}
67 self._config = {k: v for k, v in config['server:main'].items()}
68
68
69 self._args = []
69 self._args = []
70 self.log_file = log_file or os.path.join(
70 self.log_file = log_file or os.path.join(
71 tempfile.gettempdir(), self.log_file_name)
71 tempfile.gettempdir(), self.log_file_name)
72 self.process = None
72 self.process = None
73 self.server_out = None
73 self.server_out = None
74 log.info("Using the {} configuration:{}".format(
74 log.info("Using the {} configuration:{}".format(
75 self.__class__.__name__, config_file))
75 self.__class__.__name__, config_file))
76
76
77 if not os.path.isfile(config_file):
77 if not os.path.isfile(config_file):
78 raise RuntimeError(f'Failed to get config at {config_file}')
78 raise RuntimeError(f'Failed to get config at {config_file}')
79
79
80 @property
80 @property
81 def command(self):
81 def command(self):
82 return ' '.join(self._args)
82 return ' '.join(self._args)
83
83
84 @property
84 @property
85 def bind_addr(self):
85 def bind_addr(self):
86 return '{host}:{port}'.format(**self._config)
86 return '{host}:{port}'.format(**self._config)
87
87
88 @property
88 @property
89 def http_url(self):
89 def http_url(self):
90 template = 'http://{host}:{port}/'
90 template = 'http://{host}:{port}/'
91 return template.format(**self._config)
91 return template.format(**self._config)
92
92
93 def host_url(self):
93 def host_url(self):
94 host = get_host_url(self.config_file)
94 host = get_host_url(self.config_file)
95 return f'http://{host}'
95 return f'http://{host}'
96
96
97 def get_rc_log(self):
97 def get_rc_log(self):
98 with open(self.log_file) as f:
98 with open(self.log_file) as f:
99 return f.read()
99 return f.read()
100
100
101 def assert_message_in_server_logs(self, message):
101 def assert_message_in_server_logs(self, message):
102 server_logs = self.get_rc_log()
102 server_logs = self.get_rc_log()
103 assert message in server_logs
103 assert message in server_logs
104
104
105 def wait_until_ready(self, timeout=30):
105 def wait_until_ready(self, timeout=30):
106 host = self._config['host']
106 host = self._config['host']
107 port = self._config['port']
107 port = self._config['port']
108 status_url = self.status_url_tmpl.format(host=host, port=port)
108 status_url = self.status_url_tmpl.format(host=host, port=port)
109 start = time.time()
109 start = time.time()
110
110
111 while time.time() - start < timeout:
111 while time.time() - start < timeout:
112 try:
112 try:
113 urlopen(status_url)
113 urlopen(status_url)
114 break
114 break
115 except URLError:
115 except URLError:
116 time.sleep(0.2)
116 time.sleep(0.2)
117 else:
117 else:
118 pytest.fail(
118 pytest.fail(
119 "Starting the {} failed or took more than {} "
119 "Starting the {} failed or took more than {} "
120 "seconds. cmd: `{}`".format(
120 "seconds. cmd: `{}`".format(
121 self.__class__.__name__, timeout, self.command))
121 self.__class__.__name__, timeout, self.command))
122
122
123 log.info('Server of {} ready at url {}'.format(
123 log.info('Server of {} ready at url {}'.format(
124 self.__class__.__name__, status_url))
124 self.__class__.__name__, status_url))
125
125
126 def shutdown(self):
126 def shutdown(self):
127 self.process.kill()
127 self.process.kill()
128 self.server_out.flush()
128 self.server_out.flush()
129 self.server_out.close()
129 self.server_out.close()
130
130
131 def get_log_file_with_port(self):
131 def get_log_file_with_port(self):
132 log_file = list(self.log_file.partition('.log'))
132 log_file = list(self.log_file.partition('.log'))
133 log_file.insert(1, get_port(self.config_file))
133 log_file.insert(1, get_port(self.config_file))
134 log_file = ''.join(log_file)
134 log_file = ''.join(log_file)
135 return log_file
135 return log_file
136
136
137
137
138 class RcVCSServer(ServerBase):
138 class RcVCSServer(ServerBase):
139 """
139 """
140 Represents a running VCSServer instance.
140 Represents a running VCSServer instance.
141 """
141 """
142
142
143 log_file_name = 'rc-vcsserver.log'
143 log_file_name = 'rc-vcsserver.log'
144 status_url_tmpl = 'http://{host}:{port}/status'
144 status_url_tmpl = 'http://{host}:{port}/status'
145
145
146 def __init__(self, config_file, log_file=None, workers='2'):
146 def __init__(self, config_file, log_file=None, workers='2'):
147 super(RcVCSServer, self).__init__(config_file, log_file)
147 super(RcVCSServer, self).__init__(config_file, log_file)
148 self._args = [
148 self._args = [
149 'gunicorn',
149 'gunicorn',
150 '--bind', self.bind_addr,
150 '--bind', self.bind_addr,
151 '--worker-class', 'gevent',
151 '--worker-class', 'gthread',
152 '--backlog', '16',
152 '--backlog', '16',
153 '--timeout', '300',
153 '--timeout', '300',
154 '--workers', workers,
154 '--workers', workers,
155 '--paste', self.config_file]
155 '--paste', self.config_file]
156
156
157 def start(self):
157 def start(self):
158 env = os.environ.copy()
158 env = os.environ.copy()
159
159
160 self.log_file = self.get_log_file_with_port()
160 self.log_file = self.get_log_file_with_port()
161 self.server_out = open(self.log_file, 'w')
161 self.server_out = open(self.log_file, 'w')
162
162
163 host_url = self.host_url()
163 host_url = self.host_url()
164 assert_no_running_instance(host_url)
164 assert_no_running_instance(host_url)
165
165
166 print(f'rhodecode-vcsserver starting at: {host_url}')
166 print(f'rhodecode-vcsserver starting at: {host_url}')
167 print(f'rhodecode-vcsserver command: {self.command}')
167 print(f'rhodecode-vcsserver command: {self.command}')
168 print(f'rhodecode-vcsserver logfile: {self.log_file}')
168 print(f'rhodecode-vcsserver logfile: {self.log_file}')
169
169
170 self.process = subprocess.Popen(
170 self.process = subprocess.Popen(
171 self._args, bufsize=0, env=env,
171 self._args, bufsize=0, env=env,
172 stdout=self.server_out, stderr=self.server_out)
172 stdout=self.server_out, stderr=self.server_out)
173
173
174
174
175 class RcWebServer(ServerBase):
175 class RcWebServer(ServerBase):
176 """
176 """
177 Represents a running RCE web server used as a test fixture.
177 Represents a running RCE web server used as a test fixture.
178 """
178 """
179
179
180 log_file_name = 'rc-web.log'
180 log_file_name = 'rc-web.log'
181 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
181 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
182
182
183 def __init__(self, config_file, log_file=None, workers='1'):
183 def __init__(self, config_file, log_file=None, workers='1'):
184 super(RcWebServer, self).__init__(config_file, log_file)
184 super(RcWebServer, self).__init__(config_file, log_file)
185 self._args = [
185 self._args = [
186 'gunicorn',
186 'gunicorn',
187 '--bind', self.bind_addr,
187 '--bind', self.bind_addr,
188 '--worker-class', 'gevent',
188 '--worker-class', 'gthread',
189 '--backlog', '16',
189 '--backlog', '16',
190 '--timeout', '300',
190 '--timeout', '300',
191 '--workers', workers,
191 '--workers', workers,
192 '--paste', self.config_file]
192 '--paste', self.config_file]
193
193
194 def start(self):
194 def start(self):
195 env = os.environ.copy()
195 env = os.environ.copy()
196 env['RC_NO_TMP_PATH'] = '1'
196 env['RC_NO_TMP_PATH'] = '1'
197
197
198 self.log_file = self.get_log_file_with_port()
198 self.log_file = self.get_log_file_with_port()
199 self.server_out = open(self.log_file, 'w')
199 self.server_out = open(self.log_file, 'w')
200
200
201 host_url = self.host_url()
201 host_url = self.host_url()
202 assert_no_running_instance(host_url)
202 assert_no_running_instance(host_url)
203
203
204 print(f'rhodecode-web starting at: {host_url}')
204 print(f'rhodecode-web starting at: {host_url}')
205 print(f'rhodecode-web command: {self.command}')
205 print(f'rhodecode-web command: {self.command}')
206 print(f'rhodecode-web logfile: {self.log_file}')
206 print(f'rhodecode-web logfile: {self.log_file}')
207
207
208 self.process = subprocess.Popen(
208 self.process = subprocess.Popen(
209 self._args, bufsize=0, env=env,
209 self._args, bufsize=0, env=env,
210 stdout=self.server_out, stderr=self.server_out)
210 stdout=self.server_out, stderr=self.server_out)
211
211
212 def repo_clone_url(self, repo_name, **kwargs):
212 def repo_clone_url(self, repo_name, **kwargs):
213 params = {
213 params = {
214 'user': TEST_USER_ADMIN_LOGIN,
214 'user': TEST_USER_ADMIN_LOGIN,
215 'passwd': TEST_USER_ADMIN_PASS,
215 'passwd': TEST_USER_ADMIN_PASS,
216 'host': get_host_url(self.config_file),
216 'host': get_host_url(self.config_file),
217 'cloned_repo': repo_name,
217 'cloned_repo': repo_name,
218 }
218 }
219 params.update(**kwargs)
219 params.update(**kwargs)
220 _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}"
220 _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}"
221 return _url
221 return _url
222
223 def repo_clone_credentials(self, **kwargs):
224 params = {
225 'user': TEST_USER_ADMIN_LOGIN,
226 'passwd': TEST_USER_ADMIN_PASS,
227 }
228 params.update(**kwargs)
229 return params['user'], params['passwd']
@@ -1,195 +1,214 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Base for test suite for making push/pull operations.
21 Base for test suite for making push/pull operations.
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29 from os.path import join as jn
29
30 from subprocess import Popen, PIPE
31 import logging
30 import logging
32 import os
31 import os
33 import tempfile
32 import tempfile
33 import subprocess
34
34
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.tests import GIT_REPO, HG_REPO
36 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
37
37
38 DEBUG = True
38 DEBUG = True
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 REPO_GROUP = 'a_repo_group'
40 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
41 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
42 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
43 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
43
44
44 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
45
46
46
47
47 class Command(object):
48 class Command(object):
48
49
49 def __init__(self, cwd):
50 def __init__(self, cwd):
50 self.cwd = cwd
51 self.cwd = cwd
51 self.process = None
52 self.process = None
52
53
53 def execute(self, cmd, *args):
54 def execute(self, cmd, *args):
54 """
55 """
55 Runs command on the system with given ``args``.
56 Runs command on the system with given ``args``.
56 """
57 """
57
58
58 command = cmd + ' ' + ' '.join(args)
59 command = cmd + ' ' + ' '.join(args)
59 if DEBUG:
60 if DEBUG:
60 log.debug('*** CMD %s ***', command)
61 log.debug('*** CMD %s ***', command)
61
62
62 env = dict(os.environ)
63 env = dict(os.environ)
63 # Delete coverage variables, as they make the test fail for Mercurial
64 # Delete coverage variables, as they make the test fail for Mercurial
64 for key in env.keys():
65 for key in env.keys():
65 if key.startswith('COV_CORE_'):
66 if key.startswith('COV_CORE_'):
66 del env[key]
67 del env[key]
67
68
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 self.process = subprocess.Popen(
70 command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
69 cwd=self.cwd, env=env)
71 cwd=self.cwd, env=env)
70 stdout, stderr = self.process.communicate()
72 stdout, stderr = self.process.communicate()
71
73
72 stdout = safe_str(stdout)
74 stdout = safe_str(stdout)
73 stderr = safe_str(stderr)
75 stderr = safe_str(stderr)
74
76
75 if DEBUG:
77 if DEBUG:
76 log.debug('STDOUT:%s', stdout)
78 log.debug('STDOUT:%s', stdout)
77 log.debug('STDERR:%s', stderr)
79 log.debug('STDERR:%s', stderr)
78 return stdout, stderr
80 return stdout, stderr
79
81
80 def assert_returncode_success(self):
82 def assert_returncode_success(self):
81 assert self.process.returncode == 0
83 assert self.process.returncode == 0
82
84
83
85
84 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
86 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
85 full_name = 'Marcin KuΕΊminski'
87 full_name = 'Marcin KuΕΊminski'
86 email = 'me@email.com'
88 email = 'me@email.com'
87 git_ident = f"git config user.name {full_name} && git config user.email {email}"
89 git_ident = f"git config user.name {full_name} && git config user.email {email}"
88 cwd = path = jn(dest)
90 cwd = path = os.path.join(dest)
89
91
90 tags = tags or []
92 tags = tags or []
91 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
93 name_sequence = next(tempfile._RandomNameSequence())
92 Command(cwd).execute('touch %s' % added_file)
94 added_file = os.path.join(path, f'{name_sequence}_setup.py')
93 Command(cwd).execute('%s add %s' % (vcs, added_file))
95
96 Command(cwd).execute(f'touch {added_file}')
97 Command(cwd).execute(f'{vcs} add {added_file}')
94 author_str = 'Marcin KuΕΊminski <me@email.com>'
98 author_str = 'Marcin KuΕΊminski <me@email.com>'
95
99
96 for i in range(kwargs.get('files_no', 3)):
100 for i in range(kwargs.get('files_no', 3)):
97 cmd = f"""echo 'added_line{i}' >> {added_file}"""
101 cmd = f"""echo 'added_line{i}' >> {added_file}"""
98 Command(cwd).execute(cmd)
102 Command(cwd).execute(cmd)
99
103
100 if vcs == 'hg':
104 if vcs == 'hg':
101 cmd = f"""hg commit -m 'committed new {i}' -u '{author_str}' {added_file} """
105 cmd = f"""hg commit -m 'committed new {i}' -u '{author_str}' {added_file} """
102 elif vcs == 'git':
106 elif vcs == 'git':
103 cmd = f"""{git_ident} && git commit -m 'committed new {i}' {added_file}"""
107 cmd = f"""{git_ident} && git commit -m 'committed new {i}' {added_file}"""
104 Command(cwd).execute(cmd)
108 Command(cwd).execute(cmd)
105
109
106 for tag in tags:
110 for tag in tags:
107 if vcs == 'hg':
111 if vcs == 'hg':
108 Command(cwd).execute(
112 Command(cwd).execute(
109 f"""hg tag -m "{tag['commit']}" -u "{author_str}" """,
113 f"""hg tag -m "{tag['commit']}" -u "{author_str}" """,
110 tag['name'])
114 tag['name'])
111 elif vcs == 'git':
115 elif vcs == 'git':
112 if tag['commit']:
116 if tag['commit']:
113 # annotated tag
117 # annotated tag
114 _stdout, _stderr = Command(cwd).execute(
118 _stdout, _stderr = Command(cwd).execute(
115 f"""{git_ident} && git tag -a {tag['name']} -m "{tag['commit']}" """
119 f"""{git_ident} && git tag -a {tag['name']} -m "{tag['commit']}" """
116 )
120 )
117 else:
121 else:
118 # lightweight tag
122 # lightweight tag
119 _stdout, _stderr = Command(cwd).execute(
123 _stdout, _stderr = Command(cwd).execute(
120 f"""{git_ident} && git tag {tag['name']}"""
124 f"""{git_ident} && git tag {tag['name']}"""
121 )
125 )
122
126
123
127
124 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
128 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
125 new_branch=False, **kwargs):
129 new_branch=False, **kwargs):
126 """
130 """
127 Generate some files, add it to DEST repo and push back
131 Generate some files, add it to DEST repo and push back
128 vcs is git or hg and defines what VCS we want to make those files for
132 vcs is git or hg and defines what VCS we want to make those files for
129 """
133 """
130 git_ident = "git config user.name Marcin KuΕΊminski && git config user.email me@email.com"
134 git_ident = "git config user.name Marcin KuΕΊminski && git config user.email me@email.com"
131 cwd = jn(dest)
135 cwd = os.path.join(dest)
132
136
133 # commit some stuff into this repo
137 # commit some stuff into this repo
134 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
138 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
135
139
136 default_target_branch = {
140 default_target_branch = {
137 'git': 'master',
141 'git': 'master',
138 'hg': 'default'
142 'hg': 'default'
139 }.get(vcs)
143 }.get(vcs)
140
144
141 target_branch = target_branch or default_target_branch
145 target_branch = target_branch or default_target_branch
142
146
143 # PUSH it back
147 # PUSH it back
144 stdout = stderr = None
148 stdout = stderr = None
145 if vcs == 'hg':
149 if vcs == 'hg':
146 maybe_new_branch = ''
150 maybe_new_branch = ''
147 if new_branch:
151 if new_branch:
148 maybe_new_branch = '--new-branch'
152 maybe_new_branch = '--new-branch'
149 stdout, stderr = Command(cwd).execute(
153 stdout, stderr = Command(cwd).execute(
150 'hg push --traceback --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
154 f'hg push --traceback --verbose {maybe_new_branch} -r {target_branch} {clone_url}'
151 )
155 )
152 elif vcs == 'git':
156 elif vcs == 'git':
153 stdout, stderr = Command(cwd).execute(
157 stdout, stderr = Command(cwd).execute(
154 """{} &&
158 f'{git_ident} && git push --verbose --tags {clone_url} {target_branch}'
155 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
159 )
160 elif vcs == 'svn':
161 stdout, stderr = Command(cwd).execute(
162 f'svn ci -m "pushing to {target_branch}"'
156 )
163 )
157
164
158 return stdout, stderr
165 return stdout, stderr
159
166
160
167
161 def _check_proper_git_push(
168 def _check_proper_git_push(
162 stdout, stderr, branch='master', should_set_default_branch=False):
169 stdout, stderr, branch='master', should_set_default_branch=False):
163 # Note: Git is writing most information to stderr intentionally
170 # Note: Git is writing most information to stderr intentionally
164 assert 'fatal' not in stderr
171 assert 'fatal' not in stderr
165 assert 'rejected' not in stderr
172 assert 'rejected' not in stderr
166 assert 'Pushing to' in stderr
173 assert 'Pushing to' in stderr
167 assert '%s -> %s' % (branch, branch) in stderr
174 assert '%s -> %s' % (branch, branch) in stderr
168
175
169 if should_set_default_branch:
176 if should_set_default_branch:
170 assert "Setting default branch to %s" % branch in stderr
177 assert "Setting default branch to %s" % branch in stderr
171 else:
178 else:
172 assert "Setting default branch" not in stderr
179 assert "Setting default branch" not in stderr
173
180
174
181
175 def _check_proper_hg_push(stdout, stderr, branch='default'):
182 def _check_proper_hg_push(stdout, stderr, branch='default'):
176 assert 'pushing to' in stdout
183 assert 'pushing to' in stdout
177 assert 'searching for changes' in stdout
184 assert 'searching for changes' in stdout
178
185
179 assert 'abort:' not in stderr
186 assert 'abort:' not in stderr
180
187
181
188
189 def _check_proper_svn_push(stdout, stderr):
190 assert 'pushing to' in stdout
191 assert 'searching for changes' in stdout
192
193 assert 'abort:' not in stderr
194
195
182 def _check_proper_clone(stdout, stderr, vcs):
196 def _check_proper_clone(stdout, stderr, vcs):
183 if vcs == 'hg':
197 if vcs == 'hg':
184 assert 'requesting all changes' in stdout
198 assert 'requesting all changes' in stdout
185 assert 'adding changesets' in stdout
199 assert 'adding changesets' in stdout
186 assert 'adding manifests' in stdout
200 assert 'adding manifests' in stdout
187 assert 'adding file changes' in stdout
201 assert 'adding file changes' in stdout
188
202
189 assert stderr == ''
203 assert stderr == ''
190
204
191 if vcs == 'git':
205 if vcs == 'git':
192 assert '' == stdout
206 assert '' == stdout
193 assert 'Cloning into' in stderr
207 assert 'Cloning into' in stderr
194 assert 'abort:' not in stderr
208 assert 'abort:' not in stderr
195 assert 'fatal:' not in stderr
209 assert 'fatal:' not in stderr
210
211 if vcs == 'svn':
212 assert 'dupa' in stdout
213
214
@@ -1,307 +1,311 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 py.test config for test suite for making push/pull operations.
21 py.test config for test suite for making push/pull operations.
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29 import os
29 import os
30 import tempfile
30 import tempfile
31 import textwrap
31 import textwrap
32 import pytest
32 import pytest
33 import logging
33 import logging
34 import requests
34 import requests
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.lib.str_utils import safe_bytes
37 from rhodecode.lib.str_utils import safe_bytes
38 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
38 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
39 UserToRepoBranchPermission, User
39 UserToRepoBranchPermission, User
40 from rhodecode.model.integration import IntegrationModel
40 from rhodecode.model.integration import IntegrationModel
41 from rhodecode.model.db import Repository
41 from rhodecode.model.db import Repository
42 from rhodecode.model.meta import Session
42 from rhodecode.model.meta import Session
43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
44
44
45 from rhodecode.tests import GIT_REPO, HG_REPO
45 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
46 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
46 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
47 from rhodecode.tests.fixture import Fixture
47 from rhodecode.tests.fixture import Fixture
48 from rhodecode.tests.server_utils import RcWebServer
48 from rhodecode.tests.server_utils import RcWebServer
49
49
50
50
51 REPO_GROUP = 'a_repo_group'
51 REPO_GROUP = 'a_repo_group'
52 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
52 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
53 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
53 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
54 SVN_REPO_WITH_GROUP = f'{REPO_GROUP}/{SVN_REPO}'
54
55
55 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
56
57
57
58
58 def check_httpbin_connection():
59 def check_httpbin_connection():
60 log.debug('Checking if HTTPBIN_DOMAIN: %s is available', HTTPBIN_DOMAIN)
59 try:
61 try:
60 response = requests.get(HTTPBIN_DOMAIN)
62 response = requests.get(HTTPBIN_DOMAIN, timeout=5)
61 return response.status_code == 200
63 return response.status_code == 200
62 except Exception as e:
64 except Exception as e:
63 print(e)
65 print(e)
64
66
65 return False
67 return False
66
68
67
69
68 @pytest.fixture(scope="module")
70 @pytest.fixture(scope="module")
69 def rcextensions(request, db_connection, tmpdir_factory):
71 def rcextensions(request, db_connection, tmpdir_factory):
70 """
72 """
71 Installs a testing rcextensions pack to ensure they work as expected.
73 Installs a testing rcextensions pack to ensure they work as expected.
72 """
74 """
73 init_content = textwrap.dedent("""
75 init_content = textwrap.dedent("""
74 # Forward import the example rcextensions to make it
76 # Forward import the example rcextensions to make it
75 # active for our tests.
77 # active for our tests.
76 from rhodecode.tests.other.example_rcextensions import *
78 from rhodecode.tests.other.example_rcextensions import *
77 """)
79 """)
78
80
79 # Note: rcextensions are looked up based on the path of the ini file
81 # Note: rcextensions are looked up based on the path of the ini file
80 root_path = tmpdir_factory.getbasetemp()
82 root_path = tmpdir_factory.getbasetemp()
81 rcextensions_path = root_path.join('rcextensions')
83 rcextensions_path = root_path.join('rcextensions')
82 init_path = rcextensions_path.join('__init__.py')
84 init_path = rcextensions_path.join('__init__.py')
83
85
84 if rcextensions_path.check():
86 if rcextensions_path.check():
85 pytest.fail(
87 pytest.fail(
86 "Path for rcextensions already exists, please clean up before "
88 "Path for rcextensions already exists, please clean up before "
87 "test run this path: %s" % (rcextensions_path, ))
89 "test run this path: %s" % (rcextensions_path, ))
88 else:
90 else:
89 request.addfinalizer(rcextensions_path.remove)
91 request.addfinalizer(rcextensions_path.remove)
90 init_path.write_binary(safe_bytes(init_content), ensure=True)
92 init_path.write_binary(safe_bytes(init_content), ensure=True)
91
93
92
94
93 @pytest.fixture(scope="module")
95 @pytest.fixture(scope="module")
94 def repos(request, db_connection):
96 def repos(request, db_connection):
95 """Create a copy of each test repo in a repo group."""
97 """Create a copy of each test repo in a repo group."""
96 fixture = Fixture()
98 fixture = Fixture()
97 repo_group = fixture.create_repo_group(REPO_GROUP)
99 repo_group = fixture.create_repo_group(REPO_GROUP)
98 repo_group_id = repo_group.group_id
100 repo_group_id = repo_group.group_id
99 fixture.create_fork(HG_REPO, HG_REPO,
101 fixture.create_fork(HG_REPO, HG_REPO,
100 repo_name_full=HG_REPO_WITH_GROUP,
102 repo_name_full=HG_REPO_WITH_GROUP,
101 repo_group=repo_group_id)
103 repo_group=repo_group_id)
102 fixture.create_fork(GIT_REPO, GIT_REPO,
104 fixture.create_fork(GIT_REPO, GIT_REPO,
103 repo_name_full=GIT_REPO_WITH_GROUP,
105 repo_name_full=GIT_REPO_WITH_GROUP,
104 repo_group=repo_group_id)
106 repo_group=repo_group_id)
107 fixture.create_fork(SVN_REPO, SVN_REPO,
108 repo_name_full=SVN_REPO_WITH_GROUP,
109 repo_group=repo_group_id)
105
110
106 @request.addfinalizer
111 @request.addfinalizer
107 def cleanup():
112 def cleanup():
108 fixture.destroy_repo(HG_REPO_WITH_GROUP)
113 fixture.destroy_repo(HG_REPO_WITH_GROUP)
109 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
114 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
115 fixture.destroy_repo(SVN_REPO_WITH_GROUP)
110 fixture.destroy_repo_group(repo_group_id)
116 fixture.destroy_repo_group(repo_group_id)
111
117
112
118
113 @pytest.fixture(scope="module")
119 @pytest.fixture(scope="module")
114 def rc_web_server_config_modification():
120 def rc_web_server_config_modification():
115 return []
121 return []
116
122
117
123
118 @pytest.fixture(scope="module")
124 @pytest.fixture(scope="module")
119 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
125 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
120 """
126 """
121 Configuration file used for the fixture `rc_web_server`.
127 Configuration file used for the fixture `rc_web_server`.
122 """
128 """
123
129
124 def factory(rcweb_port, vcsserver_port):
130 def factory(rcweb_port, vcsserver_port):
125 custom_params = [
131 custom_params = [
126 {'handler_console': {'level': 'DEBUG'}},
132 {'handler_console': {'level': 'DEBUG'}},
127 {'server:main': {'port': rcweb_port}},
133 {'server:main': {'port': rcweb_port}},
128 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
134 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
129 ]
135 ]
130 custom_params.extend(rc_web_server_config_modification)
136 custom_params.extend(rc_web_server_config_modification)
131 return testini_factory(custom_params)
137 return testini_factory(custom_params)
132 return factory
138 return factory
133
139
134
140
135 @pytest.fixture(scope="module")
141 @pytest.fixture(scope="module")
136 def rc_web_server(
142 def rc_web_server(
137 request, vcsserver_factory, available_port_factory,
143 request, vcsserver_factory, available_port_factory,
138 rc_web_server_config_factory, repos, rcextensions):
144 rc_web_server_config_factory, repos, rcextensions):
139 """
145 """
140 Run the web server as a subprocess. with its own instance of vcsserver
146 Run the web server as a subprocess. with its own instance of vcsserver
141 """
147 """
142 rcweb_port = available_port_factory()
148 rcweb_port: int = available_port_factory()
143 log.info('Using rcweb ops test port {}'.format(rcweb_port))
149 log.info('Using rcweb ops test port %s', rcweb_port)
144
150
145 vcsserver_port = available_port_factory()
151 vcsserver_port: int = available_port_factory()
146 log.info('Using vcsserver ops test port {}'.format(vcsserver_port))
152 log.info('Using vcsserver ops test port %s', vcsserver_port)
147
153
148 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
154 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
149 vcsserver_factory(
155 vcsserver_factory(
150 request, vcsserver_port=vcsserver_port,
156 request, vcsserver_port=vcsserver_port,
151 log_file=vcs_log,
157 log_file=vcs_log,
152 overrides=(
158 overrides=(
153 {'server:main': {'workers': 2}},
159 {'server:main': {'workers': 2}},
154 {'server:main': {'graceful_timeout': 10}},
160 {'server:main': {'graceful_timeout': 10}},
155 ))
161 ))
156
162
157 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
163 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
158 rc_web_server_config = rc_web_server_config_factory(
164 rc_web_server_config = rc_web_server_config_factory(
159 rcweb_port=rcweb_port,
165 rcweb_port=rcweb_port,
160 vcsserver_port=vcsserver_port)
166 vcsserver_port=vcsserver_port)
161 server = RcWebServer(rc_web_server_config, log_file=rc_log)
167 server = RcWebServer(rc_web_server_config, log_file=rc_log)
162 server.start()
168 server.start()
163
169
164 @request.addfinalizer
170 @request.addfinalizer
165 def cleanup():
171 def cleanup():
166 server.shutdown()
172 server.shutdown()
167
173
168 server.wait_until_ready()
174 server.wait_until_ready()
169 return server
175 return server
170
176
171
177
172 @pytest.fixture()
178 @pytest.fixture()
173 def disable_locking(baseapp):
179 def disable_locking(baseapp):
174 r = Repository.get_by_repo_name(GIT_REPO)
180 r = Repository.get_by_repo_name(GIT_REPO)
175 Repository.unlock(r)
181 Repository.unlock(r)
176 r.enable_locking = False
182 r.enable_locking = False
177 Session().add(r)
183 Session().add(r)
178 Session().commit()
184 Session().commit()
179
185
180 r = Repository.get_by_repo_name(HG_REPO)
186 r = Repository.get_by_repo_name(HG_REPO)
181 Repository.unlock(r)
187 Repository.unlock(r)
182 r.enable_locking = False
188 r.enable_locking = False
183 Session().add(r)
189 Session().add(r)
184 Session().commit()
190 Session().commit()
185
191
186
192
187 @pytest.fixture()
193 @pytest.fixture()
188 def fs_repo_only(request, rhodecode_fixtures):
194 def fs_repo_only(request, rhodecode_fixtures):
189 def fs_repo_fabric(repo_name, repo_type):
195 def fs_repo_fabric(repo_name, repo_type):
190 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
196 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
191 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
197 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
192
198
193 def cleanup():
199 def cleanup():
194 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
200 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
195 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
201 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
196
202
197 request.addfinalizer(cleanup)
203 request.addfinalizer(cleanup)
198
204
199 return fs_repo_fabric
205 return fs_repo_fabric
200
206
201
207
202 @pytest.fixture()
208 @pytest.fixture()
203 def enable_webhook_push_integration(request):
209 def enable_webhook_push_integration(request):
204 integration = Integration()
210 integration = Integration()
205 integration.integration_type = WebhookIntegrationType.key
211 integration.integration_type = WebhookIntegrationType.key
206 Session().add(integration)
212 Session().add(integration)
207
213
208 settings = dict(
214 settings = dict(
209 url=HTTPBIN_POST,
215 url=HTTPBIN_POST,
210 secret_token='secret',
216 secret_token='secret',
211 username=None,
217 username=None,
212 password=None,
218 password=None,
213 custom_header_key=None,
219 custom_header_key=None,
214 custom_header_val=None,
220 custom_header_val=None,
215 method_type='post',
221 method_type='post',
216 events=[events.RepoPushEvent.name],
222 events=[events.RepoPushEvent.name],
217 log_data=True
223 log_data=True
218 )
224 )
219
225
220 IntegrationModel().update_integration(
226 IntegrationModel().update_integration(
221 integration,
227 integration,
222 name='IntegrationWebhookTest',
228 name='IntegrationWebhookTest',
223 enabled=True,
229 enabled=True,
224 settings=settings,
230 settings=settings,
225 repo=None,
231 repo=None,
226 repo_group=None,
232 repo_group=None,
227 child_repos_only=False,
233 child_repos_only=False,
228 )
234 )
229 Session().commit()
235 Session().commit()
230 integration_id = integration.integration_id
236 integration_id = integration.integration_id
231
237
232 @request.addfinalizer
238 @request.addfinalizer
233 def cleanup():
239 def cleanup():
234 integration = Integration.get(integration_id)
240 integration = Integration.get(integration_id)
235 Session().delete(integration)
241 Session().delete(integration)
236 Session().commit()
242 Session().commit()
237
243
238
244
239 @pytest.fixture()
245 @pytest.fixture()
240 def branch_permission_setter(request):
246 def branch_permission_setter(request):
241 """
247 """
242
248
243 def my_test(branch_permission_setter)
249 def my_test(branch_permission_setter)
244 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
250 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
245
251
246 """
252 """
247
253
248 rule_id = None
254 rule_id = None
249 write_perm_id = None
255 write_perm_id = None
250 write_perm = None
256 write_perm = None
251 rule = None
257 rule = None
252
258
253 def _branch_permissions_setter(
259 def _branch_permissions_setter(
254 repo_name, username, pattern='*', permission='branch.push_force'):
260 repo_name, username, pattern='*', permission='branch.push_force'):
255 global rule_id, write_perm_id
261 global rule_id, write_perm_id
256 global rule, write_perm
262 global rule, write_perm
257
263
258 repo = Repository.get_by_repo_name(repo_name)
264 repo = Repository.get_by_repo_name(repo_name)
259 repo_id = repo.repo_id
265 repo_id = repo.repo_id
260
266
261 user = User.get_by_username(username)
267 user = User.get_by_username(username)
262 user_id = user.user_id
268 user_id = user.user_id
263
269
264 rule_perm_obj = Permission.get_by_key(permission)
270 rule_perm_obj = Permission.get_by_key(permission)
265
271
266 # add new entry, based on existing perm entry
272 # add new entry, based on existing perm entry
267 perm = UserRepoToPerm.query() \
273 perm = UserRepoToPerm.query() \
268 .filter(UserRepoToPerm.repository_id == repo_id) \
274 .filter(UserRepoToPerm.repository_id == repo_id) \
269 .filter(UserRepoToPerm.user_id == user_id) \
275 .filter(UserRepoToPerm.user_id == user_id) \
270 .first()
276 .first()
271
277
272 if not perm:
278 if not perm:
273 # such user isn't defined in Permissions for repository
279 # such user isn't defined in Permissions for repository
274 # we now on-the-fly add new permission
280 # we now on-the-fly add new permission
275
281
276 write_perm = UserRepoToPerm()
282 write_perm = UserRepoToPerm()
277 write_perm.permission = Permission.get_by_key('repository.write')
283 write_perm.permission = Permission.get_by_key('repository.write')
278 write_perm.repository_id = repo_id
284 write_perm.repository_id = repo_id
279 write_perm.user_id = user_id
285 write_perm.user_id = user_id
280 Session().add(write_perm)
286 Session().add(write_perm)
281 Session().flush()
287 Session().flush()
282
288
283 perm = write_perm
289 perm = write_perm
284
290
285 rule = UserToRepoBranchPermission()
291 rule = UserToRepoBranchPermission()
286 rule.rule_to_perm_id = perm.repo_to_perm_id
292 rule.rule_to_perm_id = perm.repo_to_perm_id
287 rule.branch_pattern = pattern
293 rule.branch_pattern = pattern
288 rule.rule_order = 10
294 rule.rule_order = 10
289 rule.permission = rule_perm_obj
295 rule.permission = rule_perm_obj
290 rule.repository_id = repo_id
296 rule.repository_id = repo_id
291 Session().add(rule)
297 Session().add(rule)
292 Session().commit()
298 Session().commit()
293
299
294 return rule
300 return rule
295
301
296 @request.addfinalizer
302 @request.addfinalizer
297 def cleanup():
303 def cleanup():
298 if rule:
304 if rule:
299 Session().delete(rule)
305 Session().delete(rule)
300 Session().commit()
306 Session().commit()
301 if write_perm:
307 if write_perm:
302 Session().delete(write_perm)
308 Session().delete(write_perm)
303 Session().commit()
309 Session().commit()
304
310
305 return _branch_permissions_setter
311 return _branch_permissions_setter
306
307
@@ -1,65 +1,65 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Test suite for making push/pull operations, on specially modified INI files
21 Test suite for making push/pull operations, on specially modified INI files
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29 import os
29 import os
30
30
31 from rhodecode.lib.vcs.backends.git.repository import GitRepository
31 from rhodecode.lib.vcs.backends.git.repository import GitRepository
32 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.lib.vcs.nodes import FileNode
33 from rhodecode.tests import GIT_REPO
33 from rhodecode.tests import GIT_REPO
34 from rhodecode.tests.vcs_operations import Command
34 from rhodecode.tests.vcs_operations import Command
35 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
35 from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push
36
36
37
37
38 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
38 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
39 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
39 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
40 cmd = Command('/tmp')
40 cmd = Command('/tmp')
41 stdout, stderr = cmd.execute(
41 stdout, stderr = cmd.execute(
42 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
42 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
43 _check_proper_clone(stdout, stderr, 'git')
43 _check_proper_clone(stdout, stderr, 'git')
44 cmd.assert_returncode_success()
44 cmd.assert_returncode_success()
45
45
46
46
47 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
47 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
48 empty_repo = backend_git.create_repo()
48 empty_repo = backend_git.create_repo()
49
49
50 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
50 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
51
51
52 cmd = Command(tmpdir.strpath)
52 cmd = Command(tmpdir.strpath)
53 cmd.execute('git clone', clone_url)
53 cmd.execute('git clone', clone_url)
54
54
55 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
55 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
56 repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello'))
56 repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello'))
57 repo.in_memory_commit.commit(
57 repo.in_memory_commit.commit(
58 message='Commit on branch Master',
58 message='Commit on branch Master',
59 author='Automatic test <automatic@rhodecode.com>',
59 author='Automatic test <automatic@rhodecode.com>',
60 branch='master')
60 branch='master')
61
61
62 repo_cmd = Command(repo.path)
62 repo_cmd = Command(repo.path)
63 stdout, stderr = repo_cmd.execute(
63 stdout, stderr = repo_cmd.execute(
64 f'git -c http.postBuffer=1024 push --verbose {clone_url} master')
64 f'git -c http.postBuffer=1024 push --verbose {clone_url} master')
65 _check_proper_git_push(stdout, stderr, branch='master')
65 _check_proper_git_push(stdout, stderr, branch='master')
@@ -1,376 +1,199 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Test suite for making push/pull operations, on specially modified INI files
21 Test suite for making push/pull operations, on specially modified INI files
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29
29
30 import time
30 import time
31 import logging
32
33 import pytest
31 import pytest
34
32
35 from rhodecode.lib import rc_cache
33 from rhodecode.model.db import Repository, UserIpMap
36 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.db import Repository, UserIpMap, CacheKey
38 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
39 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.user import UserModel
36 from rhodecode.model.user import UserModel
41 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
37 from rhodecode.tests import (GIT_REPO, TEST_USER_ADMIN_LOGIN)
42 from rhodecode.tests.utils import assert_message_in_log
38
43
39
44 from rhodecode.tests.vcs_operations import (
40 from rhodecode.tests.vcs_operations import (
45 Command, _check_proper_clone, _check_proper_git_push,
41 Command, _check_proper_clone, _check_proper_git_push,
46 _add_files_and_push, HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
42 _add_files_and_push, GIT_REPO_WITH_GROUP)
47
43
48
44
49 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
50 class TestVCSOperations(object):
46 class TestVCSOperations(object):
51
47
52 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 stdout, stderr = Command('/tmp').execute(
55 'hg clone', clone_url, tmpdir.strpath)
56 _check_proper_clone(stdout, stderr, 'hg')
57
58 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
59 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 stdout, stderr = Command('/tmp').execute(
61 'hg clone --pull', clone_url, tmpdir.strpath)
62 _check_proper_clone(stdout, stderr, 'hg')
63
64 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
65 clone_url = rc_web_server.repo_clone_url(HG_REPO)
66 stdout, stderr = Command('/tmp').execute(
67 'hg clone --pull --stream', clone_url, tmpdir.strpath)
68 assert 'files to transfer,' in stdout
69 assert 'transferred 1.' in stdout
70 assert '114 files updated,' in stdout
71
72 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
48 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
73 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
49 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
74 cmd = Command('/tmp')
50 cmd = Command('/tmp')
75 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
51 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
76 _check_proper_clone(stdout, stderr, 'git')
52 _check_proper_clone(stdout, stderr, 'git')
77 cmd.assert_returncode_success()
53 cmd.assert_returncode_success()
78
54
79 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
55 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
80 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
56 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
81 cmd = Command('/tmp')
57 cmd = Command('/tmp')
82 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
58 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
83 _check_proper_clone(stdout, stderr, 'git')
59 _check_proper_clone(stdout, stderr, 'git')
84 cmd.assert_returncode_success()
60 cmd.assert_returncode_success()
85
61
86 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
87 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
88 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
89 stdout, stderr = Command('/tmp').execute(
90 'hg clone', clone_url, tmpdir.strpath)
91 _check_proper_clone(stdout, stderr, 'hg')
92
93 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
62 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
94 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
63 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
95 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
64 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
96 cmd = Command('/tmp')
65 cmd = Command('/tmp')
97 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
66 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
98 _check_proper_clone(stdout, stderr, 'git')
67 _check_proper_clone(stdout, stderr, 'git')
99 cmd.assert_returncode_success()
68 cmd.assert_returncode_success()
100
69
101 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
102 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
103 stdout, stderr = Command('/tmp').execute(
104 'hg clone', clone_url, tmpdir.strpath)
105 _check_proper_clone(stdout, stderr, 'hg')
106
107 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
70 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
108 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
71 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
109 cmd = Command('/tmp')
72 cmd = Command('/tmp')
110 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
73 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
111 _check_proper_clone(stdout, stderr, 'git')
74 _check_proper_clone(stdout, stderr, 'git')
112 cmd.assert_returncode_success()
75 cmd.assert_returncode_success()
113
76
114 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
77 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
115 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
78 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
116 cmd = Command('/tmp')
79 cmd = Command('/tmp')
117 stdout, stderr = cmd.execute(
80 stdout, stderr = cmd.execute(
118 'git clone --depth=1', clone_url, tmpdir.strpath)
81 'git clone --depth=1', clone_url, tmpdir.strpath)
119
82
120 assert '' == stdout
83 assert '' == stdout
121 assert 'Cloning into' in stderr
84 assert 'Cloning into' in stderr
122 cmd.assert_returncode_success()
85 cmd.assert_returncode_success()
123
86
124 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
125 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
126 stdout, stderr = Command('/tmp').execute(
127 'hg clone', clone_url, tmpdir.strpath)
128 assert 'abort: authorization failed' in stderr
129
87
130 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
88 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
131 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
89 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
132 stdout, stderr = Command('/tmp').execute(
90 stdout, stderr = Command('/tmp').execute(
133 'git clone', clone_url, tmpdir.strpath)
91 'git clone', clone_url, tmpdir.strpath)
134 assert 'fatal: Authentication failed' in stderr
92 assert 'fatal: Authentication failed' in stderr
135
93
136 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
94 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
137 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
95 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
138 stdout, stderr = Command('/tmp').execute(
96 stdout, stderr = Command('/tmp').execute(
139 'hg clone', clone_url, tmpdir.strpath)
97 'hg clone', clone_url, tmpdir.strpath)
140 assert 'HTTP Error 404: Not Found' in stderr
98 assert 'HTTP Error 404: Not Found' in stderr
141
99
142 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
143 clone_url = rc_web_server.repo_clone_url(HG_REPO)
144 stdout, stderr = Command('/tmp').execute(
145 'git clone', clone_url, tmpdir.strpath)
146 assert 'not found' in stderr
147
148 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
100 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
149 clone_url = rc_web_server.repo_clone_url('trololo')
101 clone_url = rc_web_server.repo_clone_url('trololo')
150 stdout, stderr = Command('/tmp').execute(
102 stdout, stderr = Command('/tmp').execute(
151 'hg clone', clone_url, tmpdir.strpath)
103 'hg clone', clone_url, tmpdir.strpath)
152 assert 'HTTP Error 404: Not Found' in stderr
104 assert 'HTTP Error 404: Not Found' in stderr
153
105
154 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
106 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
155 clone_url = rc_web_server.repo_clone_url('trololo')
107 clone_url = rc_web_server.repo_clone_url('trololo')
156 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
108 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
157 assert 'not found' in stderr
109 assert 'not found' in stderr
158
110
159 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
160 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
161 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
162 assert 'HTTP Error 404: Not Found' in stderr
163
164 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
111 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
165 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
112 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
166 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
113 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
167 assert 'not found' in stderr
114 assert 'not found' in stderr
168
115
169 def test_clone_existing_path_hg_not_in_database(
170 self, rc_web_server, tmpdir, fs_repo_only):
171
172 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
173 clone_url = rc_web_server.repo_clone_url(db_name)
174 stdout, stderr = Command('/tmp').execute(
175 'hg clone', clone_url, tmpdir.strpath)
176 assert 'HTTP Error 404: Not Found' in stderr
177
178 def test_clone_existing_path_git_not_in_database(
116 def test_clone_existing_path_git_not_in_database(
179 self, rc_web_server, tmpdir, fs_repo_only):
117 self, rc_web_server, tmpdir, fs_repo_only):
180 db_name = fs_repo_only('not-in-db-git', repo_type='git')
118 db_name = fs_repo_only('not-in-db-git', repo_type='git')
181 clone_url = rc_web_server.repo_clone_url(db_name)
119 clone_url = rc_web_server.repo_clone_url(db_name)
182 stdout, stderr = Command('/tmp').execute(
120 stdout, stderr = Command('/tmp').execute(
183 'git clone', clone_url, tmpdir.strpath)
121 'git clone', clone_url, tmpdir.strpath)
184 assert 'not found' in stderr
122 assert 'not found' in stderr
185
123
186 def test_clone_existing_path_hg_not_in_database_different_scm(
187 self, rc_web_server, tmpdir, fs_repo_only):
188 db_name = fs_repo_only('not-in-db-git', repo_type='git')
189 clone_url = rc_web_server.repo_clone_url(db_name)
190 stdout, stderr = Command('/tmp').execute(
191 'hg clone', clone_url, tmpdir.strpath)
192 assert 'HTTP Error 404: Not Found' in stderr
193
194 def test_clone_existing_path_git_not_in_database_different_scm(
124 def test_clone_existing_path_git_not_in_database_different_scm(
195 self, rc_web_server, tmpdir, fs_repo_only):
125 self, rc_web_server, tmpdir, fs_repo_only):
196 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
126 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
197 clone_url = rc_web_server.repo_clone_url(db_name)
127 clone_url = rc_web_server.repo_clone_url(db_name)
198 stdout, stderr = Command('/tmp').execute(
128 stdout, stderr = Command('/tmp').execute(
199 'git clone', clone_url, tmpdir.strpath)
129 'git clone', clone_url, tmpdir.strpath)
200 assert 'not found' in stderr
130 assert 'not found' in stderr
201
131
202 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
203 repo = user_util.create_repo()
204 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
205
206 # Damage repo by removing it's folder
207 RepoModel()._delete_filesystem_repo(repo)
208
209 stdout, stderr = Command('/tmp').execute(
210 'hg clone', clone_url, tmpdir.strpath)
211 assert 'HTTP Error 404: Not Found' in stderr
212
213 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
132 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
214 repo = user_util.create_repo(repo_type='git')
133 repo = user_util.create_repo(repo_type='git')
215 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
134 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
216
135
217 # Damage repo by removing it's folder
136 # Damage repo by removing it's folder
218 RepoModel()._delete_filesystem_repo(repo)
137 RepoModel()._delete_filesystem_repo(repo)
219
138
220 stdout, stderr = Command('/tmp').execute(
139 stdout, stderr = Command('/tmp').execute(
221 'git clone', clone_url, tmpdir.strpath)
140 'git clone', clone_url, tmpdir.strpath)
222 assert 'not found' in stderr
141 assert 'not found' in stderr
223
142
224 def test_push_new_file_hg(self, rc_web_server, tmpdir):
225 clone_url = rc_web_server.repo_clone_url(HG_REPO)
226 stdout, stderr = Command('/tmp').execute(
227 'hg clone', clone_url, tmpdir.strpath)
228
229 stdout, stderr = _add_files_and_push(
230 'hg', tmpdir.strpath, clone_url=clone_url)
231
232 assert 'pushing to' in stdout
233 assert 'size summary' in stdout
234
235 def test_push_new_file_git(self, rc_web_server, tmpdir):
143 def test_push_new_file_git(self, rc_web_server, tmpdir):
236 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
144 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
237 stdout, stderr = Command('/tmp').execute(
145 stdout, stderr = Command('/tmp').execute(
238 'git clone', clone_url, tmpdir.strpath)
146 'git clone', clone_url, tmpdir.strpath)
239
147
240 # commit some stuff into this repo
148 # commit some stuff into this repo
241 stdout, stderr = _add_files_and_push(
149 stdout, stderr = _add_files_and_push(
242 'git', tmpdir.strpath, clone_url=clone_url)
150 'git', tmpdir.strpath, clone_url=clone_url)
243
151
244 _check_proper_git_push(stdout, stderr)
152 _check_proper_git_push(stdout, stderr)
245
153
246 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
247 hg_repo = Repository.get_by_repo_name(HG_REPO)
248
249 # init cache objects
250 CacheKey.delete_all_cache()
251
252 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=hg_repo.repo_id)
253
254 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
255
256 with inv_context_manager as invalidation_context:
257 # __enter__ will create and register cache objects
258 pass
259
260 cache_keys = hg_repo.cache_keys
261 assert cache_keys != []
262 old_ids = [x.cache_state_uid for x in cache_keys]
263
264 # clone to init cache
265 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
266 stdout, stderr = Command('/tmp').execute(
267 'hg clone', clone_url, tmpdir.strpath)
268
269 cache_keys = hg_repo.cache_keys
270 assert cache_keys != []
271 for key in cache_keys:
272 assert key.cache_active is True
273
274 # PUSH that should trigger invalidation cache
275 stdout, stderr = _add_files_and_push(
276 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
277
278 # flush...
279 Session().commit()
280 hg_repo = Repository.get_by_repo_name(HG_REPO)
281 cache_keys = hg_repo.cache_keys
282 assert cache_keys != []
283 new_ids = [x.cache_state_uid for x in cache_keys]
284 assert new_ids != old_ids
285
286 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
287 clone_url = rc_web_server.repo_clone_url(HG_REPO)
288 stdout, stderr = Command('/tmp').execute(
289 'hg clone', clone_url, tmpdir.strpath)
290
291 push_url = rc_web_server.repo_clone_url(
292 HG_REPO, user='bad', passwd='name')
293 stdout, stderr = _add_files_and_push(
294 'hg', tmpdir.strpath, clone_url=push_url)
295
296 assert 'abort: authorization failed' in stderr
297
298 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
154 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
299 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
155 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
300 stdout, stderr = Command('/tmp').execute(
156 stdout, stderr = Command('/tmp').execute(
301 'git clone', clone_url, tmpdir.strpath)
157 'git clone', clone_url, tmpdir.strpath)
302
158
303 push_url = rc_web_server.repo_clone_url(
159 push_url = rc_web_server.repo_clone_url(
304 GIT_REPO, user='bad', passwd='name')
160 GIT_REPO, user='bad', passwd='name')
305 stdout, stderr = _add_files_and_push(
161 stdout, stderr = _add_files_and_push(
306 'git', tmpdir.strpath, clone_url=push_url)
162 'git', tmpdir.strpath, clone_url=push_url)
307
163
308 assert 'fatal: Authentication failed' in stderr
164 assert 'fatal: Authentication failed' in stderr
309
165
310 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
311 clone_url = rc_web_server.repo_clone_url(HG_REPO)
312 stdout, stderr = Command('/tmp').execute(
313 'hg clone', clone_url, tmpdir.strpath)
314
315 stdout, stderr = _add_files_and_push(
316 'hg', tmpdir.strpath,
317 clone_url=rc_web_server.repo_clone_url('not-existing'))
318
319 assert 'HTTP Error 404: Not Found' in stderr
320
321 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
166 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
322 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
167 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
323 stdout, stderr = Command('/tmp').execute(
168 stdout, stderr = Command('/tmp').execute(
324 'git clone', clone_url, tmpdir.strpath)
169 'git clone', clone_url, tmpdir.strpath)
325
170
326 stdout, stderr = _add_files_and_push(
171 stdout, stderr = _add_files_and_push(
327 'git', tmpdir.strpath,
172 'git', tmpdir.strpath,
328 clone_url=rc_web_server.repo_clone_url('not-existing'))
173 clone_url=rc_web_server.repo_clone_url('not-existing'))
329
174
330 assert 'not found' in stderr
175 assert 'not found' in stderr
331
176
332 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
333 user_model = UserModel()
334 try:
335 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
336 Session().commit()
337 time.sleep(2)
338 clone_url = rc_web_server.repo_clone_url(HG_REPO)
339 stdout, stderr = Command('/tmp').execute(
340 'hg clone', clone_url, tmpdir.strpath)
341 assert 'abort: HTTP Error 403: Forbidden' in stderr
342 finally:
343 # release IP restrictions
344 for ip in UserIpMap.getAll():
345 UserIpMap.delete(ip.ip_id)
346 Session().commit()
347
348 time.sleep(2)
349
350 stdout, stderr = Command('/tmp').execute(
351 'hg clone', clone_url, tmpdir.strpath)
352 _check_proper_clone(stdout, stderr, 'hg')
353
354 def test_ip_restriction_git(self, rc_web_server, tmpdir):
177 def test_ip_restriction_git(self, rc_web_server, tmpdir):
355 user_model = UserModel()
178 user_model = UserModel()
356 try:
179 try:
357 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
180 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
358 Session().commit()
181 Session().commit()
359 time.sleep(2)
182 time.sleep(2)
360 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
183 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
361 stdout, stderr = Command('/tmp').execute(
184 stdout, stderr = Command('/tmp').execute(
362 'git clone', clone_url, tmpdir.strpath)
185 'git clone', clone_url, tmpdir.strpath)
363 msg = "The requested URL returned error: 403"
186 msg = "The requested URL returned error: 403"
364 assert msg in stderr
187 assert msg in stderr
365 finally:
188 finally:
366 # release IP restrictions
189 # release IP restrictions
367 for ip in UserIpMap.getAll():
190 for ip in UserIpMap.getAll():
368 UserIpMap.delete(ip.ip_id)
191 UserIpMap.delete(ip.ip_id)
369 Session().commit()
192 Session().commit()
370
193
371 time.sleep(2)
194 time.sleep(2)
372
195
373 cmd = Command('/tmp')
196 cmd = Command('/tmp')
374 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
197 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
375 cmd.assert_returncode_success()
198 cmd.assert_returncode_success()
376 _check_proper_clone(stdout, stderr, 'git')
199 _check_proper_clone(stdout, stderr, 'git')
@@ -1,87 +1,90 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Test suite for making push/pull operations, on specially modified INI files
21 Test suite for making push/pull operations, on specially modified INI files
22
22
23 .. important::
23 .. important::
24
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
28
29 import pytest
29 import pytest
30
30
31 from rhodecode.tests import GIT_REPO, HG_REPO
31 from rhodecode.tests import GIT_REPO, HG_REPO
32 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
32 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
33 from rhodecode.tests.vcs_operations.conftest import check_httpbin_connection
33 from rhodecode.tests.vcs_operations.conftest import check_httpbin_connection
34
34
35
35
36 connection_available = pytest.mark.skipif(
36 connection_available = pytest.mark.skipif(
37 not check_httpbin_connection(), reason="No outside internet connection available")
37 not check_httpbin_connection(), reason="No outside internet connection available")
38
38
39
39
40 @pytest.mark.usefixtures(
40 @pytest.mark.usefixtures(
41 "disable_locking", "disable_anonymous_user",
41 "disable_locking", "disable_anonymous_user",
42 "enable_webhook_push_integration")
42 "enable_webhook_push_integration")
43 class TestVCSOperationsOnCustomIniConfig(object):
43 class TestVCSOperationsOnCustomIniConfig(object):
44
44
45 @connection_available
45 def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir):
46 def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir):
46 clone_url = rc_web_server.repo_clone_url(HG_REPO)
47 clone_url = rc_web_server.repo_clone_url(HG_REPO)
47 stdout, stderr = Command('/tmp').execute(
48 stdout, stderr = Command('/tmp').execute(
48 'hg clone', clone_url, tmpdir.strpath)
49 'hg clone', clone_url, tmpdir.strpath)
49
50
50 push_url = rc_web_server.repo_clone_url(HG_REPO)
51 push_url = rc_web_server.repo_clone_url(HG_REPO)
51 _add_files_and_push(
52 _add_files_and_push(
52 'hg', tmpdir.strpath, clone_url=push_url,
53 'hg', tmpdir.strpath, clone_url=push_url,
53 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
54 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
54
55
55 rc_log = rc_web_server.get_rc_log()
56 rc_log = rc_web_server.get_rc_log()
56 assert 'ERROR' not in rc_log
57 assert 'ERROR' not in rc_log
57 assert "{'name': 'v1.0.0'," in rc_log
58 assert "{'name': 'v1.0.0'," in rc_log
58
59
60 @connection_available
59 def test_push_tag_with_commit_git(
61 def test_push_tag_with_commit_git(
60 self, rc_web_server, tmpdir):
62 self, rc_web_server, tmpdir):
61 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
63 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
62 stdout, stderr = Command('/tmp').execute(
64 stdout, stderr = Command('/tmp').execute(
63 'git clone', clone_url, tmpdir.strpath)
65 'git clone', clone_url, tmpdir.strpath)
64
66
65 push_url = rc_web_server.repo_clone_url(GIT_REPO)
67 push_url = rc_web_server.repo_clone_url(GIT_REPO)
66 _add_files_and_push(
68 _add_files_and_push(
67 'git', tmpdir.strpath, clone_url=push_url,
69 'git', tmpdir.strpath, clone_url=push_url,
68 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
70 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
69
71
70 rc_log = rc_web_server.get_rc_log()
72 rc_log = rc_web_server.get_rc_log()
71 assert 'ERROR' not in rc_log
73 assert 'ERROR' not in rc_log
72 assert "{'name': 'v1.0.0'," in rc_log
74 assert "{'name': 'v1.0.0'," in rc_log
73
75
76 @connection_available
74 def test_push_tag_with_no_commit_git(
77 def test_push_tag_with_no_commit_git(
75 self, rc_web_server, tmpdir):
78 self, rc_web_server, tmpdir):
76 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
79 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
77 stdout, stderr = Command('/tmp').execute(
80 stdout, stderr = Command('/tmp').execute(
78 'git clone', clone_url, tmpdir.strpath)
81 'git clone', clone_url, tmpdir.strpath)
79
82
80 push_url = rc_web_server.repo_clone_url(GIT_REPO)
83 push_url = rc_web_server.repo_clone_url(GIT_REPO)
81 _add_files_and_push(
84 _add_files_and_push(
82 'git', tmpdir.strpath, clone_url=push_url,
85 'git', tmpdir.strpath, clone_url=push_url,
83 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
86 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
84
87
85 rc_log = rc_web_server.get_rc_log()
88 rc_log = rc_web_server.get_rc_log()
86 assert 'ERROR' not in rc_log
89 assert 'ERROR' not in rc_log
87 assert "{'name': 'v1.0.0'," in rc_log
90 assert "{'name': 'v1.0.0'," in rc_log
@@ -1,138 +1,171 b''
1 #
1 #
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 ; #################################
6
6
7 [server:main]
7 [server:main]
8 ; COMMON HOST/IP CONFIG
8 ; COMMON HOST/IP CONFIG
9 host = 127.0.0.1
9 host = 127.0.0.1
10 port = 9900
10 port = 10010
11
11
12
12
13 ; ###########################
13 ; ###########################
14 ; GUNICORN APPLICATION SERVER
14 ; GUNICORN APPLICATION SERVER
15 ; ###########################
15 ; ###########################
16
16
17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
18
18
19 ; Module to use, this setting shouldn't be changed
19 ; Module to use, this setting shouldn't be changed
20 use = egg:gunicorn#main
20 use = egg:gunicorn#main
21
21
22 [app:main]
22 [app:main]
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
24 ; of this file
24 ; of this file
25 ; Each option in the app:main can be override by an environmental variable
26 ;
27 ;To override an option:
28 ;
29 ;RC_<KeyName>
30 ;Everything should be uppercase, . and - should be replaced by _.
31 ;For example, if you have these configuration settings:
32 ;rc_cache.repo_object.backend = foo
33 ;can be overridden by
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
35
25 use = egg:rhodecode-vcsserver
36 use = egg:rhodecode-vcsserver
26
37
27 ; Pyramid default locales, we need this to be set
38 ; Pyramid default locales, we need this to be set
28 pyramid.default_locale_name = en
39 pyramid.default_locale_name = en
29
40
30 ; default locale used by VCS systems
41 ; default locale used by VCS systems
31 locale = en_US.UTF-8
42 locale = en_US.UTF-8
32
43
33 ; path to binaries for vcsserver, it should be set by the installer
44 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
34 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
45 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
35 ; it can also be a path to nix-build output in case of development
46 ; or /usr/local/bin/rhodecode_bin/vcs_bin
36 core.binary_dir =
47 core.binary_dir =
37
48
49 ; Redis connection settings for svn integrations logic
50 ; This connection string needs to be the same on ce and vcsserver
51 vcs.svn.redis_conn = redis://redis:6379/0
52
38 ; Custom exception store path, defaults to TMPDIR
53 ; Custom exception store path, defaults to TMPDIR
39 ; This is used to store exception from RhodeCode in shared directory
54 ; This is used to store exception from RhodeCode in shared directory
40 #exception_tracker.store_path =
55 #exception_tracker.store_path =
41
56
42 ; #############
57 ; #############
43 ; DOGPILE CACHE
58 ; DOGPILE CACHE
44 ; #############
59 ; #############
45
60
46 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
61 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
47 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
62 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
48 cache_dir = %(here)s/data
63 cache_dir = %(here)s/data
49
64
50 ; ***************************************
65 ; ***************************************
51 ; `repo_object` cache, default file based
66 ; `repo_object` cache, default file based
52 ; ***************************************
67 ; ***************************************
53
68
54 ; `repo_object` cache settings for vcs methods for repositories
69 ; `repo_object` cache settings for vcs methods for repositories
55 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
70 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
56
71
57 ; cache auto-expires after N seconds
72 ; cache auto-expires after N seconds
58 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
73 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
59 rc_cache.repo_object.expiration_time = 2592000
74 #rc_cache.repo_object.expiration_time = 2592000
60
75
61 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
76 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
62 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
77 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
63
78
64 ; ***********************************************************
79 ; ***********************************************************
65 ; `repo_object` cache with redis backend
80 ; `repo_object` cache with redis backend
66 ; recommended for larger instance, and for better performance
81 ; recommended for larger instance, and for better performance
67 ; ***********************************************************
82 ; ***********************************************************
68
83
69 ; `repo_object` cache settings for vcs methods for repositories
84 ; `repo_object` cache settings for vcs methods for repositories
70 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
85 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
71
86
72 ; cache auto-expires after N seconds
87 ; cache auto-expires after N seconds
73 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
88 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
74 #rc_cache.repo_object.expiration_time = 2592000
89 #rc_cache.repo_object.expiration_time = 2592000
75
90
76 ; redis_expiration_time needs to be greater then expiration_time
91 ; redis_expiration_time needs to be greater then expiration_time
77 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
92 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
78
93
79 #rc_cache.repo_object.arguments.host = localhost
94 #rc_cache.repo_object.arguments.host = localhost
80 #rc_cache.repo_object.arguments.port = 6379
95 #rc_cache.repo_object.arguments.port = 6379
81 #rc_cache.repo_object.arguments.db = 5
96 #rc_cache.repo_object.arguments.db = 5
82 #rc_cache.repo_object.arguments.socket_timeout = 30
97 #rc_cache.repo_object.arguments.socket_timeout = 30
83 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
98 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
84 #rc_cache.repo_object.arguments.distributed_lock = true
99 #rc_cache.repo_object.arguments.distributed_lock = true
85
100
86 # legacy cache regions, please don't change
101 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
87 beaker.cache.regions = repo_object
102 #rc_cache.repo_object.arguments.lock_auto_renewal = true
88 beaker.cache.repo_object.type = memorylru
103
89 beaker.cache.repo_object.max_items = 100
104 ; Statsd client config, this is used to send metrics to statsd
90 # cache auto-expires after N seconds
105 ; We recommend setting statsd_exported and scrape them using Promethues
91 beaker.cache.repo_object.expire = 300
106 #statsd.enabled = false
92 beaker.cache.repo_object.enabled = true
107 #statsd.statsd_host = 0.0.0.0
108 #statsd.statsd_port = 8125
109 #statsd.statsd_prefix =
110 #statsd.statsd_ipv6 = false
93
111
112 ; configure logging automatically at server startup set to false
113 ; to use the below custom logging config.
114 ; RC_LOGGING_FORMATTER
115 ; RC_LOGGING_LEVEL
116 ; env variables can control the settings for logging in case of autoconfigure
94
117
118 #logging.autoconfigure = true
119
120 ; specify your own custom logging config file to configure logging
121 #logging.logging_conf_file = /path/to/custom_logging.ini
95
122
96 ; #####################
123 ; #####################
97 ; LOGGING CONFIGURATION
124 ; LOGGING CONFIGURATION
98 ; #####################
125 ; #####################
126
99 [loggers]
127 [loggers]
100 keys = root, vcsserver
128 keys = root, vcsserver
101
129
102 [handlers]
130 [handlers]
103 keys = console
131 keys = console
104
132
105 [formatters]
133 [formatters]
106 keys = generic
134 keys = generic, json
107
135
108 ; #######
136 ; #######
109 ; LOGGERS
137 ; LOGGERS
110 ; #######
138 ; #######
111 [logger_root]
139 [logger_root]
112 level = NOTSET
140 level = NOTSET
113 handlers = console
141 handlers = console
114
142
115 [logger_vcsserver]
143 [logger_vcsserver]
116 level = DEBUG
144 level = INFO
117 handlers =
145 handlers =
118 qualname = vcsserver
146 qualname = vcsserver
119 propagate = 1
147 propagate = 1
120
148
121
122 ; ########
149 ; ########
123 ; HANDLERS
150 ; HANDLERS
124 ; ########
151 ; ########
125
152
126 [handler_console]
153 [handler_console]
127 class = StreamHandler
154 class = StreamHandler
128 args = (sys.stderr, )
155 args = (sys.stderr, )
129 level = DEBUG
156 level = DEBUG
157 ; To enable JSON formatted logs replace 'generic' with 'json'
158 ; This allows sending properly formatted logs to grafana loki or elasticsearch
130 formatter = generic
159 formatter = generic
131
160
132 ; ##########
161 ; ##########
133 ; FORMATTERS
162 ; FORMATTERS
134 ; ##########
163 ; ##########
135
164
136 [formatter_generic]
165 [formatter_generic]
137 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
166 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
138 datefmt = %Y-%m-%d %H:%M:%S
167 datefmt = %Y-%m-%d %H:%M:%S
168
169 [formatter_json]
170 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
171 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
General Comments 0
You need to be logged in to leave comments. Login now