##// END OF EJS Templates
feat(archive-cache): implemented s3 based backend for filecaches
super-admin -
r5433:d96689c8 default
parent child Browse files
Show More
@@ -0,0 +1,17 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,348 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import functools
21 import logging
22 import typing
23 import time
24 import zlib
25
26 from ...ext_json import json
27 from ..utils import StatsDB, NOT_GIVEN, ShardFileReader, EVICTION_POLICY, format_size
28 from ..lock import GenerationLock
29
30 log = logging.getLogger(__name__)
31
32
33 class BaseShard:
34 storage_type: str = ''
35 fs = None
36
37 @classmethod
38 def hash(cls, key):
39 """Compute portable hash for `key`.
40
41 :param key: key to hash
42 :return: hash value
43
44 """
45 mask = 0xFFFFFFFF
46 return zlib.adler32(key.encode('utf-8')) & mask # noqa
47
48 def _write_file(self, full_path, read_iterator, mode):
49 raise NotImplementedError
50
51 def _get_keyfile(self, key):
52 raise NotImplementedError
53
54 def random_filename(self):
55 raise NotImplementedError
56
57 def _store(self, key, value_reader, metadata, mode):
58 (filename, # hash-name
59 full_path # full-path/hash-name
60 ) = self.random_filename()
61
62 key_file, key_file_path = self._get_keyfile(key)
63
64 # STORE METADATA
65 _metadata = {
66 "version": "v1",
67
68 "key_file": key_file, # this is the .key.json file storing meta
69 "key_file_path": key_file_path, # full path to key_file
70 "archive_key": key, # original name we stored archive under, e.g my-archive.zip
71 "archive_filename": filename, # the actual filename we stored that file under
72 "archive_full_path": full_path,
73
74 "store_time": time.time(),
75 "access_count": 0,
76 "access_time": 0,
77
78 "size": 0
79 }
80 if metadata:
81 _metadata.update(metadata)
82
83 read_iterator = iter(functools.partial(value_reader.read, 2**22), b'')
84 size, sha256 = self._write_file(full_path, read_iterator, mode)
85 _metadata['size'] = size
86 _metadata['sha256'] = sha256
87
88 # after archive is finished, we create a key to save the presence of the binary file
89 with self.fs.open(key_file_path, 'wb') as f:
90 f.write(json.dumps(_metadata))
91
92 return key, filename, size, _metadata
93
94 def _fetch(self, key, retry, retry_attempts, retry_backoff):
95 if retry is NOT_GIVEN:
96 retry = False
97 if retry_attempts is NOT_GIVEN:
98 retry_attempts = 0
99
100 if retry and retry_attempts > 0:
101 for attempt in range(1, retry_attempts + 1):
102 if key in self:
103 break
104 # we didn't find the key, wait retry_backoff N seconds, and re-check
105 time.sleep(retry_backoff)
106
107 if key not in self:
108 log.exception(f'requested key={key} not found in {self} retry={retry}, attempts={retry_attempts}')
109 raise KeyError(key)
110
111 key_file, key_file_path = self._get_keyfile(key)
112 with self.fs.open(key_file_path, 'rb') as f:
113 metadata = json.loads(f.read())
114
115 archive_path = metadata['archive_full_path']
116
117 try:
118 return ShardFileReader(self.fs.open(archive_path, 'rb')), metadata
119 finally:
120 # update usage stats, count and accessed
121 metadata["access_count"] = metadata.get("access_count", 0) + 1
122 metadata["access_time"] = time.time()
123 log.debug('Updated %s with access snapshot, access_count=%s access_time=%s',
124 key_file, metadata['access_count'], metadata['access_time'])
125 with self.fs.open(key_file_path, 'wb') as f:
126 f.write(json.dumps(metadata))
127
128 def _remove(self, key):
129 if key not in self:
130 log.exception(f'requested key={key} not found in {self}')
131 raise KeyError(key)
132
133 key_file, key_file_path = self._get_keyfile(key)
134 with self.fs.open(key_file_path, 'rb') as f:
135 metadata = json.loads(f.read())
136
137 archive_path = metadata['archive_full_path']
138 self.fs.rm(archive_path)
139 self.fs.rm(key_file_path)
140 return 1
141
142 @property
143 def storage_medium(self):
144 return getattr(self, self.storage_type)
145
146 @property
147 def key_suffix(self):
148 return 'key.json'
149
150 def __contains__(self, key):
151 """Return `True` if `key` matching item is found in cache.
152
153 :param key: key matching item
154 :return: True if key matching item
155
156 """
157 key_file, key_file_path = self._get_keyfile(key)
158 return self.fs.exists(key_file_path)
159
160
161 class BaseCache:
162 _locking_url: str = ''
163 _storage_path: str = ''
164 _config = {}
165 retry = False
166 retry_attempts = 0
167 retry_backoff = 1
168 _shards = tuple()
169
170 def __contains__(self, key):
171 """Return `True` if `key` matching item is found in cache.
172
173 :param key: key matching item
174 :return: True if key matching item
175
176 """
177 return self.has_key(key)
178
179 def __repr__(self):
180 return f'<{self.__class__.__name__}(storage={self._storage_path})>'
181
182 @classmethod
183 def gb_to_bytes(cls, gb):
184 return gb * (1024 ** 3)
185
186 @property
187 def storage_path(self):
188 return self._storage_path
189
190 @classmethod
191 def get_stats_db(cls):
192 return StatsDB()
193
194 def get_conf(self, key, pop=False):
195 if key not in self._config:
196 raise ValueError(f"No configuration key '{key}', please make sure it exists in archive_cache config")
197 val = self._config[key]
198 if pop:
199 del self._config[key]
200 return val
201
202 def _get_shard(self, key):
203 raise NotImplementedError
204
205 def _get_size(self, shard, archive_path):
206 raise NotImplementedError
207
208 def store(self, key, value_reader, metadata=None):
209 shard = self._get_shard(key)
210 return shard.store(key, value_reader, metadata)
211
212 def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN) -> tuple[typing.BinaryIO, dict]:
213 """
214 Return file handle corresponding to `key` from specific shard cache.
215 """
216 if retry is NOT_GIVEN:
217 retry = self.retry
218 if retry_attempts is NOT_GIVEN:
219 retry_attempts = self.retry_attempts
220 retry_backoff = self.retry_backoff
221
222 shard = self._get_shard(key)
223 return shard.fetch(key, retry=retry, retry_attempts=retry_attempts, retry_backoff=retry_backoff)
224
225 def remove(self, key):
226 shard = self._get_shard(key)
227 return shard.remove(key)
228
229 def has_key(self, archive_key):
230 """Return `True` if `key` matching item is found in cache.
231
232 :param archive_key: key for item, this is a unique archive name we want to store data under. e.g my-archive-svn.zip
233 :return: True if key is found
234
235 """
236 shard = self._get_shard(archive_key)
237 return archive_key in shard
238
239 def iter_keys(self):
240 for shard in self._shards:
241 if shard.fs.exists(shard.storage_medium):
242 for path, _dirs, _files in shard.fs.walk(shard.storage_medium):
243 for key_file_path in _files:
244 if key_file_path.endswith(shard.key_suffix):
245 yield shard, key_file_path
246
247 def get_lock(self, lock_key):
248 return GenerationLock(lock_key, self._locking_url)
249
250 def evict(self, policy=None, size_limit=None) -> int:
251 """
252 Remove old items based on the conditions
253
254
255 explanation of this algo:
256 iterate over each shard, then for each shard iterate over the .key files
257 read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and
258 access data, time creation, and access counts.
259
260 Store that into a memory DB so we can run different sorting strategies easily.
261 Summing the size is a sum sql query.
262
263 Then we run a sorting strategy based on eviction policy.
264 We iterate over sorted keys, and remove each checking if we hit the overall limit.
265 """
266
267 policy = policy or self._eviction_policy
268 size_limit = size_limit or self._cache_size_limit
269
270 select_policy = EVICTION_POLICY[policy]['evict']
271
272 log.debug('Running eviction policy \'%s\', and checking for size limit: %s',
273 policy, format_size(size_limit))
274
275 if select_policy is None:
276 return 0
277
278 db = self.get_stats_db()
279
280 data = []
281 cnt = 1
282
283 for shard, key_file in self.iter_keys():
284 with shard.fs.open(os.path.join(shard.storage_medium, key_file), 'rb') as f:
285 metadata = json.loads(f.read())
286
287 key_file_path = os.path.join(shard.storage_medium, key_file)
288
289 archive_key = metadata['archive_key']
290 archive_path = metadata['archive_full_path']
291
292 size = metadata.get('size')
293 if not size:
294 # in case we don't have size re-calc it...
295 size = self._get_size(shard, archive_path)
296
297 data.append([
298 cnt,
299 key_file,
300 key_file_path,
301 archive_key,
302 archive_path,
303 metadata.get('store_time', 0),
304 metadata.get('access_time', 0),
305 metadata.get('access_count', 0),
306 size,
307 ])
308 cnt += 1
309
310 # Insert bulk data using executemany
311 db.bulk_insert(data)
312
313 total_size = db.get_total_size()
314 log.debug('Analyzed %s keys, occupying: %s, running eviction to match %s',
315 len(data), format_size(total_size), format_size(size_limit))
316
317 removed_items = 0
318 removed_size = 0
319 for key_file, archive_key, size in db.get_sorted_keys(select_policy):
320 # simulate removal impact BEFORE removal
321 total_size -= size
322
323 if total_size <= size_limit:
324 # we obtained what we wanted...
325 break
326
327 self.remove(archive_key)
328 removed_items += 1
329 removed_size += size
330
331 log.debug('Removed %s cache archives, and reduced size by: %s',
332 removed_items, format_size(removed_size))
333 return removed_items
334
335 def get_statistics(self):
336 total_files = 0
337 total_size = 0
338 meta = {}
339
340 for shard, key_file in self.iter_keys():
341 json_key = f"{shard.storage_medium}/{key_file}"
342 with shard.fs.open(json_key, 'rb') as f:
343 total_files += 1
344 metadata = json.loads(f.read())
345 total_size += metadata['size']
346
347 return total_files, total_size, meta
348
@@ -0,0 +1,150 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import codecs
20 import hashlib
21 import logging
22 import os
23
24 import fsspec
25
26 from .base import BaseCache, BaseShard
27 from ..utils import ShardFileReader, NOT_GIVEN
28 from ...type_utils import str2bool
29
30 log = logging.getLogger(__name__)
31
32
33 class S3Shard(BaseShard):
34
35 def __init__(self, index, bucket, **settings):
36 self._index = index
37 self._bucket = bucket
38 self.storage_type = 'bucket'
39
40 endpoint_url = settings.pop('archive_cache.objectstore.url')
41 key = settings.pop('archive_cache.objectstore.key')
42 secret = settings.pop('archive_cache.objectstore.secret')
43
44 self.fs = fsspec.filesystem('s3', anon=False, endpoint_url=endpoint_url, key=key, secret=secret)
45
46 @property
47 def bucket(self):
48 """Cache bucket."""
49 return self._bucket
50
51 def _get_keyfile(self, archive_key) -> tuple[str, str]:
52 key_file = f'{archive_key}-{self.key_suffix}'
53 return key_file, os.path.join(self.bucket, key_file)
54
55 def _get_writer(self, path, mode):
56 return self.fs.open(path, 'wb')
57
58 def _write_file(self, full_path, iterator, mode):
59 # ensure bucket exists
60 destination = self.bucket
61 if not self.fs.exists(destination):
62 self.fs.mkdir(destination, s3_additional_kwargs={})
63
64 writer = self._get_writer(full_path, mode)
65
66 digest = hashlib.sha256()
67 with writer:
68 size = 0
69 for chunk in iterator:
70 size += len(chunk)
71 digest.update(chunk)
72 writer.write(chunk)
73
74 sha256 = digest.hexdigest()
75 log.debug('written new archive cache under %s, sha256: %s', full_path, sha256)
76 return size, sha256
77
78 def store(self, key, value_reader, metadata: dict | None = None):
79 return self._store(key, value_reader, metadata, mode='wb')
80
81 def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN, retry_backoff=1) -> tuple[ShardFileReader, dict]:
82 return self._fetch(key, retry, retry_attempts, retry_backoff)
83
84 def remove(self, key):
85 return self._remove(key)
86
87 def random_filename(self):
88 """Return filename and full-path tuple for file storage.
89
90 Filename will be a randomly generated 28 character hexadecimal string
91 with ".archive_cache" suffixed. Two levels of sub-directories will be used to
92 reduce the size of directories. On older filesystems, lookups in
93 directories with many files may be slow.
94 """
95
96 hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
97
98 archive_name = hex_name[4:] + '.archive_cache'
99 filename = f"{hex_name[:2]}-{hex_name[2:4]}-{archive_name}"
100
101 full_path = os.path.join(self.bucket, filename)
102 return archive_name, full_path
103
104 def __repr__(self):
105 return f'{self.__class__.__name__}(index={self._index}, bucket={self.bucket})'
106
107
108 class ObjectStoreCache(BaseCache):
109
110 def __init__(self, locking_url, **settings):
111 """
112 Initialize objectstore cache instance.
113
114 :param str locking_url: redis url for a lock
115 :param settings: settings dict
116
117 """
118 self._locking_url = locking_url
119 self._config = settings
120
121 objectstore_url = self.get_conf('archive_cache.objectstore.url')
122 self._storage_path = objectstore_url
123
124 self._count = int(self.get_conf('archive_cache.objectstore.bucket_shards', pop=True))
125
126 self._eviction_policy = self.get_conf('archive_cache.objectstore.eviction_policy', pop=True)
127 self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.objectstore.cache_size_gb')))
128
129 self.retry = str2bool(self.get_conf('archive_cache.objectstore.retry', pop=True))
130 self.retry_attempts = int(self.get_conf('archive_cache.objectstore.retry_attempts', pop=True))
131 self.retry_backoff = int(self.get_conf('archive_cache.objectstore.retry_backoff', pop=True))
132
133 log.debug('Initializing archival cache instance under %s', objectstore_url)
134 self._shards = tuple(
135 S3Shard(
136 index=num,
137 bucket='rhodecode-archivecache-%03d' % num,
138 **settings,
139 )
140 for num in range(self._count)
141 )
142 self._hash = self._shards[0].hash
143
144 def _get_shard(self, key) -> S3Shard:
145 index = self._hash(key) % self._count
146 shard = self._shards[index]
147 return shard
148
149 def _get_size(self, shard, archive_path):
150 return shard.fs.info(archive_path)['size']
@@ -0,0 +1,105 b''
1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import time
20 import pytest
21 import rhodecode
22 import os
23 import shutil
24 from tempfile import mkdtemp
25
26 from rhodecode.lib import archive_cache
27
28
29 def file_reader(temp_store):
30 with open(temp_store, 'w') as f:
31 for cnt in range(10000):
32 f.write(str(cnt))
33 return open(temp_store, 'rb')
34
35
36 @pytest.fixture()
37 def d_cache_instance(ini_settings):
38 config = ini_settings
39 d_cache = archive_cache.get_archival_cache_store(config=config, always_init=True)
40 return d_cache
41
42
43 @pytest.mark.usefixtures('app')
44 class TestArchiveCaches(object):
45
46 def test_archivecache_empty_stats(self, d_cache_instance):
47 d_cache = d_cache_instance
48 shutil.rmtree(d_cache._directory)
49
50 stats = d_cache.get_statistics()
51 assert (0, 0, {}) == stats
52
53 def test_archivecache_store_keys(self, d_cache_instance, tmp_path):
54 d_cache = d_cache_instance
55 shutil.rmtree(d_cache._directory)
56
57 for n in range(100):
58
59 archive_name = f'my-archive-abc-{n}.zip'
60 temp_archive_path = os.path.join(tmp_path, archive_name)
61 d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
62 reader, meta = d_cache.fetch(archive_name)
63 content = reader.read()
64 assert content == open(temp_archive_path, 'rb').read()
65
66 stats = d_cache.get_statistics()
67 assert (100, 3889000, {}) == stats
68
69 def test_archivecache_remove_keys(self, d_cache_instance, tmp_path):
70 d_cache = d_cache_instance
71 shutil.rmtree(d_cache._directory)
72
73 n = 1
74 archive_name = f'my-archive-abc-{n}.zip'
75 temp_archive_path = os.path.join(tmp_path, archive_name)
76
77 d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
78 stats = d_cache.get_statistics()
79 assert (1, 38890, {}) == stats
80
81 assert 1 == d_cache.remove(archive_name)
82
83 stats = d_cache.get_statistics()
84 assert (0, 0, {}) == stats
85
86 def test_archivecache_evict_keys(self, d_cache_instance, tmp_path):
87 d_cache = d_cache_instance
88 shutil.rmtree(d_cache._directory)
89 tries = 500
90 for n in range(tries):
91
92 archive_name = f'my-archive-abc-{n}.zip'
93 temp_archive_path = os.path.join(tmp_path, archive_name)
94 d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
95
96 stats = d_cache.get_statistics()
97 assert (tries, 19445000, {}) == stats
98 evict_to = 0.005 # around (5mb)
99 evicted_items = d_cache.evict(size_limit=d_cache.gb_to_bytes(evict_to))
100 evicted = 361
101 assert evicted == evicted_items
102
103 stats = d_cache.get_statistics()
104 assert (tries - evicted, 5405710, {}) == stats
105
@@ -1,813 +1,845 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; #############
75 75 ; DEBUG OPTIONS
76 76 ; #############
77 77
78 78 pyramid.reload_templates = true
79 79
80 80 # During development the we want to have the debug toolbar enabled
81 81 pyramid.includes =
82 82 pyramid_debugtoolbar
83 83
84 84 debugtoolbar.hosts = 0.0.0.0/0
85 85 debugtoolbar.exclude_prefixes =
86 86 /css
87 87 /fonts
88 88 /images
89 89 /js
90 90
91 91 ## RHODECODE PLUGINS ##
92 92 rhodecode.includes =
93 93 rhodecode.api
94 94
95 95
96 96 # api prefix url
97 97 rhodecode.api.url = /_admin/api
98 98
99 99 ; enable debug style page
100 100 debug_style = true
101 101
102 102 ; #################
103 103 ; END DEBUG OPTIONS
104 104 ; #################
105 105
106 106 ; encryption key used to encrypt social plugin tokens,
107 107 ; remote_urls with credentials etc, if not set it defaults to
108 108 ; `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ; decryption strict mode (enabled by default). It controls if decryption raises
112 112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
116 116 ; fernet is safer, and we strongly recommend switching to it.
117 117 ; Due to backward compatibility aes is used as default.
118 118 #rhodecode.encrypted_values.algorithm = fernet
119 119
120 120 ; Return gzipped responses from RhodeCode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ; Auto-generate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ; System global default language.
127 127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ; Perform a full repository scan and import on each server start.
131 131 ; Settings this to true could lead to very long startup time.
132 132 startup.import_repos = false
133 133
134 134 ; URL at which the application is running. This is used for Bootstrapping
135 135 ; requests in context when no web request is available. Used in ishell, or
136 136 ; SSH calls. Set this for events to receive proper url for SSH calls.
137 137 app.base_url = http://rhodecode.local
138 138
139 139 ; Host at which the Service API is running.
140 140 app.service_api.host = http://rhodecode.local:10020
141 141
142 142 ; Secret for Service API authentication.
143 143 app.service_api.token =
144 144
145 145 ; Unique application ID. Should be a random unique string for security.
146 146 app_instance_uuid = rc-production
147 147
148 148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ; commit, or pull request exceeds this limit this diff will be displayed
150 150 ; partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ; file inside diff which exceeds this limit will be displayed partially.
155 155 ; E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
159 159 vcs_full_cache = true
160 160
161 161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
162 162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
163 163 force_https = false
164 164
165 165 ; use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ; Set to true if your repos are exposed using the dumb protocol
169 169 git_update_server_info = false
170 170
171 171 ; RSS/ATOM feed options
172 172 rss_cut_off_limit = 256000
173 173 rss_items_per_page = 10
174 174 rss_include_diff = false
175 175
176 176 ; gist URL alias, used to create nicer urls for gist. This should be an
177 177 ; url that does rewrites to _admin/gists/{gistid}.
178 178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
179 179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
180 180 gist_alias_url =
181 181
182 182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
183 183 ; used for access.
184 184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
185 185 ; came from the the logged in user who own this authentication token.
186 186 ; Additionally @TOKEN syntax can be used to bound the view to specific
187 187 ; authentication token. Such view would be only accessible when used together
188 188 ; with this authentication token
189 189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
190 190 ; The list should be "," separated and on a single line.
191 191 ; Most common views to enable:
192 192
193 193 # RepoCommitsView:repo_commit_download
194 194 # RepoCommitsView:repo_commit_patch
195 195 # RepoCommitsView:repo_commit_raw
196 196 # RepoCommitsView:repo_commit_raw@TOKEN
197 197 # RepoFilesView:repo_files_diff
198 198 # RepoFilesView:repo_archivefile
199 199 # RepoFilesView:repo_file_raw
200 200 # GistView:*
201 201 api_access_controllers_whitelist =
202 202
203 203 ; Default encoding used to convert from and to unicode
204 204 ; can be also a comma separated list of encoding in case of mixed encodings
205 205 default_encoding = UTF-8
206 206
207 207 ; instance-id prefix
208 208 ; a prefix key for this instance used for cache invalidation when running
209 209 ; multiple instances of RhodeCode, make sure it's globally unique for
210 210 ; all running RhodeCode instances. Leave empty if you don't use it
211 211 instance_id =
212 212
213 213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
214 214 ; of an authentication plugin also if it is disabled by it's settings.
215 215 ; This could be useful if you are unable to log in to the system due to broken
216 216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
217 217 ; module to log in again and fix the settings.
218 218 ; Available builtin plugin IDs (hash is part of the ID):
219 219 ; egg:rhodecode-enterprise-ce#rhodecode
220 220 ; egg:rhodecode-enterprise-ce#pam
221 221 ; egg:rhodecode-enterprise-ce#ldap
222 222 ; egg:rhodecode-enterprise-ce#jasig_cas
223 223 ; egg:rhodecode-enterprise-ce#headers
224 224 ; egg:rhodecode-enterprise-ce#crowd
225 225
226 226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
227 227
228 228 ; Flag to control loading of legacy plugins in py:/path format
229 229 auth_plugin.import_legacy_plugins = true
230 230
231 231 ; alternative return HTTP header for failed authentication. Default HTTP
232 232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
233 233 ; handling that causing a series of failed authentication calls.
234 234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
235 235 ; This will be served instead of default 401 on bad authentication
236 236 auth_ret_code =
237 237
238 238 ; use special detection method when serving auth_ret_code, instead of serving
239 239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
240 240 ; and then serve auth_ret_code to clients
241 241 auth_ret_code_detection = false
242 242
243 243 ; locking return code. When repository is locked return this HTTP code. 2XX
244 244 ; codes don't break the transactions while 4XX codes do
245 245 lock_ret_code = 423
246 246
247 247 ; Filesystem location were repositories should be stored
248 248 repo_store.path = /var/opt/rhodecode_repo_store
249 249
250 250 ; allows to setup custom hooks in settings page
251 251 allow_custom_hooks_settings = true
252 252
253 253 ; Generated license token required for EE edition license.
254 254 ; New generated token value can be found in Admin > settings > license page.
255 255 license_token =
256 256
257 257 ; This flag hides sensitive information on the license page such as token, and license data
258 258 license.hide_license_info = false
259 259
260 260 ; supervisor connection uri, for managing supervisor and logs.
261 261 supervisor.uri =
262 262
263 263 ; supervisord group name/id we only want this RC instance to handle
264 264 supervisor.group_id = dev
265 265
266 266 ; Display extended labs settings
267 267 labs_settings_active = true
268 268
269 269 ; Custom exception store path, defaults to TMPDIR
270 270 ; This is used to store exception from RhodeCode in shared directory
271 271 #exception_tracker.store_path =
272 272
273 273 ; Send email with exception details when it happens
274 274 #exception_tracker.send_email = false
275 275
276 276 ; Comma separated list of recipients for exception emails,
277 277 ; e.g admin@rhodecode.com,devops@rhodecode.com
278 278 ; Can be left empty, then emails will be sent to ALL super-admins
279 279 #exception_tracker.send_email_recipients =
280 280
281 281 ; optional prefix to Add to email Subject
282 282 #exception_tracker.email_prefix = [RHODECODE ERROR]
283 283
284 284 ; File store configuration. This is used to store and serve uploaded files
285 285 file_store.enabled = true
286 286
287 287 ; Storage backend, available options are: local
288 288 file_store.backend = local
289 289
290 290 ; path to store the uploaded binaries and artifacts
291 291 file_store.storage_path = /var/opt/rhodecode_data/file_store
292 292
293 ; Uncomment and set this path to control settings for archive download cache.
293
294 ; Redis url to acquire/check generation of archives locks
295 archive_cache.locking.url = redis://redis:6379/1
296
297 ; Storage backend, only 'filesystem' and 'objectstore' are available now
298 archive_cache.backend.type = filesystem
299
300 ; url for s3 compatible storage that allows to upload artifacts
301 ; e.g http://minio:9000
302 archive_cache.objectstore.url = http://s3-minio:9000
303
304 ; key for s3 auth
305 archive_cache.objectstore.key = key
306
307 ; secret for s3 auth
308 archive_cache.objectstore.secret = secret
309
310 ; number of sharded buckets to create to distribute archives across
311 ; default is 8 shards
312 archive_cache.objectstore.bucket_shards = 8
313
314 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
315 archive_cache.objectstore.retry = false
316
317 ; number of seconds to wait for next try using retry
318 archive_cache.objectstore.retry_backoff = 1
319
320 ; how many tries do do a retry fetch from this backend
321 archive_cache.objectstore.retry_attempts = 10
322
323 ; Default is $cache_dir/archive_cache if not set
294 324 ; Generated repo archives will be cached at this location
295 325 ; and served from the cache during subsequent requests for the same archive of
296 326 ; the repository. This path is important to be shared across filesystems and with
297 327 ; RhodeCode and vcsserver
298
299 ; Redis url to acquire/check generation of archives locks
300 archive_cache.locking.url = redis://redis:6379/1
301
302 ; Storage backend, only 'filesystem' is available now
303 archive_cache.backend.type = filesystem
304
305 ; Default is $cache_dir/archive_cache if not set
306 328 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
307 329
308 330 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
309 331 archive_cache.filesystem.cache_size_gb = 1
310 332
311 333 ; Eviction policy used to clear out after cache_size_gb limit is reached
312 334 archive_cache.filesystem.eviction_policy = least-recently-stored
313 335
314 336 ; By default cache uses sharding technique, this specifies how many shards are there
337 ; default is 8 shards
315 338 archive_cache.filesystem.cache_shards = 8
316 339
340 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
341 archive_cache.filesystem.retry = false
342
343 ; number of seconds to wait for next try using retry
344 archive_cache.filesystem.retry_backoff = 1
345
346 ; how many tries do do a retry fetch from this backend
347 archive_cache.filesystem.retry_attempts = 10
348
317 349
318 350 ; #############
319 351 ; CELERY CONFIG
320 352 ; #############
321 353
322 354 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
323 355
324 356 use_celery = true
325 357
326 358 ; path to store schedule database
327 359 #celerybeat-schedule.path =
328 360
329 361 ; connection url to the message broker (default redis)
330 362 celery.broker_url = redis://redis:6379/8
331 363
332 364 ; results backend to get results for (default redis)
333 365 celery.result_backend = redis://redis:6379/8
334 366
335 367 ; rabbitmq example
336 368 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
337 369
338 370 ; maximum tasks to execute before worker restart
339 371 celery.max_tasks_per_child = 20
340 372
341 373 ; tasks will never be sent to the queue, but executed locally instead.
342 374 celery.task_always_eager = false
343 375
344 376 ; #############
345 377 ; DOGPILE CACHE
346 378 ; #############
347 379
348 380 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
349 381 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
350 382 cache_dir = /var/opt/rhodecode_data
351 383
352 384 ; *********************************************
353 385 ; `sql_cache_short` cache for heavy SQL queries
354 386 ; Only supported backend is `memory_lru`
355 387 ; *********************************************
356 388 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
357 389 rc_cache.sql_cache_short.expiration_time = 30
358 390
359 391
360 392 ; *****************************************************
361 393 ; `cache_repo_longterm` cache for repo object instances
362 394 ; Only supported backend is `memory_lru`
363 395 ; *****************************************************
364 396 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
365 397 ; by default we use 30 Days, cache is still invalidated on push
366 398 rc_cache.cache_repo_longterm.expiration_time = 2592000
367 399 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
368 400 rc_cache.cache_repo_longterm.max_size = 10000
369 401
370 402
371 403 ; *********************************************
372 404 ; `cache_general` cache for general purpose use
373 405 ; for simplicity use rc.file_namespace backend,
374 406 ; for performance and scale use rc.redis
375 407 ; *********************************************
376 408 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
377 409 rc_cache.cache_general.expiration_time = 43200
378 410 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
379 411 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
380 412
381 413 ; alternative `cache_general` redis backend with distributed lock
382 414 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
383 415 #rc_cache.cache_general.expiration_time = 300
384 416
385 417 ; redis_expiration_time needs to be greater then expiration_time
386 418 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
387 419
388 420 #rc_cache.cache_general.arguments.host = localhost
389 421 #rc_cache.cache_general.arguments.port = 6379
390 422 #rc_cache.cache_general.arguments.db = 0
391 423 #rc_cache.cache_general.arguments.socket_timeout = 30
392 424 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
393 425 #rc_cache.cache_general.arguments.distributed_lock = true
394 426
395 427 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
396 428 #rc_cache.cache_general.arguments.lock_auto_renewal = true
397 429
398 430 ; *************************************************
399 431 ; `cache_perms` cache for permission tree, auth TTL
400 432 ; for simplicity use rc.file_namespace backend,
401 433 ; for performance and scale use rc.redis
402 434 ; *************************************************
403 435 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
404 436 rc_cache.cache_perms.expiration_time = 3600
405 437 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
406 438 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
407 439
408 440 ; alternative `cache_perms` redis backend with distributed lock
409 441 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
410 442 #rc_cache.cache_perms.expiration_time = 300
411 443
412 444 ; redis_expiration_time needs to be greater then expiration_time
413 445 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
414 446
415 447 #rc_cache.cache_perms.arguments.host = localhost
416 448 #rc_cache.cache_perms.arguments.port = 6379
417 449 #rc_cache.cache_perms.arguments.db = 0
418 450 #rc_cache.cache_perms.arguments.socket_timeout = 30
419 451 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
420 452 #rc_cache.cache_perms.arguments.distributed_lock = true
421 453
422 454 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
423 455 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
424 456
425 457 ; ***************************************************
426 458 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
427 459 ; for simplicity use rc.file_namespace backend,
428 460 ; for performance and scale use rc.redis
429 461 ; ***************************************************
430 462 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
431 463 rc_cache.cache_repo.expiration_time = 2592000
432 464 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
433 465 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
434 466
435 467 ; alternative `cache_repo` redis backend with distributed lock
436 468 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
437 469 #rc_cache.cache_repo.expiration_time = 2592000
438 470
439 471 ; redis_expiration_time needs to be greater then expiration_time
440 472 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
441 473
442 474 #rc_cache.cache_repo.arguments.host = localhost
443 475 #rc_cache.cache_repo.arguments.port = 6379
444 476 #rc_cache.cache_repo.arguments.db = 1
445 477 #rc_cache.cache_repo.arguments.socket_timeout = 30
446 478 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
447 479 #rc_cache.cache_repo.arguments.distributed_lock = true
448 480
449 481 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
450 482 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
451 483
452 484 ; ##############
453 485 ; BEAKER SESSION
454 486 ; ##############
455 487
456 488 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
457 489 ; types are file, ext:redis, ext:database, ext:memcached
458 490 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
459 491 #beaker.session.type = file
460 492 #beaker.session.data_dir = %(here)s/data/sessions
461 493
462 494 ; Redis based sessions
463 495 beaker.session.type = ext:redis
464 496 beaker.session.url = redis://redis:6379/2
465 497
466 498 ; DB based session, fast, and allows easy management over logged in users
467 499 #beaker.session.type = ext:database
468 500 #beaker.session.table_name = db_session
469 501 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
470 502 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
471 503 #beaker.session.sa.pool_recycle = 3600
472 504 #beaker.session.sa.echo = false
473 505
474 506 beaker.session.key = rhodecode
475 507 beaker.session.secret = develop-rc-uytcxaz
476 508 beaker.session.lock_dir = /data_ramdisk/lock
477 509
478 510 ; Secure encrypted cookie. Requires AES and AES python libraries
479 511 ; you must disable beaker.session.secret to use this
480 512 #beaker.session.encrypt_key = key_for_encryption
481 513 #beaker.session.validate_key = validation_key
482 514
483 515 ; Sets session as invalid (also logging out user) if it haven not been
484 516 ; accessed for given amount of time in seconds
485 517 beaker.session.timeout = 2592000
486 518 beaker.session.httponly = true
487 519
488 520 ; Path to use for the cookie. Set to prefix if you use prefix middleware
489 521 #beaker.session.cookie_path = /custom_prefix
490 522
491 523 ; Set https secure cookie
492 524 beaker.session.secure = false
493 525
494 526 ; default cookie expiration time in seconds, set to `true` to set expire
495 527 ; at browser close
496 528 #beaker.session.cookie_expires = 3600
497 529
498 530 ; #############################
499 531 ; SEARCH INDEXING CONFIGURATION
500 532 ; #############################
501 533
502 534 ; Full text search indexer is available in rhodecode-tools under
503 535 ; `rhodecode-tools index` command
504 536
505 537 ; WHOOSH Backend, doesn't require additional services to run
506 538 ; it works good with few dozen repos
507 539 search.module = rhodecode.lib.index.whoosh
508 540 search.location = %(here)s/data/index
509 541
510 542 ; ####################
511 543 ; CHANNELSTREAM CONFIG
512 544 ; ####################
513 545
514 546 ; channelstream enables persistent connections and live notification
515 547 ; in the system. It's also used by the chat system
516 548
517 549 channelstream.enabled = true
518 550
519 551 ; server address for channelstream server on the backend
520 552 channelstream.server = channelstream:9800
521 553
522 554 ; location of the channelstream server from outside world
523 555 ; use ws:// for http or wss:// for https. This address needs to be handled
524 556 ; by external HTTP server such as Nginx or Apache
525 557 ; see Nginx/Apache configuration examples in our docs
526 558 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
527 559 channelstream.secret = ENV_GENERATED
528 560 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
529 561
530 562 ; Internal application path that Javascript uses to connect into.
531 563 ; If you use proxy-prefix the prefix should be added before /_channelstream
532 564 channelstream.proxy_path = /_channelstream
533 565
534 566
535 567 ; ##############################
536 568 ; MAIN RHODECODE DATABASE CONFIG
537 569 ; ##############################
538 570
539 571 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
540 572 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
541 573 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
542 574 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
543 575 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
544 576
545 577 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
546 578
547 579 ; see sqlalchemy docs for other advanced settings
548 580 ; print the sql statements to output
549 581 sqlalchemy.db1.echo = false
550 582
551 583 ; recycle the connections after this amount of seconds
552 584 sqlalchemy.db1.pool_recycle = 3600
553 585
554 586 ; the number of connections to keep open inside the connection pool.
555 587 ; 0 indicates no limit
556 588 ; the general calculus with gevent is:
557 589 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
558 590 ; then increase pool size + max overflow so that they add up to 500.
559 591 #sqlalchemy.db1.pool_size = 5
560 592
561 593 ; The number of connections to allow in connection pool "overflow", that is
562 594 ; connections that can be opened above and beyond the pool_size setting,
563 595 ; which defaults to five.
564 596 #sqlalchemy.db1.max_overflow = 10
565 597
566 598 ; Connection check ping, used to detect broken database connections
567 599 ; could be enabled to better handle cases if MySQL has gone away errors
568 600 #sqlalchemy.db1.ping_connection = true
569 601
570 602 ; ##########
571 603 ; VCS CONFIG
572 604 ; ##########
573 605 vcs.server.enable = true
574 606 vcs.server = vcsserver:10010
575 607
576 608 ; Web server connectivity protocol, responsible for web based VCS operations
577 609 ; Available protocols are:
578 610 ; `http` - use http-rpc backend (default)
579 611 vcs.server.protocol = http
580 612
581 613 ; Push/Pull operations protocol, available options are:
582 614 ; `http` - use http-rpc backend (default)
583 615 vcs.scm_app_implementation = http
584 616
585 617 ; Push/Pull operations hooks protocol, available options are:
586 618 ; `http` - use http-rpc backend (default)
587 619 ; `celery` - use celery based hooks
588 620 vcs.hooks.protocol = http
589 621
590 622 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
591 623 ; accessible via network.
592 624 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
593 625 vcs.hooks.host = *
594 626
595 627 ; Start VCSServer with this instance as a subprocess, useful for development
596 628 vcs.start_server = false
597 629
598 630 ; List of enabled VCS backends, available options are:
599 631 ; `hg` - mercurial
600 632 ; `git` - git
601 633 ; `svn` - subversion
602 634 vcs.backends = hg, git, svn
603 635
604 636 ; Wait this number of seconds before killing connection to the vcsserver
605 637 vcs.connection_timeout = 3600
606 638
607 639 ; Cache flag to cache vcsserver remote calls locally
608 640 ; It uses cache_region `cache_repo`
609 641 vcs.methods.cache = true
610 642
611 643 ; ####################################################
612 644 ; Subversion proxy support (mod_dav_svn)
613 645 ; Maps RhodeCode repo groups into SVN paths for Apache
614 646 ; ####################################################
615 647
616 648 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
617 649 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
618 650 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
619 651 #vcs.svn.compatible_version = 1.8
620 652
621 653 ; Enable SVN proxy of requests over HTTP
622 654 vcs.svn.proxy.enabled = true
623 655
624 656 ; host to connect to running SVN subsystem
625 657 vcs.svn.proxy.host = http://svn:8090
626 658
627 659 ; Enable or disable the config file generation.
628 660 svn.proxy.generate_config = true
629 661
630 662 ; Generate config file with `SVNListParentPath` set to `On`.
631 663 svn.proxy.list_parent_path = true
632 664
633 665 ; Set location and file name of generated config file.
634 666 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
635 667
636 668 ; alternative mod_dav config template. This needs to be a valid mako template
637 669 ; Example template can be found in the source code:
638 670 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
639 671 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
640 672
641 673 ; Used as a prefix to the `Location` block in the generated config file.
642 674 ; In most cases it should be set to `/`.
643 675 svn.proxy.location_root = /
644 676
645 677 ; Command to reload the mod dav svn configuration on change.
646 678 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
647 679 ; Make sure user who runs RhodeCode process is allowed to reload Apache
648 680 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
649 681
650 682 ; If the timeout expires before the reload command finishes, the command will
651 683 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
652 684 #svn.proxy.reload_timeout = 10
653 685
654 686 ; ####################
655 687 ; SSH Support Settings
656 688 ; ####################
657 689
658 690 ; Defines if a custom authorized_keys file should be created and written on
659 691 ; any change user ssh keys. Setting this to false also disables possibility
660 692 ; of adding SSH keys by users from web interface. Super admins can still
661 693 ; manage SSH Keys.
662 694 ssh.generate_authorized_keyfile = true
663 695
664 696 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
665 697 # ssh.authorized_keys_ssh_opts =
666 698
667 699 ; Path to the authorized_keys file where the generate entries are placed.
668 700 ; It is possible to have multiple key files specified in `sshd_config` e.g.
669 701 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
670 702 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
671 703
672 704 ; Command to execute the SSH wrapper. The binary is available in the
673 705 ; RhodeCode installation directory.
674 706 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
675 707 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
676 708 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
677 709
678 710 ; Allow shell when executing the ssh-wrapper command
679 711 ssh.wrapper_cmd_allow_shell = false
680 712
681 713 ; Enables logging, and detailed output send back to the client during SSH
682 714 ; operations. Useful for debugging, shouldn't be used in production.
683 715 ssh.enable_debug_logging = true
684 716
685 717 ; Paths to binary executable, by default they are the names, but we can
686 718 ; override them if we want to use a custom one
687 719 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
688 720 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
689 721 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
690 722
691 723 ; Enables SSH key generator web interface. Disabling this still allows users
692 724 ; to add their own keys.
693 725 ssh.enable_ui_key_generator = true
694 726
695 727 ; Statsd client config, this is used to send metrics to statsd
696 728 ; We recommend setting statsd_exported and scrape them using Prometheus
697 729 #statsd.enabled = false
698 730 #statsd.statsd_host = 0.0.0.0
699 731 #statsd.statsd_port = 8125
700 732 #statsd.statsd_prefix =
701 733 #statsd.statsd_ipv6 = false
702 734
703 735 ; configure logging automatically at server startup set to false
704 736 ; to use the below custom logging config.
705 737 ; RC_LOGGING_FORMATTER
706 738 ; RC_LOGGING_LEVEL
707 739 ; env variables can control the settings for logging in case of autoconfigure
708 740
709 741 #logging.autoconfigure = true
710 742
711 743 ; specify your own custom logging config file to configure logging
712 744 #logging.logging_conf_file = /path/to/custom_logging.ini
713 745
714 746 ; Dummy marker to add new entries after.
715 747 ; Add any custom entries below. Please don't remove this marker.
716 748 custom.conf = 1
717 749
718 750
719 751 ; #####################
720 752 ; LOGGING CONFIGURATION
721 753 ; #####################
722 754
723 755 [loggers]
724 756 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
725 757
726 758 [handlers]
727 759 keys = console, console_sql
728 760
729 761 [formatters]
730 762 keys = generic, json, color_formatter, color_formatter_sql
731 763
732 764 ; #######
733 765 ; LOGGERS
734 766 ; #######
735 767 [logger_root]
736 768 level = NOTSET
737 769 handlers = console
738 770
739 771 [logger_sqlalchemy]
740 772 level = INFO
741 773 handlers = console_sql
742 774 qualname = sqlalchemy.engine
743 775 propagate = 0
744 776
745 777 [logger_beaker]
746 778 level = DEBUG
747 779 handlers =
748 780 qualname = beaker.container
749 781 propagate = 1
750 782
751 783 [logger_rhodecode]
752 784 level = DEBUG
753 785 handlers =
754 786 qualname = rhodecode
755 787 propagate = 1
756 788
757 789 [logger_ssh_wrapper]
758 790 level = DEBUG
759 791 handlers =
760 792 qualname = ssh_wrapper
761 793 propagate = 1
762 794
763 795 [logger_celery]
764 796 level = DEBUG
765 797 handlers =
766 798 qualname = celery
767 799
768 800
769 801 ; ########
770 802 ; HANDLERS
771 803 ; ########
772 804
773 805 [handler_console]
774 806 class = StreamHandler
775 807 args = (sys.stderr, )
776 808 level = DEBUG
777 809 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
778 810 ; This allows sending properly formatted logs to grafana loki or elasticsearch
779 811 formatter = color_formatter
780 812
781 813 [handler_console_sql]
782 814 ; "level = DEBUG" logs SQL queries and results.
783 815 ; "level = INFO" logs SQL queries.
784 816 ; "level = WARN" logs neither. (Recommended for production systems.)
785 817 class = StreamHandler
786 818 args = (sys.stderr, )
787 819 level = WARN
788 820 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
789 821 ; This allows sending properly formatted logs to grafana loki or elasticsearch
790 822 formatter = color_formatter_sql
791 823
792 824 ; ##########
793 825 ; FORMATTERS
794 826 ; ##########
795 827
796 828 [formatter_generic]
797 829 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
798 830 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
799 831 datefmt = %Y-%m-%d %H:%M:%S
800 832
801 833 [formatter_color_formatter]
802 834 class = rhodecode.lib.logging_formatter.ColorFormatter
803 835 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
804 836 datefmt = %Y-%m-%d %H:%M:%S
805 837
806 838 [formatter_color_formatter_sql]
807 839 class = rhodecode.lib.logging_formatter.ColorFormatterSql
808 840 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
809 841 datefmt = %Y-%m-%d %H:%M:%S
810 842
811 843 [formatter_json]
812 844 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
813 845 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,781 +1,813 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = false
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = false
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 107 ; Host at which the Service API is running.
108 108 app.service_api.host = http://rhodecode.local:10020
109 109
110 110 ; Secret for Service API authentication.
111 111 app.service_api.token =
112 112
113 113 ; Unique application ID. Should be a random unique string for security.
114 114 app_instance_uuid = rc-production
115 115
116 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 117 ; commit, or pull request exceeds this limit this diff will be displayed
118 118 ; partially. E.g 512000 == 512Kb
119 119 cut_off_limit_diff = 512000
120 120
121 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 122 ; file inside diff which exceeds this limit will be displayed partially.
123 123 ; E.g 128000 == 128Kb
124 124 cut_off_limit_file = 128000
125 125
126 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 127 vcs_full_cache = true
128 128
129 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 131 force_https = false
132 132
133 133 ; use Strict-Transport-Security headers
134 134 use_htsts = false
135 135
136 136 ; Set to true if your repos are exposed using the dumb protocol
137 137 git_update_server_info = false
138 138
139 139 ; RSS/ATOM feed options
140 140 rss_cut_off_limit = 256000
141 141 rss_items_per_page = 10
142 142 rss_include_diff = false
143 143
144 144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 145 ; url that does rewrites to _admin/gists/{gistid}.
146 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 148 gist_alias_url =
149 149
150 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 151 ; used for access.
152 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 153 ; came from the the logged in user who own this authentication token.
154 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 155 ; authentication token. Such view would be only accessible when used together
156 156 ; with this authentication token
157 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 158 ; The list should be "," separated and on a single line.
159 159 ; Most common views to enable:
160 160
161 161 # RepoCommitsView:repo_commit_download
162 162 # RepoCommitsView:repo_commit_patch
163 163 # RepoCommitsView:repo_commit_raw
164 164 # RepoCommitsView:repo_commit_raw@TOKEN
165 165 # RepoFilesView:repo_files_diff
166 166 # RepoFilesView:repo_archivefile
167 167 # RepoFilesView:repo_file_raw
168 168 # GistView:*
169 169 api_access_controllers_whitelist =
170 170
171 171 ; Default encoding used to convert from and to unicode
172 172 ; can be also a comma separated list of encoding in case of mixed encodings
173 173 default_encoding = UTF-8
174 174
175 175 ; instance-id prefix
176 176 ; a prefix key for this instance used for cache invalidation when running
177 177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 178 ; all running RhodeCode instances. Leave empty if you don't use it
179 179 instance_id =
180 180
181 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 182 ; of an authentication plugin also if it is disabled by it's settings.
183 183 ; This could be useful if you are unable to log in to the system due to broken
184 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 185 ; module to log in again and fix the settings.
186 186 ; Available builtin plugin IDs (hash is part of the ID):
187 187 ; egg:rhodecode-enterprise-ce#rhodecode
188 188 ; egg:rhodecode-enterprise-ce#pam
189 189 ; egg:rhodecode-enterprise-ce#ldap
190 190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 191 ; egg:rhodecode-enterprise-ce#headers
192 192 ; egg:rhodecode-enterprise-ce#crowd
193 193
194 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195 195
196 196 ; Flag to control loading of legacy plugins in py:/path format
197 197 auth_plugin.import_legacy_plugins = true
198 198
199 199 ; alternative return HTTP header for failed authentication. Default HTTP
200 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 201 ; handling that causing a series of failed authentication calls.
202 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 203 ; This will be served instead of default 401 on bad authentication
204 204 auth_ret_code =
205 205
206 206 ; use special detection method when serving auth_ret_code, instead of serving
207 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 208 ; and then serve auth_ret_code to clients
209 209 auth_ret_code_detection = false
210 210
211 211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 212 ; codes don't break the transactions while 4XX codes do
213 213 lock_ret_code = 423
214 214
215 215 ; Filesystem location were repositories should be stored
216 216 repo_store.path = /var/opt/rhodecode_repo_store
217 217
218 218 ; allows to setup custom hooks in settings page
219 219 allow_custom_hooks_settings = true
220 220
221 221 ; Generated license token required for EE edition license.
222 222 ; New generated token value can be found in Admin > settings > license page.
223 223 license_token =
224 224
225 225 ; This flag hides sensitive information on the license page such as token, and license data
226 226 license.hide_license_info = false
227 227
228 228 ; supervisor connection uri, for managing supervisor and logs.
229 229 supervisor.uri =
230 230
231 231 ; supervisord group name/id we only want this RC instance to handle
232 232 supervisor.group_id = prod
233 233
234 234 ; Display extended labs settings
235 235 labs_settings_active = true
236 236
237 237 ; Custom exception store path, defaults to TMPDIR
238 238 ; This is used to store exception from RhodeCode in shared directory
239 239 #exception_tracker.store_path =
240 240
241 241 ; Send email with exception details when it happens
242 242 #exception_tracker.send_email = false
243 243
244 244 ; Comma separated list of recipients for exception emails,
245 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 246 ; Can be left empty, then emails will be sent to ALL super-admins
247 247 #exception_tracker.send_email_recipients =
248 248
249 249 ; optional prefix to Add to email Subject
250 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251 251
252 252 ; File store configuration. This is used to store and serve uploaded files
253 253 file_store.enabled = true
254 254
255 255 ; Storage backend, available options are: local
256 256 file_store.backend = local
257 257
258 258 ; path to store the uploaded binaries and artifacts
259 259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260 260
261 ; Uncomment and set this path to control settings for archive download cache.
261
262 ; Redis url to acquire/check generation of archives locks
263 archive_cache.locking.url = redis://redis:6379/1
264
265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
266 archive_cache.backend.type = filesystem
267
268 ; url for s3 compatible storage that allows to upload artifacts
269 ; e.g http://minio:9000
270 archive_cache.objectstore.url = http://s3-minio:9000
271
272 ; key for s3 auth
273 archive_cache.objectstore.key = key
274
275 ; secret for s3 auth
276 archive_cache.objectstore.secret = secret
277
278 ; number of sharded buckets to create to distribute archives across
279 ; default is 8 shards
280 archive_cache.objectstore.bucket_shards = 8
281
282 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
283 archive_cache.objectstore.retry = false
284
285 ; number of seconds to wait for next try using retry
286 archive_cache.objectstore.retry_backoff = 1
287
288 ; how many tries do do a retry fetch from this backend
289 archive_cache.objectstore.retry_attempts = 10
290
291 ; Default is $cache_dir/archive_cache if not set
262 292 ; Generated repo archives will be cached at this location
263 293 ; and served from the cache during subsequent requests for the same archive of
264 294 ; the repository. This path is important to be shared across filesystems and with
265 295 ; RhodeCode and vcsserver
266
267 ; Redis url to acquire/check generation of archives locks
268 archive_cache.locking.url = redis://redis:6379/1
269
270 ; Storage backend, only 'filesystem' is available now
271 archive_cache.backend.type = filesystem
272
273 ; Default is $cache_dir/archive_cache if not set
274 296 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
275 297
276 298 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
277 299 archive_cache.filesystem.cache_size_gb = 40
278 300
279 301 ; Eviction policy used to clear out after cache_size_gb limit is reached
280 302 archive_cache.filesystem.eviction_policy = least-recently-stored
281 303
282 304 ; By default cache uses sharding technique, this specifies how many shards are there
305 ; default is 8 shards
283 306 archive_cache.filesystem.cache_shards = 8
284 307
308 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
309 archive_cache.filesystem.retry = false
310
311 ; number of seconds to wait for next try using retry
312 archive_cache.filesystem.retry_backoff = 1
313
314 ; how many tries do do a retry fetch from this backend
315 archive_cache.filesystem.retry_attempts = 10
316
285 317
286 318 ; #############
287 319 ; CELERY CONFIG
288 320 ; #############
289 321
290 322 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
291 323
292 324 use_celery = true
293 325
294 326 ; path to store schedule database
295 327 #celerybeat-schedule.path =
296 328
297 329 ; connection url to the message broker (default redis)
298 330 celery.broker_url = redis://redis:6379/8
299 331
300 332 ; results backend to get results for (default redis)
301 333 celery.result_backend = redis://redis:6379/8
302 334
303 335 ; rabbitmq example
304 336 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
305 337
306 338 ; maximum tasks to execute before worker restart
307 339 celery.max_tasks_per_child = 20
308 340
309 341 ; tasks will never be sent to the queue, but executed locally instead.
310 342 celery.task_always_eager = false
311 343
312 344 ; #############
313 345 ; DOGPILE CACHE
314 346 ; #############
315 347
316 348 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
317 349 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
318 350 cache_dir = /var/opt/rhodecode_data
319 351
320 352 ; *********************************************
321 353 ; `sql_cache_short` cache for heavy SQL queries
322 354 ; Only supported backend is `memory_lru`
323 355 ; *********************************************
324 356 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
325 357 rc_cache.sql_cache_short.expiration_time = 30
326 358
327 359
328 360 ; *****************************************************
329 361 ; `cache_repo_longterm` cache for repo object instances
330 362 ; Only supported backend is `memory_lru`
331 363 ; *****************************************************
332 364 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
333 365 ; by default we use 30 Days, cache is still invalidated on push
334 366 rc_cache.cache_repo_longterm.expiration_time = 2592000
335 367 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
336 368 rc_cache.cache_repo_longterm.max_size = 10000
337 369
338 370
339 371 ; *********************************************
340 372 ; `cache_general` cache for general purpose use
341 373 ; for simplicity use rc.file_namespace backend,
342 374 ; for performance and scale use rc.redis
343 375 ; *********************************************
344 376 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
345 377 rc_cache.cache_general.expiration_time = 43200
346 378 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
347 379 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
348 380
349 381 ; alternative `cache_general` redis backend with distributed lock
350 382 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
351 383 #rc_cache.cache_general.expiration_time = 300
352 384
353 385 ; redis_expiration_time needs to be greater then expiration_time
354 386 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
355 387
356 388 #rc_cache.cache_general.arguments.host = localhost
357 389 #rc_cache.cache_general.arguments.port = 6379
358 390 #rc_cache.cache_general.arguments.db = 0
359 391 #rc_cache.cache_general.arguments.socket_timeout = 30
360 392 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
361 393 #rc_cache.cache_general.arguments.distributed_lock = true
362 394
363 395 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
364 396 #rc_cache.cache_general.arguments.lock_auto_renewal = true
365 397
366 398 ; *************************************************
367 399 ; `cache_perms` cache for permission tree, auth TTL
368 400 ; for simplicity use rc.file_namespace backend,
369 401 ; for performance and scale use rc.redis
370 402 ; *************************************************
371 403 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
372 404 rc_cache.cache_perms.expiration_time = 3600
373 405 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
374 406 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
375 407
376 408 ; alternative `cache_perms` redis backend with distributed lock
377 409 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
378 410 #rc_cache.cache_perms.expiration_time = 300
379 411
380 412 ; redis_expiration_time needs to be greater then expiration_time
381 413 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
382 414
383 415 #rc_cache.cache_perms.arguments.host = localhost
384 416 #rc_cache.cache_perms.arguments.port = 6379
385 417 #rc_cache.cache_perms.arguments.db = 0
386 418 #rc_cache.cache_perms.arguments.socket_timeout = 30
387 419 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
388 420 #rc_cache.cache_perms.arguments.distributed_lock = true
389 421
390 422 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
391 423 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
392 424
393 425 ; ***************************************************
394 426 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
395 427 ; for simplicity use rc.file_namespace backend,
396 428 ; for performance and scale use rc.redis
397 429 ; ***************************************************
398 430 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
399 431 rc_cache.cache_repo.expiration_time = 2592000
400 432 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
401 433 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
402 434
403 435 ; alternative `cache_repo` redis backend with distributed lock
404 436 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
405 437 #rc_cache.cache_repo.expiration_time = 2592000
406 438
407 439 ; redis_expiration_time needs to be greater then expiration_time
408 440 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
409 441
410 442 #rc_cache.cache_repo.arguments.host = localhost
411 443 #rc_cache.cache_repo.arguments.port = 6379
412 444 #rc_cache.cache_repo.arguments.db = 1
413 445 #rc_cache.cache_repo.arguments.socket_timeout = 30
414 446 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
415 447 #rc_cache.cache_repo.arguments.distributed_lock = true
416 448
417 449 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
418 450 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
419 451
420 452 ; ##############
421 453 ; BEAKER SESSION
422 454 ; ##############
423 455
424 456 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
425 457 ; types are file, ext:redis, ext:database, ext:memcached
426 458 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
427 459 #beaker.session.type = file
428 460 #beaker.session.data_dir = %(here)s/data/sessions
429 461
430 462 ; Redis based sessions
431 463 beaker.session.type = ext:redis
432 464 beaker.session.url = redis://redis:6379/2
433 465
434 466 ; DB based session, fast, and allows easy management over logged in users
435 467 #beaker.session.type = ext:database
436 468 #beaker.session.table_name = db_session
437 469 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
438 470 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
439 471 #beaker.session.sa.pool_recycle = 3600
440 472 #beaker.session.sa.echo = false
441 473
442 474 beaker.session.key = rhodecode
443 475 beaker.session.secret = production-rc-uytcxaz
444 476 beaker.session.lock_dir = /data_ramdisk/lock
445 477
446 478 ; Secure encrypted cookie. Requires AES and AES python libraries
447 479 ; you must disable beaker.session.secret to use this
448 480 #beaker.session.encrypt_key = key_for_encryption
449 481 #beaker.session.validate_key = validation_key
450 482
451 483 ; Sets session as invalid (also logging out user) if it haven not been
452 484 ; accessed for given amount of time in seconds
453 485 beaker.session.timeout = 2592000
454 486 beaker.session.httponly = true
455 487
456 488 ; Path to use for the cookie. Set to prefix if you use prefix middleware
457 489 #beaker.session.cookie_path = /custom_prefix
458 490
459 491 ; Set https secure cookie
460 492 beaker.session.secure = false
461 493
462 494 ; default cookie expiration time in seconds, set to `true` to set expire
463 495 ; at browser close
464 496 #beaker.session.cookie_expires = 3600
465 497
466 498 ; #############################
467 499 ; SEARCH INDEXING CONFIGURATION
468 500 ; #############################
469 501
470 502 ; Full text search indexer is available in rhodecode-tools under
471 503 ; `rhodecode-tools index` command
472 504
473 505 ; WHOOSH Backend, doesn't require additional services to run
474 506 ; it works good with few dozen repos
475 507 search.module = rhodecode.lib.index.whoosh
476 508 search.location = %(here)s/data/index
477 509
478 510 ; ####################
479 511 ; CHANNELSTREAM CONFIG
480 512 ; ####################
481 513
482 514 ; channelstream enables persistent connections and live notification
483 515 ; in the system. It's also used by the chat system
484 516
485 517 channelstream.enabled = true
486 518
487 519 ; server address for channelstream server on the backend
488 520 channelstream.server = channelstream:9800
489 521
490 522 ; location of the channelstream server from outside world
491 523 ; use ws:// for http or wss:// for https. This address needs to be handled
492 524 ; by external HTTP server such as Nginx or Apache
493 525 ; see Nginx/Apache configuration examples in our docs
494 526 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
495 527 channelstream.secret = ENV_GENERATED
496 528 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
497 529
498 530 ; Internal application path that Javascript uses to connect into.
499 531 ; If you use proxy-prefix the prefix should be added before /_channelstream
500 532 channelstream.proxy_path = /_channelstream
501 533
502 534
503 535 ; ##############################
504 536 ; MAIN RHODECODE DATABASE CONFIG
505 537 ; ##############################
506 538
507 539 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
508 540 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
509 541 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
510 542 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
511 543 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
512 544
513 545 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
514 546
515 547 ; see sqlalchemy docs for other advanced settings
516 548 ; print the sql statements to output
517 549 sqlalchemy.db1.echo = false
518 550
519 551 ; recycle the connections after this amount of seconds
520 552 sqlalchemy.db1.pool_recycle = 3600
521 553
522 554 ; the number of connections to keep open inside the connection pool.
523 555 ; 0 indicates no limit
524 556 ; the general calculus with gevent is:
525 557 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
526 558 ; then increase pool size + max overflow so that they add up to 500.
527 559 #sqlalchemy.db1.pool_size = 5
528 560
529 561 ; The number of connections to allow in connection pool "overflow", that is
530 562 ; connections that can be opened above and beyond the pool_size setting,
531 563 ; which defaults to five.
532 564 #sqlalchemy.db1.max_overflow = 10
533 565
534 566 ; Connection check ping, used to detect broken database connections
535 567 ; could be enabled to better handle cases if MySQL has gone away errors
536 568 #sqlalchemy.db1.ping_connection = true
537 569
538 570 ; ##########
539 571 ; VCS CONFIG
540 572 ; ##########
541 573 vcs.server.enable = true
542 574 vcs.server = vcsserver:10010
543 575
544 576 ; Web server connectivity protocol, responsible for web based VCS operations
545 577 ; Available protocols are:
546 578 ; `http` - use http-rpc backend (default)
547 579 vcs.server.protocol = http
548 580
549 581 ; Push/Pull operations protocol, available options are:
550 582 ; `http` - use http-rpc backend (default)
551 583 vcs.scm_app_implementation = http
552 584
553 585 ; Push/Pull operations hooks protocol, available options are:
554 586 ; `http` - use http-rpc backend (default)
555 587 ; `celery` - use celery based hooks
556 588 vcs.hooks.protocol = http
557 589
558 590 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
559 591 ; accessible via network.
560 592 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
561 593 vcs.hooks.host = *
562 594
563 595 ; Start VCSServer with this instance as a subprocess, useful for development
564 596 vcs.start_server = false
565 597
566 598 ; List of enabled VCS backends, available options are:
567 599 ; `hg` - mercurial
568 600 ; `git` - git
569 601 ; `svn` - subversion
570 602 vcs.backends = hg, git, svn
571 603
572 604 ; Wait this number of seconds before killing connection to the vcsserver
573 605 vcs.connection_timeout = 3600
574 606
575 607 ; Cache flag to cache vcsserver remote calls locally
576 608 ; It uses cache_region `cache_repo`
577 609 vcs.methods.cache = true
578 610
579 611 ; ####################################################
580 612 ; Subversion proxy support (mod_dav_svn)
581 613 ; Maps RhodeCode repo groups into SVN paths for Apache
582 614 ; ####################################################
583 615
584 616 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
585 617 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
586 618 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
587 619 #vcs.svn.compatible_version = 1.8
588 620
589 621 ; Enable SVN proxy of requests over HTTP
590 622 vcs.svn.proxy.enabled = true
591 623
592 624 ; host to connect to running SVN subsystem
593 625 vcs.svn.proxy.host = http://svn:8090
594 626
595 627 ; Enable or disable the config file generation.
596 628 svn.proxy.generate_config = true
597 629
598 630 ; Generate config file with `SVNListParentPath` set to `On`.
599 631 svn.proxy.list_parent_path = true
600 632
601 633 ; Set location and file name of generated config file.
602 634 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
603 635
604 636 ; alternative mod_dav config template. This needs to be a valid mako template
605 637 ; Example template can be found in the source code:
606 638 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
607 639 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
608 640
609 641 ; Used as a prefix to the `Location` block in the generated config file.
610 642 ; In most cases it should be set to `/`.
611 643 svn.proxy.location_root = /
612 644
613 645 ; Command to reload the mod dav svn configuration on change.
614 646 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
615 647 ; Make sure user who runs RhodeCode process is allowed to reload Apache
616 648 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
617 649
618 650 ; If the timeout expires before the reload command finishes, the command will
619 651 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
620 652 #svn.proxy.reload_timeout = 10
621 653
622 654 ; ####################
623 655 ; SSH Support Settings
624 656 ; ####################
625 657
626 658 ; Defines if a custom authorized_keys file should be created and written on
627 659 ; any change user ssh keys. Setting this to false also disables possibility
628 660 ; of adding SSH keys by users from web interface. Super admins can still
629 661 ; manage SSH Keys.
630 662 ssh.generate_authorized_keyfile = true
631 663
632 664 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
633 665 # ssh.authorized_keys_ssh_opts =
634 666
635 667 ; Path to the authorized_keys file where the generate entries are placed.
636 668 ; It is possible to have multiple key files specified in `sshd_config` e.g.
637 669 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
638 670 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
639 671
640 672 ; Command to execute the SSH wrapper. The binary is available in the
641 673 ; RhodeCode installation directory.
642 674 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
643 675 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
644 676 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
645 677
646 678 ; Allow shell when executing the ssh-wrapper command
647 679 ssh.wrapper_cmd_allow_shell = false
648 680
649 681 ; Enables logging, and detailed output send back to the client during SSH
650 682 ; operations. Useful for debugging, shouldn't be used in production.
651 683 ssh.enable_debug_logging = false
652 684
653 685 ; Paths to binary executable, by default they are the names, but we can
654 686 ; override them if we want to use a custom one
655 687 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
656 688 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
657 689 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
658 690
659 691 ; Enables SSH key generator web interface. Disabling this still allows users
660 692 ; to add their own keys.
661 693 ssh.enable_ui_key_generator = true
662 694
663 695 ; Statsd client config, this is used to send metrics to statsd
664 696 ; We recommend setting statsd_exported and scrape them using Prometheus
665 697 #statsd.enabled = false
666 698 #statsd.statsd_host = 0.0.0.0
667 699 #statsd.statsd_port = 8125
668 700 #statsd.statsd_prefix =
669 701 #statsd.statsd_ipv6 = false
670 702
671 703 ; configure logging automatically at server startup set to false
672 704 ; to use the below custom logging config.
673 705 ; RC_LOGGING_FORMATTER
674 706 ; RC_LOGGING_LEVEL
675 707 ; env variables can control the settings for logging in case of autoconfigure
676 708
677 709 #logging.autoconfigure = true
678 710
679 711 ; specify your own custom logging config file to configure logging
680 712 #logging.logging_conf_file = /path/to/custom_logging.ini
681 713
682 714 ; Dummy marker to add new entries after.
683 715 ; Add any custom entries below. Please don't remove this marker.
684 716 custom.conf = 1
685 717
686 718
687 719 ; #####################
688 720 ; LOGGING CONFIGURATION
689 721 ; #####################
690 722
691 723 [loggers]
692 724 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
693 725
694 726 [handlers]
695 727 keys = console, console_sql
696 728
697 729 [formatters]
698 730 keys = generic, json, color_formatter, color_formatter_sql
699 731
700 732 ; #######
701 733 ; LOGGERS
702 734 ; #######
703 735 [logger_root]
704 736 level = NOTSET
705 737 handlers = console
706 738
707 739 [logger_sqlalchemy]
708 740 level = INFO
709 741 handlers = console_sql
710 742 qualname = sqlalchemy.engine
711 743 propagate = 0
712 744
713 745 [logger_beaker]
714 746 level = DEBUG
715 747 handlers =
716 748 qualname = beaker.container
717 749 propagate = 1
718 750
719 751 [logger_rhodecode]
720 752 level = DEBUG
721 753 handlers =
722 754 qualname = rhodecode
723 755 propagate = 1
724 756
725 757 [logger_ssh_wrapper]
726 758 level = DEBUG
727 759 handlers =
728 760 qualname = ssh_wrapper
729 761 propagate = 1
730 762
731 763 [logger_celery]
732 764 level = DEBUG
733 765 handlers =
734 766 qualname = celery
735 767
736 768
737 769 ; ########
738 770 ; HANDLERS
739 771 ; ########
740 772
741 773 [handler_console]
742 774 class = StreamHandler
743 775 args = (sys.stderr, )
744 776 level = INFO
745 777 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
746 778 ; This allows sending properly formatted logs to grafana loki or elasticsearch
747 779 formatter = generic
748 780
749 781 [handler_console_sql]
750 782 ; "level = DEBUG" logs SQL queries and results.
751 783 ; "level = INFO" logs SQL queries.
752 784 ; "level = WARN" logs neither. (Recommended for production systems.)
753 785 class = StreamHandler
754 786 args = (sys.stderr, )
755 787 level = WARN
756 788 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
757 789 ; This allows sending properly formatted logs to grafana loki or elasticsearch
758 790 formatter = generic
759 791
760 792 ; ##########
761 793 ; FORMATTERS
762 794 ; ##########
763 795
764 796 [formatter_generic]
765 797 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
766 798 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
767 799 datefmt = %Y-%m-%d %H:%M:%S
768 800
769 801 [formatter_color_formatter]
770 802 class = rhodecode.lib.logging_formatter.ColorFormatter
771 803 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
772 804 datefmt = %Y-%m-%d %H:%M:%S
773 805
774 806 [formatter_color_formatter_sql]
775 807 class = rhodecode.lib.logging_formatter.ColorFormatterSql
776 808 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
777 809 datefmt = %Y-%m-%d %H:%M:%S
778 810
779 811 [formatter_json]
780 812 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
781 813 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,286 +1,315 b''
1 1 # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]'
2 2
3 3 alembic==1.13.1
4 4 mako==1.2.4
5 5 markupsafe==2.1.2
6 6 sqlalchemy==1.4.52
7 7 greenlet==3.0.3
8 8 typing_extensions==4.9.0
9 9 async-timeout==4.0.3
10 10 babel==2.12.1
11 11 beaker==1.12.1
12 12 celery==5.3.6
13 13 billiard==4.2.0
14 14 click==8.1.3
15 15 click-didyoumean==0.3.0
16 16 click==8.1.3
17 17 click-plugins==1.1.1
18 18 click==8.1.3
19 19 click-repl==0.2.0
20 20 click==8.1.3
21 21 prompt-toolkit==3.0.38
22 22 wcwidth==0.2.6
23 23 six==1.16.0
24 24 kombu==5.3.5
25 25 amqp==5.2.0
26 26 vine==5.1.0
27 27 vine==5.1.0
28 28 python-dateutil==2.8.2
29 29 six==1.16.0
30 30 tzdata==2024.1
31 31 vine==5.1.0
32 32 channelstream==0.7.1
33 33 gevent==24.2.1
34 34 greenlet==3.0.3
35 35 zope.event==5.0.0
36 36 zope.interface==6.3.0
37 37 itsdangerous==1.1.0
38 38 marshmallow==2.18.0
39 39 pyramid==2.0.2
40 40 hupper==1.12
41 41 plaster==1.1.2
42 42 plaster-pastedeploy==1.0.1
43 43 pastedeploy==3.1.0
44 44 plaster==1.1.2
45 45 translationstring==1.4
46 46 venusian==3.0.0
47 47 webob==1.8.7
48 48 zope.deprecation==5.0.0
49 49 zope.interface==6.3.0
50 50 pyramid-apispec==0.3.3
51 51 apispec==1.3.3
52 52 pyramid-jinja2==2.10
53 53 jinja2==3.1.2
54 54 markupsafe==2.1.2
55 55 markupsafe==2.1.2
56 56 pyramid==2.0.2
57 57 hupper==1.12
58 58 plaster==1.1.2
59 59 plaster-pastedeploy==1.0.1
60 60 pastedeploy==3.1.0
61 61 plaster==1.1.2
62 62 translationstring==1.4
63 63 venusian==3.0.0
64 64 webob==1.8.7
65 65 zope.deprecation==5.0.0
66 66 zope.interface==6.3.0
67 67 zope.deprecation==5.0.0
68 68 python-dateutil==2.8.2
69 69 six==1.16.0
70 70 requests==2.28.2
71 71 certifi==2022.12.7
72 72 charset-normalizer==3.1.0
73 73 idna==3.4
74 74 urllib3==1.26.14
75 75 ws4py==0.5.1
76 76 deform==2.0.15
77 77 chameleon==3.10.2
78 78 colander==2.0
79 79 iso8601==1.1.0
80 80 translationstring==1.4
81 81 iso8601==1.1.0
82 82 peppercorn==0.6
83 83 translationstring==1.4
84 84 zope.deprecation==5.0.0
85 85 docutils==0.19
86 86 dogpile.cache==1.3.3
87 87 decorator==5.1.1
88 88 stevedore==5.1.0
89 89 pbr==5.11.1
90 90 formencode==2.1.0
91 91 six==1.16.0
92 fsspec==2024.6.0
92 93 gunicorn==21.2.0
93 94 packaging==24.0
94 95 gevent==24.2.1
95 96 greenlet==3.0.3
96 97 zope.event==5.0.0
97 98 zope.interface==6.3.0
98 99 ipython==8.14.0
99 100 backcall==0.2.0
100 101 decorator==5.1.1
101 102 jedi==0.19.0
102 103 parso==0.8.3
103 104 matplotlib-inline==0.1.6
104 105 traitlets==5.9.0
105 106 pexpect==4.8.0
106 107 ptyprocess==0.7.0
107 108 pickleshare==0.7.5
108 109 prompt-toolkit==3.0.38
109 110 wcwidth==0.2.6
110 111 pygments==2.15.1
111 112 stack-data==0.6.2
112 113 asttokens==2.2.1
113 114 six==1.16.0
114 115 executing==1.2.0
115 116 pure-eval==0.2.2
116 117 traitlets==5.9.0
117 118 markdown==3.4.3
118 119 msgpack==1.0.8
119 120 mysqlclient==2.1.1
120 121 nbconvert==7.7.3
121 122 beautifulsoup4==4.12.3
122 123 soupsieve==2.5
123 124 bleach==6.1.0
124 125 six==1.16.0
125 126 webencodings==0.5.1
126 127 defusedxml==0.7.1
127 128 jinja2==3.1.2
128 129 markupsafe==2.1.2
129 130 jupyter_core==5.3.1
130 131 platformdirs==3.10.0
131 132 traitlets==5.9.0
132 133 jupyterlab-pygments==0.2.2
133 134 markupsafe==2.1.2
134 135 mistune==2.0.5
135 136 nbclient==0.8.0
136 137 jupyter_client==8.3.0
137 138 jupyter_core==5.3.1
138 139 platformdirs==3.10.0
139 140 traitlets==5.9.0
140 141 python-dateutil==2.8.2
141 142 six==1.16.0
142 143 pyzmq==25.0.0
143 144 tornado==6.2
144 145 traitlets==5.9.0
145 146 jupyter_core==5.3.1
146 147 platformdirs==3.10.0
147 148 traitlets==5.9.0
148 149 nbformat==5.9.2
149 150 fastjsonschema==2.18.0
150 151 jsonschema==4.18.6
151 152 attrs==22.2.0
152 153 pyrsistent==0.19.3
153 154 jupyter_core==5.3.1
154 155 platformdirs==3.10.0
155 156 traitlets==5.9.0
156 157 traitlets==5.9.0
157 158 traitlets==5.9.0
158 159 nbformat==5.9.2
159 160 fastjsonschema==2.18.0
160 161 jsonschema==4.18.6
161 162 attrs==22.2.0
162 163 pyrsistent==0.19.3
163 164 jupyter_core==5.3.1
164 165 platformdirs==3.10.0
165 166 traitlets==5.9.0
166 167 traitlets==5.9.0
167 168 pandocfilters==1.5.0
168 169 pygments==2.15.1
169 170 tinycss2==1.2.1
170 171 webencodings==0.5.1
171 172 traitlets==5.9.0
172 173 orjson==3.10.3
173 174 paste==3.10.1
174 175 premailer==3.10.0
175 176 cachetools==5.3.3
176 177 cssselect==1.2.0
177 178 cssutils==2.6.0
178 179 lxml==4.9.3
179 180 requests==2.28.2
180 181 certifi==2022.12.7
181 182 charset-normalizer==3.1.0
182 183 idna==3.4
183 184 urllib3==1.26.14
184 185 psutil==5.9.8
185 186 psycopg2==2.9.9
186 187 py-bcrypt==0.4
187 188 pycmarkgfm==1.2.0
188 189 cffi==1.16.0
189 190 pycparser==2.21
190 191 pycryptodome==3.17
191 192 pycurl==7.45.3
192 193 pymysql==1.0.3
193 194 pyotp==2.8.0
194 195 pyparsing==3.1.1
195 196 pyramid-debugtoolbar==4.12.1
196 197 pygments==2.15.1
197 198 pyramid==2.0.2
198 199 hupper==1.12
199 200 plaster==1.1.2
200 201 plaster-pastedeploy==1.0.1
201 202 pastedeploy==3.1.0
202 203 plaster==1.1.2
203 204 translationstring==1.4
204 205 venusian==3.0.0
205 206 webob==1.8.7
206 207 zope.deprecation==5.0.0
207 208 zope.interface==6.3.0
208 209 pyramid-mako==1.1.0
209 210 mako==1.2.4
210 211 markupsafe==2.1.2
211 212 pyramid==2.0.2
212 213 hupper==1.12
213 214 plaster==1.1.2
214 215 plaster-pastedeploy==1.0.1
215 216 pastedeploy==3.1.0
216 217 plaster==1.1.2
217 218 translationstring==1.4
218 219 venusian==3.0.0
219 220 webob==1.8.7
220 221 zope.deprecation==5.0.0
221 222 zope.interface==6.3.0
222 223 pyramid-mailer==0.15.1
223 224 pyramid==2.0.2
224 225 hupper==1.12
225 226 plaster==1.1.2
226 227 plaster-pastedeploy==1.0.1
227 228 pastedeploy==3.1.0
228 229 plaster==1.1.2
229 230 translationstring==1.4
230 231 venusian==3.0.0
231 232 webob==1.8.7
232 233 zope.deprecation==5.0.0
233 234 zope.interface==6.3.0
234 235 repoze.sendmail==4.4.1
235 236 transaction==3.1.0
236 237 zope.interface==6.3.0
237 238 zope.interface==6.3.0
238 239 transaction==3.1.0
239 240 zope.interface==6.3.0
240 241 python-ldap==3.4.3
241 242 pyasn1==0.4.8
242 243 pyasn1-modules==0.2.8
243 244 pyasn1==0.4.8
244 245 python-memcached==1.59
245 246 six==1.16.0
246 247 python-pam==2.0.2
247 248 python3-saml==1.15.0
248 249 isodate==0.6.1
249 250 six==1.16.0
250 251 lxml==4.9.3
251 252 xmlsec==1.3.13
252 253 lxml==4.9.3
253 254 pyyaml==6.0.1
254 255 redis==5.0.4
255 256 async-timeout==4.0.3
256 257 regex==2022.10.31
257 258 routes==2.5.1
258 259 repoze.lru==0.7
259 260 six==1.16.0
261 s3fs==2024.6.0
262 aiobotocore==2.13.0
263 aiohttp==3.9.5
264 aiosignal==1.3.1
265 frozenlist==1.4.1
266 attrs==22.2.0
267 frozenlist==1.4.1
268 multidict==6.0.5
269 yarl==1.9.4
270 idna==3.4
271 multidict==6.0.5
272 aioitertools==0.11.0
273 botocore==1.34.106
274 jmespath==1.0.1
275 python-dateutil==2.8.2
276 six==1.16.0
277 urllib3==1.26.14
278 wrapt==1.16.0
279 aiohttp==3.9.5
280 aiosignal==1.3.1
281 frozenlist==1.4.1
282 attrs==22.2.0
283 frozenlist==1.4.1
284 multidict==6.0.5
285 yarl==1.9.4
286 idna==3.4
287 multidict==6.0.5
288 fsspec==2024.6.0
260 289 simplejson==3.19.2
261 290 sshpubkeys==3.3.1
262 291 cryptography==40.0.2
263 292 cffi==1.16.0
264 293 pycparser==2.21
265 294 ecdsa==0.18.0
266 295 six==1.16.0
267 296 sqlalchemy==1.4.52
268 297 greenlet==3.0.3
269 298 typing_extensions==4.9.0
270 299 supervisor==4.2.5
271 300 tzlocal==4.3
272 301 pytz-deprecation-shim==0.1.0.post0
273 302 tzdata==2024.1
274 303 tempita==0.5.2
275 304 unidecode==1.3.6
276 305 urlobject==2.4.3
277 306 waitress==3.0.0
278 307 webhelpers2==2.1
279 308 markupsafe==2.1.2
280 309 six==1.16.0
281 310 whoosh==2.7.4
282 311 zope.cachedescriptors==5.0.0
283 312 qrcode==7.4.2
284 313
285 314 ## uncomment to add the debug libraries
286 315 #-r requirements_debug.txt
@@ -1,1716 +1,1716 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import itertools
20 20 import logging
21 21 import os
22 22 import collections
23 23 import urllib.request
24 24 import urllib.parse
25 25 import urllib.error
26 26 import pathlib
27 27 import time
28 28 import random
29 29
30 30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 31
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 import rhodecode
36 36 from rhodecode.apps._base import RepoAppView
37 37
38 38
39 39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 40 from rhodecode.lib import audit_logger
41 41 from rhodecode.lib.hash_utils import sha1_safe
42 from rhodecode.lib.rc_cache.archive_cache import (
42 from rhodecode.lib.archive_cache import (
43 43 get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator)
44 44 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars
45 45 from rhodecode.lib.view_utils import parse_path_ref
46 46 from rhodecode.lib.exceptions import NonRelativePathError
47 47 from rhodecode.lib.codeblocks import (
48 48 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
49 49 from rhodecode.lib.utils2 import convert_line_endings, detect_mode
50 50 from rhodecode.lib.type_utils import str2bool
51 51 from rhodecode.lib.str_utils import safe_str, safe_int
52 52 from rhodecode.lib.auth import (
53 53 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
54 54 from rhodecode.lib.vcs import path as vcspath
55 55 from rhodecode.lib.vcs.backends.base import EmptyCommit
56 56 from rhodecode.lib.vcs.conf import settings
57 57 from rhodecode.lib.vcs.nodes import FileNode
58 58 from rhodecode.lib.vcs.exceptions import (
59 59 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
60 60 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
61 61 NodeDoesNotExistError, CommitError, NodeError)
62 62
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.db import Repository
65 65
66 66 log = logging.getLogger(__name__)
67 67
68 68
69 69 def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
70 70 # original backward compat name of archive
71 71 clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_'))
72 72
73 73 # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip
74 74 # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip
75 75 id_sha = sha1_safe(str(db_repo_id))[:4]
76 76 sub_repo = 'sub-1' if subrepos else 'sub-0'
77 77 commit = commit_sha if with_hash else 'archive'
78 78 path_marker = (path_sha if with_hash else '') or 'all'
79 79 archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}'
80 80
81 81 return archive_name
82 82
83 83
84 84 def get_path_sha(at_path):
85 85 return safe_str(sha1_safe(at_path)[:8])
86 86
87 87
88 88 def _get_archive_spec(fname):
89 89 log.debug('Detecting archive spec for: `%s`', fname)
90 90
91 91 fileformat = None
92 92 ext = None
93 93 content_type = None
94 94 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
95 95
96 96 if fname.endswith(extension):
97 97 fileformat = a_type
98 98 log.debug('archive is of type: %s', fileformat)
99 99 ext = extension
100 100 break
101 101
102 102 if not fileformat:
103 103 raise ValueError()
104 104
105 105 # left over part of whole fname is the commit
106 106 commit_id = fname[:-len(ext)]
107 107
108 108 return commit_id, ext, fileformat, content_type
109 109
110 110
111 111 class RepoFilesView(RepoAppView):
112 112
113 113 @staticmethod
114 114 def adjust_file_path_for_svn(f_path, repo):
115 115 """
116 116 Computes the relative path of `f_path`.
117 117
118 118 This is mainly based on prefix matching of the recognized tags and
119 119 branches in the underlying repository.
120 120 """
121 121 tags_and_branches = itertools.chain(
122 122 repo.branches.keys(),
123 123 repo.tags.keys())
124 124 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
125 125
126 126 for name in tags_and_branches:
127 127 if f_path.startswith(f'{name}/'):
128 128 f_path = vcspath.relpath(f_path, name)
129 129 break
130 130 return f_path
131 131
132 132 def load_default_context(self):
133 133 c = self._get_local_tmpl_context(include_app_defaults=True)
134 134 c.rhodecode_repo = self.rhodecode_vcs_repo
135 135 c.enable_downloads = self.db_repo.enable_downloads
136 136 return c
137 137
138 138 def _ensure_not_locked(self, commit_id='tip'):
139 139 _ = self.request.translate
140 140
141 141 repo = self.db_repo
142 142 if repo.enable_locking and repo.locked[0]:
143 143 h.flash(_('This repository has been locked by %s on %s')
144 144 % (h.person_by_id(repo.locked[0]),
145 145 h.format_date(h.time_to_datetime(repo.locked[1]))),
146 146 'warning')
147 147 files_url = h.route_path(
148 148 'repo_files:default_path',
149 149 repo_name=self.db_repo_name, commit_id=commit_id)
150 150 raise HTTPFound(files_url)
151 151
152 152 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
153 153 _ = self.request.translate
154 154
155 155 if not is_head:
156 156 message = _('Cannot modify file. '
157 157 'Given commit `{}` is not head of a branch.').format(commit_id)
158 158 h.flash(message, category='warning')
159 159
160 160 if json_mode:
161 161 return message
162 162
163 163 files_url = h.route_path(
164 164 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
165 165 f_path=f_path)
166 166 raise HTTPFound(files_url)
167 167
168 168 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
169 169 _ = self.request.translate
170 170
171 171 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
172 172 self.db_repo_name, branch_name)
173 173 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
174 174 message = _('Branch `{}` changes forbidden by rule {}.').format(
175 175 h.escape(branch_name), h.escape(rule))
176 176 h.flash(message, 'warning')
177 177
178 178 if json_mode:
179 179 return message
180 180
181 181 files_url = h.route_path(
182 182 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
183 183
184 184 raise HTTPFound(files_url)
185 185
186 186 def _get_commit_and_path(self):
187 187 default_commit_id = self.db_repo.landing_ref_name
188 188 default_f_path = '/'
189 189
190 190 commit_id = self.request.matchdict.get(
191 191 'commit_id', default_commit_id)
192 192 f_path = self._get_f_path(self.request.matchdict, default_f_path)
193 193 return commit_id, f_path
194 194
195 195 def _get_default_encoding(self, c):
196 196 enc_list = getattr(c, 'default_encodings', [])
197 197 return enc_list[0] if enc_list else 'UTF-8'
198 198
199 199 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
200 200 """
201 201 This is a safe way to get commit. If an error occurs it redirects to
202 202 tip with proper message
203 203
204 204 :param commit_id: id of commit to fetch
205 205 :param redirect_after: toggle redirection
206 206 """
207 207 _ = self.request.translate
208 208
209 209 try:
210 210 return self.rhodecode_vcs_repo.get_commit(commit_id)
211 211 except EmptyRepositoryError:
212 212 if not redirect_after:
213 213 return None
214 214
215 215 add_new = upload_new = ""
216 216 if h.HasRepoPermissionAny(
217 217 'repository.write', 'repository.admin')(self.db_repo_name):
218 218 _url = h.route_path(
219 219 'repo_files_add_file',
220 220 repo_name=self.db_repo_name, commit_id=0, f_path='')
221 221 add_new = h.link_to(
222 222 _('add a new file'), _url, class_="alert-link")
223 223
224 224 _url_upld = h.route_path(
225 225 'repo_files_upload_file',
226 226 repo_name=self.db_repo_name, commit_id=0, f_path='')
227 227 upload_new = h.link_to(
228 228 _('upload a new file'), _url_upld, class_="alert-link")
229 229
230 230 h.flash(h.literal(
231 231 _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning')
232 232 raise HTTPFound(
233 233 h.route_path('repo_summary', repo_name=self.db_repo_name))
234 234
235 235 except (CommitDoesNotExistError, LookupError) as e:
236 236 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
237 237 h.flash(msg, category='error')
238 238 raise HTTPNotFound()
239 239 except RepositoryError as e:
240 240 h.flash(h.escape(safe_str(e)), category='error')
241 241 raise HTTPNotFound()
242 242
243 243 def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None):
244 244 """
245 245 Returns file_node, if error occurs or given path is directory,
246 246 it'll redirect to top level path
247 247 """
248 248 _ = self.request.translate
249 249
250 250 try:
251 251 file_node = commit_obj.get_node(path, pre_load=pre_load)
252 252 if file_node.is_dir():
253 253 raise RepositoryError('The given path is a directory')
254 254 except CommitDoesNotExistError:
255 255 log.exception('No such commit exists for this repository')
256 256 h.flash(_('No such commit exists for this repository'), category='error')
257 257 raise HTTPNotFound()
258 258 except RepositoryError as e:
259 259 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
260 260 h.flash(h.escape(safe_str(e)), category='error')
261 261 raise HTTPNotFound()
262 262
263 263 return file_node
264 264
265 265 def _is_valid_head(self, commit_id, repo, landing_ref):
266 266 branch_name = sha_commit_id = ''
267 267 is_head = False
268 268 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
269 269
270 270 for _branch_name, branch_commit_id in repo.branches.items():
271 271 # simple case we pass in branch name, it's a HEAD
272 272 if commit_id == _branch_name:
273 273 is_head = True
274 274 branch_name = _branch_name
275 275 sha_commit_id = branch_commit_id
276 276 break
277 277 # case when we pass in full sha commit_id, which is a head
278 278 elif commit_id == branch_commit_id:
279 279 is_head = True
280 280 branch_name = _branch_name
281 281 sha_commit_id = branch_commit_id
282 282 break
283 283
284 284 if h.is_svn(repo) and not repo.is_empty():
285 285 # Note: Subversion only has one head.
286 286 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
287 287 is_head = True
288 288 return branch_name, sha_commit_id, is_head
289 289
290 290 # checked branches, means we only need to try to get the branch/commit_sha
291 291 if repo.is_empty():
292 292 is_head = True
293 293 branch_name = landing_ref
294 294 sha_commit_id = EmptyCommit().raw_id
295 295 else:
296 296 commit = repo.get_commit(commit_id=commit_id)
297 297 if commit:
298 298 branch_name = commit.branch
299 299 sha_commit_id = commit.raw_id
300 300
301 301 return branch_name, sha_commit_id, is_head
302 302
303 303 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
304 304
305 305 repo_id = self.db_repo.repo_id
306 306 force_recache = self.get_recache_flag()
307 307
308 308 cache_seconds = safe_int(
309 309 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
310 310 cache_on = not force_recache and cache_seconds > 0
311 311 log.debug(
312 312 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
313 313 'with caching: %s[TTL: %ss]' % (
314 314 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
315 315
316 316 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
317 317 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
318 318
319 319 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
320 320 def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
321 321 log.debug('Generating cached file tree at for repo_id: %s, %s, %s',
322 322 _repo_id, _commit_id, _f_path)
323 323
324 324 c.full_load = _full_load
325 325 return render(
326 326 'rhodecode:templates/files/files_browser_tree.mako',
327 327 self._get_template_context(c), self.request, _at_rev)
328 328
329 329 return compute_file_tree(
330 330 self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
331 331
332 332 def create_pure_path(self, *parts):
333 333 # Split paths and sanitize them, removing any ../ etc
334 334 sanitized_path = [
335 335 x for x in pathlib.PurePath(*parts).parts
336 336 if x not in ['.', '..']]
337 337
338 338 pure_path = pathlib.PurePath(*sanitized_path)
339 339 return pure_path
340 340
341 341 def _is_lf_enabled(self, target_repo):
342 342 lf_enabled = False
343 343
344 344 lf_key_for_vcs_map = {
345 345 'hg': 'extensions_largefiles',
346 346 'git': 'vcs_git_lfs_enabled'
347 347 }
348 348
349 349 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
350 350
351 351 if lf_key_for_vcs:
352 352 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
353 353
354 354 return lf_enabled
355 355
356 356 @LoginRequired()
357 357 @HasRepoPermissionAnyDecorator(
358 358 'repository.read', 'repository.write', 'repository.admin')
359 359 def repo_archivefile(self):
360 360 # archive cache config
361 361 from rhodecode import CONFIG
362 362 _ = self.request.translate
363 363 self.load_default_context()
364 364 default_at_path = '/'
365 365 fname = self.request.matchdict['fname']
366 366 subrepos = self.request.GET.get('subrepos') == 'true'
367 367 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
368 368 at_path = self.request.GET.get('at_path') or default_at_path
369 369
370 370 if not self.db_repo.enable_downloads:
371 371 return Response(_('Downloads disabled'))
372 372
373 373 try:
374 374 commit_id, ext, fileformat, content_type = \
375 375 _get_archive_spec(fname)
376 376 except ValueError:
377 377 return Response(_('Unknown archive type for: `{}`').format(
378 378 h.escape(fname)))
379 379
380 380 try:
381 381 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
382 382 except CommitDoesNotExistError:
383 383 return Response(_('Unknown commit_id {}').format(
384 384 h.escape(commit_id)))
385 385 except EmptyRepositoryError:
386 386 return Response(_('Empty repository'))
387 387
388 388 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
389 389 if commit_id != commit.raw_id:
390 390 fname=f'{commit.raw_id}{ext}'
391 391 raise HTTPFound(self.request.current_route_path(fname=fname))
392 392
393 393 try:
394 394 at_path = commit.get_node(at_path).path or default_at_path
395 395 except Exception:
396 396 return Response(_('No node at path {} for this repository').format(h.escape(at_path)))
397 397
398 398 path_sha = get_path_sha(at_path)
399 399
400 400 # used for cache etc, consistent unique archive name
401 401 archive_name_key = get_archive_name(
402 402 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
403 403 path_sha=path_sha, with_hash=True)
404 404
405 405 if not with_hash:
406 406 path_sha = ''
407 407
408 408 # what end client gets served
409 409 response_archive_name = get_archive_name(
410 410 self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos,
411 411 path_sha=path_sha, with_hash=with_hash)
412 412
413 413 # remove extension from our archive directory name
414 414 archive_dir_name = response_archive_name[:-len(ext)]
415 415
416 416 archive_cache_disable = self.request.GET.get('no_cache')
417 417
418 418 d_cache = get_archival_cache_store(config=CONFIG)
419 419
420 420 # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver
421 421 d_cache_conf = get_archival_config(config=CONFIG)
422 422
423 423 # This is also a cache key, and lock key
424 424 reentrant_lock_key = archive_name_key + '.lock'
425 425
426 426 use_cached_archive = False
427 427 if not archive_cache_disable and archive_name_key in d_cache:
428 428 reader, metadata = d_cache.fetch(archive_name_key)
429 429
430 430 use_cached_archive = True
431 431 log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s',
432 432 archive_name_key, metadata, reader.name)
433 433 else:
434 434 reader = None
435 435 log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key)
436 436
437 437 if not reader:
438 438 # generate new archive, as previous was not found in the cache
439 439 try:
440 440 with d_cache.get_lock(reentrant_lock_key):
441 441 try:
442 442 commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name,
443 443 kind=fileformat, subrepos=subrepos,
444 444 archive_at_path=at_path, cache_config=d_cache_conf)
445 445 except ImproperArchiveTypeError:
446 446 return _('Unknown archive type')
447 447
448 448 except ArchiveCacheGenerationLock:
449 449 retry_after = round(random.uniform(0.3, 3.0), 1)
450 450 time.sleep(retry_after)
451 451
452 452 location = self.request.url
453 453 response = Response(
454 454 f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}"
455 455 )
456 456 response.headers["Retry-After"] = str(retry_after)
457 457 response.status_code = 307 # temporary redirect
458 458
459 459 response.location = location
460 460 return response
461 461
462 462 reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30)
463 463
464 464 response = Response(app_iter=archive_iterator(reader))
465 465 response.content_disposition = f'attachment; filename={response_archive_name}'
466 466 response.content_type = str(content_type)
467 467
468 468 try:
469 469 return response
470 470 finally:
471 471 # store download action
472 472 audit_logger.store_web(
473 473 'repo.archive.download', action_data={
474 474 'user_agent': self.request.user_agent,
475 475 'archive_name': archive_name_key,
476 476 'archive_spec': fname,
477 477 'archive_cached': use_cached_archive},
478 478 user=self._rhodecode_user,
479 479 repo=self.db_repo,
480 480 commit=True
481 481 )
482 482
483 483 def _get_file_node(self, commit_id, f_path):
484 484 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
485 485 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
486 486 try:
487 487 node = commit.get_node(f_path)
488 488 if node.is_dir():
489 489 raise NodeError(f'{node} path is a {type(node)} not a file')
490 490 except NodeDoesNotExistError:
491 491 commit = EmptyCommit(
492 492 commit_id=commit_id,
493 493 idx=commit.idx,
494 494 repo=commit.repository,
495 495 alias=commit.repository.alias,
496 496 message=commit.message,
497 497 author=commit.author,
498 498 date=commit.date)
499 499 node = FileNode(safe_bytes(f_path), b'', commit=commit)
500 500 else:
501 501 commit = EmptyCommit(
502 502 repo=self.rhodecode_vcs_repo,
503 503 alias=self.rhodecode_vcs_repo.alias)
504 504 node = FileNode(safe_bytes(f_path), b'', commit=commit)
505 505 return node
506 506
507 507 @LoginRequired()
508 508 @HasRepoPermissionAnyDecorator(
509 509 'repository.read', 'repository.write', 'repository.admin')
510 510 def repo_files_diff(self):
511 511 c = self.load_default_context()
512 512 f_path = self._get_f_path(self.request.matchdict)
513 513 diff1 = self.request.GET.get('diff1', '')
514 514 diff2 = self.request.GET.get('diff2', '')
515 515
516 516 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
517 517
518 518 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
519 519 line_context = self.request.GET.get('context', 3)
520 520
521 521 if not any((diff1, diff2)):
522 522 h.flash(
523 523 'Need query parameter "diff1" or "diff2" to generate a diff.',
524 524 category='error')
525 525 raise HTTPBadRequest()
526 526
527 527 c.action = self.request.GET.get('diff')
528 528 if c.action not in ['download', 'raw']:
529 529 compare_url = h.route_path(
530 530 'repo_compare',
531 531 repo_name=self.db_repo_name,
532 532 source_ref_type='rev',
533 533 source_ref=diff1,
534 534 target_repo=self.db_repo_name,
535 535 target_ref_type='rev',
536 536 target_ref=diff2,
537 537 _query=dict(f_path=f_path))
538 538 # redirect to new view if we render diff
539 539 raise HTTPFound(compare_url)
540 540
541 541 try:
542 542 node1 = self._get_file_node(diff1, path1)
543 543 node2 = self._get_file_node(diff2, f_path)
544 544 except (RepositoryError, NodeError):
545 545 log.exception("Exception while trying to get node from repository")
546 546 raise HTTPFound(
547 547 h.route_path('repo_files', repo_name=self.db_repo_name,
548 548 commit_id='tip', f_path=f_path))
549 549
550 550 if all(isinstance(node.commit, EmptyCommit)
551 551 for node in (node1, node2)):
552 552 raise HTTPNotFound()
553 553
554 554 c.commit_1 = node1.commit
555 555 c.commit_2 = node2.commit
556 556
557 557 if c.action == 'download':
558 558 _diff = diffs.get_gitdiff(node1, node2,
559 559 ignore_whitespace=ignore_whitespace,
560 560 context=line_context)
561 561 # NOTE: this was using diff_format='gitdiff'
562 562 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
563 563
564 564 response = Response(self.path_filter.get_raw_patch(diff))
565 565 response.content_type = 'text/plain'
566 566 response.content_disposition = (
567 567 f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff'
568 568 )
569 569 charset = self._get_default_encoding(c)
570 570 if charset:
571 571 response.charset = charset
572 572 return response
573 573
574 574 elif c.action == 'raw':
575 575 _diff = diffs.get_gitdiff(node1, node2,
576 576 ignore_whitespace=ignore_whitespace,
577 577 context=line_context)
578 578 # NOTE: this was using diff_format='gitdiff'
579 579 diff = diffs.DiffProcessor(_diff, diff_format='newdiff')
580 580
581 581 response = Response(self.path_filter.get_raw_patch(diff))
582 582 response.content_type = 'text/plain'
583 583 charset = self._get_default_encoding(c)
584 584 if charset:
585 585 response.charset = charset
586 586 return response
587 587
588 588 # in case we ever end up here
589 589 raise HTTPNotFound()
590 590
591 591 @LoginRequired()
592 592 @HasRepoPermissionAnyDecorator(
593 593 'repository.read', 'repository.write', 'repository.admin')
594 594 def repo_files_diff_2way_redirect(self):
595 595 """
596 596 Kept only to make OLD links work
597 597 """
598 598 f_path = self._get_f_path_unchecked(self.request.matchdict)
599 599 diff1 = self.request.GET.get('diff1', '')
600 600 diff2 = self.request.GET.get('diff2', '')
601 601
602 602 if not any((diff1, diff2)):
603 603 h.flash(
604 604 'Need query parameter "diff1" or "diff2" to generate a diff.',
605 605 category='error')
606 606 raise HTTPBadRequest()
607 607
608 608 compare_url = h.route_path(
609 609 'repo_compare',
610 610 repo_name=self.db_repo_name,
611 611 source_ref_type='rev',
612 612 source_ref=diff1,
613 613 target_ref_type='rev',
614 614 target_ref=diff2,
615 615 _query=dict(f_path=f_path, diffmode='sideside',
616 616 target_repo=self.db_repo_name,))
617 617 raise HTTPFound(compare_url)
618 618
619 619 @LoginRequired()
620 620 def repo_files_default_commit_redirect(self):
621 621 """
622 622 Special page that redirects to the landing page of files based on the default
623 623 commit for repository
624 624 """
625 625 c = self.load_default_context()
626 626 ref_name = c.rhodecode_db_repo.landing_ref_name
627 627 landing_url = h.repo_files_by_ref_url(
628 628 c.rhodecode_db_repo.repo_name,
629 629 c.rhodecode_db_repo.repo_type,
630 630 f_path='',
631 631 ref_name=ref_name,
632 632 commit_id='tip',
633 633 query=dict(at=ref_name)
634 634 )
635 635
636 636 raise HTTPFound(landing_url)
637 637
638 638 @LoginRequired()
639 639 @HasRepoPermissionAnyDecorator(
640 640 'repository.read', 'repository.write', 'repository.admin')
641 641 def repo_files(self):
642 642 c = self.load_default_context()
643 643
644 644 view_name = getattr(self.request.matched_route, 'name', None)
645 645
646 646 c.annotate = view_name == 'repo_files:annotated'
647 647 # default is false, but .rst/.md files later are auto rendered, we can
648 648 # overwrite auto rendering by setting this GET flag
649 649 c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False)
650 650
651 651 commit_id, f_path = self._get_commit_and_path()
652 652
653 653 c.commit = self._get_commit_or_redirect(commit_id)
654 654 c.branch = self.request.GET.get('branch', None)
655 655 c.f_path = f_path
656 656 at_rev = self.request.GET.get('at')
657 657
658 658 # files or dirs
659 659 try:
660 660 c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data'])
661 661
662 662 c.file_author = True
663 663 c.file_tree = ''
664 664
665 665 # prev link
666 666 try:
667 667 prev_commit = c.commit.prev(c.branch)
668 668 c.prev_commit = prev_commit
669 669 c.url_prev = h.route_path(
670 670 'repo_files', repo_name=self.db_repo_name,
671 671 commit_id=prev_commit.raw_id, f_path=f_path)
672 672 if c.branch:
673 673 c.url_prev += '?branch=%s' % c.branch
674 674 except (CommitDoesNotExistError, VCSError):
675 675 c.url_prev = '#'
676 676 c.prev_commit = EmptyCommit()
677 677
678 678 # next link
679 679 try:
680 680 next_commit = c.commit.next(c.branch)
681 681 c.next_commit = next_commit
682 682 c.url_next = h.route_path(
683 683 'repo_files', repo_name=self.db_repo_name,
684 684 commit_id=next_commit.raw_id, f_path=f_path)
685 685 if c.branch:
686 686 c.url_next += '?branch=%s' % c.branch
687 687 except (CommitDoesNotExistError, VCSError):
688 688 c.url_next = '#'
689 689 c.next_commit = EmptyCommit()
690 690
691 691 # load file content
692 692 if c.file.is_file():
693 693
694 694 c.lf_node = {}
695 695
696 696 has_lf_enabled = self._is_lf_enabled(self.db_repo)
697 697 if has_lf_enabled:
698 698 c.lf_node = c.file.get_largefile_node()
699 699
700 700 c.file_source_page = 'true'
701 701 c.file_last_commit = c.file.last_commit
702 702
703 703 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
704 704
705 705 if not (c.file_size_too_big or c.file.is_binary):
706 706 if c.annotate: # annotation has precedence over renderer
707 707 c.annotated_lines = filenode_as_annotated_lines_tokens(
708 708 c.file
709 709 )
710 710 else:
711 711 c.renderer = (
712 712 c.renderer and h.renderer_from_filename(c.file.path)
713 713 )
714 714 if not c.renderer:
715 715 c.lines = filenode_as_lines_tokens(c.file)
716 716
717 717 _branch_name, _sha_commit_id, is_head = \
718 718 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
719 719 landing_ref=self.db_repo.landing_ref_name)
720 720 c.on_branch_head = is_head
721 721
722 722 branch = c.commit.branch if (
723 723 c.commit.branch and '/' not in c.commit.branch) else None
724 724 c.branch_or_raw_id = branch or c.commit.raw_id
725 725 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
726 726
727 727 author = c.file_last_commit.author
728 728 c.authors = [[
729 729 h.email(author),
730 730 h.person(author, 'username_or_name_or_email'),
731 731 1
732 732 ]]
733 733
734 734 else: # load tree content at path
735 735 c.file_source_page = 'false'
736 736 c.authors = []
737 737 # this loads a simple tree without metadata to speed things up
738 738 # later via ajax we call repo_nodetree_full and fetch whole
739 739 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
740 740
741 741 c.readme_data, c.readme_file = \
742 742 self._get_readme_data(self.db_repo, c.visual.default_renderer,
743 743 c.commit.raw_id, f_path)
744 744
745 745 except RepositoryError as e:
746 746 h.flash(h.escape(safe_str(e)), category='error')
747 747 raise HTTPNotFound()
748 748
749 749 if self.request.environ.get('HTTP_X_PJAX'):
750 750 html = render('rhodecode:templates/files/files_pjax.mako',
751 751 self._get_template_context(c), self.request)
752 752 else:
753 753 html = render('rhodecode:templates/files/files.mako',
754 754 self._get_template_context(c), self.request)
755 755 return Response(html)
756 756
757 757 @HasRepoPermissionAnyDecorator(
758 758 'repository.read', 'repository.write', 'repository.admin')
759 759 def repo_files_annotated_previous(self):
760 760 self.load_default_context()
761 761
762 762 commit_id, f_path = self._get_commit_and_path()
763 763 commit = self._get_commit_or_redirect(commit_id)
764 764 prev_commit_id = commit.raw_id
765 765 line_anchor = self.request.GET.get('line_anchor')
766 766 is_file = False
767 767 try:
768 768 _file = commit.get_node(f_path)
769 769 is_file = _file.is_file()
770 770 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
771 771 pass
772 772
773 773 if is_file:
774 774 history = commit.get_path_history(f_path)
775 775 prev_commit_id = history[1].raw_id \
776 776 if len(history) > 1 else prev_commit_id
777 777 prev_url = h.route_path(
778 778 'repo_files:annotated', repo_name=self.db_repo_name,
779 779 commit_id=prev_commit_id, f_path=f_path,
780 780 _anchor=f'L{line_anchor}')
781 781
782 782 raise HTTPFound(prev_url)
783 783
784 784 @LoginRequired()
785 785 @HasRepoPermissionAnyDecorator(
786 786 'repository.read', 'repository.write', 'repository.admin')
787 787 def repo_nodetree_full(self):
788 788 """
789 789 Returns rendered html of file tree that contains commit date,
790 790 author, commit_id for the specified combination of
791 791 repo, commit_id and file path
792 792 """
793 793 c = self.load_default_context()
794 794
795 795 commit_id, f_path = self._get_commit_and_path()
796 796 commit = self._get_commit_or_redirect(commit_id)
797 797 try:
798 798 dir_node = commit.get_node(f_path)
799 799 except RepositoryError as e:
800 800 return Response(f'error: {h.escape(safe_str(e))}')
801 801
802 802 if dir_node.is_file():
803 803 return Response('')
804 804
805 805 c.file = dir_node
806 806 c.commit = commit
807 807 at_rev = self.request.GET.get('at')
808 808
809 809 html = self._get_tree_at_commit(
810 810 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
811 811
812 812 return Response(html)
813 813
814 814 def _get_attachement_headers(self, f_path):
815 815 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
816 816 safe_path = f_name.replace('"', '\\"')
817 817 encoded_path = urllib.parse.quote(f_name)
818 818
819 819 headers = "attachment; " \
820 820 "filename=\"{}\"; " \
821 821 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
822 822
823 823 return safe_bytes(headers).decode('latin-1', errors='replace')
824 824
825 825 @LoginRequired()
826 826 @HasRepoPermissionAnyDecorator(
827 827 'repository.read', 'repository.write', 'repository.admin')
828 828 def repo_file_raw(self):
829 829 """
830 830 Action for show as raw, some mimetypes are "rendered",
831 831 those include images, icons.
832 832 """
833 833 c = self.load_default_context()
834 834
835 835 commit_id, f_path = self._get_commit_and_path()
836 836 commit = self._get_commit_or_redirect(commit_id)
837 837 file_node = self._get_filenode_or_redirect(commit, f_path)
838 838
839 839 raw_mimetype_mapping = {
840 840 # map original mimetype to a mimetype used for "show as raw"
841 841 # you can also provide a content-disposition to override the
842 842 # default "attachment" disposition.
843 843 # orig_type: (new_type, new_dispo)
844 844
845 845 # show images inline:
846 846 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
847 847 # for example render an SVG with javascript inside or even render
848 848 # HTML.
849 849 'image/x-icon': ('image/x-icon', 'inline'),
850 850 'image/png': ('image/png', 'inline'),
851 851 'image/gif': ('image/gif', 'inline'),
852 852 'image/jpeg': ('image/jpeg', 'inline'),
853 853 'application/pdf': ('application/pdf', 'inline'),
854 854 }
855 855
856 856 mimetype = file_node.mimetype
857 857 try:
858 858 mimetype, disposition = raw_mimetype_mapping[mimetype]
859 859 except KeyError:
860 860 # we don't know anything special about this, handle it safely
861 861 if file_node.is_binary:
862 862 # do same as download raw for binary files
863 863 mimetype, disposition = 'application/octet-stream', 'attachment'
864 864 else:
865 865 # do not just use the original mimetype, but force text/plain,
866 866 # otherwise it would serve text/html and that might be unsafe.
867 867 # Note: underlying vcs library fakes text/plain mimetype if the
868 868 # mimetype can not be determined and it thinks it is not
869 869 # binary.This might lead to erroneous text display in some
870 870 # cases, but helps in other cases, like with text files
871 871 # without extension.
872 872 mimetype, disposition = 'text/plain', 'inline'
873 873
874 874 if disposition == 'attachment':
875 875 disposition = self._get_attachement_headers(f_path)
876 876
877 877 stream_content = file_node.stream_bytes()
878 878
879 879 response = Response(app_iter=stream_content)
880 880 response.content_disposition = disposition
881 881 response.content_type = mimetype
882 882
883 883 charset = self._get_default_encoding(c)
884 884 if charset:
885 885 response.charset = charset
886 886
887 887 return response
888 888
889 889 @LoginRequired()
890 890 @HasRepoPermissionAnyDecorator(
891 891 'repository.read', 'repository.write', 'repository.admin')
892 892 def repo_file_download(self):
893 893 c = self.load_default_context()
894 894
895 895 commit_id, f_path = self._get_commit_and_path()
896 896 commit = self._get_commit_or_redirect(commit_id)
897 897 file_node = self._get_filenode_or_redirect(commit, f_path)
898 898
899 899 if self.request.GET.get('lf'):
900 900 # only if lf get flag is passed, we download this file
901 901 # as LFS/Largefile
902 902 lf_node = file_node.get_largefile_node()
903 903 if lf_node:
904 904 # overwrite our pointer with the REAL large-file
905 905 file_node = lf_node
906 906
907 907 disposition = self._get_attachement_headers(f_path)
908 908
909 909 stream_content = file_node.stream_bytes()
910 910
911 911 response = Response(app_iter=stream_content)
912 912 response.content_disposition = disposition
913 913 response.content_type = file_node.mimetype
914 914
915 915 charset = self._get_default_encoding(c)
916 916 if charset:
917 917 response.charset = charset
918 918
919 919 return response
920 920
921 921 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
922 922
923 923 cache_seconds = safe_int(
924 924 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
925 925 cache_on = cache_seconds > 0
926 926 log.debug(
927 927 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
928 928 'with caching: %s[TTL: %ss]' % (
929 929 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
930 930
931 931 cache_namespace_uid = f'repo.{repo_id}'
932 932 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
933 933
934 934 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
935 935 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
936 936 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
937 937 _repo_id, commit_id, f_path)
938 938 try:
939 939 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
940 940 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
941 941 log.exception(safe_str(e))
942 942 h.flash(h.escape(safe_str(e)), category='error')
943 943 raise HTTPFound(h.route_path(
944 944 'repo_files', repo_name=self.db_repo_name,
945 945 commit_id='tip', f_path='/'))
946 946
947 947 return _d + _f
948 948
949 949 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
950 950 commit_id, f_path)
951 951 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
952 952
953 953 @LoginRequired()
954 954 @HasRepoPermissionAnyDecorator(
955 955 'repository.read', 'repository.write', 'repository.admin')
956 956 def repo_nodelist(self):
957 957 self.load_default_context()
958 958
959 959 commit_id, f_path = self._get_commit_and_path()
960 960 commit = self._get_commit_or_redirect(commit_id)
961 961
962 962 metadata = self._get_nodelist_at_commit(
963 963 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
964 964 return {'nodes': [x for x in metadata]}
965 965
966 966 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
967 967 items = []
968 968 for name, commit_id in branches_or_tags.items():
969 969 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
970 970 items.append((sym_ref, name, ref_type))
971 971 return items
972 972
973 973 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
974 974 return commit_id
975 975
976 976 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
977 977 return commit_id
978 978
979 979 # NOTE(dan): old code we used in "diff" mode compare
980 980 new_f_path = vcspath.join(name, f_path)
981 981 return f'{new_f_path}@{commit_id}'
982 982
983 983 def _get_node_history(self, commit_obj, f_path, commits=None):
984 984 """
985 985 get commit history for given node
986 986
987 987 :param commit_obj: commit to calculate history
988 988 :param f_path: path for node to calculate history for
989 989 :param commits: if passed don't calculate history and take
990 990 commits defined in this list
991 991 """
992 992 _ = self.request.translate
993 993
994 994 # calculate history based on tip
995 995 tip = self.rhodecode_vcs_repo.get_commit()
996 996 if commits is None:
997 997 pre_load = ["author", "branch"]
998 998 try:
999 999 commits = tip.get_path_history(f_path, pre_load=pre_load)
1000 1000 except (NodeDoesNotExistError, CommitError):
1001 1001 # this node is not present at tip!
1002 1002 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
1003 1003
1004 1004 history = []
1005 1005 commits_group = ([], _("Changesets"))
1006 1006 for commit in commits:
1007 1007 branch = ' (%s)' % commit.branch if commit.branch else ''
1008 1008 n_desc = f'r{commit.idx}:{commit.short_id}{branch}'
1009 1009 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
1010 1010 history.append(commits_group)
1011 1011
1012 1012 symbolic_reference = self._symbolic_reference
1013 1013
1014 1014 if self.rhodecode_vcs_repo.alias == 'svn':
1015 1015 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
1016 1016 f_path, self.rhodecode_vcs_repo)
1017 1017 if adjusted_f_path != f_path:
1018 1018 log.debug(
1019 1019 'Recognized svn tag or branch in file "%s", using svn '
1020 1020 'specific symbolic references', f_path)
1021 1021 f_path = adjusted_f_path
1022 1022 symbolic_reference = self._symbolic_reference_svn
1023 1023
1024 1024 branches = self._create_references(
1025 1025 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1026 1026 branches_group = (branches, _("Branches"))
1027 1027
1028 1028 tags = self._create_references(
1029 1029 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1030 1030 tags_group = (tags, _("Tags"))
1031 1031
1032 1032 history.append(branches_group)
1033 1033 history.append(tags_group)
1034 1034
1035 1035 return history, commits
1036 1036
1037 1037 @LoginRequired()
1038 1038 @HasRepoPermissionAnyDecorator(
1039 1039 'repository.read', 'repository.write', 'repository.admin')
1040 1040 def repo_file_history(self):
1041 1041 self.load_default_context()
1042 1042
1043 1043 commit_id, f_path = self._get_commit_and_path()
1044 1044 commit = self._get_commit_or_redirect(commit_id)
1045 1045 file_node = self._get_filenode_or_redirect(commit, f_path)
1046 1046
1047 1047 if file_node.is_file():
1048 1048 file_history, _hist = self._get_node_history(commit, f_path)
1049 1049
1050 1050 res = []
1051 1051 for section_items, section in file_history:
1052 1052 items = []
1053 1053 for obj_id, obj_text, obj_type in section_items:
1054 1054 at_rev = ''
1055 1055 if obj_type in ['branch', 'bookmark', 'tag']:
1056 1056 at_rev = obj_text
1057 1057 entry = {
1058 1058 'id': obj_id,
1059 1059 'text': obj_text,
1060 1060 'type': obj_type,
1061 1061 'at_rev': at_rev
1062 1062 }
1063 1063
1064 1064 items.append(entry)
1065 1065
1066 1066 res.append({
1067 1067 'text': section,
1068 1068 'children': items
1069 1069 })
1070 1070
1071 1071 data = {
1072 1072 'more': False,
1073 1073 'results': res
1074 1074 }
1075 1075 return data
1076 1076
1077 1077 log.warning('Cannot fetch history for directory')
1078 1078 raise HTTPBadRequest()
1079 1079
1080 1080 @LoginRequired()
1081 1081 @HasRepoPermissionAnyDecorator(
1082 1082 'repository.read', 'repository.write', 'repository.admin')
1083 1083 def repo_file_authors(self):
1084 1084 c = self.load_default_context()
1085 1085
1086 1086 commit_id, f_path = self._get_commit_and_path()
1087 1087 commit = self._get_commit_or_redirect(commit_id)
1088 1088 file_node = self._get_filenode_or_redirect(commit, f_path)
1089 1089
1090 1090 if not file_node.is_file():
1091 1091 raise HTTPBadRequest()
1092 1092
1093 1093 c.file_last_commit = file_node.last_commit
1094 1094 if self.request.GET.get('annotate') == '1':
1095 1095 # use _hist from annotation if annotation mode is on
1096 1096 commit_ids = {x[1] for x in file_node.annotate}
1097 1097 _hist = (
1098 1098 self.rhodecode_vcs_repo.get_commit(commit_id)
1099 1099 for commit_id in commit_ids)
1100 1100 else:
1101 1101 _f_history, _hist = self._get_node_history(commit, f_path)
1102 1102 c.file_author = False
1103 1103
1104 1104 unique = collections.OrderedDict()
1105 1105 for commit in _hist:
1106 1106 author = commit.author
1107 1107 if author not in unique:
1108 1108 unique[commit.author] = [
1109 1109 h.email(author),
1110 1110 h.person(author, 'username_or_name_or_email'),
1111 1111 1 # counter
1112 1112 ]
1113 1113
1114 1114 else:
1115 1115 # increase counter
1116 1116 unique[commit.author][2] += 1
1117 1117
1118 1118 c.authors = [val for val in unique.values()]
1119 1119
1120 1120 return self._get_template_context(c)
1121 1121
1122 1122 @LoginRequired()
1123 1123 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1124 1124 def repo_files_check_head(self):
1125 1125 self.load_default_context()
1126 1126
1127 1127 commit_id, f_path = self._get_commit_and_path()
1128 1128 _branch_name, _sha_commit_id, is_head = \
1129 1129 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1130 1130 landing_ref=self.db_repo.landing_ref_name)
1131 1131
1132 1132 new_path = self.request.POST.get('path')
1133 1133 operation = self.request.POST.get('operation')
1134 1134 path_exist = ''
1135 1135
1136 1136 if new_path and operation in ['create', 'upload']:
1137 1137 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1138 1138 try:
1139 1139 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1140 1140 # NOTE(dan): construct whole path without leading /
1141 1141 file_node = commit_obj.get_node(new_f_path)
1142 1142 if file_node is not None:
1143 1143 path_exist = new_f_path
1144 1144 except EmptyRepositoryError:
1145 1145 pass
1146 1146 except Exception:
1147 1147 pass
1148 1148
1149 1149 return {
1150 1150 'branch': _branch_name,
1151 1151 'sha': _sha_commit_id,
1152 1152 'is_head': is_head,
1153 1153 'path_exists': path_exist
1154 1154 }
1155 1155
1156 1156 @LoginRequired()
1157 1157 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1158 1158 def repo_files_remove_file(self):
1159 1159 _ = self.request.translate
1160 1160 c = self.load_default_context()
1161 1161 commit_id, f_path = self._get_commit_and_path()
1162 1162
1163 1163 self._ensure_not_locked()
1164 1164 _branch_name, _sha_commit_id, is_head = \
1165 1165 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1166 1166 landing_ref=self.db_repo.landing_ref_name)
1167 1167
1168 1168 self.forbid_non_head(is_head, f_path)
1169 1169 self.check_branch_permission(_branch_name)
1170 1170
1171 1171 c.commit = self._get_commit_or_redirect(commit_id)
1172 1172 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1173 1173
1174 1174 c.default_message = _(
1175 1175 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1176 1176 c.f_path = f_path
1177 1177
1178 1178 return self._get_template_context(c)
1179 1179
1180 1180 @LoginRequired()
1181 1181 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1182 1182 @CSRFRequired()
1183 1183 def repo_files_delete_file(self):
1184 1184 _ = self.request.translate
1185 1185
1186 1186 c = self.load_default_context()
1187 1187 commit_id, f_path = self._get_commit_and_path()
1188 1188
1189 1189 self._ensure_not_locked()
1190 1190 _branch_name, _sha_commit_id, is_head = \
1191 1191 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1192 1192 landing_ref=self.db_repo.landing_ref_name)
1193 1193
1194 1194 self.forbid_non_head(is_head, f_path)
1195 1195 self.check_branch_permission(_branch_name)
1196 1196
1197 1197 c.commit = self._get_commit_or_redirect(commit_id)
1198 1198 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1199 1199
1200 1200 c.default_message = _(
1201 1201 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1202 1202 c.f_path = f_path
1203 1203 node_path = f_path
1204 1204 author = self._rhodecode_db_user.full_contact
1205 1205 message = self.request.POST.get('message') or c.default_message
1206 1206 try:
1207 1207 nodes = {
1208 1208 safe_bytes(node_path): {
1209 1209 'content': b''
1210 1210 }
1211 1211 }
1212 1212 ScmModel().delete_nodes(
1213 1213 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1214 1214 message=message,
1215 1215 nodes=nodes,
1216 1216 parent_commit=c.commit,
1217 1217 author=author,
1218 1218 )
1219 1219
1220 1220 h.flash(
1221 1221 _('Successfully deleted file `{}`').format(
1222 1222 h.escape(f_path)), category='success')
1223 1223 except Exception:
1224 1224 log.exception('Error during commit operation')
1225 1225 h.flash(_('Error occurred during commit'), category='error')
1226 1226 raise HTTPFound(
1227 1227 h.route_path('repo_commit', repo_name=self.db_repo_name,
1228 1228 commit_id='tip'))
1229 1229
1230 1230 @LoginRequired()
1231 1231 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1232 1232 def repo_files_edit_file(self):
1233 1233 _ = self.request.translate
1234 1234 c = self.load_default_context()
1235 1235 commit_id, f_path = self._get_commit_and_path()
1236 1236
1237 1237 self._ensure_not_locked()
1238 1238 _branch_name, _sha_commit_id, is_head = \
1239 1239 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1240 1240 landing_ref=self.db_repo.landing_ref_name)
1241 1241
1242 1242 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1243 1243 self.check_branch_permission(_branch_name, commit_id=commit_id)
1244 1244
1245 1245 c.commit = self._get_commit_or_redirect(commit_id)
1246 1246 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1247 1247
1248 1248 if c.file.is_binary:
1249 1249 files_url = h.route_path(
1250 1250 'repo_files',
1251 1251 repo_name=self.db_repo_name,
1252 1252 commit_id=c.commit.raw_id, f_path=f_path)
1253 1253 raise HTTPFound(files_url)
1254 1254
1255 1255 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1256 1256 c.f_path = f_path
1257 1257
1258 1258 return self._get_template_context(c)
1259 1259
1260 1260 @LoginRequired()
1261 1261 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1262 1262 @CSRFRequired()
1263 1263 def repo_files_update_file(self):
1264 1264 _ = self.request.translate
1265 1265 c = self.load_default_context()
1266 1266 commit_id, f_path = self._get_commit_and_path()
1267 1267
1268 1268 self._ensure_not_locked()
1269 1269
1270 1270 c.commit = self._get_commit_or_redirect(commit_id)
1271 1271 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1272 1272
1273 1273 if c.file.is_binary:
1274 1274 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1275 1275 commit_id=c.commit.raw_id, f_path=f_path))
1276 1276
1277 1277 _branch_name, _sha_commit_id, is_head = \
1278 1278 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1279 1279 landing_ref=self.db_repo.landing_ref_name)
1280 1280
1281 1281 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1282 1282 self.check_branch_permission(_branch_name, commit_id=commit_id)
1283 1283
1284 1284 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1285 1285 c.f_path = f_path
1286 1286
1287 1287 old_content = c.file.str_content
1288 1288 sl = old_content.splitlines(1)
1289 1289 first_line = sl[0] if sl else ''
1290 1290
1291 1291 r_post = self.request.POST
1292 1292 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1293 1293 line_ending_mode = detect_mode(first_line, 0)
1294 1294 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1295 1295
1296 1296 message = r_post.get('message') or c.default_message
1297 1297
1298 1298 org_node_path = c.file.str_path
1299 1299 filename = r_post['filename']
1300 1300
1301 1301 root_path = c.file.dir_path
1302 1302 pure_path = self.create_pure_path(root_path, filename)
1303 1303 node_path = pure_path.as_posix()
1304 1304
1305 1305 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1306 1306 commit_id=commit_id)
1307 1307 if content == old_content and node_path == org_node_path:
1308 1308 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1309 1309 category='warning')
1310 1310 raise HTTPFound(default_redirect_url)
1311 1311
1312 1312 try:
1313 1313 mapping = {
1314 1314 c.file.bytes_path: {
1315 1315 'org_filename': org_node_path,
1316 1316 'filename': safe_bytes(node_path),
1317 1317 'content': safe_bytes(content),
1318 1318 'lexer': '',
1319 1319 'op': 'mod',
1320 1320 'mode': c.file.mode
1321 1321 }
1322 1322 }
1323 1323
1324 1324 commit = ScmModel().update_nodes(
1325 1325 user=self._rhodecode_db_user.user_id,
1326 1326 repo=self.db_repo,
1327 1327 message=message,
1328 1328 nodes=mapping,
1329 1329 parent_commit=c.commit,
1330 1330 )
1331 1331
1332 1332 h.flash(_('Successfully committed changes to file `{}`').format(
1333 1333 h.escape(f_path)), category='success')
1334 1334 default_redirect_url = h.route_path(
1335 1335 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1336 1336
1337 1337 except Exception:
1338 1338 log.exception('Error occurred during commit')
1339 1339 h.flash(_('Error occurred during commit'), category='error')
1340 1340
1341 1341 raise HTTPFound(default_redirect_url)
1342 1342
1343 1343 @LoginRequired()
1344 1344 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1345 1345 def repo_files_add_file(self):
1346 1346 _ = self.request.translate
1347 1347 c = self.load_default_context()
1348 1348 commit_id, f_path = self._get_commit_and_path()
1349 1349
1350 1350 self._ensure_not_locked()
1351 1351
1352 1352 # Check if we need to use this page to upload binary
1353 1353 upload_binary = str2bool(self.request.params.get('upload_binary', False))
1354 1354
1355 1355 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1356 1356 if c.commit is None:
1357 1357 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1358 1358
1359 1359 if self.rhodecode_vcs_repo.is_empty():
1360 1360 # for empty repository we cannot check for current branch, we rely on
1361 1361 # c.commit.branch instead
1362 1362 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1363 1363 else:
1364 1364 _branch_name, _sha_commit_id, is_head = \
1365 1365 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1366 1366 landing_ref=self.db_repo.landing_ref_name)
1367 1367
1368 1368 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1369 1369 self.check_branch_permission(_branch_name, commit_id=commit_id)
1370 1370
1371 1371 c.default_message = (_('Added file via RhodeCode Enterprise')) \
1372 1372 if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1373 1373 c.f_path = f_path.lstrip('/') # ensure not relative path
1374 1374 c.replace_binary = upload_binary
1375 1375
1376 1376 return self._get_template_context(c)
1377 1377
1378 1378 @LoginRequired()
1379 1379 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1380 1380 @CSRFRequired()
1381 1381 def repo_files_create_file(self):
1382 1382 _ = self.request.translate
1383 1383 c = self.load_default_context()
1384 1384 commit_id, f_path = self._get_commit_and_path()
1385 1385
1386 1386 self._ensure_not_locked()
1387 1387
1388 1388 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1389 1389 if c.commit is None:
1390 1390 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1391 1391
1392 1392 # calculate redirect URL
1393 1393 if self.rhodecode_vcs_repo.is_empty():
1394 1394 default_redirect_url = h.route_path(
1395 1395 'repo_summary', repo_name=self.db_repo_name)
1396 1396 else:
1397 1397 default_redirect_url = h.route_path(
1398 1398 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1399 1399
1400 1400 if self.rhodecode_vcs_repo.is_empty():
1401 1401 # for empty repository we cannot check for current branch, we rely on
1402 1402 # c.commit.branch instead
1403 1403 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1404 1404 else:
1405 1405 _branch_name, _sha_commit_id, is_head = \
1406 1406 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1407 1407 landing_ref=self.db_repo.landing_ref_name)
1408 1408
1409 1409 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1410 1410 self.check_branch_permission(_branch_name, commit_id=commit_id)
1411 1411
1412 1412 c.default_message = (_('Added file via RhodeCode Enterprise'))
1413 1413 c.f_path = f_path
1414 1414
1415 1415 r_post = self.request.POST
1416 1416 message = r_post.get('message') or c.default_message
1417 1417 filename = r_post.get('filename')
1418 1418 unix_mode = 0
1419 1419
1420 1420 if not filename:
1421 1421 # If there's no commit, redirect to repo summary
1422 1422 if type(c.commit) is EmptyCommit:
1423 1423 redirect_url = h.route_path(
1424 1424 'repo_summary', repo_name=self.db_repo_name)
1425 1425 else:
1426 1426 redirect_url = default_redirect_url
1427 1427 h.flash(_('No filename specified'), category='warning')
1428 1428 raise HTTPFound(redirect_url)
1429 1429
1430 1430 root_path = f_path
1431 1431 pure_path = self.create_pure_path(root_path, filename)
1432 1432 node_path = pure_path.as_posix().lstrip('/')
1433 1433
1434 1434 author = self._rhodecode_db_user.full_contact
1435 1435 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1436 1436 nodes = {
1437 1437 safe_bytes(node_path): {
1438 1438 'content': safe_bytes(content)
1439 1439 }
1440 1440 }
1441 1441
1442 1442 try:
1443 1443
1444 1444 commit = ScmModel().create_nodes(
1445 1445 user=self._rhodecode_db_user.user_id,
1446 1446 repo=self.db_repo,
1447 1447 message=message,
1448 1448 nodes=nodes,
1449 1449 parent_commit=c.commit,
1450 1450 author=author,
1451 1451 )
1452 1452
1453 1453 h.flash(_('Successfully committed new file `{}`').format(
1454 1454 h.escape(node_path)), category='success')
1455 1455
1456 1456 default_redirect_url = h.route_path(
1457 1457 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1458 1458
1459 1459 except NonRelativePathError:
1460 1460 log.exception('Non Relative path found')
1461 1461 h.flash(_('The location specified must be a relative path and must not '
1462 1462 'contain .. in the path'), category='warning')
1463 1463 raise HTTPFound(default_redirect_url)
1464 1464 except (NodeError, NodeAlreadyExistsError) as e:
1465 1465 h.flash(h.escape(safe_str(e)), category='error')
1466 1466 except Exception:
1467 1467 log.exception('Error occurred during commit')
1468 1468 h.flash(_('Error occurred during commit'), category='error')
1469 1469
1470 1470 raise HTTPFound(default_redirect_url)
1471 1471
1472 1472 @LoginRequired()
1473 1473 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1474 1474 @CSRFRequired()
1475 1475 def repo_files_upload_file(self):
1476 1476 _ = self.request.translate
1477 1477 c = self.load_default_context()
1478 1478 commit_id, f_path = self._get_commit_and_path()
1479 1479
1480 1480 self._ensure_not_locked()
1481 1481
1482 1482 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1483 1483 if c.commit is None:
1484 1484 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1485 1485
1486 1486 # calculate redirect URL
1487 1487 if self.rhodecode_vcs_repo.is_empty():
1488 1488 default_redirect_url = h.route_path(
1489 1489 'repo_summary', repo_name=self.db_repo_name)
1490 1490 else:
1491 1491 default_redirect_url = h.route_path(
1492 1492 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1493 1493
1494 1494 if self.rhodecode_vcs_repo.is_empty():
1495 1495 # for empty repository we cannot check for current branch, we rely on
1496 1496 # c.commit.branch instead
1497 1497 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1498 1498 else:
1499 1499 _branch_name, _sha_commit_id, is_head = \
1500 1500 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1501 1501 landing_ref=self.db_repo.landing_ref_name)
1502 1502
1503 1503 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1504 1504 if error:
1505 1505 return {
1506 1506 'error': error,
1507 1507 'redirect_url': default_redirect_url
1508 1508 }
1509 1509 error = self.check_branch_permission(_branch_name, json_mode=True)
1510 1510 if error:
1511 1511 return {
1512 1512 'error': error,
1513 1513 'redirect_url': default_redirect_url
1514 1514 }
1515 1515
1516 1516 c.default_message = (_('Added file via RhodeCode Enterprise'))
1517 1517 c.f_path = f_path
1518 1518
1519 1519 r_post = self.request.POST
1520 1520
1521 1521 message = c.default_message
1522 1522 user_message = r_post.getall('message')
1523 1523 if isinstance(user_message, list) and user_message:
1524 1524 # we take the first from duplicated results if it's not empty
1525 1525 message = user_message[0] if user_message[0] else message
1526 1526
1527 1527 nodes = {}
1528 1528
1529 1529 for file_obj in r_post.getall('files_upload') or []:
1530 1530 content = file_obj.file
1531 1531 filename = file_obj.filename
1532 1532
1533 1533 root_path = f_path
1534 1534 pure_path = self.create_pure_path(root_path, filename)
1535 1535 node_path = pure_path.as_posix().lstrip('/')
1536 1536
1537 1537 nodes[safe_bytes(node_path)] = {
1538 1538 'content': content
1539 1539 }
1540 1540
1541 1541 if not nodes:
1542 1542 error = 'missing files'
1543 1543 return {
1544 1544 'error': error,
1545 1545 'redirect_url': default_redirect_url
1546 1546 }
1547 1547
1548 1548 author = self._rhodecode_db_user.full_contact
1549 1549
1550 1550 try:
1551 1551 commit = ScmModel().create_nodes(
1552 1552 user=self._rhodecode_db_user.user_id,
1553 1553 repo=self.db_repo,
1554 1554 message=message,
1555 1555 nodes=nodes,
1556 1556 parent_commit=c.commit,
1557 1557 author=author,
1558 1558 )
1559 1559 if len(nodes) == 1:
1560 1560 flash_message = _('Successfully committed {} new files').format(len(nodes))
1561 1561 else:
1562 1562 flash_message = _('Successfully committed 1 new file')
1563 1563
1564 1564 h.flash(flash_message, category='success')
1565 1565
1566 1566 default_redirect_url = h.route_path(
1567 1567 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1568 1568
1569 1569 except NonRelativePathError:
1570 1570 log.exception('Non Relative path found')
1571 1571 error = _('The location specified must be a relative path and must not '
1572 1572 'contain .. in the path')
1573 1573 h.flash(error, category='warning')
1574 1574
1575 1575 return {
1576 1576 'error': error,
1577 1577 'redirect_url': default_redirect_url
1578 1578 }
1579 1579 except (NodeError, NodeAlreadyExistsError) as e:
1580 1580 error = h.escape(e)
1581 1581 h.flash(error, category='error')
1582 1582
1583 1583 return {
1584 1584 'error': error,
1585 1585 'redirect_url': default_redirect_url
1586 1586 }
1587 1587 except Exception:
1588 1588 log.exception('Error occurred during commit')
1589 1589 error = _('Error occurred during commit')
1590 1590 h.flash(error, category='error')
1591 1591 return {
1592 1592 'error': error,
1593 1593 'redirect_url': default_redirect_url
1594 1594 }
1595 1595
1596 1596 return {
1597 1597 'error': None,
1598 1598 'redirect_url': default_redirect_url
1599 1599 }
1600 1600
1601 1601 @LoginRequired()
1602 1602 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1603 1603 @CSRFRequired()
1604 1604 def repo_files_replace_file(self):
1605 1605 _ = self.request.translate
1606 1606 c = self.load_default_context()
1607 1607 commit_id, f_path = self._get_commit_and_path()
1608 1608
1609 1609 self._ensure_not_locked()
1610 1610
1611 1611 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1612 1612 if c.commit is None:
1613 1613 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1614 1614
1615 1615 if self.rhodecode_vcs_repo.is_empty():
1616 1616 default_redirect_url = h.route_path(
1617 1617 'repo_summary', repo_name=self.db_repo_name)
1618 1618 else:
1619 1619 default_redirect_url = h.route_path(
1620 1620 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1621 1621
1622 1622 if self.rhodecode_vcs_repo.is_empty():
1623 1623 # for empty repository we cannot check for current branch, we rely on
1624 1624 # c.commit.branch instead
1625 1625 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1626 1626 else:
1627 1627 _branch_name, _sha_commit_id, is_head = \
1628 1628 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1629 1629 landing_ref=self.db_repo.landing_ref_name)
1630 1630
1631 1631 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1632 1632 if error:
1633 1633 return {
1634 1634 'error': error,
1635 1635 'redirect_url': default_redirect_url
1636 1636 }
1637 1637 error = self.check_branch_permission(_branch_name, json_mode=True)
1638 1638 if error:
1639 1639 return {
1640 1640 'error': error,
1641 1641 'redirect_url': default_redirect_url
1642 1642 }
1643 1643
1644 1644 c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path))
1645 1645 c.f_path = f_path
1646 1646
1647 1647 r_post = self.request.POST
1648 1648
1649 1649 message = c.default_message
1650 1650 user_message = r_post.getall('message')
1651 1651 if isinstance(user_message, list) and user_message:
1652 1652 # we take the first from duplicated results if it's not empty
1653 1653 message = user_message[0] if user_message[0] else message
1654 1654
1655 1655 data_for_replacement = r_post.getall('files_upload') or []
1656 1656 if (objects_count := len(data_for_replacement)) > 1:
1657 1657 return {
1658 1658 'error': 'too many files for replacement',
1659 1659 'redirect_url': default_redirect_url
1660 1660 }
1661 1661 elif not objects_count:
1662 1662 return {
1663 1663 'error': 'missing files',
1664 1664 'redirect_url': default_redirect_url
1665 1665 }
1666 1666
1667 1667 content = data_for_replacement[0].file
1668 1668 retrieved_filename = data_for_replacement[0].filename
1669 1669
1670 1670 if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]:
1671 1671 return {
1672 1672 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension',
1673 1673 'redirect_url': default_redirect_url
1674 1674 }
1675 1675
1676 1676 author = self._rhodecode_db_user.full_contact
1677 1677
1678 1678 try:
1679 1679 commit = ScmModel().update_binary_node(
1680 1680 user=self._rhodecode_db_user.user_id,
1681 1681 repo=self.db_repo,
1682 1682 message=message,
1683 1683 node={
1684 1684 'content': content,
1685 1685 'file_path': f_path.encode(),
1686 1686 },
1687 1687 parent_commit=c.commit,
1688 1688 author=author,
1689 1689 )
1690 1690
1691 1691 h.flash(_('Successfully committed 1 new file'), category='success')
1692 1692
1693 1693 default_redirect_url = h.route_path(
1694 1694 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1695 1695
1696 1696 except (NodeError, NodeAlreadyExistsError) as e:
1697 1697 error = h.escape(e)
1698 1698 h.flash(error, category='error')
1699 1699
1700 1700 return {
1701 1701 'error': error,
1702 1702 'redirect_url': default_redirect_url
1703 1703 }
1704 1704 except Exception:
1705 1705 log.exception('Error occurred during commit')
1706 1706 error = _('Error occurred during commit')
1707 1707 h.flash(error, category='error')
1708 1708 return {
1709 1709 'error': error,
1710 1710 'redirect_url': default_redirect_url
1711 1711 }
1712 1712
1713 1713 return {
1714 1714 'error': None,
1715 1715 'redirect_url': default_redirect_url
1716 1716 }
@@ -1,205 +1,221 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import tempfile
21 21 import logging
22 22
23 23 from pyramid.settings import asbool
24 24
25 25 from rhodecode.config.settings_maker import SettingsMaker
26 26 from rhodecode.config import utils as config_utils
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 32 """
33 33 Applies settings defaults and does all type conversion.
34 34
35 35 We would move all settings parsing and preparation into this place, so that
36 36 we have only one place left which deals with this part. The remaining parts
37 37 of the application would start to rely fully on well-prepared settings.
38 38
39 39 This piece would later be split up per topic to avoid a big fat monster
40 40 function.
41 41 """
42 42 jn = os.path.join
43 43
44 44 global_settings_maker = SettingsMaker(global_config)
45 45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 46 debug_enabled = asbool(global_config.get('debug'))
47 47
48 48 settings_maker = SettingsMaker(settings)
49 49
50 50 settings_maker.make_setting(
51 51 'logging.autoconfigure',
52 52 default=False,
53 53 parser='bool')
54 54
55 55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57 57
58 58 # Default includes, possible to change as a user
59 59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 60 log.debug(
61 61 "Using the following pyramid.includes: %s",
62 62 pyramid_includes)
63 63
64 64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66 66
67 67 if 'mako.default_filters' not in settings:
68 68 # set custom default filters if we don't have it defined
69 69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 70 settings['mako.default_filters'] = 'h_filter'
71 71
72 72 if 'mako.directories' not in settings:
73 73 mako_directories = settings.setdefault('mako.directories', [
74 74 # Base templates of the original application
75 75 'rhodecode:templates',
76 76 ])
77 77 log.debug(
78 78 "Using the following Mako template directories: %s",
79 79 mako_directories)
80 80
81 81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 83 raw_url = settings['beaker.session.url']
84 84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 85 settings['beaker.session.url'] = 'redis://' + raw_url
86 86
87 87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88 88
89 89 # TODO: johbo: Re-think this, usually the call to config.include
90 90 # should allow to pass in a prefix.
91 91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92 92
93 93 # Sanitize generic settings.
94 94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97 97
98 98 # statsd
99 99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104 104
105 105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 106 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
107 107 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
108 108 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 109 settings_maker.make_setting('vcs.hooks.host', '*')
110 110 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
111 111 settings_maker.make_setting('vcs.server', '')
112 112 settings_maker.make_setting('vcs.server.protocol', 'http')
113 113 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
114 114 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
115 115 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
116 116 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
117 117 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
118 118
119 119 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
120 120
121 121 # repo_store path
122 122 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
123 123 # Support legacy values of vcs.scm_app_implementation. Legacy
124 124 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
125 125 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
126 126 scm_app_impl = settings['vcs.scm_app_implementation']
127 127 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
128 128 settings['vcs.scm_app_implementation'] = 'http'
129 129
130 130 settings_maker.make_setting('appenlight', False, parser='bool')
131 131
132 132 temp_store = tempfile.gettempdir()
133 133 tmp_cache_dir = jn(temp_store, 'rc_cache')
134 134
135 135 # save default, cache dir, and use it for all backends later.
136 136 default_cache_dir = settings_maker.make_setting(
137 137 'cache_dir',
138 138 default=tmp_cache_dir, default_when_empty=True,
139 139 parser='dir:ensured')
140 140
141 141 # exception store cache
142 142 settings_maker.make_setting(
143 143 'exception_tracker.store_path',
144 144 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
145 145 parser='dir:ensured'
146 146 )
147 147
148 148 settings_maker.make_setting(
149 149 'celerybeat-schedule.path',
150 150 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
151 151 parser='file:ensured'
152 152 )
153 153
154 154 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
155 155 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
156 156
157 157 # sessions, ensure file since no-value is memory
158 158 settings_maker.make_setting('beaker.session.type', 'file')
159 159 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
160 160
161 161 # cache_general
162 162 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
163 163 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
164 164 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
165 165
166 166 # cache_perms
167 167 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
168 168 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
169 169 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
170 170
171 171 # cache_repo
172 172 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
173 173 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
174 174 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
175 175
176 176 # cache_license
177 177 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
178 178 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
179 179 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
180 180
181 181 # cache_repo_longterm memory, 96H
182 182 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
183 183 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
184 184 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
185 185
186 186 # sql_cache_short
187 187 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
188 188 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
189 189 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
190 190
191 191 # archive_cache
192 192 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
193 193 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
194 194
195 195 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
196 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
196 197 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
197 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
198 198 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
199 199
200 settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
201 settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
202 settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
203
204 settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
205 settings_maker.make_setting('archive_cache.objectstore.key', '')
206 settings_maker.make_setting('archive_cache.objectstore.secret', '')
207 settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
208
209 settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
210 settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
211
212 settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
213 settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
214 settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
215
200 216 settings_maker.env_expand()
201 217
202 218 # configure instance id
203 219 config_utils.set_instance_id(settings)
204 220
205 221 return settings
@@ -1,466 +1,466 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import collections
22 22
23 23 import time
24 24 import logging.config
25 25
26 26 from paste.gzipper import make_gzip_middleware
27 27 import pyramid.events
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.config import Configurator
30 30 from pyramid.settings import asbool, aslist
31 31 from pyramid.httpexceptions import (
32 32 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 33 from pyramid.renderers import render_to_response
34 34
35 35 from rhodecode.model import meta
36 36 from rhodecode.config import patches
37 37
38 38 from rhodecode.config.environment import load_pyramid_environment
39 39
40 40 import rhodecode.events
41 41 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 49 from rhodecode.lib.utils2 import AttributeDict
50 50 from rhodecode.lib.exc_tracking import store_exception, format_exc
51 51 from rhodecode.subscribers import (
52 52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, write_usage_data)
54 54 from rhodecode.lib.statsd_client import StatsdClient
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def should_load_all():
65 65 """
66 66 Returns if all application components should be loaded. In some cases it's
67 67 desired to skip apps loading for faster shell script execution
68 68 """
69 69 ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER')
70 70 if ssh_cmd:
71 71 return False
72 72
73 73 return True
74 74
75 75
76 76 def make_pyramid_app(global_config, **settings):
77 77 """
78 78 Constructs the WSGI application based on Pyramid.
79 79
80 80 Specials:
81 81
82 82 * The application can also be integrated like a plugin via the call to
83 83 `includeme`. This is accompanied with the other utility functions which
84 84 are called. Changing this should be done with great care to not break
85 85 cases when these fragments are assembled from another place.
86 86
87 87 """
88 88 start_time = time.time()
89 89 log.info('Pyramid app config starting')
90 90
91 91 sanitize_settings_and_apply_defaults(global_config, settings)
92 92
93 93 # init and bootstrap StatsdClient
94 94 StatsdClient.setup(settings)
95 95
96 96 config = Configurator(settings=settings)
97 97 # Init our statsd at very start
98 98 config.registry.statsd = StatsdClient.statsd
99 99
100 100 # Apply compatibility patches
101 101 patches.inspect_getargspec()
102 102
103 103 load_pyramid_environment(global_config, settings)
104 104
105 105 # Static file view comes first
106 106 includeme_first(config)
107 107
108 108 includeme(config)
109 109
110 110 pyramid_app = config.make_wsgi_app()
111 111 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
112 112 pyramid_app.config = config
113 113
114 114 celery_settings = get_celery_config(settings)
115 115 config.configure_celery(celery_settings)
116 116
117 117 # creating the app uses a connection - return it after we are done
118 118 meta.Session.remove()
119 119
120 120 total_time = time.time() - start_time
121 121 log.info('Pyramid app created and configured in %.2fs', total_time)
122 122 return pyramid_app
123 123
124 124
125 125 def get_celery_config(settings):
126 126 """
127 127 Converts basic ini configuration into celery 4.X options
128 128 """
129 129
130 130 def key_converter(key_name):
131 131 pref = 'celery.'
132 132 if key_name.startswith(pref):
133 133 return key_name[len(pref):].replace('.', '_').lower()
134 134
135 135 def type_converter(parsed_key, value):
136 136 # cast to int
137 137 if value.isdigit():
138 138 return int(value)
139 139
140 140 # cast to bool
141 141 if value.lower() in ['true', 'false', 'True', 'False']:
142 142 return value.lower() == 'true'
143 143 return value
144 144
145 145 celery_config = {}
146 146 for k, v in settings.items():
147 147 pref = 'celery.'
148 148 if k.startswith(pref):
149 149 celery_config[key_converter(k)] = type_converter(key_converter(k), v)
150 150
151 151 # TODO:rethink if we want to support celerybeat based file config, probably NOT
152 152 # beat_config = {}
153 153 # for section in parser.sections():
154 154 # if section.startswith('celerybeat:'):
155 155 # name = section.split(':', 1)[1]
156 156 # beat_config[name] = get_beat_config(parser, section)
157 157
158 158 # final compose of settings
159 159 celery_settings = {}
160 160
161 161 if celery_config:
162 162 celery_settings.update(celery_config)
163 163 # if beat_config:
164 164 # celery_settings.update({'beat_schedule': beat_config})
165 165
166 166 return celery_settings
167 167
168 168
169 169 def not_found_view(request):
170 170 """
171 171 This creates the view which should be registered as not-found-view to
172 172 pyramid.
173 173 """
174 174
175 175 if not getattr(request, 'vcs_call', None):
176 176 # handle like regular case with our error_handler
177 177 return error_handler(HTTPNotFound(), request)
178 178
179 179 # handle not found view as a vcs call
180 180 settings = request.registry.settings
181 181 ae_client = getattr(request, 'ae_client', None)
182 182 vcs_app = VCSMiddleware(
183 183 HTTPNotFound(), request.registry, settings,
184 184 appenlight_client=ae_client)
185 185
186 186 return wsgiapp(vcs_app)(None, request)
187 187
188 188
189 189 def error_handler(exception, request):
190 190 import rhodecode
191 191 from rhodecode.lib import helpers
192 192
193 193 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
194 194
195 195 base_response = HTTPInternalServerError()
196 196 # prefer original exception for the response since it may have headers set
197 197 if isinstance(exception, HTTPException):
198 198 base_response = exception
199 199 elif isinstance(exception, VCSCommunicationError):
200 200 base_response = VCSServerUnavailable()
201 201
202 202 if is_http_error(base_response):
203 203 traceback_info = format_exc(request.exc_info)
204 204 log.error(
205 205 'error occurred handling this request for path: %s, \n%s',
206 206 request.path, traceback_info)
207 207
208 208 error_explanation = base_response.explanation or str(base_response)
209 209 if base_response.status_code == 404:
210 210 error_explanation += " Optionally you don't have permission to access this page."
211 211 c = AttributeDict()
212 212 c.error_message = base_response.status
213 213 c.error_explanation = error_explanation
214 214 c.visual = AttributeDict()
215 215
216 216 c.visual.rhodecode_support_url = (
217 217 request.registry.settings.get('rhodecode_support_url') or
218 218 request.route_url('rhodecode_support')
219 219 )
220 220 c.redirect_time = 0
221 221 c.rhodecode_name = rhodecode_title
222 222 if not c.rhodecode_name:
223 223 c.rhodecode_name = 'Rhodecode'
224 224
225 225 c.causes = []
226 226 if is_http_error(base_response):
227 227 c.causes.append('Server is overloaded.')
228 228 c.causes.append('Server database connection is lost.')
229 229 c.causes.append('Server expected unhandled error.')
230 230
231 231 if hasattr(base_response, 'causes'):
232 232 c.causes = base_response.causes
233 233
234 234 c.messages = helpers.flash.pop_messages(request=request)
235 235 exc_info = sys.exc_info()
236 236 c.exception_id = id(exc_info)
237 237 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
238 238 or base_response.status_code > 499
239 239 c.exception_id_url = request.route_url(
240 240 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
241 241
242 242 debug_mode = rhodecode.ConfigGet().get_bool('debug')
243 243 if c.show_exception_id:
244 244 store_exception(c.exception_id, exc_info)
245 245 c.exception_debug = debug_mode
246 246 c.exception_config_ini = rhodecode.CONFIG.get('__file__')
247 247
248 248 if debug_mode:
249 249 try:
250 250 from rich.traceback import install
251 251 install(show_locals=True)
252 252 log.debug('Installing rich tracebacks...')
253 253 except ImportError:
254 254 pass
255 255
256 256 response = render_to_response(
257 257 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
258 258 response=base_response)
259 259
260 260 response.headers["X-RC-Exception-Id"] = str(c.exception_id)
261 261
262 262 statsd = request.registry.statsd
263 263 if statsd and base_response.status_code > 499:
264 264 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
265 265 statsd.incr('rhodecode_exception_total',
266 266 tags=["exc_source:web",
267 267 f"http_code:{base_response.status_code}",
268 268 f"type:{exc_type}"])
269 269
270 270 return response
271 271
272 272
273 273 def includeme_first(config):
274 274 # redirect automatic browser favicon.ico requests to correct place
275 275 def favicon_redirect(context, request):
276 276 return HTTPFound(
277 277 request.static_path('rhodecode:public/images/favicon.ico'))
278 278
279 279 config.add_view(favicon_redirect, route_name='favicon')
280 280 config.add_route('favicon', '/favicon.ico')
281 281
282 282 def robots_redirect(context, request):
283 283 return HTTPFound(
284 284 request.static_path('rhodecode:public/robots.txt'))
285 285
286 286 config.add_view(robots_redirect, route_name='robots')
287 287 config.add_route('robots', '/robots.txt')
288 288
289 289 config.add_static_view(
290 290 '_static/deform', 'deform:static')
291 291 config.add_static_view(
292 292 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
293 293
294 294
295 295 ce_auth_resources = [
296 296 'rhodecode.authentication.plugins.auth_crowd',
297 297 'rhodecode.authentication.plugins.auth_headers',
298 298 'rhodecode.authentication.plugins.auth_jasig_cas',
299 299 'rhodecode.authentication.plugins.auth_ldap',
300 300 'rhodecode.authentication.plugins.auth_pam',
301 301 'rhodecode.authentication.plugins.auth_rhodecode',
302 302 'rhodecode.authentication.plugins.auth_token',
303 303 ]
304 304
305 305
306 306 def includeme(config, auth_resources=None):
307 307 from rhodecode.lib.celerylib.loader import configure_celery
308 308 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
309 309 settings = config.registry.settings
310 310 config.set_request_factory(Request)
311 311
312 312 # plugin information
313 313 config.registry.rhodecode_plugins = collections.OrderedDict()
314 314
315 315 config.add_directive(
316 316 'register_rhodecode_plugin', register_rhodecode_plugin)
317 317
318 318 config.add_directive('configure_celery', configure_celery)
319 319
320 320 if settings.get('appenlight', False):
321 321 config.include('appenlight_client.ext.pyramid_tween')
322 322
323 323 load_all = should_load_all()
324 324
325 325 # Includes which are required. The application would fail without them.
326 326 config.include('pyramid_mako')
327 327 config.include('rhodecode.lib.rc_beaker')
328 328 config.include('rhodecode.lib.rc_cache')
329 config.include('rhodecode.lib.rc_cache.archive_cache')
329 config.include('rhodecode.lib.archive_cache')
330 330
331 331 config.include('rhodecode.apps._base.navigation')
332 332 config.include('rhodecode.apps._base.subscribers')
333 333 config.include('rhodecode.tweens')
334 334 config.include('rhodecode.authentication')
335 335
336 336 if load_all:
337 337
338 338 # load CE authentication plugins
339 339
340 340 if auth_resources:
341 341 ce_auth_resources.extend(auth_resources)
342 342
343 343 for resource in ce_auth_resources:
344 344 config.include(resource)
345 345
346 346 # Auto discover authentication plugins and include their configuration.
347 347 if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')):
348 348 from rhodecode.authentication import discover_legacy_plugins
349 349 discover_legacy_plugins(config)
350 350
351 351 # apps
352 352 if load_all:
353 353 log.debug('Starting config.include() calls')
354 354 config.include('rhodecode.api.includeme')
355 355 config.include('rhodecode.apps._base.includeme')
356 356 config.include('rhodecode.apps._base.navigation.includeme')
357 357 config.include('rhodecode.apps._base.subscribers.includeme')
358 358 config.include('rhodecode.apps.hovercards.includeme')
359 359 config.include('rhodecode.apps.ops.includeme')
360 360 config.include('rhodecode.apps.channelstream.includeme')
361 361 config.include('rhodecode.apps.file_store.includeme')
362 362 config.include('rhodecode.apps.admin.includeme')
363 363 config.include('rhodecode.apps.login.includeme')
364 364 config.include('rhodecode.apps.home.includeme')
365 365 config.include('rhodecode.apps.journal.includeme')
366 366
367 367 config.include('rhodecode.apps.repository.includeme')
368 368 config.include('rhodecode.apps.repo_group.includeme')
369 369 config.include('rhodecode.apps.user_group.includeme')
370 370 config.include('rhodecode.apps.search.includeme')
371 371 config.include('rhodecode.apps.user_profile.includeme')
372 372 config.include('rhodecode.apps.user_group_profile.includeme')
373 373 config.include('rhodecode.apps.my_account.includeme')
374 374 config.include('rhodecode.apps.gist.includeme')
375 375
376 376 config.include('rhodecode.apps.svn_support.includeme')
377 377 config.include('rhodecode.apps.ssh_support.includeme')
378 378 config.include('rhodecode.apps.debug_style')
379 379
380 380 if load_all:
381 381 config.include('rhodecode.integrations.includeme')
382 382 config.include('rhodecode.integrations.routes.includeme')
383 383
384 384 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
385 385 settings['default_locale_name'] = settings.get('lang', 'en')
386 386 config.add_translation_dirs('rhodecode:i18n/')
387 387
388 388 # Add subscribers.
389 389 if load_all:
390 390 log.debug('Adding subscribers...')
391 391 config.add_subscriber(scan_repositories_if_enabled,
392 392 pyramid.events.ApplicationCreated)
393 393 config.add_subscriber(write_metadata_if_needed,
394 394 pyramid.events.ApplicationCreated)
395 395 config.add_subscriber(write_usage_data,
396 396 pyramid.events.ApplicationCreated)
397 397 config.add_subscriber(write_js_routes_if_enabled,
398 398 pyramid.events.ApplicationCreated)
399 399
400 400
401 401 # Set the default renderer for HTML templates to mako.
402 402 config.add_mako_renderer('.html')
403 403
404 404 config.add_renderer(
405 405 name='json_ext',
406 406 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
407 407
408 408 config.add_renderer(
409 409 name='string_html',
410 410 factory='rhodecode.lib.string_renderer.html')
411 411
412 412 # include RhodeCode plugins
413 413 includes = aslist(settings.get('rhodecode.includes', []))
414 414 log.debug('processing rhodecode.includes data...')
415 415 for inc in includes:
416 416 config.include(inc)
417 417
418 418 # custom not found view, if our pyramid app doesn't know how to handle
419 419 # the request pass it to potential VCS handling ap
420 420 config.add_notfound_view(not_found_view)
421 421 if not settings.get('debugtoolbar.enabled', False):
422 422 # disabled debugtoolbar handle all exceptions via the error_handlers
423 423 config.add_view(error_handler, context=Exception)
424 424
425 425 # all errors including 403/404/50X
426 426 config.add_view(error_handler, context=HTTPError)
427 427
428 428
429 429 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
430 430 """
431 431 Apply outer WSGI middlewares around the application.
432 432 """
433 433 registry = config.registry
434 434 settings = registry.settings
435 435
436 436 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
437 437 pyramid_app = HttpsFixup(pyramid_app, settings)
438 438
439 439 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
440 440 pyramid_app, settings)
441 441 registry.ae_client = _ae_client
442 442
443 443 if settings['gzip_responses']:
444 444 pyramid_app = make_gzip_middleware(
445 445 pyramid_app, settings, compress_level=1)
446 446
447 447 # this should be the outer most middleware in the wsgi stack since
448 448 # middleware like Routes make database calls
449 449 def pyramid_app_with_cleanup(environ, start_response):
450 450 start = time.time()
451 451 try:
452 452 return pyramid_app(environ, start_response)
453 453 finally:
454 454 # Dispose current database session and rollback uncommitted
455 455 # transactions.
456 456 meta.Session.remove()
457 457
458 458 # In a single threaded mode server, on non sqlite db we should have
459 459 # '0 Current Checked out connections' at the end of a request,
460 460 # if not, then something, somewhere is leaving a connection open
461 461 pool = meta.get_engine().pool
462 462 log.debug('sa pool status: %s', pool.status())
463 463 total = time.time() - start
464 464 log.debug('Request processing finalized: %.4fs', total)
465 465
466 466 return pyramid_app_with_cleanup
@@ -1,29 +1,78 b''
1 1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from .fanout_cache import get_archival_cache_store
20 from .fanout_cache import get_archival_config
19 import logging
20
21 from .backends.fanout_cache import FileSystemFanoutCache
22 from .backends.objectstore_cache import ObjectStoreCache
21 23
22 from .utils import archive_iterator
23 from .utils import ArchiveCacheGenerationLock
24 from .utils import archive_iterator # noqa
25 from .lock import ArchiveCacheGenerationLock # noqa
26
27 log = logging.getLogger(__name__)
28
29
30 cache_meta = None
24 31
25 32
26 33 def includeme(config):
27 34 # init our cache at start
28 35 settings = config.get_settings()
29 36 get_archival_cache_store(settings)
37
38
39 def get_archival_config(config):
40
41 final_config = {
42
43 }
44
45 for k, v in config.items():
46 if k.startswith('archive_cache'):
47 final_config[k] = v
48
49 return final_config
50
51
52 def get_archival_cache_store(config, always_init=False):
53
54 global cache_meta
55 if cache_meta is not None and not always_init:
56 return cache_meta
57
58 config = get_archival_config(config)
59 backend = config['archive_cache.backend.type']
60
61 archive_cache_locking_url = config['archive_cache.locking.url']
62
63 match backend:
64 case 'filesystem':
65 d_cache = FileSystemFanoutCache(
66 locking_url=archive_cache_locking_url,
67 **config
68 )
69 case 'objectstore':
70 d_cache = ObjectStoreCache(
71 locking_url=archive_cache_locking_url,
72 **config
73 )
74 case _:
75 raise ValueError(f'archive_cache.backend.type only supports "filesystem" or "objectstore" got {backend} ')
76
77 cache_meta = d_cache
78 return cache_meta
@@ -1,456 +1,166 b''
1 1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import codecs
20 import contextlib
21 import functools
22 import os
20 import hashlib
23 21 import logging
24 import time
25 import typing
26 import zlib
27 import sqlite3
22 import os
23
24 import fsspec
28 25
29 from ...ext_json import json
30 from .lock import GenerationLock
31 from .utils import format_size
26 from .base import BaseCache, BaseShard
27 from ..utils import ShardFileReader, NOT_GIVEN
28 from ...type_utils import str2bool
32 29
33 30 log = logging.getLogger(__name__)
34 31
35 cache_meta = None
36 32
37 UNKNOWN = -241
38 NO_VAL = -917
39
40 MODE_BINARY = 'BINARY'
41
42
43 EVICTION_POLICY = {
44 'none': {
45 'evict': None,
46 },
47 'least-recently-stored': {
48 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time',
49 },
50 'least-recently-used': {
51 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time',
52 },
53 'least-frequently-used': {
54 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count',
55 },
56 }
57
58
59 class DB:
60
61 def __init__(self):
62 self.connection = sqlite3.connect(':memory:')
63 self._init_db()
64
65 def _init_db(self):
66 qry = '''
67 CREATE TABLE IF NOT EXISTS archive_cache (
68 rowid INTEGER PRIMARY KEY,
69 key_file TEXT,
70 key_file_path TEXT,
71 filename TEXT,
72 full_path TEXT,
73 store_time REAL,
74 access_time REAL,
75 access_count INTEGER DEFAULT 0,
76 size INTEGER DEFAULT 0
77 )
78 '''
79
80 self.sql(qry)
81 self.connection.commit()
82
83 @property
84 def sql(self):
85 return self.connection.execute
86
87 def bulk_insert(self, rows):
88 qry = '''
89 INSERT INTO archive_cache (
90 rowid,
91 key_file,
92 key_file_path,
93 filename,
94 full_path,
95 store_time,
96 access_time,
97 access_count,
98 size
99 )
100 VALUES (
101 ?, ?, ?, ?, ?, ?, ?, ?, ?
102 )
103 '''
104 cursor = self.connection.cursor()
105 cursor.executemany(qry, rows)
106 self.connection.commit()
107
108
109 class FileSystemCache:
33 class FileSystemShard(BaseShard):
110 34
111 35 def __init__(self, index, directory, **settings):
112 36 self._index = index
113 37 self._directory = directory
38 self.storage_type = 'directory'
39 self.fs = fsspec.filesystem('file')
114 40
115 41 @property
116 42 def directory(self):
117 43 """Cache directory."""
118 44 return self._directory
119 45
120 def _write_file(self, full_path, iterator, mode, encoding=None):
121 full_dir, _ = os.path.split(full_path)
46 def _get_keyfile(self, archive_key) -> tuple[str, str]:
47 key_file = f'{archive_key}.{self.key_suffix}'
48 return key_file, os.path.join(self.directory, key_file)
122 49
50 def _get_writer(self, path, mode):
123 51 for count in range(1, 11):
124 with contextlib.suppress(OSError):
125 os.makedirs(full_dir)
126
127 52 try:
128 53 # Another cache may have deleted the directory before
129 54 # the file could be opened.
130 writer = open(full_path, mode, encoding=encoding)
55 return self.fs.open(path, mode)
131 56 except OSError:
132 57 if count == 10:
133 58 # Give up after 10 tries to open the file.
134 59 raise
135 60 continue
136 61
137 with writer:
138 size = 0
139 for chunk in iterator:
140 size += len(chunk)
141 writer.write(chunk)
142 writer.flush()
143 # Get the file descriptor
144 fd = writer.fileno()
145
146 # Sync the file descriptor to disk, helps with NFS cases...
147 os.fsync(fd)
148 log.debug('written new archive cache under %s', full_path)
149 return size
150
151 def _get_keyfile(self, key):
152 return os.path.join(self._directory, f'{key}.key')
62 def _write_file(self, full_path, iterator, mode):
63 # ensure dir exists
64 destination, _ = os.path.split(full_path)
65 if not self.fs.exists(destination):
66 self.fs.makedirs(destination)
153 67
154 def store(self, key, value_reader, metadata):
155 filename, full_path = self.random_filename()
156 key_file = self._get_keyfile(key)
157
158 # STORE METADATA
159 _metadata = {
160 "version": "v1",
161 "filename": filename,
162 "full_path": full_path,
163 "key_file": key_file,
164 "store_time": time.time(),
165 "access_count": 1,
166 "access_time": 0,
167 "size": 0
168 }
169 if metadata:
170 _metadata.update(metadata)
171
172 reader = functools.partial(value_reader.read, 2**22)
68 writer = self._get_writer(full_path, mode)
173 69
174 iterator = iter(reader, b'')
175 size = self._write_file(full_path, iterator, 'xb')
176 metadata['size'] = size
177
178 # after archive is finished, we create a key to save the presence of the binary file
179 with open(key_file, 'wb') as f:
180 f.write(json.dumps(_metadata))
181
182 return key, size, MODE_BINARY, filename, _metadata
183
184 def fetch(self, key, retry=False, retry_attempts=10) -> tuple[typing.BinaryIO, dict]:
185
186 if retry:
187 for attempt in range(retry_attempts):
188 if key in self:
189 break
190 # we dind't find the key, wait 1s, and re-check
191 time.sleep(1)
70 digest = hashlib.sha256()
71 with writer:
72 size = 0
73 for chunk in iterator:
74 size += len(chunk)
75 digest.update(chunk)
76 writer.write(chunk)
77 writer.flush()
78 # Get the file descriptor
79 fd = writer.fileno()
192 80
193 if key not in self:
194 log.exception('requested {key} not found in {self}', key, self)
195 raise KeyError(key)
196
197 key_file = self._get_keyfile(key)
198 with open(key_file, 'rb') as f:
199 metadata = json.loads(f.read())
200
201 filename = metadata['filename']
81 # Sync the file descriptor to disk, helps with NFS cases...
82 os.fsync(fd)
83 sha256 = digest.hexdigest()
84 log.debug('written new archive cache under %s, sha256: %s', full_path, sha256)
85 return size, sha256
202 86
203 try:
204 return open(os.path.join(self.directory, filename), 'rb'), metadata
205 finally:
206 # update usage stats, count and accessed
207 metadata["access_count"] = metadata.get("access_count", 0) + 1
208 metadata["access_time"] = time.time()
87 def store(self, key, value_reader, metadata: dict | None = None):
88 return self._store(key, value_reader, metadata, mode='xb')
209 89
210 with open(key_file, 'wb') as f:
211 f.write(json.dumps(metadata))
90 def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN, retry_backoff=1) -> tuple[ShardFileReader, dict]:
91 return self._fetch(key, retry, retry_attempts, retry_backoff)
92
93 def remove(self, key):
94 return self._remove(key)
212 95
213 96 def random_filename(self):
214 97 """Return filename and full-path tuple for file storage.
215 98
216 99 Filename will be a randomly generated 28 character hexadecimal string
217 100 with ".archive_cache" suffixed. Two levels of sub-directories will be used to
218 101 reduce the size of directories. On older filesystems, lookups in
219 102 directories with many files may be slow.
220 103 """
221 104
222 105 hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
223 sub_dir = os.path.join(hex_name[:2], hex_name[2:4])
224 name = hex_name[4:] + '.archive_cache'
225 filename = os.path.join(sub_dir, name)
226 full_path = os.path.join(self.directory, filename)
227 return filename, full_path
228
229 def hash(self, key):
230 """Compute portable hash for `key`.
231
232 :param key: key to hash
233 :return: hash value
234 106
235 """
236 mask = 0xFFFFFFFF
237 return zlib.adler32(key.encode('utf-8')) & mask # noqa
238
239 def __contains__(self, key):
240 """Return `True` if `key` matching item is found in cache.
107 archive_name = hex_name[4:] + '.archive_cache'
108 filename = f"{hex_name[:2]}/{hex_name[2:4]}/{archive_name}"
241 109
242 :param key: key matching item
243 :return: True if key matching item
244
245 """
246 key_file = self._get_keyfile(key)
247 return os.path.exists(key_file)
110 full_path = os.path.join(self.directory, filename)
111 return archive_name, full_path
248 112
249 113 def __repr__(self):
250 return f'FileSystemCache(index={self._index}, dir={self.directory})'
114 return f'{self.__class__.__name__}(index={self._index}, dir={self.directory})'
251 115
252 116
253 class FanoutCache:
254 """Cache that shards keys and values."""
117 class FileSystemFanoutCache(BaseCache):
255 118
256 def __init__(
257 self, directory=None, **settings
258 ):
259 """Initialize cache instance.
119 def __init__(self, locking_url, **settings):
120 """
121 Initialize file system cache instance.
260 122
261 :param str directory: cache directory
123 :param str locking_url: redis url for a lock
262 124 :param settings: settings dict
263 125
264 126 """
265 if directory is None:
266 raise ValueError('directory cannot be None')
267
268 directory = str(directory)
127 self._locking_url = locking_url
128 self._config = settings
129 cache_dir = self.get_conf('archive_cache.filesystem.store_dir')
130 directory = str(cache_dir)
269 131 directory = os.path.expanduser(directory)
270 132 directory = os.path.expandvars(directory)
271 133 self._directory = directory
134 self._storage_path = directory
272 135
273 self._count = settings.pop('cache_shards')
274 self._locking_url = settings.pop('locking_url')
136 # check if it's ok to write, and re-create the archive cache
137 if not os.path.isdir(self._directory):
138 os.makedirs(self._directory, exist_ok=True)
139
140 self._count = int(self.get_conf('archive_cache.filesystem.cache_shards', pop=True))
275 141
276 self._eviction_policy = settings['cache_eviction_policy']
277 self._cache_size_limit = settings['cache_size_limit']
142 self._eviction_policy = self.get_conf('archive_cache.filesystem.eviction_policy', pop=True)
143 self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.filesystem.cache_size_gb')))
278 144
145 self.retry = str2bool(self.get_conf('archive_cache.filesystem.retry', pop=True))
146 self.retry_attempts = int(self.get_conf('archive_cache.filesystem.retry_attempts', pop=True))
147 self.retry_backoff = int(self.get_conf('archive_cache.filesystem.retry_backoff', pop=True))
148
149 log.debug('Initializing archival cache instance under %s', self._directory)
279 150 self._shards = tuple(
280 FileSystemCache(
151 FileSystemShard(
281 152 index=num,
282 153 directory=os.path.join(directory, 'shard_%03d' % num),
283 154 **settings,
284 155 )
285 156 for num in range(self._count)
286 157 )
287 158 self._hash = self._shards[0].hash
288 159
289 @property
290 def directory(self):
291 """Cache directory."""
292 return self._directory
293
294 def get_lock(self, lock_key):
295 return GenerationLock(lock_key, self._locking_url)
296
297 def _get_shard(self, key) -> FileSystemCache:
160 def _get_shard(self, key) -> FileSystemShard:
298 161 index = self._hash(key) % self._count
299 162 shard = self._shards[index]
300 163 return shard
301 164
302 def store(self, key, value_reader, metadata=None):
303 shard = self._get_shard(key)
304 return shard.store(key, value_reader, metadata)
305
306 def fetch(self, key, retry=False, retry_attempts=10):
307 """Return file handle corresponding to `key` from cache.
308 """
309 shard = self._get_shard(key)
310 return shard.fetch(key, retry=retry, retry_attempts=retry_attempts)
311
312 def has_key(self, key):
313 """Return `True` if `key` matching item is found in cache.
314
315 :param key: key for item
316 :return: True if key is found
317
318 """
319 shard = self._get_shard(key)
320 return key in shard
321
322 def __contains__(self, item):
323 return self.has_key(item)
324
325 def evict(self, policy=None, size_limit=None):
326 """
327 Remove old items based on the conditions
328
329
330 explanation of this algo:
331 iterate over each shard, then for each shard iterate over the .key files
332 read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and
333 access data, time creation, and access counts.
334
335 Store that into a memory DB so we can run different sorting strategies easily.
336 Summing the size is a sum sql query.
337
338 Then we run a sorting strategy based on eviction policy.
339 We iterate over sorted keys, and remove each checking if we hit the overall limit.
340 """
341
342 policy = policy or self._eviction_policy
343 size_limit = size_limit or self._cache_size_limit
344
345 select_policy = EVICTION_POLICY[policy]['evict']
346
347 log.debug('Running eviction policy \'%s\', and checking for size limit: %s',
348 policy, format_size(size_limit))
349
350 if select_policy is None:
351 return 0
352
353 db = DB()
354
355 data = []
356 cnt = 1
357 for shard in self._shards:
358 for key_file in os.listdir(shard.directory):
359 if key_file.endswith('.key'):
360 key_file_path = os.path.join(shard.directory, key_file)
361 with open(key_file_path, 'rb') as f:
362 metadata = json.loads(f.read())
363
364 size = metadata.get('size')
365 filename = metadata.get('filename')
366 full_path = metadata.get('full_path')
367
368 if not size:
369 # in case we don't have size re-calc it...
370 size = os.stat(full_path).st_size
371
372 data.append([
373 cnt,
374 key_file,
375 key_file_path,
376 filename,
377 full_path,
378 metadata.get('store_time', 0),
379 metadata.get('access_time', 0),
380 metadata.get('access_count', 0),
381 size,
382 ])
383 cnt += 1
384
385 # Insert bulk data using executemany
386 db.bulk_insert(data)
387
388 ((total_size,),) = db.sql('SELECT COALESCE(SUM(size), 0) FROM archive_cache').fetchall()
389 log.debug('Analyzed %s keys, occupied: %s', len(data), format_size(total_size))
390 select_policy_qry = select_policy.format(fields='key_file_path, full_path, size')
391 sorted_keys = db.sql(select_policy_qry).fetchall()
392
393 removed_items = 0
394 removed_size = 0
395 for key, cached_file, size in sorted_keys:
396 # simulate removal impact BEFORE removal
397 total_size -= size
398
399 if total_size <= size_limit:
400 # we obtained what we wanted...
401 break
402
403 os.remove(cached_file)
404 os.remove(key)
405 removed_items += 1
406 removed_size += size
407
408 log.debug('Removed %s cache archives, and reduced size: %s', removed_items, format_size(removed_size))
409 return removed_items
410
411
412 def get_archival_config(config):
413
414 final_config = {
415
416 }
417
418 for k, v in config.items():
419 if k.startswith('archive_cache'):
420 final_config[k] = v
421
422 return final_config
423
424
425 def get_archival_cache_store(config):
426
427 global cache_meta
428 if cache_meta is not None:
429 return cache_meta
430
431 config = get_archival_config(config)
432 backend = config['archive_cache.backend.type']
433 if backend != 'filesystem':
434 raise ValueError('archive_cache.backend.type only supports "filesystem"')
435
436 archive_cache_locking_url = config['archive_cache.locking.url']
437 archive_cache_dir = config['archive_cache.filesystem.store_dir']
438 archive_cache_size_gb = config['archive_cache.filesystem.cache_size_gb']
439 archive_cache_shards = config['archive_cache.filesystem.cache_shards']
440 archive_cache_eviction_policy = config['archive_cache.filesystem.eviction_policy']
441
442 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
443
444 # check if it's ok to write, and re-create the archive cache
445 if not os.path.isdir(archive_cache_dir):
446 os.makedirs(archive_cache_dir, exist_ok=True)
447
448 d_cache = FanoutCache(
449 archive_cache_dir,
450 locking_url=archive_cache_locking_url,
451 cache_shards=archive_cache_shards,
452 cache_size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
453 cache_eviction_policy=archive_cache_eviction_policy
454 )
455 cache_meta = d_cache
456 return cache_meta
165 def _get_size(self, shard, archive_path):
166 return os.stat(archive_path).st_size
@@ -1,59 +1,62 b''
1 1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import redis
20 from ..._vendor import redis_lock
21 from .utils import ArchiveCacheGenerationLock
20 from .._vendor import redis_lock
21
22
23 class ArchiveCacheGenerationLock(Exception):
24 pass
22 25
23 26
24 27 class GenerationLock:
25 28 """
26 29 Locking mechanism that detects if a lock is acquired
27 30
28 31 with GenerationLock(lock_key):
29 32 compute_archive()
30 33 """
31 34 lock_timeout = 7200
32 35
33 36 def __init__(self, lock_key, url):
34 37 self.lock_key = lock_key
35 38 self._create_client(url)
36 39 self.lock = self.get_lock()
37 40
38 41 def _create_client(self, url):
39 42 connection_pool = redis.ConnectionPool.from_url(url)
40 43 self.writer_client = redis.StrictRedis(
41 44 connection_pool=connection_pool
42 45 )
43 46 self.reader_client = self.writer_client
44 47
45 48 def get_lock(self):
46 49 return redis_lock.Lock(
47 50 redis_client=self.writer_client,
48 51 name=self.lock_key,
49 52 expire=self.lock_timeout,
50 53 strict=True
51 54 )
52 55
53 56 def __enter__(self):
54 57 acquired = self.lock.acquire(blocking=False)
55 58 if not acquired:
56 59 raise ArchiveCacheGenerationLock('Failed to create a lock')
57 60
58 61 def __exit__(self, exc_type, exc_val, exc_tb):
59 62 self.lock.release()
@@ -1,72 +1,134 b''
1 1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 import os
19 import sqlite3
20 import s3fs.core
21
22 NOT_GIVEN = -917
20 23
21 24
22 class ArchiveCacheGenerationLock(Exception):
23 pass
25 EVICTION_POLICY = {
26 'none': {
27 'evict': None,
28 },
29 'least-recently-stored': {
30 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time',
31 },
32 'least-recently-used': {
33 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time',
34 },
35 'least-frequently-used': {
36 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count',
37 },
38 }
24 39
25 40
26 41 def archive_iterator(_reader, block_size: int = 4096 * 512):
27 42 # 4096 * 64 = 64KB
28 43 while 1:
29 44 data = _reader.read(block_size)
30 45 if not data:
31 46 break
32 47 yield data
33 48
34 49
35 def get_directory_statistics(start_path):
36 """
37 total_files, total_size, directory_stats = get_directory_statistics(start_path)
38
39 print(f"Directory statistics for: {start_path}\n")
40 print(f"Total files: {total_files}")
41 print(f"Total size: {format_size(total_size)}\n")
42
43 :param start_path:
44 :return:
45 """
46
47 total_files = 0
48 total_size = 0
49 directory_stats = {}
50
51 for dir_path, dir_names, file_names in os.walk(start_path):
52 dir_size = 0
53 file_count = len(file_names)
54
55 for file in file_names:
56 filepath = os.path.join(dir_path, file)
57 file_size = os.path.getsize(filepath)
58 dir_size += file_size
59
60 directory_stats[dir_path] = {'file_count': file_count, 'size': dir_size}
61 total_files += file_count
62 total_size += dir_size
63
64 return total_files, total_size, directory_stats
65
66
67 50 def format_size(size):
68 51 # Convert size in bytes to a human-readable format (e.g., KB, MB, GB)
69 52 for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
70 53 if size < 1024:
71 54 return f"{size:.2f} {unit}"
72 55 size /= 1024
56
57
58 class StatsDB:
59
60 def __init__(self):
61 self.connection = sqlite3.connect(':memory:')
62 self._init_db()
63
64 def _init_db(self):
65 qry = '''
66 CREATE TABLE IF NOT EXISTS archive_cache (
67 rowid INTEGER PRIMARY KEY,
68 key_file TEXT,
69 key_file_path TEXT,
70 archive_key TEXT,
71 archive_path TEXT,
72 store_time REAL,
73 access_time REAL,
74 access_count INTEGER DEFAULT 0,
75 size INTEGER DEFAULT 0
76 )
77 '''
78
79 self.sql(qry)
80 self.connection.commit()
81
82 @property
83 def sql(self):
84 return self.connection.execute
85
86 def bulk_insert(self, rows):
87 qry = '''
88 INSERT INTO archive_cache (
89 rowid,
90 key_file,
91 key_file_path,
92 archive_key,
93 archive_path,
94 store_time,
95 access_time,
96 access_count,
97 size
98 )
99 VALUES (
100 ?, ?, ?, ?, ?, ?, ?, ?, ?
101 )
102 '''
103 cursor = self.connection.cursor()
104 cursor.executemany(qry, rows)
105 self.connection.commit()
106
107 def get_total_size(self):
108 qry = 'SELECT COALESCE(SUM(size), 0) FROM archive_cache'
109 ((total_size,),) = self.sql(qry).fetchall()
110 return total_size
111
112 def get_sorted_keys(self, select_policy):
113 select_policy_qry = select_policy.format(fields='key_file, archive_key, size')
114 return self.sql(select_policy_qry).fetchall()
115
116
117 class ShardFileReader:
118
119 def __init__(self, file_like_reader):
120 self._file_like_reader = file_like_reader
121
122 def __getattr__(self, item):
123 if isinstance(self._file_like_reader, s3fs.core.S3File):
124 match item:
125 case 'name':
126 # S3 FileWrapper doesn't support name attribute, and we use it
127 return self._file_like_reader.full_name
128 case _:
129 return getattr(self._file_like_reader, item)
130 else:
131 return getattr(self._file_like_reader, item)
132
133 def __repr__(self):
134 return f'<{self.__class__.__name__}={self._file_like_reader}>'
@@ -1,607 +1,607 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 The base Controller API
21 21 Provides the BaseController class for subclassing. And usage in different
22 22 controllers
23 23 """
24 24
25 25 import logging
26 26 import socket
27 27 import base64
28 28
29 29 import markupsafe
30 30 import ipaddress
31 31
32 32 import paste.httpheaders
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35
36 36 import rhodecode
37 37 from rhodecode.authentication.base import VCS_TYPE
38 38 from rhodecode.lib import auth, utils2
39 39 from rhodecode.lib import helpers as h
40 40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 41 from rhodecode.lib.exceptions import UserCreationError
42 42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
45 45 from rhodecode.lib.type_utils import aslist, str2bool
46 46 from rhodecode.lib.hash_utils import sha1
47 47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
48 48 from rhodecode.model.notification import NotificationModel
49 49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 def _filter_proxy(ip):
55 55 """
56 56 Passed in IP addresses in HEADERS can be in a special format of multiple
57 57 ips. Those comma separated IPs are passed from various proxies in the
58 58 chain of request processing. The left-most being the original client.
59 59 We only care about the first IP which came from the org. client.
60 60
61 61 :param ip: ip string from headers
62 62 """
63 63 if ',' in ip:
64 64 _ips = ip.split(',')
65 65 _first_ip = _ips[0].strip()
66 66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
67 67 return _first_ip
68 68 return ip
69 69
70 70
71 71 def _filter_port(ip):
72 72 """
73 73 Removes a port from ip, there are 4 main cases to handle here.
74 74 - ipv4 eg. 127.0.0.1
75 75 - ipv6 eg. ::1
76 76 - ipv4+port eg. 127.0.0.1:8080
77 77 - ipv6+port eg. [::1]:8080
78 78
79 79 :param ip:
80 80 """
81 81 def is_ipv6(ip_addr):
82 82 if hasattr(socket, 'inet_pton'):
83 83 try:
84 84 socket.inet_pton(socket.AF_INET6, ip_addr)
85 85 except socket.error:
86 86 return False
87 87 else:
88 88 # fallback to ipaddress
89 89 try:
90 90 ipaddress.IPv6Address(safe_str(ip_addr))
91 91 except Exception:
92 92 return False
93 93 return True
94 94
95 95 if ':' not in ip: # must be ipv4 pure ip
96 96 return ip
97 97
98 98 if '[' in ip and ']' in ip: # ipv6 with port
99 99 return ip.split(']')[0][1:].lower()
100 100
101 101 # must be ipv6 or ipv4 with port
102 102 if is_ipv6(ip):
103 103 return ip
104 104 else:
105 105 ip, _port = ip.split(':')[:2] # means ipv4+port
106 106 return ip
107 107
108 108
109 109 def get_ip_addr(environ):
110 110 proxy_key = 'HTTP_X_REAL_IP'
111 111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
112 112 def_key = 'REMOTE_ADDR'
113 113
114 114 def ip_filters(ip_):
115 115 return _filter_port(_filter_proxy(ip_))
116 116
117 117 ip = environ.get(proxy_key)
118 118 if ip:
119 119 return ip_filters(ip)
120 120
121 121 ip = environ.get(proxy_key2)
122 122 if ip:
123 123 return ip_filters(ip)
124 124
125 125 ip = environ.get(def_key, '0.0.0.0')
126 126 return ip_filters(ip)
127 127
128 128
129 129 def get_server_ip_addr(environ, log_errors=True):
130 130 hostname = environ.get('SERVER_NAME')
131 131 try:
132 132 return socket.gethostbyname(hostname)
133 133 except Exception as e:
134 134 if log_errors:
135 135 # in some cases this lookup is not possible, and we don't want to
136 136 # make it an exception in logs
137 137 log.exception('Could not retrieve server ip address: %s', e)
138 138 return hostname
139 139
140 140
141 141 def get_server_port(environ):
142 142 return environ.get('SERVER_PORT')
143 143
144 144
145 145
146 146 def get_user_agent(environ):
147 147 return environ.get('HTTP_USER_AGENT')
148 148
149 149
150 150 def vcs_operation_context(
151 151 environ, repo_name, username, action, scm, check_locking=True,
152 152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
153 153 """
154 154 Generate the context for a vcs operation, e.g. push or pull.
155 155
156 156 This context is passed over the layers so that hooks triggered by the
157 157 vcs operation know details like the user, the user's IP address etc.
158 158
159 159 :param check_locking: Allows to switch of the computation of the locking
160 160 data. This serves mainly the need of the simplevcs middleware to be
161 161 able to disable this for certain operations.
162 162
163 163 """
164 164 # Tri-state value: False: unlock, None: nothing, True: lock
165 165 make_lock = None
166 166 locked_by = [None, None, None]
167 167 is_anonymous = username == User.DEFAULT_USER
168 168 user = User.get_by_username(username)
169 169 if not is_anonymous and check_locking:
170 170 log.debug('Checking locking on repository "%s"', repo_name)
171 171 repo = Repository.get_by_repo_name(repo_name)
172 172 make_lock, __, locked_by = repo.get_locking_state(
173 173 action, user.user_id)
174 174 user_id = user.user_id
175 175 settings_model = VcsSettingsModel(repo=repo_name)
176 176 ui_settings = settings_model.get_ui_settings()
177 177
178 178 # NOTE(marcink): This should be also in sync with
179 179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
180 180 store = [x for x in ui_settings if x.key == '/']
181 181 repo_store = ''
182 182 if store:
183 183 repo_store = store[0].value
184 184
185 185 scm_data = {
186 186 'ip': get_ip_addr(environ),
187 187 'username': username,
188 188 'user_id': user_id,
189 189 'action': action,
190 190 'repository': repo_name,
191 191 'scm': scm,
192 192 'config': rhodecode.CONFIG['__file__'],
193 193 'repo_store': repo_store,
194 194 'make_lock': make_lock,
195 195 'locked_by': locked_by,
196 196 'server_url': utils2.get_server_url(environ),
197 197 'user_agent': get_user_agent(environ),
198 198 'hooks': get_enabled_hook_classes(ui_settings),
199 199 'is_shadow_repo': is_shadow_repo,
200 200 'detect_force_push': detect_force_push,
201 201 'check_branch_perms': check_branch_perms,
202 202 }
203 203 return scm_data
204 204
205 205
206 206 class BasicAuth(AuthBasicAuthenticator):
207 207
208 208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
209 209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
210 210 super().__init__(realm=realm, authfunc=authfunc)
211 211 self.realm = realm
212 212 self.rc_realm = rc_realm
213 213 self.initial_call = initial_call_detection
214 214 self.authfunc = authfunc
215 215 self.registry = registry
216 216 self.acl_repo_name = acl_repo_name
217 217 self._rc_auth_http_code = auth_http_code
218 218
219 219 def _get_response_from_code(self, http_code, fallback):
220 220 try:
221 221 return get_exception(safe_int(http_code))
222 222 except Exception:
223 223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
224 224 return fallback
225 225
226 226 def get_rc_realm(self):
227 227 return safe_str(self.rc_realm)
228 228
229 229 def build_authentication(self):
230 230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
231 231
232 232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
233 233 # investigate if we still need it.
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
239 239 log.debug('Using custom response class: %s', custom_response_klass)
240 240 return custom_response_klass(headers=header)
241 241 return HTTPUnauthorized(headers=header)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = paste.httpheaders.AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
248 248 if 'basic' != auth_meth.lower():
249 249 return self.build_authentication()
250 250
251 251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
252 252 _parts = credentials.split(':', 1)
253 253 if len(_parts) == 2:
254 254 username, password = _parts
255 255 auth_data = self.authfunc(
256 256 username, password, environ, VCS_TYPE,
257 257 registry=self.registry, acl_repo_name=self.acl_repo_name)
258 258 if auth_data:
259 259 return {'username': username, 'auth_data': auth_data}
260 260 if username and password:
261 261 # we mark that we actually executed authentication once, at
262 262 # that point we can use the alternative auth code
263 263 self.initial_call = False
264 264
265 265 return self.build_authentication()
266 266
267 267 __call__ = authenticate
268 268
269 269
270 270 def calculate_version_hash(config):
271 271 return sha1(
272 272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
273 273 )[:8]
274 274
275 275
276 276 def get_current_lang(request):
277 277 return getattr(request, '_LOCALE_', request.locale_name)
278 278
279 279
280 280 def attach_context_attributes(context, request, user_id=None, is_api=None):
281 281 """
282 282 Attach variables into template context called `c`.
283 283 """
284 284 config = request.registry.settings
285 285
286 286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
287 287 context.rc_config = rc_config
288 288 context.rhodecode_version = rhodecode.__version__
289 289 context.rhodecode_edition = config.get('rhodecode.edition')
290 290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
291 291 # unique secret + version does not leak the version but keep consistency
292 292 context.rhodecode_version_hash = calculate_version_hash(config)
293 293
294 294 # Default language set for the incoming request
295 295 context.language = get_current_lang(request)
296 296
297 297 # Visual options
298 298 context.visual = AttributeDict({})
299 299
300 300 # DB stored Visual Items
301 301 context.visual.show_public_icon = str2bool(
302 302 rc_config.get('rhodecode_show_public_icon'))
303 303 context.visual.show_private_icon = str2bool(
304 304 rc_config.get('rhodecode_show_private_icon'))
305 305 context.visual.stylify_metatags = str2bool(
306 306 rc_config.get('rhodecode_stylify_metatags'))
307 307 context.visual.dashboard_items = safe_int(
308 308 rc_config.get('rhodecode_dashboard_items', 100))
309 309 context.visual.admin_grid_items = safe_int(
310 310 rc_config.get('rhodecode_admin_grid_items', 100))
311 311 context.visual.show_revision_number = str2bool(
312 312 rc_config.get('rhodecode_show_revision_number', True))
313 313 context.visual.show_sha_length = safe_int(
314 314 rc_config.get('rhodecode_show_sha_length', 100))
315 315 context.visual.repository_fields = str2bool(
316 316 rc_config.get('rhodecode_repository_fields'))
317 317 context.visual.show_version = str2bool(
318 318 rc_config.get('rhodecode_show_version'))
319 319 context.visual.use_gravatar = str2bool(
320 320 rc_config.get('rhodecode_use_gravatar'))
321 321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
322 322 context.visual.default_renderer = rc_config.get(
323 323 'rhodecode_markup_renderer', 'rst')
324 324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
325 325 context.visual.rhodecode_support_url = \
326 326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
327 327
328 328 context.visual.affected_files_cut_off = 60
329 329
330 330 context.pre_code = rc_config.get('rhodecode_pre_code')
331 331 context.post_code = rc_config.get('rhodecode_post_code')
332 332 context.rhodecode_name = rc_config.get('rhodecode_title')
333 333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
334 334 # if we have specified default_encoding in the request, it has more
335 335 # priority
336 336 if request.GET.get('default_encoding'):
337 337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
338 338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
339 339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
340 340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
341 341
342 342 # INI stored
343 343 context.labs_active = str2bool(
344 344 config.get('labs_settings_active', 'false'))
345 345 context.ssh_enabled = str2bool(
346 346 config.get('ssh.generate_authorized_keyfile', 'false'))
347 347 context.ssh_key_generator_enabled = str2bool(
348 348 config.get('ssh.enable_ui_key_generator', 'true'))
349 349
350 350 context.visual.allow_custom_hooks_settings = str2bool(
351 351 config.get('allow_custom_hooks_settings', True))
352 352 context.debug_style = str2bool(config.get('debug_style', False))
353 353
354 354 context.rhodecode_instanceid = config.get('instance_id')
355 355
356 356 context.visual.cut_off_limit_diff = safe_int(
357 357 config.get('cut_off_limit_diff'), default=0)
358 358 context.visual.cut_off_limit_file = safe_int(
359 359 config.get('cut_off_limit_file'), default=0)
360 360
361 361 context.license = AttributeDict({})
362 362 context.license.hide_license_info = str2bool(
363 363 config.get('license.hide_license_info', False))
364 364
365 365 # AppEnlight
366 366 context.appenlight_enabled = config.get('appenlight', False)
367 367 context.appenlight_api_public_key = config.get(
368 368 'appenlight.api_public_key', '')
369 369 context.appenlight_server_url = config.get('appenlight.server_url', '')
370 370
371 371 diffmode = {
372 372 "unified": "unified",
373 373 "sideside": "sideside"
374 374 }.get(request.GET.get('diffmode'))
375 375
376 376 if is_api is not None:
377 377 is_api = hasattr(request, 'rpc_user')
378 378 session_attrs = {
379 379 # defaults
380 380 "clone_url_format": "http",
381 381 "diffmode": "sideside",
382 382 "license_fingerprint": request.session.get('license_fingerprint')
383 383 }
384 384
385 385 if not is_api:
386 386 # don't access pyramid session for API calls
387 387 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
388 388 request.session['rc_user_session_attr.diffmode'] = diffmode
389 389
390 390 # session settings per user
391 391
392 392 for k, v in list(request.session.items()):
393 393 pref = 'rc_user_session_attr.'
394 394 if k and k.startswith(pref):
395 395 k = k[len(pref):]
396 396 session_attrs[k] = v
397 397
398 398 context.user_session_attrs = session_attrs
399 399
400 400 # JS template context
401 401 context.template_context = {
402 402 'repo_name': None,
403 403 'repo_type': None,
404 404 'repo_landing_commit': None,
405 405 'rhodecode_user': {
406 406 'username': None,
407 407 'email': None,
408 408 'notification_status': False
409 409 },
410 410 'session_attrs': session_attrs,
411 411 'visual': {
412 412 'default_renderer': None
413 413 },
414 414 'commit_data': {
415 415 'commit_id': None
416 416 },
417 417 'pull_request_data': {'pull_request_id': None},
418 418 'timeago': {
419 419 'refresh_time': 120 * 1000,
420 420 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
421 421 },
422 422 'pyramid_dispatch': {
423 423
424 424 },
425 425 'extra': {'plugins': {}}
426 426 }
427 427 # END CONFIG VARS
428 428 if is_api:
429 429 csrf_token = None
430 430 else:
431 431 csrf_token = auth.get_csrf_token(session=request.session)
432 432
433 433 context.csrf_token = csrf_token
434 434 context.backends = list(rhodecode.BACKENDS.keys())
435 435
436 436 unread_count = 0
437 437 user_bookmark_list = []
438 438 if user_id:
439 439 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
440 440 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
441 441 context.unread_notifications = unread_count
442 442 context.bookmark_items = user_bookmark_list
443 443
444 444 # web case
445 445 if hasattr(request, 'user'):
446 446 context.auth_user = request.user
447 447 context.rhodecode_user = request.user
448 448
449 449 # api case
450 450 if hasattr(request, 'rpc_user'):
451 451 context.auth_user = request.rpc_user
452 452 context.rhodecode_user = request.rpc_user
453 453
454 454 # attach the whole call context to the request
455 455 request.set_call_context(context)
456 456
457 457
458 458 def get_auth_user(request):
459 459 environ = request.environ
460 460 session = request.session
461 461
462 462 ip_addr = get_ip_addr(environ)
463 463
464 464 # make sure that we update permissions each time we call controller
465 465 _auth_token = (
466 466 # ?auth_token=XXX
467 467 request.GET.get('auth_token', '')
468 468 # ?api_key=XXX !LEGACY
469 469 or request.GET.get('api_key', '')
470 470 # or headers....
471 471 or request.headers.get('X-Rc-Auth-Token', '')
472 472 )
473 473 if not _auth_token and request.matchdict:
474 474 url_auth_token = request.matchdict.get('_auth_token')
475 475 _auth_token = url_auth_token
476 476 if _auth_token:
477 477 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
478 478
479 479 if _auth_token:
480 480 # when using API_KEY we assume user exists, and
481 481 # doesn't need auth based on cookies.
482 482 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
483 483 authenticated = False
484 484 else:
485 485 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
486 486 try:
487 487 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
488 488 ip_addr=ip_addr)
489 489 except UserCreationError as e:
490 490 h.flash(e, 'error')
491 491 # container auth or other auth functions that create users
492 492 # on the fly can throw this exception signaling that there's
493 493 # issue with user creation, explanation should be provided
494 494 # in Exception itself. We then create a simple blank
495 495 # AuthUser
496 496 auth_user = AuthUser(ip_addr=ip_addr)
497 497
498 498 # in case someone changes a password for user it triggers session
499 499 # flush and forces a re-login
500 500 if password_changed(auth_user, session):
501 501 session.invalidate()
502 502 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
503 503 auth_user = AuthUser(ip_addr=ip_addr)
504 504
505 505 authenticated = cookie_store.get('is_authenticated')
506 506
507 507 if not auth_user.is_authenticated and auth_user.is_user_object:
508 508 # user is not authenticated and not empty
509 509 auth_user.set_authenticated(authenticated)
510 510
511 511 return auth_user, _auth_token
512 512
513 513
514 514 def h_filter(s):
515 515 """
516 516 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
517 517 we wrap this with additional functionality that converts None to empty
518 518 strings
519 519 """
520 520 if s is None:
521 521 return markupsafe.Markup()
522 522 return markupsafe.escape(s)
523 523
524 524
525 525 def add_events_routes(config):
526 526 """
527 527 Adds routing that can be used in events. Because some events are triggered
528 528 outside of pyramid context, we need to bootstrap request with some
529 529 routing registered
530 530 """
531 531
532 532 from rhodecode.apps._base import ADMIN_PREFIX
533 533
534 534 config.add_route(name='home', pattern='/')
535 535 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
536 536 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
537 537
538 538 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
539 539 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
540 540 config.add_route(name='repo_summary', pattern='/{repo_name}')
541 541 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
542 542 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
543 543
544 544 config.add_route(name='pullrequest_show',
545 545 pattern='/{repo_name}/pull-request/{pull_request_id}')
546 546 config.add_route(name='pull_requests_global',
547 547 pattern='/pull-request/{pull_request_id}')
548 548
549 549 config.add_route(name='repo_commit',
550 550 pattern='/{repo_name}/changeset/{commit_id}')
551 551 config.add_route(name='repo_files',
552 552 pattern='/{repo_name}/files/{commit_id}/{f_path}')
553 553
554 554 config.add_route(name='hovercard_user',
555 555 pattern='/_hovercard/user/{user_id}')
556 556
557 557 config.add_route(name='hovercard_user_group',
558 558 pattern='/_hovercard/user_group/{user_group_id}')
559 559
560 560 config.add_route(name='hovercard_pull_request',
561 561 pattern='/_hovercard/pull_request/{pull_request_id}')
562 562
563 563 config.add_route(name='hovercard_repo_commit',
564 564 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
565 565
566 566
567 567 def bootstrap_config(request, registry_name='RcTestRegistry'):
568 568 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
569 569 import pyramid.testing
570 570 registry = pyramid.testing.Registry(registry_name)
571 571
572 572 global_config = {'__file__': ''}
573 573
574 574 config = pyramid.testing.setUp(registry=registry, request=request)
575 575 sanitize_settings_and_apply_defaults(global_config, config.registry.settings)
576 576
577 577 # allow pyramid lookup in testing
578 578 config.include('pyramid_mako')
579 579 config.include('rhodecode.lib.rc_beaker')
580 580 config.include('rhodecode.lib.rc_cache')
581 config.include('rhodecode.lib.rc_cache.archive_cache')
581 config.include('rhodecode.lib.archive_cache')
582 582 add_events_routes(config)
583 583
584 584 return config
585 585
586 586
587 587 def bootstrap_request(**kwargs):
588 588 """
589 589 Returns a thin version of Request Object that is used in non-web context like testing/celery
590 590 """
591 591
592 592 import pyramid.testing
593 593 from rhodecode.lib.request import ThinRequest as _ThinRequest
594 594
595 595 class ThinRequest(_ThinRequest):
596 596 application_url = kwargs.pop('application_url', 'http://example.com')
597 597 host = kwargs.pop('host', 'example.com:80')
598 598 domain = kwargs.pop('domain', 'example.com')
599 599
600 600 class ThinSession(pyramid.testing.DummySession):
601 601 def save(*arg, **kw):
602 602 pass
603 603
604 604 request = ThinRequest(**kwargs)
605 605 request.session = ThinSession()
606 606
607 607 return request
@@ -1,2198 +1,2230 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Helper functions
21 21
22 22 Consists of functions to typically be used within templates, but also
23 23 available to Controllers. This module is available to both as 'h'.
24 24 """
25 25 import base64
26 26 import collections
27 27
28 28 import os
29 29 import random
30 30 import hashlib
31 31 import io
32 32 import textwrap
33 33 import urllib.request
34 34 import urllib.parse
35 35 import urllib.error
36 36 import math
37 37 import logging
38 38 import re
39 39 import time
40 40 import string
41 41 import regex
42 42 from collections import OrderedDict
43 43
44 44 import pygments
45 45 import itertools
46 46 import fnmatch
47 47
48 48 from datetime import datetime
49 49 from functools import partial
50 50 from pygments.formatters.html import HtmlFormatter
51 51 from pygments.lexers import (
52 52 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
53 53
54 54 from pyramid.threadlocal import get_current_request
55 55 from tempita import looper
56 56 from webhelpers2.html import literal, HTML, escape
57 57 from webhelpers2.html._autolink import _auto_link_urls
58 58 from webhelpers2.html.tools import (
59 59 button_to, highlight, js_obfuscate, strip_links, strip_tags)
60 60
61 61 from webhelpers2.text import (
62 62 chop_at, collapse, convert_accented_entities,
63 63 convert_misc_entities, lchop, plural, rchop, remove_formatting,
64 64 replace_whitespace, urlify, truncate, wrap_paragraphs)
65 65 from webhelpers2.date import time_ago_in_words
66 66
67 67 from webhelpers2.html.tags import (
68 68 _input, NotGiven, _make_safe_id_component as safeid,
69 69 form as insecure_form,
70 70 auto_discovery_link, checkbox, end_form, file,
71 71 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
72 72 stylesheet_link, submit, text, password, textarea,
73 73 ul, radio, Options)
74 74
75 75 from webhelpers2.number import format_byte_size
76 76 # python3.11 backport fixes for webhelpers2
77 77 from rhodecode import ConfigGet
78 78 from rhodecode.lib._vendor.webhelpers_backports import raw_select
79 79
80 80 from rhodecode.lib.action_parser import action_parser
81 81 from rhodecode.lib.html_filters import sanitize_html
82 82 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
83 83 from rhodecode.lib import ext_json
84 84 from rhodecode.lib.ext_json import json
85 85 from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str
86 86 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
87 87 from rhodecode.lib.str_utils import safe_str
88 88 from rhodecode.lib.utils2 import (
89 89 str2bool,
90 90 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
91 91 AttributeDict, safe_int, md5, md5_safe, get_host_info)
92 92 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
93 93 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
94 94 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
95 95 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
96 96 from rhodecode.lib.index.search_utils import get_matching_line_offsets
97 97 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
98 98 from rhodecode.model.changeset_status import ChangesetStatusModel
99 99 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
100 100 from rhodecode.model.repo_group import RepoGroupModel
101 101 from rhodecode.model.settings import IssueTrackerSettingsModel
102 102
103 103
104 104 log = logging.getLogger(__name__)
105 105
106 106
107 107 DEFAULT_USER = User.DEFAULT_USER
108 108 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
109 109
110 110
111 111 def asset(path, ver=None, **kwargs):
112 112 """
113 113 Helper to generate a static asset file path for rhodecode assets
114 114
115 115 eg. h.asset('images/image.png', ver='3923')
116 116
117 117 :param path: path of asset
118 118 :param ver: optional version query param to append as ?ver=
119 119 """
120 120 request = get_current_request()
121 121 query = {}
122 122 query.update(kwargs)
123 123 if ver:
124 124 query = {'ver': ver}
125 125 return request.static_path(
126 126 f'rhodecode:public/{path}', _query=query)
127 127
128 128
129 129 default_html_escape_table = {
130 130 ord('&'): '&amp;',
131 131 ord('<'): '&lt;',
132 132 ord('>'): '&gt;',
133 133 ord('"'): '&quot;',
134 134 ord("'"): '&#39;',
135 135 }
136 136
137 137
138 138 def html_escape(text, html_escape_table=default_html_escape_table):
139 139 """Produce entities within text."""
140 140 return text.translate(html_escape_table)
141 141
142 142
143 143 def str_json(*args, **kwargs):
144 144 return ext_json.str_json(*args, **kwargs)
145 145
146 146
147 147 def formatted_str_json(*args, **kwargs):
148 148 return ext_json.formatted_str_json(*args, **kwargs)
149 149
150 150
151 151 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
152 152 """
153 153 Truncate string ``s`` at the first occurrence of ``sub``.
154 154
155 155 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
156 156 """
157 157 suffix_if_chopped = suffix_if_chopped or ''
158 158 pos = s.find(sub)
159 159 if pos == -1:
160 160 return s
161 161
162 162 if inclusive:
163 163 pos += len(sub)
164 164
165 165 chopped = s[:pos]
166 166 left = s[pos:].strip()
167 167
168 168 if left and suffix_if_chopped:
169 169 chopped += suffix_if_chopped
170 170
171 171 return chopped
172 172
173 173
174 174 def shorter(text, size=20, prefix=False):
175 175 postfix = '...'
176 176 if len(text) > size:
177 177 if prefix:
178 178 # shorten in front
179 179 return postfix + text[-(size - len(postfix)):]
180 180 else:
181 181 return text[:size - len(postfix)] + postfix
182 182 return text
183 183
184 184
185 185 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
186 186 """
187 187 Reset button
188 188 """
189 189 return _input(type, name, value, id, attrs)
190 190
191 191
192 192 def select(name, selected_values, options, id=NotGiven, **attrs):
193 193
194 194 if isinstance(options, (list, tuple)):
195 195 options_iter = options
196 196 # Handle old value,label lists ... where value also can be value,label lists
197 197 options = Options()
198 198 for opt in options_iter:
199 199 if isinstance(opt, tuple) and len(opt) == 2:
200 200 value, label = opt
201 201 elif isinstance(opt, str):
202 202 value = label = opt
203 203 else:
204 204 raise ValueError('invalid select option type %r' % type(opt))
205 205
206 206 if isinstance(value, (list, tuple)):
207 207 option_group = options.add_optgroup(label)
208 208 for opt2 in value:
209 209 if isinstance(opt2, tuple) and len(opt2) == 2:
210 210 group_value, group_label = opt2
211 211 elif isinstance(opt2, str):
212 212 group_value = group_label = opt2
213 213 else:
214 214 raise ValueError('invalid select option type %r' % type(opt2))
215 215
216 216 option_group.add_option(group_label, group_value)
217 217 else:
218 218 options.add_option(label, value)
219 219
220 220 return raw_select(name, selected_values, options, id=id, **attrs)
221 221
222 222
223 223 def branding(name, length=40):
224 224 return truncate(name, length, indicator="")
225 225
226 226
227 227 def FID(raw_id, path):
228 228 """
229 229 Creates a unique ID for filenode based on it's hash of path and commit
230 230 it's safe to use in urls
231 231
232 232 :param raw_id:
233 233 :param path:
234 234 """
235 235
236 236 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
237 237
238 238
239 239 class _GetError(object):
240 240 """Get error from form_errors, and represent it as span wrapped error
241 241 message
242 242
243 243 :param field_name: field to fetch errors for
244 244 :param form_errors: form errors dict
245 245 """
246 246
247 247 def __call__(self, field_name, form_errors):
248 248 tmpl = """<span class="error_msg">%s</span>"""
249 249 if form_errors and field_name in form_errors:
250 250 return literal(tmpl % form_errors.get(field_name))
251 251
252 252
253 253 get_error = _GetError()
254 254
255 255
256 256 class _ToolTip(object):
257 257
258 258 def __call__(self, tooltip_title, trim_at=50):
259 259 """
260 260 Special function just to wrap our text into nice formatted
261 261 autowrapped text
262 262
263 263 :param tooltip_title:
264 264 """
265 265 tooltip_title = escape(tooltip_title)
266 266 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
267 267 return tooltip_title
268 268
269 269
270 270 tooltip = _ToolTip()
271 271
272 272 files_icon = '<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
273 273
274 274
275 275 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
276 276 limit_items=False, linkify_last_item=False, hide_last_item=False,
277 277 copy_path_icon=True):
278 278
279 279 if at_ref:
280 280 route_qry = {'at': at_ref}
281 281 default_landing_ref = at_ref or landing_ref_name or commit_id
282 282 else:
283 283 route_qry = None
284 284 default_landing_ref = commit_id
285 285
286 286 # first segment is a `HOME` link to repo files root location
287 287 root_name = literal('<i class="icon-home"></i>')
288 288
289 289 url_segments = [
290 290 link_to(
291 291 root_name,
292 292 repo_files_by_ref_url(
293 293 repo_name,
294 294 repo_type,
295 295 f_path=None, # None here is a special case for SVN repos,
296 296 # that won't prefix with a ref
297 297 ref_name=default_landing_ref,
298 298 commit_id=commit_id,
299 299 query=route_qry
300 300 )
301 301 )]
302 302
303 303 path_segments = file_path.split('/')
304 304 last_cnt = len(path_segments) - 1
305 305 for cnt, segment in enumerate(path_segments):
306 306 if not segment:
307 307 continue
308 308 segment_html = escape(segment)
309 309
310 310 last_item = cnt == last_cnt
311 311
312 312 if last_item and hide_last_item:
313 313 # iterate over and hide last element
314 314 continue
315 315
316 316 if last_item and linkify_last_item is False:
317 317 # plain version
318 318 url_segments.append(segment_html)
319 319 else:
320 320 url_segments.append(
321 321 link_to(
322 322 segment_html,
323 323 repo_files_by_ref_url(
324 324 repo_name,
325 325 repo_type,
326 326 f_path='/'.join(path_segments[:cnt + 1]),
327 327 ref_name=default_landing_ref,
328 328 commit_id=commit_id,
329 329 query=route_qry
330 330 ),
331 331 ))
332 332
333 333 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
334 334 if limit_items and len(limited_url_segments) < len(url_segments):
335 335 url_segments = limited_url_segments
336 336
337 337 full_path = file_path
338 338 if copy_path_icon:
339 339 icon = files_icon.format(escape(full_path))
340 340 else:
341 341 icon = ''
342 342
343 343 if file_path == '':
344 344 return root_name
345 345 else:
346 346 return literal(' / '.join(url_segments) + icon)
347 347
348 348
349 349 def files_url_data(request):
350 350 matchdict = request.matchdict
351 351
352 352 if 'f_path' not in matchdict:
353 353 matchdict['f_path'] = ''
354 354 else:
355 355 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
356 356 if 'commit_id' not in matchdict:
357 357 matchdict['commit_id'] = 'tip'
358 358
359 359 return ext_json.str_json(matchdict)
360 360
361 361
362 362 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
363 363 _is_svn = is_svn(db_repo_type)
364 364 final_f_path = f_path
365 365
366 366 if _is_svn:
367 367 """
368 368 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
369 369 actually commit_id followed by the ref_name. This should be done only in case
370 370 This is a initial landing url, without additional paths.
371 371
372 372 like: /1000/tags/1.0.0/?at=tags/1.0.0
373 373 """
374 374
375 375 if ref_name and ref_name != 'tip':
376 376 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
377 377 # for SVN we only do this magic prefix if it's root, .eg landing revision
378 378 # of files link. If we are in the tree we don't need this since we traverse the url
379 379 # that has everything stored
380 380 if f_path in ['', '/']:
381 381 final_f_path = '/'.join([ref_name, f_path])
382 382
383 383 # SVN always needs a commit_id explicitly, without a named REF
384 384 default_commit_id = commit_id
385 385 else:
386 386 """
387 387 For git and mercurial we construct a new URL using the names instead of commit_id
388 388 like: /master/some_path?at=master
389 389 """
390 390 # We currently do not support branches with slashes
391 391 if '/' in ref_name:
392 392 default_commit_id = commit_id
393 393 else:
394 394 default_commit_id = ref_name
395 395
396 396 # sometimes we pass f_path as None, to indicate explicit no prefix,
397 397 # we translate it to string to not have None
398 398 final_f_path = final_f_path or ''
399 399
400 400 files_url = route_path(
401 401 'repo_files',
402 402 repo_name=db_repo_name,
403 403 commit_id=default_commit_id,
404 404 f_path=final_f_path,
405 405 _query=query
406 406 )
407 407 return files_url
408 408
409 409
410 410 def code_highlight(code, lexer, formatter, use_hl_filter=False):
411 411 """
412 412 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
413 413
414 414 If ``outfile`` is given and a valid file object (an object
415 415 with a ``write`` method), the result will be written to it, otherwise
416 416 it is returned as a string.
417 417 """
418 418 if use_hl_filter:
419 419 # add HL filter
420 420 from rhodecode.lib.index import search_utils
421 421 lexer.add_filter(search_utils.ElasticSearchHLFilter())
422 422 return pygments.format(pygments.lex(code, lexer), formatter)
423 423
424 424
425 425 class CodeHtmlFormatter(HtmlFormatter):
426 426 """
427 427 My code Html Formatter for source codes
428 428 """
429 429
430 430 def wrap(self, source):
431 431 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
432 432
433 433 def _wrap_code(self, source):
434 434 for cnt, it in enumerate(source):
435 435 i, t = it
436 436 t = f'<div id="L{cnt+1}">{t}</div>'
437 437 yield i, t
438 438
439 439 def _wrap_tablelinenos(self, inner):
440 440 dummyoutfile = io.StringIO()
441 441 lncount = 0
442 442 for t, line in inner:
443 443 if t:
444 444 lncount += 1
445 445 dummyoutfile.write(line)
446 446
447 447 fl = self.linenostart
448 448 mw = len(str(lncount + fl - 1))
449 449 sp = self.linenospecial
450 450 st = self.linenostep
451 451 la = self.lineanchors
452 452 aln = self.anchorlinenos
453 453 nocls = self.noclasses
454 454 if sp:
455 455 lines = []
456 456
457 457 for i in range(fl, fl + lncount):
458 458 if i % st == 0:
459 459 if i % sp == 0:
460 460 if aln:
461 461 lines.append('<a href="#%s%d" class="special">%*d</a>' %
462 462 (la, i, mw, i))
463 463 else:
464 464 lines.append('<span class="special">%*d</span>' % (mw, i))
465 465 else:
466 466 if aln:
467 467 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
468 468 else:
469 469 lines.append('%*d' % (mw, i))
470 470 else:
471 471 lines.append('')
472 472 ls = '\n'.join(lines)
473 473 else:
474 474 lines = []
475 475 for i in range(fl, fl + lncount):
476 476 if i % st == 0:
477 477 if aln:
478 478 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
479 479 else:
480 480 lines.append('%*d' % (mw, i))
481 481 else:
482 482 lines.append('')
483 483 ls = '\n'.join(lines)
484 484
485 485 # in case you wonder about the seemingly redundant <div> here: since the
486 486 # content in the other cell also is wrapped in a div, some browsers in
487 487 # some configurations seem to mess up the formatting...
488 488 if nocls:
489 489 yield 0, ('<table class="%stable">' % self.cssclass +
490 490 '<tr><td><div class="linenodiv" '
491 491 'style="background-color: #f0f0f0; padding-right: 10px">'
492 492 '<pre style="line-height: 125%">' +
493 493 ls + '</pre></div></td><td id="hlcode" class="code">')
494 494 else:
495 495 yield 0, ('<table class="%stable">' % self.cssclass +
496 496 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
497 497 ls + '</pre></div></td><td id="hlcode" class="code">')
498 498 yield 0, dummyoutfile.getvalue()
499 499 yield 0, '</td></tr></table>'
500 500
501 501
502 502 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
503 503 def __init__(self, **kw):
504 504 # only show these line numbers if set
505 505 self.only_lines = kw.pop('only_line_numbers', [])
506 506 self.query_terms = kw.pop('query_terms', [])
507 507 self.max_lines = kw.pop('max_lines', 5)
508 508 self.line_context = kw.pop('line_context', 3)
509 509 self.url = kw.pop('url', None)
510 510
511 511 super(CodeHtmlFormatter, self).__init__(**kw)
512 512
513 513 def _wrap_code(self, source):
514 514 for cnt, it in enumerate(source):
515 515 i, t = it
516 516 t = '<pre>%s</pre>' % t
517 517 yield i, t
518 518
519 519 def _wrap_tablelinenos(self, inner):
520 520 yield 0, '<table class="code-highlight %stable">' % self.cssclass
521 521
522 522 last_shown_line_number = 0
523 523 current_line_number = 1
524 524
525 525 for t, line in inner:
526 526 if not t:
527 527 yield t, line
528 528 continue
529 529
530 530 if current_line_number in self.only_lines:
531 531 if last_shown_line_number + 1 != current_line_number:
532 532 yield 0, '<tr>'
533 533 yield 0, '<td class="line">...</td>'
534 534 yield 0, '<td id="hlcode" class="code"></td>'
535 535 yield 0, '</tr>'
536 536
537 537 yield 0, '<tr>'
538 538 if self.url:
539 539 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
540 540 self.url, current_line_number, current_line_number)
541 541 else:
542 542 yield 0, '<td class="line"><a href="">%i</a></td>' % (
543 543 current_line_number)
544 544 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
545 545 yield 0, '</tr>'
546 546
547 547 last_shown_line_number = current_line_number
548 548
549 549 current_line_number += 1
550 550
551 551 yield 0, '</table>'
552 552
553 553
554 554 def hsv_to_rgb(h, s, v):
555 555 """ Convert hsv color values to rgb """
556 556
557 557 if s == 0.0:
558 558 return v, v, v
559 559 i = int(h * 6.0) # XXX assume int() truncates!
560 560 f = (h * 6.0) - i
561 561 p = v * (1.0 - s)
562 562 q = v * (1.0 - s * f)
563 563 t = v * (1.0 - s * (1.0 - f))
564 564 i = i % 6
565 565 if i == 0:
566 566 return v, t, p
567 567 if i == 1:
568 568 return q, v, p
569 569 if i == 2:
570 570 return p, v, t
571 571 if i == 3:
572 572 return p, q, v
573 573 if i == 4:
574 574 return t, p, v
575 575 if i == 5:
576 576 return v, p, q
577 577
578 578
579 579 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
580 580 """
581 581 Generator for getting n of evenly distributed colors using
582 582 hsv color and golden ratio. It always return same order of colors
583 583
584 584 :param n: number of colors to generate
585 585 :param saturation: saturation of returned colors
586 586 :param lightness: lightness of returned colors
587 587 :returns: RGB tuple
588 588 """
589 589
590 590 golden_ratio = 0.618033988749895
591 591 h = 0.22717784590367374
592 592
593 593 for _ in range(n):
594 594 h += golden_ratio
595 595 h %= 1
596 596 HSV_tuple = [h, saturation, lightness]
597 597 RGB_tuple = hsv_to_rgb(*HSV_tuple)
598 598 yield [str(int(x * 256)) for x in RGB_tuple]
599 599
600 600
601 601 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
602 602 """
603 603 Returns a function which when called with an argument returns a unique
604 604 color for that argument, eg.
605 605
606 606 :param n: number of colors to generate
607 607 :param saturation: saturation of returned colors
608 608 :param lightness: lightness of returned colors
609 609 :returns: css RGB string
610 610
611 611 >>> color_hash = color_hasher()
612 612 >>> color_hash('hello')
613 613 'rgb(34, 12, 59)'
614 614 >>> color_hash('hello')
615 615 'rgb(34, 12, 59)'
616 616 >>> color_hash('other')
617 617 'rgb(90, 224, 159)'
618 618 """
619 619
620 620 color_dict = {}
621 621 cgenerator = unique_color_generator(
622 622 saturation=saturation, lightness=lightness)
623 623
624 624 def get_color_string(thing):
625 625 if thing in color_dict:
626 626 col = color_dict[thing]
627 627 else:
628 628 col = color_dict[thing] = next(cgenerator)
629 629 return "rgb(%s)" % (', '.join(col))
630 630
631 631 return get_color_string
632 632
633 633
634 634 def get_lexer_safe(mimetype=None, filepath=None):
635 635 """
636 636 Tries to return a relevant pygments lexer using mimetype/filepath name,
637 637 defaulting to plain text if none could be found
638 638 """
639 639 lexer = None
640 640 try:
641 641 if mimetype:
642 642 lexer = get_lexer_for_mimetype(mimetype)
643 643 if not lexer:
644 644 lexer = get_lexer_for_filename(filepath)
645 645 except pygments.util.ClassNotFound:
646 646 pass
647 647
648 648 if not lexer:
649 649 lexer = get_lexer_by_name('text')
650 650
651 651 return lexer
652 652
653 653
654 654 def get_lexer_for_filenode(filenode):
655 655 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
656 656 return lexer
657 657
658 658
659 659 def pygmentize(filenode, **kwargs):
660 660 """
661 661 pygmentize function using pygments
662 662
663 663 :param filenode:
664 664 """
665 665 lexer = get_lexer_for_filenode(filenode)
666 666 return literal(code_highlight(filenode.content, lexer,
667 667 CodeHtmlFormatter(**kwargs)))
668 668
669 669
670 670 def is_following_repo(repo_name, user_id):
671 671 from rhodecode.model.scm import ScmModel
672 672 return ScmModel().is_following_repo(repo_name, user_id)
673 673
674 674
675 675 class _Message(object):
676 676 """A message returned by ``Flash.pop_messages()``.
677 677
678 678 Converting the message to a string returns the message text. Instances
679 679 also have the following attributes:
680 680
681 681 * ``message``: the message text.
682 682 * ``category``: the category specified when the message was created.
683 683 """
684 684
685 685 def __init__(self, category, message, sub_data=None):
686 686 self.category = category
687 687 self.message = message
688 688 self.sub_data = sub_data or {}
689 689
690 690 def __str__(self):
691 691 return self.message
692 692
693 693 __unicode__ = __str__
694 694
695 695 def __html__(self):
696 696 return escape(safe_str(self.message))
697 697
698 698
699 699 class Flash(object):
700 700 # List of allowed categories. If None, allow any category.
701 701 categories = ["warning", "notice", "error", "success"]
702 702
703 703 # Default category if none is specified.
704 704 default_category = "notice"
705 705
706 706 def __init__(self, session_key="flash", categories=None,
707 707 default_category=None):
708 708 """
709 709 Instantiate a ``Flash`` object.
710 710
711 711 ``session_key`` is the key to save the messages under in the user's
712 712 session.
713 713
714 714 ``categories`` is an optional list which overrides the default list
715 715 of categories.
716 716
717 717 ``default_category`` overrides the default category used for messages
718 718 when none is specified.
719 719 """
720 720 self.session_key = session_key
721 721 if categories is not None:
722 722 self.categories = categories
723 723 if default_category is not None:
724 724 self.default_category = default_category
725 725 if self.categories and self.default_category not in self.categories:
726 726 raise ValueError(
727 727 "unrecognized default category %r" % (self.default_category,))
728 728
729 729 def pop_messages(self, session=None, request=None):
730 730 """
731 731 Return all accumulated messages and delete them from the session.
732 732
733 733 The return value is a list of ``Message`` objects.
734 734 """
735 735 messages = []
736 736
737 737 if not session:
738 738 if not request:
739 739 request = get_current_request()
740 740 session = request.session
741 741
742 742 # Pop the 'old' pylons flash messages. They are tuples of the form
743 743 # (category, message)
744 744 for cat, msg in session.pop(self.session_key, []):
745 745 messages.append(_Message(cat, msg))
746 746
747 747 # Pop the 'new' pyramid flash messages for each category as list
748 748 # of strings.
749 749 for cat in self.categories:
750 750 for msg in session.pop_flash(queue=cat):
751 751 sub_data = {}
752 752 if hasattr(msg, 'rsplit'):
753 753 flash_data = msg.rsplit('|DELIM|', 1)
754 754 org_message = flash_data[0]
755 755 if len(flash_data) > 1:
756 756 sub_data = json.loads(flash_data[1])
757 757 else:
758 758 org_message = msg
759 759
760 760 messages.append(_Message(cat, org_message, sub_data=sub_data))
761 761
762 762 # Map messages from the default queue to the 'notice' category.
763 763 for msg in session.pop_flash():
764 764 messages.append(_Message('notice', msg))
765 765
766 766 session.save()
767 767 return messages
768 768
769 769 def json_alerts(self, session=None, request=None):
770 770 payloads = []
771 771 messages = flash.pop_messages(session=session, request=request) or []
772 772 for message in messages:
773 773 payloads.append({
774 774 'message': {
775 775 'message': '{}'.format(message.message),
776 776 'level': message.category,
777 777 'force': True,
778 778 'subdata': message.sub_data
779 779 }
780 780 })
781 781 return safe_str(json.dumps(payloads))
782 782
783 783 def __call__(self, message, category=None, ignore_duplicate=True,
784 784 session=None, request=None):
785 785
786 786 if not session:
787 787 if not request:
788 788 request = get_current_request()
789 789 session = request.session
790 790
791 791 session.flash(
792 792 message, queue=category, allow_duplicate=not ignore_duplicate)
793 793
794 794
795 795 flash = Flash()
796 796
797 797 #==============================================================================
798 798 # SCM FILTERS available via h.
799 799 #==============================================================================
800 800 from rhodecode.lib.vcs.utils import author_name, author_email
801 801 from rhodecode.lib.utils2 import age, age_from_seconds
802 802 from rhodecode.model.db import User, ChangesetStatus
803 803
804 804
805 805 email = author_email
806 806
807 807
808 808 def capitalize(raw_text):
809 809 return raw_text.capitalize()
810 810
811 811
812 812 def short_id(long_id):
813 813 return long_id[:12]
814 814
815 815
816 816 def hide_credentials(url):
817 817 from rhodecode.lib.utils2 import credentials_filter
818 818 return credentials_filter(url)
819 819
820 820 import zoneinfo
821 821 import tzlocal
822 822 local_timezone = tzlocal.get_localzone()
823 823
824 824
825 825 def get_timezone(datetime_iso, time_is_local=False):
826 826 tzinfo = '+00:00'
827 827
828 828 # detect if we have a timezone info, otherwise, add it
829 829 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
830 830 force_timezone = os.environ.get('RC_TIMEZONE', '')
831 831 if force_timezone:
832 832 force_timezone = zoneinfo.ZoneInfo(force_timezone)
833 833 timezone = force_timezone or local_timezone
834 834
835 835 offset = datetime_iso.replace(tzinfo=timezone).strftime('%z')
836 836 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
837 837 return tzinfo
838 838
839 839
840 840 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
841 841 title = value or format_date(datetime_iso)
842 842 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
843 843
844 844 return literal(
845 845 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
846 846 cls='tooltip' if tooltip else '',
847 847 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
848 848 title=title, dt=datetime_iso, tzinfo=tzinfo
849 849 ))
850 850
851 851
852 852 def _shorten_commit_id(commit_id, commit_len=None):
853 853 if commit_len is None:
854 854 request = get_current_request()
855 855 commit_len = request.call_context.visual.show_sha_length
856 856 return commit_id[:commit_len]
857 857
858 858
859 859 def show_id(commit, show_idx=None, commit_len=None):
860 860 """
861 861 Configurable function that shows ID
862 862 by default it's r123:fffeeefffeee
863 863
864 864 :param commit: commit instance
865 865 """
866 866 if show_idx is None:
867 867 request = get_current_request()
868 868 show_idx = request.call_context.visual.show_revision_number
869 869
870 870 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
871 871 if show_idx:
872 872 return 'r%s:%s' % (commit.idx, raw_id)
873 873 else:
874 874 return '%s' % (raw_id, )
875 875
876 876
877 877 def format_date(date):
878 878 """
879 879 use a standardized formatting for dates used in RhodeCode
880 880
881 881 :param date: date/datetime object
882 882 :return: formatted date
883 883 """
884 884
885 885 if date:
886 886 _fmt = "%a, %d %b %Y %H:%M:%S"
887 887 return safe_str(date.strftime(_fmt))
888 888
889 889 return ""
890 890
891 891
892 892 class _RepoChecker(object):
893 893
894 894 def __init__(self, backend_alias):
895 895 self._backend_alias = backend_alias
896 896
897 897 def __call__(self, repository):
898 898 if hasattr(repository, 'alias'):
899 899 _type = repository.alias
900 900 elif hasattr(repository, 'repo_type'):
901 901 _type = repository.repo_type
902 902 else:
903 903 _type = repository
904 904 return _type == self._backend_alias
905 905
906 906
907 907 is_git = _RepoChecker('git')
908 908 is_hg = _RepoChecker('hg')
909 909 is_svn = _RepoChecker('svn')
910 910
911 911
912 912 def get_repo_type_by_name(repo_name):
913 913 repo = Repository.get_by_repo_name(repo_name)
914 914 if repo:
915 915 return repo.repo_type
916 916
917 917
918 918 def is_svn_without_proxy(repository):
919 919 if is_svn(repository):
920 920 return not ConfigGet().get_bool('vcs.svn.proxy.enabled')
921 921 return False
922 922
923 923
924 924 def discover_user(author):
925 925 """
926 926 Tries to discover RhodeCode User based on the author string. Author string
927 927 is typically `FirstName LastName <email@address.com>`
928 928 """
929 929
930 930 # if author is already an instance use it for extraction
931 931 if isinstance(author, User):
932 932 return author
933 933
934 934 # Valid email in the attribute passed, see if they're in the system
935 935 _email = author_email(author)
936 936 if _email != '':
937 937 user = User.get_by_email(_email, case_insensitive=True, cache=True)
938 938 if user is not None:
939 939 return user
940 940
941 941 # Maybe it's a username, we try to extract it and fetch by username ?
942 942 _author = author_name(author)
943 943 user = User.get_by_username(_author, case_insensitive=True, cache=True)
944 944 if user is not None:
945 945 return user
946 946
947 947 return None
948 948
949 949
950 950 def email_or_none(author):
951 951 # extract email from the commit string
952 952 _email = author_email(author)
953 953
954 954 # If we have an email, use it, otherwise
955 955 # see if it contains a username we can get an email from
956 956 if _email != '':
957 957 return _email
958 958 else:
959 959 user = User.get_by_username(
960 960 author_name(author), case_insensitive=True, cache=True)
961 961
962 962 if user is not None:
963 963 return user.email
964 964
965 965 # No valid email, not a valid user in the system, none!
966 966 return None
967 967
968 968
969 969 def link_to_user(author, length=0, **kwargs):
970 970 user = discover_user(author)
971 971 # user can be None, but if we have it already it means we can re-use it
972 972 # in the person() function, so we save 1 intensive-query
973 973 if user:
974 974 author = user
975 975
976 976 display_person = person(author, 'username_or_name_or_email')
977 977 if length:
978 978 display_person = shorter(display_person, length)
979 979
980 980 if user and user.username != user.DEFAULT_USER:
981 981 return link_to(
982 982 escape(display_person),
983 983 route_path('user_profile', username=user.username),
984 984 **kwargs)
985 985 else:
986 986 return escape(display_person)
987 987
988 988
989 989 def link_to_group(users_group_name, **kwargs):
990 990 return link_to(
991 991 escape(users_group_name),
992 992 route_path('user_group_profile', user_group_name=users_group_name),
993 993 **kwargs)
994 994
995 995
996 996 def person(author, show_attr="username_and_name"):
997 997 user = discover_user(author)
998 998 if user:
999 999 return getattr(user, show_attr)
1000 1000 else:
1001 1001 _author = author_name(author)
1002 1002 _email = email(author)
1003 1003 return _author or _email
1004 1004
1005 1005
1006 1006 def author_string(email):
1007 1007 if email:
1008 1008 user = User.get_by_email(email, case_insensitive=True, cache=True)
1009 1009 if user:
1010 1010 if user.first_name or user.last_name:
1011 1011 return '%s %s &lt;%s&gt;' % (
1012 1012 user.first_name, user.last_name, email)
1013 1013 else:
1014 1014 return email
1015 1015 else:
1016 1016 return email
1017 1017 else:
1018 1018 return None
1019 1019
1020 1020
1021 1021 def person_by_id(id_, show_attr="username_and_name"):
1022 1022 # attr to return from fetched user
1023 1023 def person_getter(usr):
1024 1024 return getattr(usr, show_attr)
1025 1025
1026 1026 #maybe it's an ID ?
1027 1027 if str(id_).isdigit() or isinstance(id_, int):
1028 1028 id_ = int(id_)
1029 1029 user = User.get(id_)
1030 1030 if user is not None:
1031 1031 return person_getter(user)
1032 1032 return id_
1033 1033
1034 1034
1035 1035 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1036 1036 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1037 1037 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1038 1038
1039 1039
1040 1040 tags_patterns = OrderedDict(
1041 1041 (
1042 1042 (
1043 1043 "lang",
1044 1044 (
1045 1045 re.compile(r"\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]"),
1046 1046 '<div class="metatag" tag="lang">\\2</div>',
1047 1047 ),
1048 1048 ),
1049 1049 (
1050 1050 "see",
1051 1051 (
1052 1052 re.compile(r"\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]"),
1053 1053 '<div class="metatag" tag="see">see: \\1 </div>',
1054 1054 ),
1055 1055 ),
1056 1056 (
1057 1057 "url",
1058 1058 (
1059 1059 re.compile(
1060 1060 r"\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]"
1061 1061 ),
1062 1062 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>',
1063 1063 ),
1064 1064 ),
1065 1065 (
1066 1066 "license",
1067 1067 (
1068 1068 re.compile(
1069 1069 r"\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]"
1070 1070 ),
1071 1071 # don't make it a raw string here...
1072 1072 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>',
1073 1073 ),
1074 1074 ),
1075 1075 (
1076 1076 "ref",
1077 1077 (
1078 1078 re.compile(
1079 1079 r"\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]"
1080 1080 ),
1081 1081 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>',
1082 1082 ),
1083 1083 ),
1084 1084 (
1085 1085 "state",
1086 1086 (
1087 1087 re.compile(r"\[(stable|featured|stale|dead|dev|deprecated)\]"),
1088 1088 '<div class="metatag" tag="state \\1">\\1</div>',
1089 1089 ),
1090 1090 ),
1091 1091 # label in grey
1092 1092 (
1093 1093 "label",
1094 1094 (re.compile(r"\[([a-z]+)\]"), '<div class="metatag" tag="label">\\1</div>'),
1095 1095 ),
1096 1096 # generic catch all in grey
1097 1097 (
1098 1098 "generic",
1099 1099 (
1100 1100 re.compile(r"\[([a-zA-Z0-9\.\-\_]+)\]"),
1101 1101 '<div class="metatag" tag="generic">\\1</div>',
1102 1102 ),
1103 1103 ),
1104 1104 )
1105 1105 )
1106 1106
1107 1107
1108 1108 def extract_metatags(value):
1109 1109 """
1110 1110 Extract supported meta-tags from given text value
1111 1111 """
1112 1112 tags = []
1113 1113 if not value:
1114 1114 return tags, ''
1115 1115
1116 1116 for key, val in list(tags_patterns.items()):
1117 1117 pat, replace_html = val
1118 1118 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1119 1119 value = pat.sub('', value)
1120 1120
1121 1121 return tags, value
1122 1122
1123 1123
1124 1124 def style_metatag(tag_type, value):
1125 1125 """
1126 1126 converts tags from value into html equivalent
1127 1127 """
1128 1128 if not value:
1129 1129 return ''
1130 1130
1131 1131 html_value = value
1132 1132 tag_data = tags_patterns.get(tag_type)
1133 1133 if tag_data:
1134 1134 pat, replace_html = tag_data
1135 1135 # convert to plain `str` instead of a markup tag to be used in
1136 1136 # regex expressions. safe_str doesn't work here
1137 1137 html_value = pat.sub(replace_html, value)
1138 1138
1139 1139 return html_value
1140 1140
1141 1141
1142 1142 def bool2icon(value, show_at_false=True):
1143 1143 """
1144 1144 Returns boolean value of a given value, represented as html element with
1145 1145 classes that will represent icons
1146 1146
1147 1147 :param value: given value to convert to html node
1148 1148 """
1149 1149
1150 1150 if value: # does bool conversion
1151 1151 return HTML.tag('i', class_="icon-true", title='True')
1152 1152 else: # not true as bool
1153 1153 if show_at_false:
1154 1154 return HTML.tag('i', class_="icon-false", title='False')
1155 1155 return HTML.tag('i')
1156 1156
1157 1157
1158 1158 def b64(inp):
1159 1159 return base64.b64encode(safe_bytes(inp))
1160 1160
1161 1161 #==============================================================================
1162 1162 # PERMS
1163 1163 #==============================================================================
1164 1164 from rhodecode.lib.auth import (
1165 1165 HasPermissionAny, HasPermissionAll,
1166 1166 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1167 1167 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1168 1168 csrf_token_key, AuthUser)
1169 1169
1170 1170
1171 1171 #==============================================================================
1172 1172 # GRAVATAR URL
1173 1173 #==============================================================================
1174 1174 class InitialsGravatar(object):
1175 1175 def __init__(self, email_address, first_name, last_name, size=30,
1176 1176 background=None, text_color='#fff'):
1177 1177 self.size = size
1178 1178 self.first_name = first_name
1179 1179 self.last_name = last_name
1180 1180 self.email_address = email_address
1181 1181 self.background = background or self.str2color(email_address)
1182 1182 self.text_color = text_color
1183 1183
1184 1184 def get_color_bank(self):
1185 1185 """
1186 1186 returns a predefined list of colors that gravatars can use.
1187 1187 Those are randomized distinct colors that guarantee readability and
1188 1188 uniqueness.
1189 1189
1190 1190 generated with: http://phrogz.net/css/distinct-colors.html
1191 1191 """
1192 1192 return [
1193 1193 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1194 1194 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1195 1195 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1196 1196 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1197 1197 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1198 1198 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1199 1199 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1200 1200 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1201 1201 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1202 1202 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1203 1203 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1204 1204 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1205 1205 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1206 1206 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1207 1207 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1208 1208 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1209 1209 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1210 1210 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1211 1211 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1212 1212 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1213 1213 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1214 1214 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1215 1215 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1216 1216 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1217 1217 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1218 1218 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1219 1219 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1220 1220 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1221 1221 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1222 1222 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1223 1223 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1224 1224 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1225 1225 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1226 1226 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1227 1227 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1228 1228 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1229 1229 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1230 1230 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1231 1231 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1232 1232 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1233 1233 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1234 1234 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1235 1235 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1236 1236 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1237 1237 '#4f8c46', '#368dd9', '#5c0073'
1238 1238 ]
1239 1239
1240 1240 def rgb_to_hex_color(self, rgb_tuple):
1241 1241 """
1242 1242 Converts an rgb_tuple passed to an hex color.
1243 1243
1244 1244 :param rgb_tuple: tuple with 3 ints represents rgb color space
1245 1245 """
1246 1246 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1247 1247
1248 1248 def email_to_int_list(self, email_str):
1249 1249 """
1250 1250 Get every byte of the hex digest value of email and turn it to integer.
1251 1251 It's going to be always between 0-255
1252 1252 """
1253 1253 digest = md5_safe(email_str.lower())
1254 1254 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1255 1255
1256 1256 def pick_color_bank_index(self, email_str, color_bank):
1257 1257 return self.email_to_int_list(email_str)[0] % len(color_bank)
1258 1258
1259 1259 def str2color(self, email_str):
1260 1260 """
1261 1261 Tries to map in a stable algorithm an email to color
1262 1262
1263 1263 :param email_str:
1264 1264 """
1265 1265 color_bank = self.get_color_bank()
1266 1266 # pick position (module it's length so we always find it in the
1267 1267 # bank even if it's smaller than 256 values
1268 1268 pos = self.pick_color_bank_index(email_str, color_bank)
1269 1269 return color_bank[pos]
1270 1270
1271 1271 def normalize_email(self, email_address):
1272 1272 # default host used to fill in the fake/missing email
1273 1273 default_host = 'localhost'
1274 1274
1275 1275 if not email_address:
1276 1276 email_address = f'{User.DEFAULT_USER}@{default_host}'
1277 1277
1278 1278 email_address = safe_str(email_address)
1279 1279
1280 1280 if '@' not in email_address:
1281 1281 email_address = f'{email_address}@{default_host}'
1282 1282
1283 1283 if email_address.endswith('@'):
1284 1284 email_address = f'{email_address}{default_host}'
1285 1285
1286 1286 email_address = convert_special_chars(email_address)
1287 1287
1288 1288 return email_address
1289 1289
1290 1290 def get_initials(self):
1291 1291 """
1292 1292 Returns 2 letter initials calculated based on the input.
1293 1293 The algorithm picks first given email address, and takes first letter
1294 1294 of part before @, and then the first letter of server name. In case
1295 1295 the part before @ is in a format of `somestring.somestring2` it replaces
1296 1296 the server letter with first letter of somestring2
1297 1297
1298 1298 In case function was initialized with both first and lastname, this
1299 1299 overrides the extraction from email by first letter of the first and
1300 1300 last name. We add special logic to that functionality, In case Full name
1301 1301 is compound, like Guido Von Rossum, we use last part of the last name
1302 1302 (Von Rossum) picking `R`.
1303 1303
1304 1304 Function also normalizes the non-ascii characters to they ascii
1305 1305 representation, eg Ą => A
1306 1306 """
1307 1307 # replace non-ascii to ascii
1308 1308 first_name = convert_special_chars(self.first_name)
1309 1309 last_name = convert_special_chars(self.last_name)
1310 1310 # multi word last names, Guido Von Rossum, we take the last part only
1311 1311 last_name = last_name.split(' ', 1)[-1]
1312 1312
1313 1313 # do NFKD encoding, and also make sure email has proper format
1314 1314 email_address = self.normalize_email(self.email_address)
1315 1315
1316 1316 # first push the email initials
1317 1317 prefix, server = email_address.split('@', 1)
1318 1318
1319 1319 # check if prefix is maybe a 'first_name.last_name' syntax
1320 1320 _dot_split = prefix.rsplit('.', 1)
1321 1321 if len(_dot_split) == 2 and _dot_split[1]:
1322 1322 initials = [_dot_split[0][0], _dot_split[1][0]]
1323 1323 else:
1324 1324 initials = [prefix[0], server[0]]
1325 1325
1326 1326 # get first letter of first and last names to create initials
1327 1327 fn_letter = (first_name or " ")[0].strip()
1328 1328 ln_letter = (last_name or " ")[0].strip()
1329 1329
1330 1330 if fn_letter:
1331 1331 initials[0] = fn_letter
1332 1332
1333 1333 if ln_letter:
1334 1334 initials[1] = ln_letter
1335 1335
1336 1336 return ''.join(initials).upper()
1337 1337
1338 1338 def get_img_data_by_type(self, font_family, img_type):
1339 1339 default_user = """
1340 1340 <svg xmlns="http://www.w3.org/2000/svg"
1341 1341 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1342 1342 viewBox="-15 -10 439.165 429.164"
1343 1343
1344 1344 xml:space="preserve"
1345 1345 font-family="{font_family}"
1346 1346 style="background:{background};" >
1347 1347
1348 1348 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1349 1349 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1350 1350 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1351 1351 168.596,153.916,216.671,
1352 1352 204.583,216.671z" fill="{text_color}"/>
1353 1353 <path d="M407.164,374.717L360.88,
1354 1354 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1355 1355 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1356 1356 15.366-44.203,23.488-69.076,23.488c-24.877,
1357 1357 0-48.762-8.122-69.078-23.488
1358 1358 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1359 1359 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1360 1360 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1361 1361 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1362 1362 19.402-10.527 C409.699,390.129,
1363 1363 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1364 1364 </svg>""".format(
1365 1365 size=self.size,
1366 1366 background='#979797', # @grey4
1367 1367 text_color=self.text_color,
1368 1368 font_family=font_family)
1369 1369
1370 1370 return {
1371 1371 "default_user": default_user
1372 1372 }[img_type]
1373 1373
1374 1374 def get_img_data(self, svg_type=None):
1375 1375 """
1376 1376 generates the svg metadata for image
1377 1377 """
1378 1378 fonts = [
1379 1379 '-apple-system',
1380 1380 'BlinkMacSystemFont',
1381 1381 'Segoe UI',
1382 1382 'Roboto',
1383 1383 'Oxygen-Sans',
1384 1384 'Ubuntu',
1385 1385 'Cantarell',
1386 1386 'Helvetica Neue',
1387 1387 'sans-serif'
1388 1388 ]
1389 1389 font_family = ','.join(fonts)
1390 1390 if svg_type:
1391 1391 return self.get_img_data_by_type(font_family, svg_type)
1392 1392
1393 1393 initials = self.get_initials()
1394 1394 img_data = """
1395 1395 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1396 1396 width="{size}" height="{size}"
1397 1397 style="width: 100%; height: 100%; background-color: {background}"
1398 1398 viewBox="0 0 {size} {size}">
1399 1399 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1400 1400 pointer-events="auto" fill="{text_color}"
1401 1401 font-family="{font_family}"
1402 1402 style="font-weight: 400; font-size: {f_size}px;">{text}
1403 1403 </text>
1404 1404 </svg>""".format(
1405 1405 size=self.size,
1406 1406 f_size=self.size/2.05, # scale the text inside the box nicely
1407 1407 background=self.background,
1408 1408 text_color=self.text_color,
1409 1409 text=initials.upper(),
1410 1410 font_family=font_family)
1411 1411
1412 1412 return img_data
1413 1413
1414 1414 def generate_svg(self, svg_type=None):
1415 1415 img_data = base64_to_str(self.get_img_data(svg_type))
1416 1416 return "data:image/svg+xml;base64,{}".format(img_data)
1417 1417
1418 1418
1419 1419 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1420 1420
1421 1421 svg_type = None
1422 1422 if email_address == User.DEFAULT_USER_EMAIL:
1423 1423 svg_type = 'default_user'
1424 1424
1425 1425 klass = InitialsGravatar(email_address, first_name, last_name, size)
1426 1426
1427 1427 if store_on_disk:
1428 1428 from rhodecode.apps.file_store import utils as store_utils
1429 1429 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1430 1430 FileOverSizeException
1431 1431 from rhodecode.model.db import Session
1432 1432
1433 1433 image_key = md5_safe(email_address.lower()
1434 1434 + first_name.lower() + last_name.lower())
1435 1435
1436 1436 storage = store_utils.get_file_storage(request.registry.settings)
1437 1437 filename = '{}.svg'.format(image_key)
1438 1438 subdir = 'gravatars'
1439 1439 # since final name has a counter, we apply the 0
1440 1440 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1441 1441 store_uid = os.path.join(subdir, uid)
1442 1442
1443 1443 db_entry = FileStore.get_by_store_uid(store_uid)
1444 1444 if db_entry:
1445 1445 return request.route_path('download_file', fid=store_uid)
1446 1446
1447 1447 img_data = klass.get_img_data(svg_type=svg_type)
1448 1448 img_file = store_utils.bytes_to_file_obj(img_data)
1449 1449
1450 1450 try:
1451 1451 store_uid, metadata = storage.save_file(
1452 1452 img_file, filename, directory=subdir,
1453 1453 extensions=['.svg'], randomized_name=False)
1454 1454 except (FileNotAllowedException, FileOverSizeException):
1455 1455 raise
1456 1456
1457 1457 try:
1458 1458 entry = FileStore.create(
1459 1459 file_uid=store_uid, filename=metadata["filename"],
1460 1460 file_hash=metadata["sha256"], file_size=metadata["size"],
1461 1461 file_display_name=filename,
1462 1462 file_description=f'user gravatar `{safe_str(filename)}`',
1463 1463 hidden=True, check_acl=False, user_id=1
1464 1464 )
1465 1465 Session().add(entry)
1466 1466 Session().commit()
1467 1467 log.debug('Stored upload in DB as %s', entry)
1468 1468 except Exception:
1469 1469 raise
1470 1470
1471 1471 return request.route_path('download_file', fid=store_uid)
1472 1472
1473 1473 else:
1474 1474 return klass.generate_svg(svg_type=svg_type)
1475 1475
1476 1476
1477 1477 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1478 1478 return safe_str(gravatar_url_tmpl)\
1479 1479 .replace('{email}', email_address) \
1480 1480 .replace('{md5email}', md5_safe(email_address.lower())) \
1481 1481 .replace('{netloc}', request.host) \
1482 1482 .replace('{scheme}', request.scheme) \
1483 1483 .replace('{size}', safe_str(size))
1484 1484
1485 1485
1486 1486 def gravatar_url(email_address, size=30, request=None):
1487 1487 request = request or get_current_request()
1488 1488 _use_gravatar = request.call_context.visual.use_gravatar
1489 1489
1490 1490 email_address = email_address or User.DEFAULT_USER_EMAIL
1491 1491 if isinstance(email_address, str):
1492 1492 # hashlib crashes on unicode items
1493 1493 email_address = safe_str(email_address)
1494 1494
1495 1495 # empty email or default user
1496 1496 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1497 1497 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1498 1498
1499 1499 if _use_gravatar:
1500 1500 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1501 1501 or User.DEFAULT_GRAVATAR_URL
1502 1502 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1503 1503
1504 1504 else:
1505 1505 return initials_gravatar(request, email_address, '', '', size=size)
1506 1506
1507 1507
1508 1508 def breadcrumb_repo_link(repo):
1509 1509 """
1510 1510 Makes a breadcrumbs path link to repo
1511 1511
1512 1512 ex::
1513 1513 group >> subgroup >> repo
1514 1514
1515 1515 :param repo: a Repository instance
1516 1516 """
1517 1517
1518 1518 path = [
1519 1519 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1520 1520 title='last change:{}'.format(format_date(group.last_commit_change)))
1521 1521 for group in repo.groups_with_parents
1522 1522 ] + [
1523 1523 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1524 1524 title='last change:{}'.format(format_date(repo.last_commit_change)))
1525 1525 ]
1526 1526
1527 1527 return literal(' &raquo; '.join(path))
1528 1528
1529 1529
1530 1530 def breadcrumb_repo_group_link(repo_group):
1531 1531 """
1532 1532 Makes a breadcrumbs path link to repo
1533 1533
1534 1534 ex::
1535 1535 group >> subgroup
1536 1536
1537 1537 :param repo_group: a Repository Group instance
1538 1538 """
1539 1539
1540 1540 path = [
1541 1541 link_to(group.name,
1542 1542 route_path('repo_group_home', repo_group_name=group.group_name),
1543 1543 title='last change:{}'.format(format_date(group.last_commit_change)))
1544 1544 for group in repo_group.parents
1545 1545 ] + [
1546 1546 link_to(repo_group.name,
1547 1547 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1548 1548 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1549 1549 ]
1550 1550
1551 1551 return literal(' &raquo; '.join(path))
1552 1552
1553 1553
1554 1554 def format_byte_size_binary(file_size):
1555 1555 """
1556 1556 Formats file/folder sizes to standard.
1557 1557 """
1558 1558 if file_size is None:
1559 1559 file_size = 0
1560 1560
1561 1561 formatted_size = format_byte_size(file_size, binary=True)
1562 1562 return formatted_size
1563 1563
1564 1564
1565 1565 def urlify_text(text_, safe=True, **href_attrs):
1566 1566 """
1567 1567 Extract urls from text and make html links out of them
1568 1568 """
1569 1569
1570 1570 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1571 1571 r'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1572 1572
1573 1573 def url_func(match_obj):
1574 1574 url_full = match_obj.groups()[0]
1575 1575 a_options = dict(href_attrs)
1576 1576 a_options['href'] = url_full
1577 1577 a_text = url_full
1578 1578 return HTML.tag("a", a_text, **a_options)
1579 1579
1580 1580 _new_text = url_pat.sub(url_func, text_)
1581 1581
1582 1582 if safe:
1583 1583 return literal(_new_text)
1584 1584 return _new_text
1585 1585
1586 1586
1587 1587 def urlify_commits(text_, repo_name):
1588 1588 """
1589 1589 Extract commit ids from text and make link from them
1590 1590
1591 1591 :param text_:
1592 1592 :param repo_name: repo name to build the URL with
1593 1593 """
1594 1594
1595 1595 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1596 1596
1597 1597 def url_func(match_obj):
1598 1598 commit_id = match_obj.groups()[1]
1599 1599 pref = match_obj.groups()[0]
1600 1600 suf = match_obj.groups()[2]
1601 1601
1602 1602 tmpl = (
1603 1603 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1604 1604 '%(commit_id)s</a>%(suf)s'
1605 1605 )
1606 1606 return tmpl % {
1607 1607 'pref': pref,
1608 1608 'cls': 'revision-link',
1609 1609 'url': route_url(
1610 1610 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1611 1611 'commit_id': commit_id,
1612 1612 'suf': suf,
1613 1613 'hovercard_alt': 'Commit: {}'.format(commit_id),
1614 1614 'hovercard_url': route_url(
1615 1615 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1616 1616 }
1617 1617
1618 1618 new_text = url_pat.sub(url_func, text_)
1619 1619
1620 1620 return new_text
1621 1621
1622 1622
1623 1623 def _process_url_func(match_obj, repo_name, uid, entry,
1624 1624 return_raw_data=False, link_format='html'):
1625 1625 pref = ''
1626 1626 if match_obj.group().startswith(' '):
1627 1627 pref = ' '
1628 1628
1629 1629 issue_id = ''.join(match_obj.groups())
1630 1630
1631 1631 if link_format == 'html':
1632 1632 tmpl = (
1633 1633 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1634 1634 '%(issue-prefix)s%(id-repr)s'
1635 1635 '</a>')
1636 1636 elif link_format == 'html+hovercard':
1637 1637 tmpl = (
1638 1638 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1639 1639 '%(issue-prefix)s%(id-repr)s'
1640 1640 '</a>')
1641 1641 elif link_format in ['rst', 'rst+hovercard']:
1642 1642 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1643 1643 elif link_format in ['markdown', 'markdown+hovercard']:
1644 1644 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1645 1645 else:
1646 1646 raise ValueError('Bad link_format:{}'.format(link_format))
1647 1647
1648 1648 (repo_name_cleaned,
1649 1649 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1650 1650
1651 1651 # variables replacement
1652 1652 named_vars = {
1653 1653 'id': issue_id,
1654 1654 'repo': repo_name,
1655 1655 'repo_name': repo_name_cleaned,
1656 1656 'group_name': parent_group_name,
1657 1657 # set dummy keys so we always have them
1658 1658 'hostname': '',
1659 1659 'netloc': '',
1660 1660 'scheme': ''
1661 1661 }
1662 1662
1663 1663 request = get_current_request()
1664 1664 if request:
1665 1665 # exposes, hostname, netloc, scheme
1666 1666 host_data = get_host_info(request)
1667 1667 named_vars.update(host_data)
1668 1668
1669 1669 # named regex variables
1670 1670 named_vars.update(match_obj.groupdict())
1671 1671 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1672 1672 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1673 1673 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1674 1674
1675 1675 def quote_cleaner(input_str):
1676 1676 """Remove quotes as it's HTML"""
1677 1677 return input_str.replace('"', '')
1678 1678
1679 1679 data = {
1680 1680 'pref': pref,
1681 1681 'cls': quote_cleaner('issue-tracker-link'),
1682 1682 'url': quote_cleaner(_url),
1683 1683 'id-repr': issue_id,
1684 1684 'issue-prefix': entry['pref'],
1685 1685 'serv': entry['url'],
1686 1686 'title': sanitize_html(desc, strip=True),
1687 1687 'hovercard_url': hovercard_url
1688 1688 }
1689 1689
1690 1690 if return_raw_data:
1691 1691 return {
1692 1692 'id': issue_id,
1693 1693 'url': _url
1694 1694 }
1695 1695 return tmpl % data
1696 1696
1697 1697
1698 1698 def get_active_pattern_entries(repo_name):
1699 1699 repo = None
1700 1700 if repo_name:
1701 1701 # Retrieving repo_name to avoid invalid repo_name to explode on
1702 1702 # IssueTrackerSettingsModel but still passing invalid name further down
1703 1703 repo = Repository.get_by_repo_name(repo_name, cache=True)
1704 1704
1705 1705 settings_model = IssueTrackerSettingsModel(repo=repo)
1706 1706 active_entries = settings_model.get_settings(cache=True)
1707 1707 return active_entries
1708 1708
1709 1709
1710 1710 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1711 1711
1712 1712 allowed_link_formats = [
1713 1713 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1714 1714
1715 1715 compile_cache = {
1716 1716
1717 1717 }
1718 1718
1719 1719
1720 1720 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1721 1721
1722 1722 if link_format not in allowed_link_formats:
1723 1723 raise ValueError('Link format can be only one of:{} got {}'.format(
1724 1724 allowed_link_formats, link_format))
1725 1725 issues_data = []
1726 1726 errors = []
1727 1727 new_text = text_string
1728 1728
1729 1729 if active_entries is None:
1730 1730 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1731 1731 active_entries = get_active_pattern_entries(repo_name)
1732 1732
1733 1733 log.debug('Got %s pattern entries to process', len(active_entries))
1734 1734
1735 1735 for uid, entry in list(active_entries.items()):
1736 1736
1737 1737 if not (entry['pat'] and entry['url']):
1738 1738 log.debug('skipping due to missing data')
1739 1739 continue
1740 1740
1741 1741 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1742 1742 uid, entry['pat'], entry['url'], entry['pref'])
1743 1743
1744 1744 if entry.get('pat_compiled'):
1745 1745 pattern = entry['pat_compiled']
1746 1746 elif entry['pat'] in compile_cache:
1747 1747 pattern = compile_cache[entry['pat']]
1748 1748 else:
1749 1749 try:
1750 1750 pattern = regex.compile(r'%s' % entry['pat'])
1751 1751 except regex.error as e:
1752 1752 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1753 1753 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1754 1754 errors.append(regex_err)
1755 1755 continue
1756 1756 compile_cache[entry['pat']] = pattern
1757 1757
1758 1758 data_func = partial(
1759 1759 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1760 1760 return_raw_data=True)
1761 1761
1762 1762 for match_obj in pattern.finditer(text_string):
1763 1763 issues_data.append(data_func(match_obj))
1764 1764
1765 1765 url_func = partial(
1766 1766 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1767 1767 link_format=link_format)
1768 1768
1769 1769 new_text = pattern.sub(url_func, new_text)
1770 1770 log.debug('processed prefix:uid `%s`', uid)
1771 1771
1772 1772 # finally use global replace, eg !123 -> pr-link, those will not catch
1773 1773 # if already similar pattern exists
1774 1774 server_url = '${scheme}://${netloc}'
1775 1775 pr_entry = {
1776 1776 'pref': '!',
1777 1777 'url': server_url + '/_admin/pull-requests/${id}',
1778 1778 'desc': 'Pull Request !${id}',
1779 1779 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1780 1780 }
1781 1781 pr_url_func = partial(
1782 1782 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1783 1783 link_format=link_format+'+hovercard')
1784 1784 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1785 1785 log.debug('processed !pr pattern')
1786 1786
1787 1787 return new_text, issues_data, errors
1788 1788
1789 1789
1790 1790 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1791 1791 issues_container_callback=None, error_container=None):
1792 1792 """
1793 1793 Parses given text message and makes proper links.
1794 1794 issues are linked to given issue-server, and rest is a commit link
1795 1795 """
1796 1796
1797 1797 def escaper(_text):
1798 1798 return _text.replace('<', '&lt;').replace('>', '&gt;')
1799 1799
1800 1800 new_text = escaper(commit_text)
1801 1801
1802 1802 # extract http/https links and make them real urls
1803 1803 new_text = urlify_text(new_text, safe=False)
1804 1804
1805 1805 # urlify commits - extract commit ids and make link out of them, if we have
1806 1806 # the scope of repository present.
1807 1807 if repository:
1808 1808 new_text = urlify_commits(new_text, repository)
1809 1809
1810 1810 # process issue tracker patterns
1811 1811 new_text, issues, errors = process_patterns(
1812 1812 new_text, repository or '', active_entries=active_pattern_entries)
1813 1813
1814 1814 if issues_container_callback is not None:
1815 1815 for issue in issues:
1816 1816 issues_container_callback(issue)
1817 1817
1818 1818 if error_container is not None:
1819 1819 error_container.extend(errors)
1820 1820
1821 1821 return literal(new_text)
1822 1822
1823 1823
1824 1824 def render_binary(repo_name, file_obj):
1825 1825 """
1826 1826 Choose how to render a binary file
1827 1827 """
1828 1828
1829 1829 # unicode
1830 1830 filename = file_obj.name
1831 1831
1832 1832 # images
1833 1833 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1834 1834 if fnmatch.fnmatch(filename, pat=ext):
1835 1835 src = route_path(
1836 1836 'repo_file_raw', repo_name=repo_name,
1837 1837 commit_id=file_obj.commit.raw_id,
1838 1838 f_path=file_obj.path)
1839 1839
1840 1840 return literal(
1841 1841 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1842 1842
1843 1843
1844 1844 def renderer_from_filename(filename, exclude=None):
1845 1845 """
1846 1846 choose a renderer based on filename, this works only for text based files
1847 1847 """
1848 1848
1849 1849 # ipython
1850 1850 for ext in ['*.ipynb']:
1851 1851 if fnmatch.fnmatch(filename, pat=ext):
1852 1852 return 'jupyter'
1853 1853
1854 1854 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1855 1855 if is_markup:
1856 1856 return is_markup
1857 1857 return None
1858 1858
1859 1859
1860 1860 def render(source, renderer='rst', mentions=False, relative_urls=None,
1861 1861 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1862 1862
1863 1863 def maybe_convert_relative_links(html_source):
1864 1864 if relative_urls:
1865 1865 return relative_links(html_source, relative_urls)
1866 1866 return html_source
1867 1867
1868 1868 if renderer == 'plain':
1869 1869 return literal(
1870 1870 MarkupRenderer.plain(source, leading_newline=False))
1871 1871
1872 1872 elif renderer == 'rst':
1873 1873 if repo_name:
1874 1874 # process patterns on comments if we pass in repo name
1875 1875 source, issues, errors = process_patterns(
1876 1876 source, repo_name, link_format='rst',
1877 1877 active_entries=active_pattern_entries)
1878 1878 if issues_container_callback is not None:
1879 1879 for issue in issues:
1880 1880 issues_container_callback(issue)
1881 1881
1882 1882 rendered_block = maybe_convert_relative_links(
1883 1883 MarkupRenderer.rst(source, mentions=mentions))
1884 1884
1885 1885 return literal(f'<div class="rst-block">{rendered_block}</div>')
1886 1886
1887 1887 elif renderer == 'markdown':
1888 1888 if repo_name:
1889 1889 # process patterns on comments if we pass in repo name
1890 1890 source, issues, errors = process_patterns(
1891 1891 source, repo_name, link_format='markdown',
1892 1892 active_entries=active_pattern_entries)
1893 1893 if issues_container_callback is not None:
1894 1894 for issue in issues:
1895 1895 issues_container_callback(issue)
1896 1896
1897 1897 rendered_block = maybe_convert_relative_links(
1898 1898 MarkupRenderer.markdown(source, flavored=True, mentions=mentions))
1899 1899 return literal(f'<div class="markdown-block">{rendered_block}</div>')
1900 1900
1901 1901 elif renderer == 'jupyter':
1902 1902 rendered_block = maybe_convert_relative_links(
1903 1903 MarkupRenderer.jupyter(source))
1904 1904 return literal(f'<div class="ipynb">{rendered_block}</div>')
1905 1905
1906 1906 # None means just show the file-source
1907 1907 return None
1908 1908
1909 1909
1910 1910 def commit_status(repo, commit_id):
1911 1911 return ChangesetStatusModel().get_status(repo, commit_id)
1912 1912
1913 1913
1914 1914 def commit_status_lbl(commit_status):
1915 1915 return dict(ChangesetStatus.STATUSES).get(commit_status)
1916 1916
1917 1917
1918 1918 def commit_time(repo_name, commit_id):
1919 1919 repo = Repository.get_by_repo_name(repo_name)
1920 1920 commit = repo.get_commit(commit_id=commit_id)
1921 1921 return commit.date
1922 1922
1923 1923
1924 1924 def get_permission_name(key):
1925 1925 return dict(Permission.PERMS).get(key)
1926 1926
1927 1927
1928 1928 def journal_filter_help(request):
1929 1929 _ = request.translate
1930 1930 from rhodecode.lib.audit_logger import ACTIONS
1931 1931 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1932 1932
1933 1933 return _(
1934 1934 'Example filter terms:\n' +
1935 1935 ' repository:vcs\n' +
1936 1936 ' username:marcin\n' +
1937 1937 ' username:(NOT marcin)\n' +
1938 1938 ' action:*push*\n' +
1939 1939 ' ip:127.0.0.1\n' +
1940 1940 ' date:20120101\n' +
1941 1941 ' date:[20120101100000 TO 20120102]\n' +
1942 1942 '\n' +
1943 1943 'Actions: {actions}\n' +
1944 1944 '\n' +
1945 1945 'Generate wildcards using \'*\' character:\n' +
1946 1946 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1947 1947 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1948 1948 '\n' +
1949 1949 'Optional AND / OR operators in queries\n' +
1950 1950 ' "repository:vcs OR repository:test"\n' +
1951 1951 ' "username:test AND repository:test*"\n'
1952 1952 ).format(actions=actions)
1953 1953
1954 1954
1955 1955 def not_mapped_error(repo_name):
1956 1956 from rhodecode.translation import _
1957 1957 flash(_('%s repository is not mapped to db perhaps'
1958 1958 ' it was created or renamed from the filesystem'
1959 1959 ' please run the application again'
1960 1960 ' in order to rescan repositories') % repo_name, category='error')
1961 1961
1962 1962
1963 1963 def ip_range(ip_addr):
1964 1964 from rhodecode.model.db import UserIpMap
1965 1965 s, e = UserIpMap._get_ip_range(ip_addr)
1966 1966 return '%s - %s' % (s, e)
1967 1967
1968 1968
1969 1969 def form(url, method='post', needs_csrf_token=True, **attrs):
1970 1970 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1971 1971 if method.lower() != 'get' and needs_csrf_token:
1972 1972 raise Exception(
1973 1973 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1974 1974 'CSRF token. If the endpoint does not require such token you can ' +
1975 1975 'explicitly set the parameter needs_csrf_token to false.')
1976 1976
1977 1977 return insecure_form(url, method=method, **attrs)
1978 1978
1979 1979
1980 1980 def secure_form(form_url, method="POST", multipart=False, **attrs):
1981 1981 """Start a form tag that points the action to an url. This
1982 1982 form tag will also include the hidden field containing
1983 1983 the auth token.
1984 1984
1985 1985 The url options should be given either as a string, or as a
1986 1986 ``url()`` function. The method for the form defaults to POST.
1987 1987
1988 1988 Options:
1989 1989
1990 1990 ``multipart``
1991 1991 If set to True, the enctype is set to "multipart/form-data".
1992 1992 ``method``
1993 1993 The method to use when submitting the form, usually either
1994 1994 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1995 1995 hidden input with name _method is added to simulate the verb
1996 1996 over POST.
1997 1997
1998 1998 """
1999 1999
2000 2000 if 'request' in attrs:
2001 2001 session = attrs['request'].session
2002 2002 del attrs['request']
2003 2003 else:
2004 2004 raise ValueError(
2005 2005 'Calling this form requires request= to be passed as argument')
2006 2006
2007 2007 _form = insecure_form(form_url, method, multipart, **attrs)
2008 2008 token = literal(
2009 2009 '<input type="hidden" name="{}" value="{}">'.format(
2010 2010 csrf_token_key, get_csrf_token(session)))
2011 2011
2012 2012 return literal("%s\n%s" % (_form, token))
2013 2013
2014 2014
2015 2015 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
2016 2016 select_html = select(name, selected, options, **attrs)
2017 2017
2018 2018 select2 = """
2019 2019 <script>
2020 2020 $(document).ready(function() {
2021 2021 $('#%s').select2({
2022 2022 containerCssClass: 'drop-menu %s',
2023 2023 dropdownCssClass: 'drop-menu-dropdown',
2024 2024 dropdownAutoWidth: true%s
2025 2025 });
2026 2026 });
2027 2027 </script>
2028 2028 """
2029 2029
2030 2030 filter_option = """,
2031 2031 minimumResultsForSearch: -1
2032 2032 """
2033 2033 input_id = attrs.get('id') or name
2034 2034 extra_classes = ' '.join(attrs.pop('extra_classes', []))
2035 2035 filter_enabled = "" if enable_filter else filter_option
2036 2036 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
2037 2037
2038 2038 return literal(select_html+select_script)
2039 2039
2040 2040
2041 2041 def get_visual_attr(tmpl_context_var, attr_name):
2042 2042 """
2043 2043 A safe way to get a variable from visual variable of template context
2044 2044
2045 2045 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2046 2046 :param attr_name: name of the attribute we fetch from the c.visual
2047 2047 """
2048 2048 visual = getattr(tmpl_context_var, 'visual', None)
2049 2049 if not visual:
2050 2050 return
2051 2051 else:
2052 2052 return getattr(visual, attr_name, None)
2053 2053
2054 2054
2055 2055 def get_last_path_part(file_node):
2056 2056 if not file_node.path:
2057 2057 return '/'
2058 2058
2059 2059 path = safe_str(file_node.path.split('/')[-1])
2060 2060 return '../' + path
2061 2061
2062 2062
2063 2063 def route_url(*args, **kwargs):
2064 2064 """
2065 2065 Wrapper around pyramids `route_url` (fully qualified url) function.
2066 2066 """
2067 2067 req = get_current_request()
2068 2068 return req.route_url(*args, **kwargs)
2069 2069
2070 2070
2071 2071 def route_path(*args, **kwargs):
2072 2072 """
2073 2073 Wrapper around pyramids `route_path` function.
2074 2074 """
2075 2075 req = get_current_request()
2076 2076 return req.route_path(*args, **kwargs)
2077 2077
2078 2078
2079 2079 def route_path_or_none(*args, **kwargs):
2080 2080 try:
2081 2081 return route_path(*args, **kwargs)
2082 2082 except KeyError:
2083 2083 return None
2084 2084
2085 2085
2086 2086 def current_route_path(request, **kw):
2087 2087 new_args = request.GET.mixed()
2088 2088 new_args.update(kw)
2089 2089 return request.current_route_path(_query=new_args)
2090 2090
2091 2091
2092 2092 def curl_api_example(method, args):
2093 2093 args_json = json.dumps(OrderedDict([
2094 2094 ('id', 1),
2095 2095 ('auth_token', 'SECRET'),
2096 2096 ('method', method),
2097 2097 ('args', args)
2098 2098 ]))
2099 2099
2100 2100 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2101 2101 api_url=route_url('apiv2'),
2102 2102 args_json=args_json
2103 2103 )
2104 2104
2105 2105
2106 2106 def api_call_example(method, args):
2107 2107 """
2108 2108 Generates an API call example via CURL
2109 2109 """
2110 2110 curl_call = curl_api_example(method, args)
2111 2111
2112 2112 return literal(
2113 2113 curl_call +
2114 2114 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2115 2115 "and needs to be of `api calls` role."
2116 2116 .format(token_url=route_url('my_account_auth_tokens')))
2117 2117
2118 2118
2119 2119 def notification_description(notification, request):
2120 2120 """
2121 2121 Generate notification human readable description based on notification type
2122 2122 """
2123 2123 from rhodecode.model.notification import NotificationModel
2124 2124 return NotificationModel().make_description(
2125 2125 notification, translate=request.translate)
2126 2126
2127 2127
2128 2128 def go_import_header(request, db_repo=None):
2129 2129 """
2130 2130 Creates a header for go-import functionality in Go Lang
2131 2131 """
2132 2132
2133 2133 if not db_repo:
2134 2134 return
2135 2135 if 'go-get' not in request.GET:
2136 2136 return
2137 2137
2138 2138 clone_url = db_repo.clone_url()
2139 2139 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2140 2140 # we have a repo and go-get flag,
2141 2141 return literal('<meta name="go-import" content="{} {} {}">'.format(
2142 2142 prefix, db_repo.repo_type, clone_url))
2143 2143
2144 2144
2145 2145 def reviewer_as_json(*args, **kwargs):
2146 2146 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2147 2147 return _reviewer_as_json(*args, **kwargs)
2148 2148
2149 2149
2150 2150 def get_repo_view_type(request):
2151 2151 route_name = request.matched_route.name
2152 2152 route_to_view_type = {
2153 2153 'repo_changelog': 'commits',
2154 2154 'repo_commits': 'commits',
2155 2155 'repo_files': 'files',
2156 2156 'repo_summary': 'summary',
2157 2157 'repo_commit': 'commit'
2158 2158 }
2159 2159
2160 2160 return route_to_view_type.get(route_name)
2161 2161
2162 2162
2163 2163 def is_active(menu_entry, selected):
2164 2164 """
2165 2165 Returns active class for selecting menus in templates
2166 2166 <li class=${h.is_active('settings', current_active)}></li>
2167 2167 """
2168 2168 if not isinstance(menu_entry, list):
2169 2169 menu_entry = [menu_entry]
2170 2170
2171 2171 if selected in menu_entry:
2172 2172 return "active"
2173 2173
2174 2174
2175 2175 class IssuesRegistry(object):
2176 2176 """
2177 2177 issue_registry = IssuesRegistry()
2178 2178 some_func(issues_callback=issues_registry(...))
2179 2179 """
2180 2180
2181 2181 def __init__(self):
2182 2182 self.issues = []
2183 2183 self.unique_issues = collections.defaultdict(lambda: [])
2184 2184
2185 2185 def __call__(self, commit_dict=None):
2186 2186 def callback(issue):
2187 2187 if commit_dict and issue:
2188 2188 issue['commit'] = commit_dict
2189 2189 self.issues.append(issue)
2190 2190 self.unique_issues[issue['id']].append(issue)
2191 2191 return callback
2192 2192
2193 2193 def get_issues(self):
2194 2194 return self.issues
2195 2195
2196 2196 @property
2197 2197 def issues_unique_count(self):
2198 2198 return len(set(i['id'] for i in self.issues))
2199
2200
2201 def get_directory_statistics(start_path):
2202 """
2203 total_files, total_size, directory_stats = get_directory_statistics(start_path)
2204
2205 print(f"Directory statistics for: {start_path}\n")
2206 print(f"Total files: {total_files}")
2207 print(f"Total size: {format_size(total_size)}\n")
2208
2209 :param start_path:
2210 :return:
2211 """
2212
2213 total_files = 0
2214 total_size = 0
2215 directory_stats = {}
2216
2217 for dir_path, dir_names, file_names in os.walk(start_path):
2218 dir_size = 0
2219 file_count = len(file_names)
2220
2221 for fname in file_names:
2222 filepath = os.path.join(dir_path, fname)
2223 file_size = os.path.getsize(filepath)
2224 dir_size += file_size
2225
2226 directory_stats[dir_path] = {'file_count': file_count, 'size': dir_size}
2227 total_files += file_count
2228 total_size += dir_size
2229
2230 return total_files, total_size, directory_stats
@@ -1,834 +1,827 b''
1 1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import os
21 21 import sys
22 22 import time
23 23 import platform
24 24 import collections
25 25 import psutil
26 26 from functools import wraps
27 27
28 28 import pkg_resources
29 29 import logging
30 30 import resource
31 31
32 32 import configparser
33 33
34 34 from rc_license.models import LicenseModel
35 35 from rhodecode.lib.str_utils import safe_str
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 _NA = 'NOT AVAILABLE'
41 41 _NA_FLOAT = 0.0
42 42
43 43 STATE_OK = 'ok'
44 44 STATE_ERR = 'error'
45 45 STATE_WARN = 'warning'
46 46
47 47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
48 48
49 49
50 50 registered_helpers = {}
51 51
52 52
53 53 def register_sysinfo(func):
54 54 """
55 55 @register_helper
56 56 def db_check():
57 57 pass
58 58
59 59 db_check == registered_helpers['db_check']
60 60 """
61 61 global registered_helpers
62 62 registered_helpers[func.__name__] = func
63 63
64 64 @wraps(func)
65 65 def _wrapper(*args, **kwargs):
66 66 return func(*args, **kwargs)
67 67 return _wrapper
68 68
69 69
70 70 # HELPERS
71 71 def percentage(part: (int, float), whole: (int, float)):
72 72 whole = float(whole)
73 73 if whole > 0:
74 74 return round(100 * float(part) / whole, 1)
75 75 return 0.0
76 76
77 77
78 78 def get_storage_size(storage_path):
79 79 sizes = []
80 80 for file_ in os.listdir(storage_path):
81 81 storage_file = os.path.join(storage_path, file_)
82 82 if os.path.isfile(storage_file):
83 83 try:
84 84 sizes.append(os.path.getsize(storage_file))
85 85 except OSError:
86 86 log.exception('Failed to get size of storage file %s', storage_file)
87 87 pass
88 88
89 89 return sum(sizes)
90 90
91 91
92 92 def get_resource(resource_type):
93 93 try:
94 94 return resource.getrlimit(resource_type)
95 95 except Exception:
96 96 return 'NOT_SUPPORTED'
97 97
98 98
99 99 def get_cert_path(ini_path):
100 100 default = '/etc/ssl/certs/ca-certificates.crt'
101 101 control_ca_bundle = os.path.join(
102 102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
103 103 '/etc/ssl/certs/ca-certificates.crt')
104 104 if os.path.isfile(control_ca_bundle):
105 105 default = control_ca_bundle
106 106
107 107 return default
108 108
109 109
110 110 class SysInfoRes(object):
111 111 def __init__(self, value, state=None, human_value=None):
112 112 self.value = value
113 113 self.state = state or STATE_OK_DEFAULT
114 114 self.human_value = human_value or value
115 115
116 116 def __json__(self):
117 117 return {
118 118 'value': self.value,
119 119 'state': self.state,
120 120 'human_value': self.human_value,
121 121 }
122 122
123 123 def get_value(self):
124 124 return self.__json__()
125 125
126 126 def __str__(self):
127 127 return f'<SysInfoRes({self.__json__()})>'
128 128
129 129
130 130 class SysInfo(object):
131 131
132 132 def __init__(self, func_name, **kwargs):
133 133 self.function_name = func_name
134 134 self.value = _NA
135 135 self.state = None
136 136 self.kwargs = kwargs or {}
137 137
138 138 def __call__(self):
139 139 computed = self.compute(**self.kwargs)
140 140 if not isinstance(computed, SysInfoRes):
141 141 raise ValueError(
142 142 'computed value for {} is not instance of '
143 143 '{}, got {} instead'.format(
144 144 self.function_name, SysInfoRes, type(computed)))
145 145 return computed.__json__()
146 146
147 147 def __str__(self):
148 148 return f'<SysInfo({self.function_name})>'
149 149
150 150 def compute(self, **kwargs):
151 151 return self.function_name(**kwargs)
152 152
153 153
154 154 # SysInfo functions
155 155 @register_sysinfo
156 156 def python_info():
157 157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
158 158 executable=sys.executable)
159 159 return SysInfoRes(value=value)
160 160
161 161
162 162 @register_sysinfo
163 163 def py_modules():
164 164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
165 165 for p in pkg_resources.working_set])
166 166
167 167 value = sorted(mods.items(), key=lambda k: k[0].lower())
168 168 return SysInfoRes(value=value)
169 169
170 170
171 171 @register_sysinfo
172 172 def platform_type():
173 173 from rhodecode.lib.utils import generate_platform_uuid
174 174
175 175 value = dict(
176 176 name=safe_str(platform.platform()),
177 177 uuid=generate_platform_uuid()
178 178 )
179 179 return SysInfoRes(value=value)
180 180
181 181
182 182 @register_sysinfo
183 183 def locale_info():
184 184 import locale
185 185
186 186 def safe_get_locale(locale_name):
187 187 try:
188 188 locale.getlocale(locale_name)
189 189 except TypeError:
190 190 return f'FAILED_LOCALE_GET:{locale_name}'
191 191
192 192 value = dict(
193 193 locale_default=locale.getlocale(),
194 194 locale_lc_all=safe_get_locale(locale.LC_ALL),
195 195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
196 196 lang_env=os.environ.get('LANG'),
197 197 lc_all_env=os.environ.get('LC_ALL'),
198 198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
199 199 )
200 200 human_value = \
201 201 f"LANG: {value['lang_env']}, \
202 202 locale LC_ALL: {value['locale_lc_all']}, \
203 203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
204 204 Default locales: {value['locale_default']}"
205 205
206 206 return SysInfoRes(value=value, human_value=human_value)
207 207
208 208
209 209 @register_sysinfo
210 210 def ulimit_info():
211 211 data = collections.OrderedDict([
212 212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
213 213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
214 214 ('stack size', get_resource(resource.RLIMIT_STACK)),
215 215 ('core file size', get_resource(resource.RLIMIT_CORE)),
216 216 ('address space size', get_resource(resource.RLIMIT_AS)),
217 217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
218 218 ('heap size', get_resource(resource.RLIMIT_DATA)),
219 219 ('rss size', get_resource(resource.RLIMIT_RSS)),
220 220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
221 221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
222 222 ])
223 223
224 224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
225 225
226 226 value = {
227 227 'limits': data,
228 228 'text': text,
229 229 }
230 230 return SysInfoRes(value=value)
231 231
232 232
233 233 @register_sysinfo
234 234 def uptime():
235 235 from rhodecode.lib.helpers import age, time_to_datetime
236 236 from rhodecode.translation import TranslationString
237 237
238 238 value = dict(boot_time=0, uptime=0, text='')
239 239 state = STATE_OK_DEFAULT
240 240
241 241 boot_time = psutil.boot_time()
242 242 value['boot_time'] = boot_time
243 243 value['uptime'] = time.time() - boot_time
244 244
245 245 date_or_age = age(time_to_datetime(boot_time))
246 246 if isinstance(date_or_age, TranslationString):
247 247 date_or_age = date_or_age.interpolate()
248 248
249 249 human_value = value.copy()
250 250 human_value['boot_time'] = time_to_datetime(boot_time)
251 251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
252 252
253 253 human_value['text'] = f'Server started {date_or_age}'
254 254 return SysInfoRes(value=value, human_value=human_value)
255 255
256 256
257 257 @register_sysinfo
258 258 def memory():
259 259 from rhodecode.lib.helpers import format_byte_size_binary
260 260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
261 261 percent_used=0, free=0, inactive=0, active=0, shared=0,
262 262 total=0, buffers=0, text='')
263 263
264 264 state = STATE_OK_DEFAULT
265 265
266 266 value.update(dict(psutil.virtual_memory()._asdict()))
267 267 value['used_real'] = value['total'] - value['available']
268 268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
269 269
270 270 human_value = value.copy()
271 271 human_value['text'] = '{}/{}, {}% used'.format(
272 272 format_byte_size_binary(value['used_real']),
273 273 format_byte_size_binary(value['total']),
274 274 value['percent_used'])
275 275
276 276 keys = list(value.keys())[::]
277 277 keys.pop(keys.index('percent'))
278 278 keys.pop(keys.index('percent_used'))
279 279 keys.pop(keys.index('text'))
280 280 for k in keys:
281 281 human_value[k] = format_byte_size_binary(value[k])
282 282
283 283 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 284 msg = 'Critical: your available RAM memory is very low.'
285 285 state = {'message': msg, 'type': STATE_ERR}
286 286
287 287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 288 msg = 'Warning: your available RAM memory is running low.'
289 289 state = {'message': msg, 'type': STATE_WARN}
290 290
291 291 return SysInfoRes(value=value, state=state, human_value=human_value)
292 292
293 293
294 294 @register_sysinfo
295 295 def machine_load():
296 296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 297 state = STATE_OK_DEFAULT
298 298
299 299 # load averages
300 300 if hasattr(psutil.os, 'getloadavg'):
301 301 value.update(dict(
302 302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 303 ))
304 304
305 305 human_value = value.copy()
306 306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 307 value['1_min'], value['5_min'], value['15_min'])
308 308
309 309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 310 msg = 'Warning: your machine load is very high.'
311 311 state = {'message': msg, 'type': STATE_WARN}
312 312
313 313 return SysInfoRes(value=value, state=state, human_value=human_value)
314 314
315 315
316 316 @register_sysinfo
317 317 def cpu():
318 318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 319 state = STATE_OK_DEFAULT
320 320
321 321 value['cpu'] = psutil.cpu_percent(0.5)
322 322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 323 value['cpu_count'] = psutil.cpu_count()
324 324
325 325 human_value = value.copy()
326 326 human_value['text'] = f'{value["cpu_count"]} cores at {value["cpu"]} %'
327 327
328 328 return SysInfoRes(value=value, state=state, human_value=human_value)
329 329
330 330
331 331 @register_sysinfo
332 332 def storage():
333 333 from rhodecode.lib.helpers import format_byte_size_binary
334 334 from rhodecode.lib.utils import get_rhodecode_repo_store_path
335 335 path = get_rhodecode_repo_store_path()
336 336
337 337 value = dict(percent=0, used=0, total=0, path=path, text='')
338 338 state = STATE_OK_DEFAULT
339 339
340 340 try:
341 341 value.update(dict(psutil.disk_usage(path)._asdict()))
342 342 except Exception as e:
343 343 log.exception('Failed to fetch disk info')
344 344 state = {'message': str(e), 'type': STATE_ERR}
345 345
346 346 human_value = value.copy()
347 347 human_value['used'] = format_byte_size_binary(value['used'])
348 348 human_value['total'] = format_byte_size_binary(value['total'])
349 349 human_value['text'] = "{}/{}, {}% used".format(
350 350 format_byte_size_binary(value['used']),
351 351 format_byte_size_binary(value['total']),
352 352 value['percent'])
353 353
354 354 if state['type'] == STATE_OK and value['percent'] > 90:
355 355 msg = 'Critical: your disk space is very low.'
356 356 state = {'message': msg, 'type': STATE_ERR}
357 357
358 358 elif state['type'] == STATE_OK and value['percent'] > 70:
359 359 msg = 'Warning: your disk space is running low.'
360 360 state = {'message': msg, 'type': STATE_WARN}
361 361
362 362 return SysInfoRes(value=value, state=state, human_value=human_value)
363 363
364 364
365 365 @register_sysinfo
366 366 def storage_inodes():
367 367 from rhodecode.lib.utils import get_rhodecode_repo_store_path
368 368 path = get_rhodecode_repo_store_path()
369 369
370 370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 371 state = STATE_OK_DEFAULT
372 372
373 373 try:
374 374 i_stat = os.statvfs(path)
375 375 value['free'] = i_stat.f_ffree
376 376 value['used'] = i_stat.f_files-i_stat.f_favail
377 377 value['total'] = i_stat.f_files
378 378 value['percent'] = percentage(value['used'], value['total'])
379 379 except Exception as e:
380 380 log.exception('Failed to fetch disk inodes info')
381 381 state = {'message': str(e), 'type': STATE_ERR}
382 382
383 383 human_value = value.copy()
384 384 human_value['text'] = "{}/{}, {}% used".format(
385 385 value['used'], value['total'], value['percent'])
386 386
387 387 if state['type'] == STATE_OK and value['percent'] > 90:
388 388 msg = 'Critical: your disk free inodes are very low.'
389 389 state = {'message': msg, 'type': STATE_ERR}
390 390
391 391 elif state['type'] == STATE_OK and value['percent'] > 70:
392 392 msg = 'Warning: your disk free inodes are running low.'
393 393 state = {'message': msg, 'type': STATE_WARN}
394 394
395 395 return SysInfoRes(value=value, state=state, human_value=human_value)
396 396
397 397
398 398 @register_sysinfo
399 399 def storage_archives():
400 400 import rhodecode
401 401 from rhodecode.lib.helpers import format_byte_size_binary
402 from rhodecode.lib.rc_cache.archive_cache.utils import get_directory_statistics
402 from rhodecode.lib.archive_cache import get_archival_cache_store
403 403
404 404 storage_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type')
405 storage_key = 'archive_cache.filesystem.store_dir'
406 405
407 default_msg = 'Archive cache storage is controlled by '\
408 f'{storage_key}=/path/to/cache option in the .ini file'
409 path = rhodecode.ConfigGet().get_str(storage_key, missing=default_msg)
410
411 value = dict(percent=0, used=0, total=0, items=0, path=path, text='', type=storage_type)
406 value = dict(percent=0, used=0, total=0, items=0, path='', text='', type=storage_type)
412 407 state = STATE_OK_DEFAULT
413 408 try:
414 if storage_type != 'filesystem':
415 # raise Exc to stop reporting on different type
416 raise ValueError('Storage type must be "filesystem"')
409 d_cache = get_archival_cache_store(config=rhodecode.CONFIG)
417 410
418 total_files, total_size, _directory_stats = get_directory_statistics(path)
411 total_files, total_size, _directory_stats = d_cache.get_statistics()
419 412
420 413 value.update({
421 414 'percent': 100,
422 415 'used': total_size,
423 416 'total': total_size,
424 'items': total_files
417 'items': total_files,
418 'path': d_cache.storage_path
425 419 })
426 420
427 421 except Exception as e:
428 422 log.exception('failed to fetch archive cache storage')
429 423 state = {'message': str(e), 'type': STATE_ERR}
430 424
431 425 human_value = value.copy()
432 426 human_value['used'] = format_byte_size_binary(value['used'])
433 427 human_value['total'] = format_byte_size_binary(value['total'])
434 428 human_value['text'] = "{} ({} items)".format(
435 429 human_value['used'], value['items'])
436 430
437 431 return SysInfoRes(value=value, state=state, human_value=human_value)
438 432
439 433
440 434 @register_sysinfo
441 435 def storage_gist():
442 436 from rhodecode.model.gist import GIST_STORE_LOC
443 437 from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
444 from rhodecode.lib.helpers import format_byte_size_binary
445 from rhodecode.lib.rc_cache.archive_cache.utils import get_directory_statistics
438 from rhodecode.lib.helpers import format_byte_size_binary, get_directory_statistics
446 439
447 440 path = safe_str(os.path.join(
448 441 get_rhodecode_repo_store_path(), GIST_STORE_LOC))
449 442
450 443 # gist storage
451 444 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
452 445 state = STATE_OK_DEFAULT
453 446
454 447 try:
455 448 total_files, total_size, _directory_stats = get_directory_statistics(path)
456 449 value.update({
457 450 'percent': 100,
458 451 'used': total_size,
459 452 'total': total_size,
460 453 'items': total_files
461 454 })
462 455 except Exception as e:
463 456 log.exception('failed to fetch gist storage items')
464 457 state = {'message': str(e), 'type': STATE_ERR}
465 458
466 459 human_value = value.copy()
467 460 human_value['used'] = format_byte_size_binary(value['used'])
468 461 human_value['total'] = format_byte_size_binary(value['total'])
469 462 human_value['text'] = "{} ({} items)".format(
470 463 human_value['used'], value['items'])
471 464
472 465 return SysInfoRes(value=value, state=state, human_value=human_value)
473 466
474 467
475 468 @register_sysinfo
476 469 def storage_temp():
477 470 import tempfile
478 471 from rhodecode.lib.helpers import format_byte_size_binary
479 472
480 473 path = tempfile.gettempdir()
481 474 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
482 475 state = STATE_OK_DEFAULT
483 476
484 477 if not psutil:
485 478 return SysInfoRes(value=value, state=state)
486 479
487 480 try:
488 481 value.update(dict(psutil.disk_usage(path)._asdict()))
489 482 except Exception as e:
490 483 log.exception('Failed to fetch temp dir info')
491 484 state = {'message': str(e), 'type': STATE_ERR}
492 485
493 486 human_value = value.copy()
494 487 human_value['used'] = format_byte_size_binary(value['used'])
495 488 human_value['total'] = format_byte_size_binary(value['total'])
496 489 human_value['text'] = "{}/{}, {}% used".format(
497 490 format_byte_size_binary(value['used']),
498 491 format_byte_size_binary(value['total']),
499 492 value['percent'])
500 493
501 494 return SysInfoRes(value=value, state=state, human_value=human_value)
502 495
503 496
504 497 @register_sysinfo
505 498 def search_info():
506 499 import rhodecode
507 500 from rhodecode.lib.index import searcher_from_config
508 501
509 502 backend = rhodecode.CONFIG.get('search.module', '')
510 503 location = rhodecode.CONFIG.get('search.location', '')
511 504
512 505 try:
513 506 searcher = searcher_from_config(rhodecode.CONFIG)
514 507 searcher = searcher.__class__.__name__
515 508 except Exception:
516 509 searcher = None
517 510
518 511 value = dict(
519 512 backend=backend, searcher=searcher, location=location, text='')
520 513 state = STATE_OK_DEFAULT
521 514
522 515 human_value = value.copy()
523 516 human_value['text'] = "backend:`{}`".format(human_value['backend'])
524 517
525 518 return SysInfoRes(value=value, state=state, human_value=human_value)
526 519
527 520
528 521 @register_sysinfo
529 522 def git_info():
530 523 from rhodecode.lib.vcs.backends import git
531 524 state = STATE_OK_DEFAULT
532 525 value = human_value = ''
533 526 try:
534 527 value = git.discover_git_version(raise_on_exc=True)
535 528 human_value = f'version reported from VCSServer: {value}'
536 529 except Exception as e:
537 530 state = {'message': str(e), 'type': STATE_ERR}
538 531
539 532 return SysInfoRes(value=value, state=state, human_value=human_value)
540 533
541 534
542 535 @register_sysinfo
543 536 def hg_info():
544 537 from rhodecode.lib.vcs.backends import hg
545 538 state = STATE_OK_DEFAULT
546 539 value = human_value = ''
547 540 try:
548 541 value = hg.discover_hg_version(raise_on_exc=True)
549 542 human_value = f'version reported from VCSServer: {value}'
550 543 except Exception as e:
551 544 state = {'message': str(e), 'type': STATE_ERR}
552 545 return SysInfoRes(value=value, state=state, human_value=human_value)
553 546
554 547
555 548 @register_sysinfo
556 549 def svn_info():
557 550 from rhodecode.lib.vcs.backends import svn
558 551 state = STATE_OK_DEFAULT
559 552 value = human_value = ''
560 553 try:
561 554 value = svn.discover_svn_version(raise_on_exc=True)
562 555 human_value = f'version reported from VCSServer: {value}'
563 556 except Exception as e:
564 557 state = {'message': str(e), 'type': STATE_ERR}
565 558 return SysInfoRes(value=value, state=state, human_value=human_value)
566 559
567 560
568 561 @register_sysinfo
569 562 def vcs_backends():
570 563 import rhodecode
571 564 value = rhodecode.CONFIG.get('vcs.backends')
572 565 human_value = 'Enabled backends in order: {}'.format(','.join(value))
573 566 return SysInfoRes(value=value, human_value=human_value)
574 567
575 568
576 569 @register_sysinfo
577 570 def vcs_server():
578 571 import rhodecode
579 572 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
580 573
581 574 server_url = rhodecode.CONFIG.get('vcs.server')
582 575 enabled = rhodecode.CONFIG.get('vcs.server.enable')
583 576 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
584 577 state = STATE_OK_DEFAULT
585 578 version = None
586 579 workers = 0
587 580
588 581 try:
589 582 data = get_vcsserver_service_data()
590 583 if data and 'version' in data:
591 584 version = data['version']
592 585
593 586 if data and 'config' in data:
594 587 conf = data['config']
595 588 workers = conf.get('workers', 'NOT AVAILABLE')
596 589
597 590 connection = 'connected'
598 591 except Exception as e:
599 592 connection = 'failed'
600 593 state = {'message': str(e), 'type': STATE_ERR}
601 594
602 595 value = dict(
603 596 url=server_url,
604 597 enabled=enabled,
605 598 protocol=protocol,
606 599 connection=connection,
607 600 version=version,
608 601 text='',
609 602 )
610 603
611 604 human_value = value.copy()
612 605 human_value['text'] = \
613 606 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
614 607 url=server_url, ver=version, workers=workers, mode=protocol,
615 608 conn=connection)
616 609
617 610 return SysInfoRes(value=value, state=state, human_value=human_value)
618 611
619 612
620 613 @register_sysinfo
621 614 def vcs_server_config():
622 615 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
623 616 state = STATE_OK_DEFAULT
624 617
625 618 value = {}
626 619 try:
627 620 data = get_vcsserver_service_data()
628 621 value = data['app_config']
629 622 except Exception as e:
630 623 state = {'message': str(e), 'type': STATE_ERR}
631 624
632 625 human_value = value.copy()
633 626 human_value['text'] = 'VCS Server config'
634 627
635 628 return SysInfoRes(value=value, state=state, human_value=human_value)
636 629
637 630
638 631 @register_sysinfo
639 632 def rhodecode_app_info():
640 633 import rhodecode
641 634 edition = rhodecode.CONFIG.get('rhodecode.edition')
642 635
643 636 value = dict(
644 637 rhodecode_version=rhodecode.__version__,
645 638 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
646 639 text=''
647 640 )
648 641 human_value = value.copy()
649 642 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
650 643 edition=edition, ver=value['rhodecode_version']
651 644 )
652 645 return SysInfoRes(value=value, human_value=human_value)
653 646
654 647
655 648 @register_sysinfo
656 649 def rhodecode_config():
657 650 import rhodecode
658 651 path = rhodecode.CONFIG.get('__file__')
659 652 rhodecode_ini_safe = rhodecode.CONFIG.copy()
660 653 cert_path = get_cert_path(path)
661 654
662 655 try:
663 656 config = configparser.ConfigParser()
664 657 config.read(path)
665 658 parsed_ini = config
666 659 if parsed_ini.has_section('server:main'):
667 660 parsed_ini = dict(parsed_ini.items('server:main'))
668 661 except Exception:
669 662 log.exception('Failed to read .ini file for display')
670 663 parsed_ini = {}
671 664
672 665 rhodecode_ini_safe['server:main'] = parsed_ini
673 666
674 667 blacklist = [
675 668 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
676 669 'routes.map',
677 670 'sqlalchemy.db1.url',
678 671 'channelstream.secret',
679 672 'beaker.session.secret',
680 673 'rhodecode.encrypted_values.secret',
681 674 'rhodecode_auth_github_consumer_key',
682 675 'rhodecode_auth_github_consumer_secret',
683 676 'rhodecode_auth_google_consumer_key',
684 677 'rhodecode_auth_google_consumer_secret',
685 678 'rhodecode_auth_bitbucket_consumer_secret',
686 679 'rhodecode_auth_bitbucket_consumer_key',
687 680 'rhodecode_auth_twitter_consumer_secret',
688 681 'rhodecode_auth_twitter_consumer_key',
689 682
690 683 'rhodecode_auth_twitter_secret',
691 684 'rhodecode_auth_github_secret',
692 685 'rhodecode_auth_google_secret',
693 686 'rhodecode_auth_bitbucket_secret',
694 687
695 688 'appenlight.api_key',
696 689 ('app_conf', 'sqlalchemy.db1.url')
697 690 ]
698 691 for k in blacklist:
699 692 if isinstance(k, tuple):
700 693 section, key = k
701 694 if section in rhodecode_ini_safe:
702 695 rhodecode_ini_safe[section] = '**OBFUSCATED**'
703 696 else:
704 697 rhodecode_ini_safe.pop(k, None)
705 698
706 699 # TODO: maybe put some CONFIG checks here ?
707 700 return SysInfoRes(value={'config': rhodecode_ini_safe,
708 701 'path': path, 'cert_path': cert_path})
709 702
710 703
711 704 @register_sysinfo
712 705 def database_info():
713 706 import rhodecode
714 707 from sqlalchemy.engine import url as engine_url
715 708 from rhodecode.model import meta
716 709 from rhodecode.model.meta import Session
717 710 from rhodecode.model.db import DbMigrateVersion
718 711
719 712 state = STATE_OK_DEFAULT
720 713
721 714 db_migrate = DbMigrateVersion.query().filter(
722 715 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
723 716
724 717 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
725 718
726 719 try:
727 720 engine = meta.get_engine()
728 721 db_server_info = engine.dialect._get_server_version_info(
729 722 Session.connection(bind=engine))
730 723 db_version = '.'.join(map(str, db_server_info))
731 724 except Exception:
732 725 log.exception('failed to fetch db version')
733 726 db_version = 'UNKNOWN'
734 727
735 728 db_info = dict(
736 729 migrate_version=db_migrate.version,
737 730 type=db_url_obj.get_backend_name(),
738 731 version=db_version,
739 732 url=repr(db_url_obj)
740 733 )
741 734 current_version = db_migrate.version
742 735 expected_version = rhodecode.__dbversion__
743 736 if state['type'] == STATE_OK and current_version != expected_version:
744 737 msg = 'Critical: database schema mismatch, ' \
745 738 'expected version {}, got {}. ' \
746 739 'Please run migrations on your database.'.format(
747 740 expected_version, current_version)
748 741 state = {'message': msg, 'type': STATE_ERR}
749 742
750 743 human_value = db_info.copy()
751 744 human_value['url'] = "{} @ migration version: {}".format(
752 745 db_info['url'], db_info['migrate_version'])
753 746 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
754 747 return SysInfoRes(value=db_info, state=state, human_value=human_value)
755 748
756 749
757 750 @register_sysinfo
758 751 def server_info(environ):
759 752 import rhodecode
760 753 from rhodecode.lib.base import get_server_ip_addr, get_server_port
761 754
762 755 value = {
763 756 'server_ip': '{}:{}'.format(
764 757 get_server_ip_addr(environ, log_errors=False),
765 758 get_server_port(environ)
766 759 ),
767 760 'server_id': rhodecode.CONFIG.get('instance_id'),
768 761 }
769 762 return SysInfoRes(value=value)
770 763
771 764
772 765 @register_sysinfo
773 766 def usage_info():
774 767 from rhodecode.model.db import User, Repository, true
775 768 value = {
776 769 'users': User.query().count(),
777 770 'users_active': User.query().filter(User.active == true()).count(),
778 771 'repositories': Repository.query().count(),
779 772 'repository_types': {
780 773 'hg': Repository.query().filter(
781 774 Repository.repo_type == 'hg').count(),
782 775 'git': Repository.query().filter(
783 776 Repository.repo_type == 'git').count(),
784 777 'svn': Repository.query().filter(
785 778 Repository.repo_type == 'svn').count(),
786 779 },
787 780 }
788 781 return SysInfoRes(value=value)
789 782
790 783
791 784 def get_system_info(environ):
792 785 environ = environ or {}
793 786 return {
794 787 'rhodecode_app': SysInfo(rhodecode_app_info)(),
795 788 'rhodecode_config': SysInfo(rhodecode_config)(),
796 789 'rhodecode_usage': SysInfo(usage_info)(),
797 790 'python': SysInfo(python_info)(),
798 791 'py_modules': SysInfo(py_modules)(),
799 792
800 793 'platform': SysInfo(platform_type)(),
801 794 'locale': SysInfo(locale_info)(),
802 795 'server': SysInfo(server_info, environ=environ)(),
803 796 'database': SysInfo(database_info)(),
804 797 'ulimit': SysInfo(ulimit_info)(),
805 798 'storage': SysInfo(storage)(),
806 799 'storage_inodes': SysInfo(storage_inodes)(),
807 800 'storage_archive': SysInfo(storage_archives)(),
808 801 'storage_gist': SysInfo(storage_gist)(),
809 802 'storage_temp': SysInfo(storage_temp)(),
810 803
811 804 'search': SysInfo(search_info)(),
812 805
813 806 'uptime': SysInfo(uptime)(),
814 807 'load': SysInfo(machine_load)(),
815 808 'cpu': SysInfo(cpu)(),
816 809 'memory': SysInfo(memory)(),
817 810
818 811 'vcs_backends': SysInfo(vcs_backends)(),
819 812 'vcs_server': SysInfo(vcs_server)(),
820 813
821 814 'vcs_server_config': SysInfo(vcs_server_config)(),
822 815
823 816 'git': SysInfo(git_info)(),
824 817 'hg': SysInfo(hg_info)(),
825 818 'svn': SysInfo(svn_info)(),
826 819 }
827 820
828 821
829 822 def load_system_info(key):
830 823 """
831 824 get_sys_info('vcs_server')
832 825 get_sys_info('database')
833 826 """
834 827 return SysInfo(registered_helpers[key])()
@@ -1,226 +1,226 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21
22 22 from rhodecode.lib.config_utils import get_app_config
23 23 from rhodecode.tests.fixture import TestINI
24 24 from rhodecode.tests import TESTS_TMP_PATH
25 25 from rhodecode.tests.server_utils import RcVCSServer
26 26
27 27
28 28 @pytest.fixture(scope='session')
29 29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 30 """
31 31 Session scope VCSServer.
32 32
33 33 Tests which need the VCSServer have to rely on this fixture in order
34 34 to ensure it will be running.
35 35
36 36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 37 adjust the configuration file for the test run.
38 38
39 39 Command line args:
40 40
41 41 --without-vcsserver: Allows to switch this fixture off. You have to
42 42 manually start the server.
43 43
44 44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 45 """
46 46
47 47 if not request.config.getoption('with_vcsserver'):
48 48 return None
49 49
50 50 return vcsserver_factory(
51 51 request, vcsserver_port=vcsserver_port)
52 52
53 53
54 54 @pytest.fixture(scope='session')
55 55 def vcsserver_factory(tmpdir_factory):
56 56 """
57 57 Use this if you need a running vcsserver with a special configuration.
58 58 """
59 59
60 60 def factory(request, overrides=(), vcsserver_port=None,
61 61 log_file=None, workers='2'):
62 62
63 63 if vcsserver_port is None:
64 64 vcsserver_port = get_available_port()
65 65
66 66 overrides = list(overrides)
67 67 overrides.append({'server:main': {'port': vcsserver_port}})
68 68
69 69 option_name = 'vcsserver_config_http'
70 70 override_option_name = 'vcsserver_config_override'
71 71 config_file = get_config(
72 72 request.config, option_name=option_name,
73 73 override_option_name=override_option_name, overrides=overrides,
74 74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 75 prefix='test_vcs_')
76 76
77 77 server = RcVCSServer(config_file, log_file, workers)
78 78 server.start()
79 79
80 80 @request.addfinalizer
81 81 def cleanup():
82 82 server.shutdown()
83 83
84 84 server.wait_until_ready()
85 85 return server
86 86
87 87 return factory
88 88
89 89
90 90 def _use_log_level(config):
91 91 level = config.getoption('test_loglevel') or 'critical'
92 92 return level.upper()
93 93
94 94
95 95 @pytest.fixture(scope='session')
96 96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 97 option_name = 'pyramid_config'
98 98 log_level = _use_log_level(request.config)
99 99
100 100 overrides = [
101 101 {'server:main': {'port': rcserver_port}},
102 102 {'app:main': {
103 'cache_dir': '%(here)s/rc_data',
103 'cache_dir': '%(here)s/rc-tests/rc_data',
104 104 'vcs.server': f'localhost:{vcsserver_port}',
105 105 # johbo: We will always start the VCSServer on our own based on the
106 106 # fixtures of the test cases. For the test run it must always be
107 107 # off in the INI file.
108 108 'vcs.start_server': 'false',
109 109
110 110 'vcs.server.protocol': 'http',
111 111 'vcs.scm_app_implementation': 'http',
112 112 'vcs.svn.proxy.enabled': 'true',
113 113 'vcs.hooks.protocol': 'http',
114 114 'vcs.hooks.host': '*',
115 115 'repo_store.path': TESTS_TMP_PATH,
116 116 'app.service_api.token': 'service_secret_token',
117 117 }},
118 118
119 119 {'handler_console': {
120 120 'class': 'StreamHandler',
121 121 'args': '(sys.stderr,)',
122 122 'level': log_level,
123 123 }},
124 124
125 125 ]
126 126
127 127 filename = get_config(
128 128 request.config, option_name=option_name,
129 129 override_option_name='{}_override'.format(option_name),
130 130 overrides=overrides,
131 131 basetemp=tmpdir_factory.getbasetemp().strpath,
132 132 prefix='test_rce_')
133 133 return filename
134 134
135 135
136 136 @pytest.fixture(scope='session')
137 137 def ini_settings(ini_config):
138 138 ini_path = ini_config
139 139 return get_app_config(ini_path)
140 140
141 141
142 142 def get_available_port(min_port=40000, max_port=55555):
143 143 from rhodecode.lib.utils2 import get_available_port as _get_port
144 144 return _get_port(min_port, max_port)
145 145
146 146
147 147 @pytest.fixture(scope='session')
148 148 def rcserver_port(request):
149 149 port = get_available_port()
150 150 print(f'Using rhodecode port {port}')
151 151 return port
152 152
153 153
154 154 @pytest.fixture(scope='session')
155 155 def vcsserver_port(request):
156 156 port = request.config.getoption('--vcsserver-port')
157 157 if port is None:
158 158 port = get_available_port()
159 159 print(f'Using vcsserver port {port}')
160 160 return port
161 161
162 162
163 163 @pytest.fixture(scope='session')
164 164 def available_port_factory():
165 165 """
166 166 Returns a callable which returns free port numbers.
167 167 """
168 168 return get_available_port
169 169
170 170
171 171 @pytest.fixture()
172 172 def available_port(available_port_factory):
173 173 """
174 174 Gives you one free port for the current test.
175 175
176 176 Uses "available_port_factory" to retrieve the port.
177 177 """
178 178 return available_port_factory()
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def testini_factory(tmpdir_factory, ini_config):
183 183 """
184 184 Factory to create an INI file based on TestINI.
185 185
186 186 It will make sure to place the INI file in the correct directory.
187 187 """
188 188 basetemp = tmpdir_factory.getbasetemp().strpath
189 189 return TestIniFactory(basetemp, ini_config)
190 190
191 191
192 192 class TestIniFactory(object):
193 193
194 194 def __init__(self, basetemp, template_ini):
195 195 self._basetemp = basetemp
196 196 self._template_ini = template_ini
197 197
198 198 def __call__(self, ini_params, new_file_prefix='test'):
199 199 ini_file = TestINI(
200 200 self._template_ini, ini_params=ini_params,
201 201 new_file_prefix=new_file_prefix, dir=self._basetemp)
202 202 result = ini_file.create()
203 203 return result
204 204
205 205
206 206 def get_config(
207 207 config, option_name, override_option_name, overrides=None,
208 208 basetemp=None, prefix='test'):
209 209 """
210 210 Find a configuration file and apply overrides for the given `prefix`.
211 211 """
212 212 config_file = (
213 213 config.getoption(option_name) or config.getini(option_name))
214 214 if not config_file:
215 215 pytest.exit(
216 216 "Configuration error, could not extract {}.".format(option_name))
217 217
218 218 overrides = overrides or []
219 219 config_override = config.getoption(override_option_name)
220 220 if config_override:
221 221 overrides.append(config_override)
222 222 temp_ini_file = TestINI(
223 223 config_file, ini_params=overrides, new_file_prefix=prefix,
224 224 dir=basetemp)
225 225
226 226 return temp_ini_file.create()
@@ -1,785 +1,827 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = true
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 107 ; Host at which the Service API is running.
108 108 app.service_api.host = http://rhodecode.local:10020
109 109
110 110 ; Secret for Service API authentication.
111 111 app.service_api.token =
112 112
113 113 ; Unique application ID. Should be a random unique string for security.
114 114 app_instance_uuid = rc-production
115 115
116 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 117 ; commit, or pull request exceeds this limit this diff will be displayed
118 118 ; partially. E.g 512000 == 512Kb
119 119 cut_off_limit_diff = 1024000
120 120
121 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 122 ; file inside diff which exceeds this limit will be displayed partially.
123 123 ; E.g 128000 == 128Kb
124 124 cut_off_limit_file = 256000
125 125
126 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 127 vcs_full_cache = false
128 128
129 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 131 force_https = false
132 132
133 133 ; use Strict-Transport-Security headers
134 134 use_htsts = false
135 135
136 136 ; Set to true if your repos are exposed using the dumb protocol
137 137 git_update_server_info = false
138 138
139 139 ; RSS/ATOM feed options
140 140 rss_cut_off_limit = 256000
141 141 rss_items_per_page = 10
142 142 rss_include_diff = false
143 143
144 144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 145 ; url that does rewrites to _admin/gists/{gistid}.
146 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 148 gist_alias_url =
149 149
150 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 151 ; used for access.
152 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 153 ; came from the the logged in user who own this authentication token.
154 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 155 ; authentication token. Such view would be only accessible when used together
156 156 ; with this authentication token
157 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 158 ; The list should be "," separated and on a single line.
159 159 ; Most common views to enable:
160 160
161 161 # RepoCommitsView:repo_commit_download
162 162 # RepoCommitsView:repo_commit_patch
163 163 # RepoCommitsView:repo_commit_raw
164 164 # RepoCommitsView:repo_commit_raw@TOKEN
165 165 # RepoFilesView:repo_files_diff
166 166 # RepoFilesView:repo_archivefile
167 167 # RepoFilesView:repo_file_raw
168 168 # GistView:*
169 169 api_access_controllers_whitelist =
170 170
171 171 ; Default encoding used to convert from and to unicode
172 172 ; can be also a comma separated list of encoding in case of mixed encodings
173 173 default_encoding = UTF-8
174 174
175 175 ; instance-id prefix
176 176 ; a prefix key for this instance used for cache invalidation when running
177 177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 178 ; all running RhodeCode instances. Leave empty if you don't use it
179 179 instance_id =
180 180
181 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 182 ; of an authentication plugin also if it is disabled by it's settings.
183 183 ; This could be useful if you are unable to log in to the system due to broken
184 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 185 ; module to log in again and fix the settings.
186 186 ; Available builtin plugin IDs (hash is part of the ID):
187 187 ; egg:rhodecode-enterprise-ce#rhodecode
188 188 ; egg:rhodecode-enterprise-ce#pam
189 189 ; egg:rhodecode-enterprise-ce#ldap
190 190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 191 ; egg:rhodecode-enterprise-ce#headers
192 192 ; egg:rhodecode-enterprise-ce#crowd
193 193
194 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195 195
196 196 ; Flag to control loading of legacy plugins in py:/path format
197 197 auth_plugin.import_legacy_plugins = true
198 198
199 199 ; alternative return HTTP header for failed authentication. Default HTTP
200 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 201 ; handling that causing a series of failed authentication calls.
202 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 203 ; This will be served instead of default 401 on bad authentication
204 204 auth_ret_code =
205 205
206 206 ; use special detection method when serving auth_ret_code, instead of serving
207 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 208 ; and then serve auth_ret_code to clients
209 209 auth_ret_code_detection = false
210 210
211 211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 212 ; codes don't break the transactions while 4XX codes do
213 213 lock_ret_code = 423
214 214
215 215 ; Filesystem location were repositories should be stored
216 216 repo_store.path = /var/opt/rhodecode_repo_store
217 217
218 218 ; allows to setup custom hooks in settings page
219 219 allow_custom_hooks_settings = true
220 220
221 221 ; Generated license token required for EE edition license.
222 222 ; New generated token value can be found in Admin > settings > license page.
223 223 license_token = abra-cada-bra1-rce3
224 224
225 225 ; This flag hides sensitive information on the license page such as token, and license data
226 226 license.hide_license_info = false
227 227
228 228 ; supervisor connection uri, for managing supervisor and logs.
229 229 supervisor.uri =
230 230
231 231 ; supervisord group name/id we only want this RC instance to handle
232 232 supervisor.group_id = dev
233 233
234 234 ; Display extended labs settings
235 235 labs_settings_active = true
236 236
237 237 ; Custom exception store path, defaults to TMPDIR
238 238 ; This is used to store exception from RhodeCode in shared directory
239 239 #exception_tracker.store_path =
240 240
241 241 ; Send email with exception details when it happens
242 242 #exception_tracker.send_email = false
243 243
244 244 ; Comma separated list of recipients for exception emails,
245 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 246 ; Can be left empty, then emails will be sent to ALL super-admins
247 247 #exception_tracker.send_email_recipients =
248 248
249 249 ; optional prefix to Add to email Subject
250 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251 251
252 252 ; File store configuration. This is used to store and serve uploaded files
253 253 file_store.enabled = true
254 254
255 255 ; Storage backend, available options are: local
256 256 file_store.backend = local
257 257
258 258 ; path to store the uploaded binaries and artifacts
259 259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260 260
261 ; Uncomment and set this path to control settings for archive download cache.
261
262 ; Redis url to acquire/check generation of archives locks
263 archive_cache.locking.url = redis://redis:6379/1
264
265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
266 archive_cache.backend.type = filesystem
267
268 ; url for s3 compatible storage that allows to upload artifacts
269 ; e.g http://minio:9000
270 archive_cache.objectstore.url = http://s3-minio:9000
271
272 ; key for s3 auth
273 archive_cache.objectstore.key = key
274
275 ; secret for s3 auth
276 archive_cache.objectstore.secret = secret
277
278 ; number of sharded buckets to create to distribute archives across
279 ; default is 8 shards
280 archive_cache.objectstore.bucket_shards = 8
281
282 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
283 archive_cache.objectstore.retry = false
284
285 ; number of seconds to wait for next try using retry
286 archive_cache.objectstore.retry_backoff = 1
287
288 ; how many tries do do a retry fetch from this backend
289 archive_cache.objectstore.retry_attempts = 10
290
291 ; Default is $cache_dir/archive_cache if not set
262 292 ; Generated repo archives will be cached at this location
263 293 ; and served from the cache during subsequent requests for the same archive of
264 294 ; the repository. This path is important to be shared across filesystems and with
265 295 ; RhodeCode and vcsserver
266
267 ; Default is $cache_dir/archive_cache if not set
268 archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache
296 archive_cache.filesystem.store_dir = %(here)s/rc-tests/archive_cache
269 297
270 298 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
271 archive_cache.cache_size_gb = 10
299 archive_cache.filesystem.cache_size_gb = 2
300
301 ; Eviction policy used to clear out after cache_size_gb limit is reached
302 archive_cache.filesystem.eviction_policy = least-recently-stored
272 303
273 304 ; By default cache uses sharding technique, this specifies how many shards are there
274 archive_cache.cache_shards = 10
305 ; default is 8 shards
306 archive_cache.filesystem.cache_shards = 8
307
308 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
309 archive_cache.filesystem.retry = false
310
311 ; number of seconds to wait for next try using retry
312 archive_cache.filesystem.retry_backoff = 1
313
314 ; how many tries do do a retry fetch from this backend
315 archive_cache.filesystem.retry_attempts = 10
316
275 317
276 318 ; #############
277 319 ; CELERY CONFIG
278 320 ; #############
279 321
280 322 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
281 323
282 324 use_celery = false
283 325
284 326 ; path to store schedule database
285 327 #celerybeat-schedule.path =
286 328
287 329 ; connection url to the message broker (default redis)
288 330 celery.broker_url = redis://redis:6379/8
289 331
290 332 ; results backend to get results for (default redis)
291 333 celery.result_backend = redis://redis:6379/8
292 334
293 335 ; rabbitmq example
294 336 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
295 337
296 338 ; maximum tasks to execute before worker restart
297 339 celery.max_tasks_per_child = 20
298 340
299 341 ; tasks will never be sent to the queue, but executed locally instead.
300 342 celery.task_always_eager = true
301 343 celery.task_store_eager_result = true
302 344
303 345 ; #############
304 346 ; DOGPILE CACHE
305 347 ; #############
306 348
307 349 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
308 350 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
309 351 cache_dir = /var/opt/rhodecode_data
310 352
311 353 ; *********************************************
312 354 ; `sql_cache_short` cache for heavy SQL queries
313 355 ; Only supported backend is `memory_lru`
314 356 ; *********************************************
315 357 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
316 358 rc_cache.sql_cache_short.expiration_time = 0
317 359
318 360
319 361 ; *****************************************************
320 362 ; `cache_repo_longterm` cache for repo object instances
321 363 ; Only supported backend is `memory_lru`
322 364 ; *****************************************************
323 365 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
324 366 ; by default we use 30 Days, cache is still invalidated on push
325 367 rc_cache.cache_repo_longterm.expiration_time = 2592000
326 368 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
327 369 rc_cache.cache_repo_longterm.max_size = 10000
328 370
329 371
330 372 ; *********************************************
331 373 ; `cache_general` cache for general purpose use
332 374 ; for simplicity use rc.file_namespace backend,
333 375 ; for performance and scale use rc.redis
334 376 ; *********************************************
335 377 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
336 378 rc_cache.cache_general.expiration_time = 43200
337 379 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
338 rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db
380 rc_cache.cache_general.arguments.filename = %(here)s/rc-tests/cache-backend/cache_general_db
339 381
340 382 ; alternative `cache_general` redis backend with distributed lock
341 383 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
342 384 #rc_cache.cache_general.expiration_time = 300
343 385
344 386 ; redis_expiration_time needs to be greater then expiration_time
345 387 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
346 388
347 389 #rc_cache.cache_general.arguments.host = localhost
348 390 #rc_cache.cache_general.arguments.port = 6379
349 391 #rc_cache.cache_general.arguments.db = 0
350 392 #rc_cache.cache_general.arguments.socket_timeout = 30
351 393 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
352 394 #rc_cache.cache_general.arguments.distributed_lock = true
353 395
354 396 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
355 397 #rc_cache.cache_general.arguments.lock_auto_renewal = true
356 398
357 399 ; *************************************************
358 400 ; `cache_perms` cache for permission tree, auth TTL
359 401 ; for simplicity use rc.file_namespace backend,
360 402 ; for performance and scale use rc.redis
361 403 ; *************************************************
362 404 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
363 405 rc_cache.cache_perms.expiration_time = 0
364 406 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
365 rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db
407 rc_cache.cache_perms.arguments.filename = %(here)s/rc-tests/cache-backend/cache_perms_db
366 408
367 409 ; alternative `cache_perms` redis backend with distributed lock
368 410 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
369 411 #rc_cache.cache_perms.expiration_time = 300
370 412
371 413 ; redis_expiration_time needs to be greater then expiration_time
372 414 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
373 415
374 416 #rc_cache.cache_perms.arguments.host = localhost
375 417 #rc_cache.cache_perms.arguments.port = 6379
376 418 #rc_cache.cache_perms.arguments.db = 0
377 419 #rc_cache.cache_perms.arguments.socket_timeout = 30
378 420 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
379 421 #rc_cache.cache_perms.arguments.distributed_lock = true
380 422
381 423 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
382 424 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
383 425
384 426 ; ***************************************************
385 427 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
386 428 ; for simplicity use rc.file_namespace backend,
387 429 ; for performance and scale use rc.redis
388 430 ; ***************************************************
389 431 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
390 432 rc_cache.cache_repo.expiration_time = 2592000
391 433 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
392 rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db
434 rc_cache.cache_repo.arguments.filename = %(here)s/rc-tests/cache-backend/cache_repo_db
393 435
394 436 ; alternative `cache_repo` redis backend with distributed lock
395 437 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
396 438 #rc_cache.cache_repo.expiration_time = 2592000
397 439
398 440 ; redis_expiration_time needs to be greater then expiration_time
399 441 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
400 442
401 443 #rc_cache.cache_repo.arguments.host = localhost
402 444 #rc_cache.cache_repo.arguments.port = 6379
403 445 #rc_cache.cache_repo.arguments.db = 1
404 446 #rc_cache.cache_repo.arguments.socket_timeout = 30
405 447 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
406 448 #rc_cache.cache_repo.arguments.distributed_lock = true
407 449
408 450 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
409 451 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
410 452
411 453 ; ##############
412 454 ; BEAKER SESSION
413 455 ; ##############
414 456
415 457 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
416 458 ; types are file, ext:redis, ext:database, ext:memcached
417 459 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
418 460 beaker.session.type = file
419 461 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
420 462
421 463 ; Redis based sessions
422 464 #beaker.session.type = ext:redis
423 465 #beaker.session.url = redis://redis:6379/2
424 466
425 467 ; DB based session, fast, and allows easy management over logged in users
426 468 #beaker.session.type = ext:database
427 469 #beaker.session.table_name = db_session
428 470 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
429 471 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
430 472 #beaker.session.sa.pool_recycle = 3600
431 473 #beaker.session.sa.echo = false
432 474
433 475 beaker.session.key = rhodecode
434 476 beaker.session.secret = test-rc-uytcxaz
435 beaker.session.lock_dir = %(here)s/data/sessions/lock
477 beaker.session.lock_dir = %(here)s/rc-tests/data/sessions/lock
436 478
437 479 ; Secure encrypted cookie. Requires AES and AES python libraries
438 480 ; you must disable beaker.session.secret to use this
439 481 #beaker.session.encrypt_key = key_for_encryption
440 482 #beaker.session.validate_key = validation_key
441 483
442 484 ; Sets session as invalid (also logging out user) if it haven not been
443 485 ; accessed for given amount of time in seconds
444 486 beaker.session.timeout = 2592000
445 487 beaker.session.httponly = true
446 488
447 489 ; Path to use for the cookie. Set to prefix if you use prefix middleware
448 490 #beaker.session.cookie_path = /custom_prefix
449 491
450 492 ; Set https secure cookie
451 493 beaker.session.secure = false
452 494
453 495 ; default cookie expiration time in seconds, set to `true` to set expire
454 496 ; at browser close
455 497 #beaker.session.cookie_expires = 3600
456 498
457 499 ; #############################
458 500 ; SEARCH INDEXING CONFIGURATION
459 501 ; #############################
460 502
461 503 ; Full text search indexer is available in rhodecode-tools under
462 504 ; `rhodecode-tools index` command
463 505
464 506 ; WHOOSH Backend, doesn't require additional services to run
465 507 ; it works good with few dozen repos
466 508 search.module = rhodecode.lib.index.whoosh
467 search.location = %(here)s/data/index
509 search.location = %(here)s/rc-tests/data/index
468 510
469 511 ; ####################
470 512 ; CHANNELSTREAM CONFIG
471 513 ; ####################
472 514
473 515 ; channelstream enables persistent connections and live notification
474 516 ; in the system. It's also used by the chat system
475 517
476 518 channelstream.enabled = false
477 519
478 520 ; server address for channelstream server on the backend
479 521 channelstream.server = channelstream:9800
480 522
481 523 ; location of the channelstream server from outside world
482 524 ; use ws:// for http or wss:// for https. This address needs to be handled
483 525 ; by external HTTP server such as Nginx or Apache
484 526 ; see Nginx/Apache configuration examples in our docs
485 527 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
486 528 channelstream.secret = ENV_GENERATED
487 channelstream.history.location = %(here)s/channelstream_history
529 channelstream.history.location = %(here)s/rc-tests/channelstream_history
488 530
489 531 ; Internal application path that Javascript uses to connect into.
490 532 ; If you use proxy-prefix the prefix should be added before /_channelstream
491 533 channelstream.proxy_path = /_channelstream
492 534
493 535
494 536 ; ##############################
495 537 ; MAIN RHODECODE DATABASE CONFIG
496 538 ; ##############################
497 539
498 540 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
499 541 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
500 542 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
501 543 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
502 544 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
503 545
504 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
546 sqlalchemy.db1.url = sqlite:///%(here)s/rc-tests/rhodecode_test.db?timeout=30
505 547
506 548 ; see sqlalchemy docs for other advanced settings
507 549 ; print the sql statements to output
508 550 sqlalchemy.db1.echo = false
509 551
510 552 ; recycle the connections after this amount of seconds
511 553 sqlalchemy.db1.pool_recycle = 3600
512 554
513 555 ; the number of connections to keep open inside the connection pool.
514 556 ; 0 indicates no limit
515 557 ; the general calculus with gevent is:
516 558 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
517 559 ; then increase pool size + max overflow so that they add up to 500.
518 560 #sqlalchemy.db1.pool_size = 5
519 561
520 562 ; The number of connections to allow in connection pool "overflow", that is
521 563 ; connections that can be opened above and beyond the pool_size setting,
522 564 ; which defaults to five.
523 565 #sqlalchemy.db1.max_overflow = 10
524 566
525 567 ; Connection check ping, used to detect broken database connections
526 568 ; could be enabled to better handle cases if MySQL has gone away errors
527 569 #sqlalchemy.db1.ping_connection = true
528 570
529 571 ; ##########
530 572 ; VCS CONFIG
531 573 ; ##########
532 574 vcs.server.enable = true
533 575 vcs.server = vcsserver:10010
534 576
535 577 ; Web server connectivity protocol, responsible for web based VCS operations
536 578 ; Available protocols are:
537 579 ; `http` - use http-rpc backend (default)
538 580 vcs.server.protocol = http
539 581
540 582 ; Push/Pull operations protocol, available options are:
541 583 ; `http` - use http-rpc backend (default)
542 584 vcs.scm_app_implementation = http
543 585
544 586 ; Push/Pull operations hooks protocol, available options are:
545 587 ; `http` - use http-rpc backend (default)
546 588 ; `celery` - use celery based hooks
547 589 vcs.hooks.protocol = http
548 590
549 591 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
550 592 ; accessible via network.
551 593 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
552 594 vcs.hooks.host = *
553 595
554 596 ; Start VCSServer with this instance as a subprocess, useful for development
555 597 vcs.start_server = false
556 598
557 599 ; List of enabled VCS backends, available options are:
558 600 ; `hg` - mercurial
559 601 ; `git` - git
560 602 ; `svn` - subversion
561 603 vcs.backends = hg, git, svn
562 604
563 605 ; Wait this number of seconds before killing connection to the vcsserver
564 606 vcs.connection_timeout = 3600
565 607
566 608 ; Cache flag to cache vcsserver remote calls locally
567 609 ; It uses cache_region `cache_repo`
568 610 vcs.methods.cache = false
569 611
570 612 ; ####################################################
571 613 ; Subversion proxy support (mod_dav_svn)
572 614 ; Maps RhodeCode repo groups into SVN paths for Apache
573 615 ; ####################################################
574 616
575 617 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
576 618 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
577 619 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
578 620 #vcs.svn.compatible_version = 1.8
579 621
580 622 ; Enable SVN proxy of requests over HTTP
581 623 vcs.svn.proxy.enabled = true
582 624
583 625 ; host to connect to running SVN subsystem
584 626 vcs.svn.proxy.host = http://svn:8090
585 627
586 628 ; Enable or disable the config file generation.
587 629 svn.proxy.generate_config = false
588 630
589 631 ; Generate config file with `SVNListParentPath` set to `On`.
590 632 svn.proxy.list_parent_path = true
591 633
592 634 ; Set location and file name of generated config file.
593 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
635 svn.proxy.config_file_path = %(here)s/rc-tests/mod_dav_svn.conf
594 636
595 637 ; alternative mod_dav config template. This needs to be a valid mako template
596 638 ; Example template can be found in the source code:
597 639 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
598 640 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
599 641
600 642 ; Used as a prefix to the `Location` block in the generated config file.
601 643 ; In most cases it should be set to `/`.
602 644 svn.proxy.location_root = /
603 645
604 646 ; Command to reload the mod dav svn configuration on change.
605 647 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
606 648 ; Make sure user who runs RhodeCode process is allowed to reload Apache
607 649 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
608 650
609 651 ; If the timeout expires before the reload command finishes, the command will
610 652 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
611 653 #svn.proxy.reload_timeout = 10
612 654
613 655 ; ####################
614 656 ; SSH Support Settings
615 657 ; ####################
616 658
617 659 ; Defines if a custom authorized_keys file should be created and written on
618 660 ; any change user ssh keys. Setting this to false also disables possibility
619 661 ; of adding SSH keys by users from web interface. Super admins can still
620 662 ; manage SSH Keys.
621 663 ssh.generate_authorized_keyfile = true
622 664
623 665 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
624 666 # ssh.authorized_keys_ssh_opts =
625 667
626 668 ; Path to the authorized_keys file where the generate entries are placed.
627 669 ; It is possible to have multiple key files specified in `sshd_config` e.g.
628 670 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
629 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
671 ssh.authorized_keys_file_path = %(here)s/rc-tests/authorized_keys_rhodecode
630 672
631 673 ; Command to execute the SSH wrapper. The binary is available in the
632 674 ; RhodeCode installation directory.
633 675 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
634 676 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
635 677 ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
636 678
637 679 ; Allow shell when executing the ssh-wrapper command
638 680 ssh.wrapper_cmd_allow_shell = false
639 681
640 682 ; Enables logging, and detailed output send back to the client during SSH
641 683 ; operations. Useful for debugging, shouldn't be used in production.
642 684 ssh.enable_debug_logging = false
643 685
644 686 ; Paths to binary executable, by default they are the names, but we can
645 687 ; override them if we want to use a custom one
646 688 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
647 689 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
648 690 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
649 691
650 692 ; Enables SSH key generator web interface. Disabling this still allows users
651 693 ; to add their own keys.
652 694 ssh.enable_ui_key_generator = true
653 695
654 696 ; Statsd client config, this is used to send metrics to statsd
655 697 ; We recommend setting statsd_exported and scrape them using Prometheus
656 698 #statsd.enabled = false
657 699 #statsd.statsd_host = 0.0.0.0
658 700 #statsd.statsd_port = 8125
659 701 #statsd.statsd_prefix =
660 702 #statsd.statsd_ipv6 = false
661 703
662 704 ; configure logging automatically at server startup set to false
663 705 ; to use the below custom logging config.
664 706 ; RC_LOGGING_FORMATTER
665 707 ; RC_LOGGING_LEVEL
666 708 ; env variables can control the settings for logging in case of autoconfigure
667 709
668 710 logging.autoconfigure = false
669 711
670 712 ; specify your own custom logging config file to configure logging
671 713 #logging.logging_conf_file = /path/to/custom_logging.ini
672 714
673 715 ; Dummy marker to add new entries after.
674 716 ; Add any custom entries below. Please don't remove this marker.
675 717 custom.conf = 1
676 718
677 719
678 720 ; #####################
679 721 ; LOGGING CONFIGURATION
680 722 ; #####################
681 723
682 724 [loggers]
683 725 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile
684 726
685 727 [handlers]
686 728 keys = console, console_sql
687 729
688 730 [formatters]
689 731 keys = generic, json, color_formatter, color_formatter_sql
690 732
691 733 ; #######
692 734 ; LOGGERS
693 735 ; #######
694 736 [logger_root]
695 737 level = NOTSET
696 738 handlers = console
697 739
698 740 [logger_routes]
699 741 level = DEBUG
700 742 handlers =
701 743 qualname = routes.middleware
702 744 ## "level = DEBUG" logs the route matched and routing variables.
703 745 propagate = 1
704 746
705 747 [logger_sqlalchemy]
706 748 level = INFO
707 749 handlers = console_sql
708 750 qualname = sqlalchemy.engine
709 751 propagate = 0
710 752
711 753 [logger_beaker]
712 754 level = DEBUG
713 755 handlers =
714 756 qualname = beaker.container
715 757 propagate = 1
716 758
717 759 [logger_dogpile]
718 760 level = INFO
719 761 handlers = console
720 762 qualname = dogpile
721 763 propagate = 1
722 764
723 765 [logger_rhodecode]
724 766 level = DEBUG
725 767 handlers =
726 768 qualname = rhodecode
727 769 propagate = 1
728 770
729 771 [logger_ssh_wrapper]
730 772 level = DEBUG
731 773 handlers =
732 774 qualname = ssh_wrapper
733 775 propagate = 1
734 776
735 777 [logger_celery]
736 778 level = DEBUG
737 779 handlers =
738 780 qualname = celery
739 781
740 782
741 783 ; ########
742 784 ; HANDLERS
743 785 ; ########
744 786
745 787 [handler_console]
746 788 class = StreamHandler
747 789 args = (sys.stderr, )
748 790 level = DEBUG
749 791 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
750 792 ; This allows sending properly formatted logs to grafana loki or elasticsearch
751 793 formatter = generic
752 794
753 795 [handler_console_sql]
754 796 ; "level = DEBUG" logs SQL queries and results.
755 797 ; "level = INFO" logs SQL queries.
756 798 ; "level = WARN" logs neither. (Recommended for production systems.)
757 799 class = StreamHandler
758 800 args = (sys.stderr, )
759 801 level = WARN
760 802 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
761 803 ; This allows sending properly formatted logs to grafana loki or elasticsearch
762 804 formatter = generic
763 805
764 806 ; ##########
765 807 ; FORMATTERS
766 808 ; ##########
767 809
768 810 [formatter_generic]
769 811 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
770 812 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
771 813 datefmt = %Y-%m-%d %H:%M:%S
772 814
773 815 [formatter_color_formatter]
774 816 class = rhodecode.lib.logging_formatter.ColorFormatter
775 817 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
776 818 datefmt = %Y-%m-%d %H:%M:%S
777 819
778 820 [formatter_color_formatter_sql]
779 821 class = rhodecode.lib.logging_formatter.ColorFormatterSql
780 822 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
781 823 datefmt = %Y-%m-%d %H:%M:%S
782 824
783 825 [formatter_json]
784 826 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
785 827 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,198 +1,198 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import datetime
21 21 import os
22 22 import shutil
23 23 import tarfile
24 24 import zipfile
25 25 import io
26 26
27 27 import mock
28 28 import pytest
29 29
30 30 import rhodecode
31 from rhodecode.lib.rc_cache.archive_cache import get_archival_config
31 from rhodecode.lib.archive_cache import get_archival_config
32 32 from rhodecode.lib.str_utils import ascii_bytes
33 33 from rhodecode.lib.vcs.backends import base
34 34 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError
35 35 from rhodecode.lib.vcs.nodes import FileNode
36 36 from rhodecode.tests.vcs.conftest import BackendTestMixin
37 37
38 38
39 39 @pytest.fixture()
40 40 def d_cache_config():
41 41 return get_archival_config(config=rhodecode.CONFIG)
42 42
43 43
44 44 @pytest.mark.usefixtures("vcs_repository_support")
45 45 class TestArchives(BackendTestMixin):
46 46
47 47 @classmethod
48 48 def _get_commits(cls):
49 49 start_date = datetime.datetime(2010, 1, 1, 20)
50 50 yield {
51 51 'message': 'Initial Commit',
52 52 'author': 'Joe Doe <joe.doe@example.com>',
53 53 'date': start_date + datetime.timedelta(hours=12),
54 54 'added': [
55 55 FileNode(b'executable_0o100755', b'mode_755', mode=0o100755),
56 56 FileNode(b'executable_0o100500', b'mode_500', mode=0o100500),
57 57 FileNode(b'not_executable', b'mode_644', mode=0o100644),
58 58 ],
59 59 }
60 60 for x in range(5):
61 61 yield {
62 62 'message': 'Commit %d' % x,
63 63 'author': 'Joe Doe <joe.doe@example.com>',
64 64 'date': start_date + datetime.timedelta(hours=12 * x),
65 65 'added': [
66 66 FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x),
67 67 ],
68 68 }
69 69
70 70 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
71 71 def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config):
72 72
73 73 archive_node = tmp_path / 'archive-node'
74 74 archive_node.touch()
75 75
76 76 archive_lnk = self.tip.archive_repo(
77 77 str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config)
78 78
79 79 out_dir = tmpdir
80 80 out_file = tarfile.open(str(archive_lnk), f'r|{compressor}')
81 81 out_file.extractall(out_dir)
82 82 out_file.close()
83 83
84 84 for x in range(5):
85 85 node_path = '%d/file_%d.txt' % (x, x)
86 86 with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f:
87 87 file_content = f.read()
88 88 assert file_content == self.tip.get_node(node_path).content
89 89
90 90 shutil.rmtree(out_dir)
91 91
92 92 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
93 93 def test_archive_tar_symlink(self, compressor):
94 94 pytest.skip('Not supported')
95 95
96 96 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
97 97 def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config):
98 98 archive_node = tmp_path / 'archive-node'
99 99 archive_node.touch()
100 100
101 101 archive_lnk = self.tip.archive_repo(
102 102 str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config)
103 103
104 104 out_dir = tmpdir
105 105 out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor))
106 106 out_file.extractall(out_dir)
107 107 out_file.close()
108 108
109 109 def dest(inp):
110 110 return os.path.join(out_dir, "repo/" + inp)
111 111
112 112 assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644'
113 113
114 114 def test_archive_zip(self, tmp_path, d_cache_config):
115 115 archive_node = tmp_path / 'archive-node'
116 116 archive_node.touch()
117 117
118 118 archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip',
119 119 archive_dir_name='repo', cache_config=d_cache_config)
120 120 zip_file = zipfile.ZipFile(str(archive_lnk))
121 121
122 122 for x in range(5):
123 123 node_path = '%d/file_%d.txt' % (x, x)
124 124 data = zip_file.read(f'repo/{node_path}')
125 125
126 126 decompressed = io.BytesIO()
127 127 decompressed.write(data)
128 128 assert decompressed.getvalue() == \
129 129 self.tip.get_node(node_path).content
130 130 decompressed.close()
131 131
132 132 def test_archive_zip_with_metadata(self, tmp_path, d_cache_config):
133 133 archive_node = tmp_path / 'archive-node'
134 134 archive_node.touch()
135 135
136 136 archive_lnk = self.tip.archive_repo(
137 137 str(archive_node), kind='zip',
138 138 archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config)
139 139
140 140 zip_file = zipfile.ZipFile(str(archive_lnk))
141 141 metafile = zip_file.read('repo/.archival.txt')
142 142
143 143 raw_id = ascii_bytes(self.tip.raw_id)
144 144 assert b'commit_id:%b' % raw_id in metafile
145 145
146 146 for x in range(5):
147 147 node_path = '%d/file_%d.txt' % (x, x)
148 148 data = zip_file.read(f'repo/{node_path}')
149 149 decompressed = io.BytesIO()
150 150 decompressed.write(data)
151 151 assert decompressed.getvalue() == \
152 152 self.tip.get_node(node_path).content
153 153 decompressed.close()
154 154
155 155 def test_archive_wrong_kind(self, tmp_path, d_cache_config):
156 156 archive_node = tmp_path / 'archive-node'
157 157 archive_node.touch()
158 158
159 159 with pytest.raises(ImproperArchiveTypeError):
160 160 self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config)
161 161
162 162
163 163 @pytest.fixture()
164 164 def base_commit():
165 165 """
166 166 Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`.
167 167 """
168 168 commit = base.BaseCommit()
169 169 commit.repository = mock.Mock()
170 170 commit.repository.name = 'fake_repo'
171 171 commit.short_id = 'fake_id'
172 172 return commit
173 173
174 174
175 175 def test_validate_archive_prefix_enforces_non_ascii_as_prefix(base_commit):
176 176 with pytest.raises(VCSError):
177 177 base_commit._validate_archive_prefix("Ünïcödë")
178 178
179 179
180 180 def test_validate_archive_prefix_empty_prefix(base_commit):
181 181 # TODO: johbo: Should raise a ValueError here.
182 182 with pytest.raises(VCSError):
183 183 base_commit._validate_archive_prefix('')
184 184
185 185
186 186 def test_validate_archive_prefix_with_leading_slash(base_commit):
187 187 # TODO: johbo: Should raise a ValueError here.
188 188 with pytest.raises(VCSError):
189 189 base_commit._validate_archive_prefix('/any')
190 190
191 191
192 192 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
193 193 prefix = base_commit._validate_archive_prefix(None)
194 194 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
195 195 repo_name='fake_repo',
196 196 short_id='fake_id')
197 197 assert isinstance(prefix, str)
198 198 assert prefix == expected_prefix
General Comments 0
You need to be logged in to leave comments. Login now