Show More
@@ -0,0 +1,17 b'' | |||||
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,348 b'' | |||||
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import os | |||
|
20 | import functools | |||
|
21 | import logging | |||
|
22 | import typing | |||
|
23 | import time | |||
|
24 | import zlib | |||
|
25 | ||||
|
26 | from ...ext_json import json | |||
|
27 | from ..utils import StatsDB, NOT_GIVEN, ShardFileReader, EVICTION_POLICY, format_size | |||
|
28 | from ..lock import GenerationLock | |||
|
29 | ||||
|
30 | log = logging.getLogger(__name__) | |||
|
31 | ||||
|
32 | ||||
|
33 | class BaseShard: | |||
|
34 | storage_type: str = '' | |||
|
35 | fs = None | |||
|
36 | ||||
|
37 | @classmethod | |||
|
38 | def hash(cls, key): | |||
|
39 | """Compute portable hash for `key`. | |||
|
40 | ||||
|
41 | :param key: key to hash | |||
|
42 | :return: hash value | |||
|
43 | ||||
|
44 | """ | |||
|
45 | mask = 0xFFFFFFFF | |||
|
46 | return zlib.adler32(key.encode('utf-8')) & mask # noqa | |||
|
47 | ||||
|
48 | def _write_file(self, full_path, read_iterator, mode): | |||
|
49 | raise NotImplementedError | |||
|
50 | ||||
|
51 | def _get_keyfile(self, key): | |||
|
52 | raise NotImplementedError | |||
|
53 | ||||
|
54 | def random_filename(self): | |||
|
55 | raise NotImplementedError | |||
|
56 | ||||
|
57 | def _store(self, key, value_reader, metadata, mode): | |||
|
58 | (filename, # hash-name | |||
|
59 | full_path # full-path/hash-name | |||
|
60 | ) = self.random_filename() | |||
|
61 | ||||
|
62 | key_file, key_file_path = self._get_keyfile(key) | |||
|
63 | ||||
|
64 | # STORE METADATA | |||
|
65 | _metadata = { | |||
|
66 | "version": "v1", | |||
|
67 | ||||
|
68 | "key_file": key_file, # this is the .key.json file storing meta | |||
|
69 | "key_file_path": key_file_path, # full path to key_file | |||
|
70 | "archive_key": key, # original name we stored archive under, e.g my-archive.zip | |||
|
71 | "archive_filename": filename, # the actual filename we stored that file under | |||
|
72 | "archive_full_path": full_path, | |||
|
73 | ||||
|
74 | "store_time": time.time(), | |||
|
75 | "access_count": 0, | |||
|
76 | "access_time": 0, | |||
|
77 | ||||
|
78 | "size": 0 | |||
|
79 | } | |||
|
80 | if metadata: | |||
|
81 | _metadata.update(metadata) | |||
|
82 | ||||
|
83 | read_iterator = iter(functools.partial(value_reader.read, 2**22), b'') | |||
|
84 | size, sha256 = self._write_file(full_path, read_iterator, mode) | |||
|
85 | _metadata['size'] = size | |||
|
86 | _metadata['sha256'] = sha256 | |||
|
87 | ||||
|
88 | # after archive is finished, we create a key to save the presence of the binary file | |||
|
89 | with self.fs.open(key_file_path, 'wb') as f: | |||
|
90 | f.write(json.dumps(_metadata)) | |||
|
91 | ||||
|
92 | return key, filename, size, _metadata | |||
|
93 | ||||
|
94 | def _fetch(self, key, retry, retry_attempts, retry_backoff): | |||
|
95 | if retry is NOT_GIVEN: | |||
|
96 | retry = False | |||
|
97 | if retry_attempts is NOT_GIVEN: | |||
|
98 | retry_attempts = 0 | |||
|
99 | ||||
|
100 | if retry and retry_attempts > 0: | |||
|
101 | for attempt in range(1, retry_attempts + 1): | |||
|
102 | if key in self: | |||
|
103 | break | |||
|
104 | # we didn't find the key, wait retry_backoff N seconds, and re-check | |||
|
105 | time.sleep(retry_backoff) | |||
|
106 | ||||
|
107 | if key not in self: | |||
|
108 | log.exception(f'requested key={key} not found in {self} retry={retry}, attempts={retry_attempts}') | |||
|
109 | raise KeyError(key) | |||
|
110 | ||||
|
111 | key_file, key_file_path = self._get_keyfile(key) | |||
|
112 | with self.fs.open(key_file_path, 'rb') as f: | |||
|
113 | metadata = json.loads(f.read()) | |||
|
114 | ||||
|
115 | archive_path = metadata['archive_full_path'] | |||
|
116 | ||||
|
117 | try: | |||
|
118 | return ShardFileReader(self.fs.open(archive_path, 'rb')), metadata | |||
|
119 | finally: | |||
|
120 | # update usage stats, count and accessed | |||
|
121 | metadata["access_count"] = metadata.get("access_count", 0) + 1 | |||
|
122 | metadata["access_time"] = time.time() | |||
|
123 | log.debug('Updated %s with access snapshot, access_count=%s access_time=%s', | |||
|
124 | key_file, metadata['access_count'], metadata['access_time']) | |||
|
125 | with self.fs.open(key_file_path, 'wb') as f: | |||
|
126 | f.write(json.dumps(metadata)) | |||
|
127 | ||||
|
128 | def _remove(self, key): | |||
|
129 | if key not in self: | |||
|
130 | log.exception(f'requested key={key} not found in {self}') | |||
|
131 | raise KeyError(key) | |||
|
132 | ||||
|
133 | key_file, key_file_path = self._get_keyfile(key) | |||
|
134 | with self.fs.open(key_file_path, 'rb') as f: | |||
|
135 | metadata = json.loads(f.read()) | |||
|
136 | ||||
|
137 | archive_path = metadata['archive_full_path'] | |||
|
138 | self.fs.rm(archive_path) | |||
|
139 | self.fs.rm(key_file_path) | |||
|
140 | return 1 | |||
|
141 | ||||
|
142 | @property | |||
|
143 | def storage_medium(self): | |||
|
144 | return getattr(self, self.storage_type) | |||
|
145 | ||||
|
146 | @property | |||
|
147 | def key_suffix(self): | |||
|
148 | return 'key.json' | |||
|
149 | ||||
|
150 | def __contains__(self, key): | |||
|
151 | """Return `True` if `key` matching item is found in cache. | |||
|
152 | ||||
|
153 | :param key: key matching item | |||
|
154 | :return: True if key matching item | |||
|
155 | ||||
|
156 | """ | |||
|
157 | key_file, key_file_path = self._get_keyfile(key) | |||
|
158 | return self.fs.exists(key_file_path) | |||
|
159 | ||||
|
160 | ||||
|
161 | class BaseCache: | |||
|
162 | _locking_url: str = '' | |||
|
163 | _storage_path: str = '' | |||
|
164 | _config = {} | |||
|
165 | retry = False | |||
|
166 | retry_attempts = 0 | |||
|
167 | retry_backoff = 1 | |||
|
168 | _shards = tuple() | |||
|
169 | ||||
|
170 | def __contains__(self, key): | |||
|
171 | """Return `True` if `key` matching item is found in cache. | |||
|
172 | ||||
|
173 | :param key: key matching item | |||
|
174 | :return: True if key matching item | |||
|
175 | ||||
|
176 | """ | |||
|
177 | return self.has_key(key) | |||
|
178 | ||||
|
179 | def __repr__(self): | |||
|
180 | return f'<{self.__class__.__name__}(storage={self._storage_path})>' | |||
|
181 | ||||
|
182 | @classmethod | |||
|
183 | def gb_to_bytes(cls, gb): | |||
|
184 | return gb * (1024 ** 3) | |||
|
185 | ||||
|
186 | @property | |||
|
187 | def storage_path(self): | |||
|
188 | return self._storage_path | |||
|
189 | ||||
|
190 | @classmethod | |||
|
191 | def get_stats_db(cls): | |||
|
192 | return StatsDB() | |||
|
193 | ||||
|
194 | def get_conf(self, key, pop=False): | |||
|
195 | if key not in self._config: | |||
|
196 | raise ValueError(f"No configuration key '{key}', please make sure it exists in archive_cache config") | |||
|
197 | val = self._config[key] | |||
|
198 | if pop: | |||
|
199 | del self._config[key] | |||
|
200 | return val | |||
|
201 | ||||
|
202 | def _get_shard(self, key): | |||
|
203 | raise NotImplementedError | |||
|
204 | ||||
|
205 | def _get_size(self, shard, archive_path): | |||
|
206 | raise NotImplementedError | |||
|
207 | ||||
|
208 | def store(self, key, value_reader, metadata=None): | |||
|
209 | shard = self._get_shard(key) | |||
|
210 | return shard.store(key, value_reader, metadata) | |||
|
211 | ||||
|
212 | def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN) -> tuple[typing.BinaryIO, dict]: | |||
|
213 | """ | |||
|
214 | Return file handle corresponding to `key` from specific shard cache. | |||
|
215 | """ | |||
|
216 | if retry is NOT_GIVEN: | |||
|
217 | retry = self.retry | |||
|
218 | if retry_attempts is NOT_GIVEN: | |||
|
219 | retry_attempts = self.retry_attempts | |||
|
220 | retry_backoff = self.retry_backoff | |||
|
221 | ||||
|
222 | shard = self._get_shard(key) | |||
|
223 | return shard.fetch(key, retry=retry, retry_attempts=retry_attempts, retry_backoff=retry_backoff) | |||
|
224 | ||||
|
225 | def remove(self, key): | |||
|
226 | shard = self._get_shard(key) | |||
|
227 | return shard.remove(key) | |||
|
228 | ||||
|
229 | def has_key(self, archive_key): | |||
|
230 | """Return `True` if `key` matching item is found in cache. | |||
|
231 | ||||
|
232 | :param archive_key: key for item, this is a unique archive name we want to store data under. e.g my-archive-svn.zip | |||
|
233 | :return: True if key is found | |||
|
234 | ||||
|
235 | """ | |||
|
236 | shard = self._get_shard(archive_key) | |||
|
237 | return archive_key in shard | |||
|
238 | ||||
|
239 | def iter_keys(self): | |||
|
240 | for shard in self._shards: | |||
|
241 | if shard.fs.exists(shard.storage_medium): | |||
|
242 | for path, _dirs, _files in shard.fs.walk(shard.storage_medium): | |||
|
243 | for key_file_path in _files: | |||
|
244 | if key_file_path.endswith(shard.key_suffix): | |||
|
245 | yield shard, key_file_path | |||
|
246 | ||||
|
247 | def get_lock(self, lock_key): | |||
|
248 | return GenerationLock(lock_key, self._locking_url) | |||
|
249 | ||||
|
250 | def evict(self, policy=None, size_limit=None) -> int: | |||
|
251 | """ | |||
|
252 | Remove old items based on the conditions | |||
|
253 | ||||
|
254 | ||||
|
255 | explanation of this algo: | |||
|
256 | iterate over each shard, then for each shard iterate over the .key files | |||
|
257 | read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and | |||
|
258 | access data, time creation, and access counts. | |||
|
259 | ||||
|
260 | Store that into a memory DB so we can run different sorting strategies easily. | |||
|
261 | Summing the size is a sum sql query. | |||
|
262 | ||||
|
263 | Then we run a sorting strategy based on eviction policy. | |||
|
264 | We iterate over sorted keys, and remove each checking if we hit the overall limit. | |||
|
265 | """ | |||
|
266 | ||||
|
267 | policy = policy or self._eviction_policy | |||
|
268 | size_limit = size_limit or self._cache_size_limit | |||
|
269 | ||||
|
270 | select_policy = EVICTION_POLICY[policy]['evict'] | |||
|
271 | ||||
|
272 | log.debug('Running eviction policy \'%s\', and checking for size limit: %s', | |||
|
273 | policy, format_size(size_limit)) | |||
|
274 | ||||
|
275 | if select_policy is None: | |||
|
276 | return 0 | |||
|
277 | ||||
|
278 | db = self.get_stats_db() | |||
|
279 | ||||
|
280 | data = [] | |||
|
281 | cnt = 1 | |||
|
282 | ||||
|
283 | for shard, key_file in self.iter_keys(): | |||
|
284 | with shard.fs.open(os.path.join(shard.storage_medium, key_file), 'rb') as f: | |||
|
285 | metadata = json.loads(f.read()) | |||
|
286 | ||||
|
287 | key_file_path = os.path.join(shard.storage_medium, key_file) | |||
|
288 | ||||
|
289 | archive_key = metadata['archive_key'] | |||
|
290 | archive_path = metadata['archive_full_path'] | |||
|
291 | ||||
|
292 | size = metadata.get('size') | |||
|
293 | if not size: | |||
|
294 | # in case we don't have size re-calc it... | |||
|
295 | size = self._get_size(shard, archive_path) | |||
|
296 | ||||
|
297 | data.append([ | |||
|
298 | cnt, | |||
|
299 | key_file, | |||
|
300 | key_file_path, | |||
|
301 | archive_key, | |||
|
302 | archive_path, | |||
|
303 | metadata.get('store_time', 0), | |||
|
304 | metadata.get('access_time', 0), | |||
|
305 | metadata.get('access_count', 0), | |||
|
306 | size, | |||
|
307 | ]) | |||
|
308 | cnt += 1 | |||
|
309 | ||||
|
310 | # Insert bulk data using executemany | |||
|
311 | db.bulk_insert(data) | |||
|
312 | ||||
|
313 | total_size = db.get_total_size() | |||
|
314 | log.debug('Analyzed %s keys, occupying: %s, running eviction to match %s', | |||
|
315 | len(data), format_size(total_size), format_size(size_limit)) | |||
|
316 | ||||
|
317 | removed_items = 0 | |||
|
318 | removed_size = 0 | |||
|
319 | for key_file, archive_key, size in db.get_sorted_keys(select_policy): | |||
|
320 | # simulate removal impact BEFORE removal | |||
|
321 | total_size -= size | |||
|
322 | ||||
|
323 | if total_size <= size_limit: | |||
|
324 | # we obtained what we wanted... | |||
|
325 | break | |||
|
326 | ||||
|
327 | self.remove(archive_key) | |||
|
328 | removed_items += 1 | |||
|
329 | removed_size += size | |||
|
330 | ||||
|
331 | log.debug('Removed %s cache archives, and reduced size by: %s', | |||
|
332 | removed_items, format_size(removed_size)) | |||
|
333 | return removed_items | |||
|
334 | ||||
|
335 | def get_statistics(self): | |||
|
336 | total_files = 0 | |||
|
337 | total_size = 0 | |||
|
338 | meta = {} | |||
|
339 | ||||
|
340 | for shard, key_file in self.iter_keys(): | |||
|
341 | json_key = f"{shard.storage_medium}/{key_file}" | |||
|
342 | with shard.fs.open(json_key, 'rb') as f: | |||
|
343 | total_files += 1 | |||
|
344 | metadata = json.loads(f.read()) | |||
|
345 | total_size += metadata['size'] | |||
|
346 | ||||
|
347 | return total_files, total_size, meta | |||
|
348 |
@@ -0,0 +1,150 b'' | |||||
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import codecs | |||
|
20 | import hashlib | |||
|
21 | import logging | |||
|
22 | import os | |||
|
23 | ||||
|
24 | import fsspec | |||
|
25 | ||||
|
26 | from .base import BaseCache, BaseShard | |||
|
27 | from ..utils import ShardFileReader, NOT_GIVEN | |||
|
28 | from ...type_utils import str2bool | |||
|
29 | ||||
|
30 | log = logging.getLogger(__name__) | |||
|
31 | ||||
|
32 | ||||
|
33 | class S3Shard(BaseShard): | |||
|
34 | ||||
|
35 | def __init__(self, index, bucket, **settings): | |||
|
36 | self._index = index | |||
|
37 | self._bucket = bucket | |||
|
38 | self.storage_type = 'bucket' | |||
|
39 | ||||
|
40 | endpoint_url = settings.pop('archive_cache.objectstore.url') | |||
|
41 | key = settings.pop('archive_cache.objectstore.key') | |||
|
42 | secret = settings.pop('archive_cache.objectstore.secret') | |||
|
43 | ||||
|
44 | self.fs = fsspec.filesystem('s3', anon=False, endpoint_url=endpoint_url, key=key, secret=secret) | |||
|
45 | ||||
|
46 | @property | |||
|
47 | def bucket(self): | |||
|
48 | """Cache bucket.""" | |||
|
49 | return self._bucket | |||
|
50 | ||||
|
51 | def _get_keyfile(self, archive_key) -> tuple[str, str]: | |||
|
52 | key_file = f'{archive_key}-{self.key_suffix}' | |||
|
53 | return key_file, os.path.join(self.bucket, key_file) | |||
|
54 | ||||
|
55 | def _get_writer(self, path, mode): | |||
|
56 | return self.fs.open(path, 'wb') | |||
|
57 | ||||
|
58 | def _write_file(self, full_path, iterator, mode): | |||
|
59 | # ensure bucket exists | |||
|
60 | destination = self.bucket | |||
|
61 | if not self.fs.exists(destination): | |||
|
62 | self.fs.mkdir(destination, s3_additional_kwargs={}) | |||
|
63 | ||||
|
64 | writer = self._get_writer(full_path, mode) | |||
|
65 | ||||
|
66 | digest = hashlib.sha256() | |||
|
67 | with writer: | |||
|
68 | size = 0 | |||
|
69 | for chunk in iterator: | |||
|
70 | size += len(chunk) | |||
|
71 | digest.update(chunk) | |||
|
72 | writer.write(chunk) | |||
|
73 | ||||
|
74 | sha256 = digest.hexdigest() | |||
|
75 | log.debug('written new archive cache under %s, sha256: %s', full_path, sha256) | |||
|
76 | return size, sha256 | |||
|
77 | ||||
|
78 | def store(self, key, value_reader, metadata: dict | None = None): | |||
|
79 | return self._store(key, value_reader, metadata, mode='wb') | |||
|
80 | ||||
|
81 | def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN, retry_backoff=1) -> tuple[ShardFileReader, dict]: | |||
|
82 | return self._fetch(key, retry, retry_attempts, retry_backoff) | |||
|
83 | ||||
|
84 | def remove(self, key): | |||
|
85 | return self._remove(key) | |||
|
86 | ||||
|
87 | def random_filename(self): | |||
|
88 | """Return filename and full-path tuple for file storage. | |||
|
89 | ||||
|
90 | Filename will be a randomly generated 28 character hexadecimal string | |||
|
91 | with ".archive_cache" suffixed. Two levels of sub-directories will be used to | |||
|
92 | reduce the size of directories. On older filesystems, lookups in | |||
|
93 | directories with many files may be slow. | |||
|
94 | """ | |||
|
95 | ||||
|
96 | hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') | |||
|
97 | ||||
|
98 | archive_name = hex_name[4:] + '.archive_cache' | |||
|
99 | filename = f"{hex_name[:2]}-{hex_name[2:4]}-{archive_name}" | |||
|
100 | ||||
|
101 | full_path = os.path.join(self.bucket, filename) | |||
|
102 | return archive_name, full_path | |||
|
103 | ||||
|
104 | def __repr__(self): | |||
|
105 | return f'{self.__class__.__name__}(index={self._index}, bucket={self.bucket})' | |||
|
106 | ||||
|
107 | ||||
|
108 | class ObjectStoreCache(BaseCache): | |||
|
109 | ||||
|
110 | def __init__(self, locking_url, **settings): | |||
|
111 | """ | |||
|
112 | Initialize objectstore cache instance. | |||
|
113 | ||||
|
114 | :param str locking_url: redis url for a lock | |||
|
115 | :param settings: settings dict | |||
|
116 | ||||
|
117 | """ | |||
|
118 | self._locking_url = locking_url | |||
|
119 | self._config = settings | |||
|
120 | ||||
|
121 | objectstore_url = self.get_conf('archive_cache.objectstore.url') | |||
|
122 | self._storage_path = objectstore_url | |||
|
123 | ||||
|
124 | self._count = int(self.get_conf('archive_cache.objectstore.bucket_shards', pop=True)) | |||
|
125 | ||||
|
126 | self._eviction_policy = self.get_conf('archive_cache.objectstore.eviction_policy', pop=True) | |||
|
127 | self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.objectstore.cache_size_gb'))) | |||
|
128 | ||||
|
129 | self.retry = str2bool(self.get_conf('archive_cache.objectstore.retry', pop=True)) | |||
|
130 | self.retry_attempts = int(self.get_conf('archive_cache.objectstore.retry_attempts', pop=True)) | |||
|
131 | self.retry_backoff = int(self.get_conf('archive_cache.objectstore.retry_backoff', pop=True)) | |||
|
132 | ||||
|
133 | log.debug('Initializing archival cache instance under %s', objectstore_url) | |||
|
134 | self._shards = tuple( | |||
|
135 | S3Shard( | |||
|
136 | index=num, | |||
|
137 | bucket='rhodecode-archivecache-%03d' % num, | |||
|
138 | **settings, | |||
|
139 | ) | |||
|
140 | for num in range(self._count) | |||
|
141 | ) | |||
|
142 | self._hash = self._shards[0].hash | |||
|
143 | ||||
|
144 | def _get_shard(self, key) -> S3Shard: | |||
|
145 | index = self._hash(key) % self._count | |||
|
146 | shard = self._shards[index] | |||
|
147 | return shard | |||
|
148 | ||||
|
149 | def _get_size(self, shard, archive_path): | |||
|
150 | return shard.fs.info(archive_path)['size'] |
@@ -0,0 +1,105 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import time | |||
|
20 | import pytest | |||
|
21 | import rhodecode | |||
|
22 | import os | |||
|
23 | import shutil | |||
|
24 | from tempfile import mkdtemp | |||
|
25 | ||||
|
26 | from rhodecode.lib import archive_cache | |||
|
27 | ||||
|
28 | ||||
|
29 | def file_reader(temp_store): | |||
|
30 | with open(temp_store, 'w') as f: | |||
|
31 | for cnt in range(10000): | |||
|
32 | f.write(str(cnt)) | |||
|
33 | return open(temp_store, 'rb') | |||
|
34 | ||||
|
35 | ||||
|
36 | @pytest.fixture() | |||
|
37 | def d_cache_instance(ini_settings): | |||
|
38 | config = ini_settings | |||
|
39 | d_cache = archive_cache.get_archival_cache_store(config=config, always_init=True) | |||
|
40 | return d_cache | |||
|
41 | ||||
|
42 | ||||
|
43 | @pytest.mark.usefixtures('app') | |||
|
44 | class TestArchiveCaches(object): | |||
|
45 | ||||
|
46 | def test_archivecache_empty_stats(self, d_cache_instance): | |||
|
47 | d_cache = d_cache_instance | |||
|
48 | shutil.rmtree(d_cache._directory) | |||
|
49 | ||||
|
50 | stats = d_cache.get_statistics() | |||
|
51 | assert (0, 0, {}) == stats | |||
|
52 | ||||
|
53 | def test_archivecache_store_keys(self, d_cache_instance, tmp_path): | |||
|
54 | d_cache = d_cache_instance | |||
|
55 | shutil.rmtree(d_cache._directory) | |||
|
56 | ||||
|
57 | for n in range(100): | |||
|
58 | ||||
|
59 | archive_name = f'my-archive-abc-{n}.zip' | |||
|
60 | temp_archive_path = os.path.join(tmp_path, archive_name) | |||
|
61 | d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'}) | |||
|
62 | reader, meta = d_cache.fetch(archive_name) | |||
|
63 | content = reader.read() | |||
|
64 | assert content == open(temp_archive_path, 'rb').read() | |||
|
65 | ||||
|
66 | stats = d_cache.get_statistics() | |||
|
67 | assert (100, 3889000, {}) == stats | |||
|
68 | ||||
|
69 | def test_archivecache_remove_keys(self, d_cache_instance, tmp_path): | |||
|
70 | d_cache = d_cache_instance | |||
|
71 | shutil.rmtree(d_cache._directory) | |||
|
72 | ||||
|
73 | n = 1 | |||
|
74 | archive_name = f'my-archive-abc-{n}.zip' | |||
|
75 | temp_archive_path = os.path.join(tmp_path, archive_name) | |||
|
76 | ||||
|
77 | d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'}) | |||
|
78 | stats = d_cache.get_statistics() | |||
|
79 | assert (1, 38890, {}) == stats | |||
|
80 | ||||
|
81 | assert 1 == d_cache.remove(archive_name) | |||
|
82 | ||||
|
83 | stats = d_cache.get_statistics() | |||
|
84 | assert (0, 0, {}) == stats | |||
|
85 | ||||
|
86 | def test_archivecache_evict_keys(self, d_cache_instance, tmp_path): | |||
|
87 | d_cache = d_cache_instance | |||
|
88 | shutil.rmtree(d_cache._directory) | |||
|
89 | tries = 500 | |||
|
90 | for n in range(tries): | |||
|
91 | ||||
|
92 | archive_name = f'my-archive-abc-{n}.zip' | |||
|
93 | temp_archive_path = os.path.join(tmp_path, archive_name) | |||
|
94 | d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'}) | |||
|
95 | ||||
|
96 | stats = d_cache.get_statistics() | |||
|
97 | assert (tries, 19445000, {}) == stats | |||
|
98 | evict_to = 0.005 # around (5mb) | |||
|
99 | evicted_items = d_cache.evict(size_limit=d_cache.gb_to_bytes(evict_to)) | |||
|
100 | evicted = 361 | |||
|
101 | assert evicted == evicted_items | |||
|
102 | ||||
|
103 | stats = d_cache.get_statistics() | |||
|
104 | assert (tries - evicted, 5405710, {}) == stats | |||
|
105 |
@@ -1,813 +1,845 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = true |
|
8 | debug = true | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, | |
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py | |
31 | host = 127.0.0.1 |
|
31 | host = 127.0.0.1 | |
32 | port = 10020 |
|
32 | port = 10020 | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | ; ########################### |
|
35 | ; ########################### | |
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py |
|
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
43 |
|
43 | |||
44 | ; Prefix middleware for RhodeCode. |
|
44 | ; Prefix middleware for RhodeCode. | |
45 | ; recommended when using proxy setup. |
|
45 | ; recommended when using proxy setup. | |
46 | ; allows to set RhodeCode under a prefix in server. |
|
46 | ; allows to set RhodeCode under a prefix in server. | |
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
48 | ; And set your prefix like: `prefix = /custom_prefix` | |
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
50 | ; to make your cookies only work on prefix url |
|
50 | ; to make your cookies only work on prefix url | |
51 | [filter:proxy-prefix] |
|
51 | [filter:proxy-prefix] | |
52 | use = egg:PasteDeploy#prefix |
|
52 | use = egg:PasteDeploy#prefix | |
53 | prefix = / |
|
53 | prefix = / | |
54 |
|
54 | |||
55 | [app:main] |
|
55 | [app:main] | |
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
57 | ; of this file |
|
57 | ; of this file | |
58 | ; Each option in the app:main can be override by an environmental variable |
|
58 | ; Each option in the app:main can be override by an environmental variable | |
59 | ; |
|
59 | ; | |
60 | ;To override an option: |
|
60 | ;To override an option: | |
61 | ; |
|
61 | ; | |
62 | ;RC_<KeyName> |
|
62 | ;RC_<KeyName> | |
63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
63 | ;Everything should be uppercase, . and - should be replaced by _. | |
64 | ;For example, if you have these configuration settings: |
|
64 | ;For example, if you have these configuration settings: | |
65 | ;rc_cache.repo_object.backend = foo |
|
65 | ;rc_cache.repo_object.backend = foo | |
66 | ;can be overridden by |
|
66 | ;can be overridden by | |
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
68 |
|
68 | |||
69 | use = egg:rhodecode-enterprise-ce |
|
69 | use = egg:rhodecode-enterprise-ce | |
70 |
|
70 | |||
71 | ; enable proxy prefix middleware, defined above |
|
71 | ; enable proxy prefix middleware, defined above | |
72 | #filter-with = proxy-prefix |
|
72 | #filter-with = proxy-prefix | |
73 |
|
73 | |||
74 | ; ############# |
|
74 | ; ############# | |
75 | ; DEBUG OPTIONS |
|
75 | ; DEBUG OPTIONS | |
76 | ; ############# |
|
76 | ; ############# | |
77 |
|
77 | |||
78 | pyramid.reload_templates = true |
|
78 | pyramid.reload_templates = true | |
79 |
|
79 | |||
80 | # During development the we want to have the debug toolbar enabled |
|
80 | # During development the we want to have the debug toolbar enabled | |
81 | pyramid.includes = |
|
81 | pyramid.includes = | |
82 | pyramid_debugtoolbar |
|
82 | pyramid_debugtoolbar | |
83 |
|
83 | |||
84 | debugtoolbar.hosts = 0.0.0.0/0 |
|
84 | debugtoolbar.hosts = 0.0.0.0/0 | |
85 | debugtoolbar.exclude_prefixes = |
|
85 | debugtoolbar.exclude_prefixes = | |
86 | /css |
|
86 | /css | |
87 | /fonts |
|
87 | /fonts | |
88 | /images |
|
88 | /images | |
89 | /js |
|
89 | /js | |
90 |
|
90 | |||
91 | ## RHODECODE PLUGINS ## |
|
91 | ## RHODECODE PLUGINS ## | |
92 | rhodecode.includes = |
|
92 | rhodecode.includes = | |
93 | rhodecode.api |
|
93 | rhodecode.api | |
94 |
|
94 | |||
95 |
|
95 | |||
96 | # api prefix url |
|
96 | # api prefix url | |
97 | rhodecode.api.url = /_admin/api |
|
97 | rhodecode.api.url = /_admin/api | |
98 |
|
98 | |||
99 | ; enable debug style page |
|
99 | ; enable debug style page | |
100 | debug_style = true |
|
100 | debug_style = true | |
101 |
|
101 | |||
102 | ; ################# |
|
102 | ; ################# | |
103 | ; END DEBUG OPTIONS |
|
103 | ; END DEBUG OPTIONS | |
104 | ; ################# |
|
104 | ; ################# | |
105 |
|
105 | |||
106 | ; encryption key used to encrypt social plugin tokens, |
|
106 | ; encryption key used to encrypt social plugin tokens, | |
107 | ; remote_urls with credentials etc, if not set it defaults to |
|
107 | ; remote_urls with credentials etc, if not set it defaults to | |
108 | ; `beaker.session.secret` |
|
108 | ; `beaker.session.secret` | |
109 | #rhodecode.encrypted_values.secret = |
|
109 | #rhodecode.encrypted_values.secret = | |
110 |
|
110 | |||
111 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
111 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
112 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
112 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
113 | #rhodecode.encrypted_values.strict = false |
|
113 | #rhodecode.encrypted_values.strict = false | |
114 |
|
114 | |||
115 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
115 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
116 | ; fernet is safer, and we strongly recommend switching to it. |
|
116 | ; fernet is safer, and we strongly recommend switching to it. | |
117 | ; Due to backward compatibility aes is used as default. |
|
117 | ; Due to backward compatibility aes is used as default. | |
118 | #rhodecode.encrypted_values.algorithm = fernet |
|
118 | #rhodecode.encrypted_values.algorithm = fernet | |
119 |
|
119 | |||
120 | ; Return gzipped responses from RhodeCode (static files/application) |
|
120 | ; Return gzipped responses from RhodeCode (static files/application) | |
121 | gzip_responses = false |
|
121 | gzip_responses = false | |
122 |
|
122 | |||
123 | ; Auto-generate javascript routes file on startup |
|
123 | ; Auto-generate javascript routes file on startup | |
124 | generate_js_files = false |
|
124 | generate_js_files = false | |
125 |
|
125 | |||
126 | ; System global default language. |
|
126 | ; System global default language. | |
127 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
127 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
128 | lang = en |
|
128 | lang = en | |
129 |
|
129 | |||
130 | ; Perform a full repository scan and import on each server start. |
|
130 | ; Perform a full repository scan and import on each server start. | |
131 | ; Settings this to true could lead to very long startup time. |
|
131 | ; Settings this to true could lead to very long startup time. | |
132 | startup.import_repos = false |
|
132 | startup.import_repos = false | |
133 |
|
133 | |||
134 | ; URL at which the application is running. This is used for Bootstrapping |
|
134 | ; URL at which the application is running. This is used for Bootstrapping | |
135 | ; requests in context when no web request is available. Used in ishell, or |
|
135 | ; requests in context when no web request is available. Used in ishell, or | |
136 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
136 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
137 | app.base_url = http://rhodecode.local |
|
137 | app.base_url = http://rhodecode.local | |
138 |
|
138 | |||
139 | ; Host at which the Service API is running. |
|
139 | ; Host at which the Service API is running. | |
140 | app.service_api.host = http://rhodecode.local:10020 |
|
140 | app.service_api.host = http://rhodecode.local:10020 | |
141 |
|
141 | |||
142 | ; Secret for Service API authentication. |
|
142 | ; Secret for Service API authentication. | |
143 | app.service_api.token = |
|
143 | app.service_api.token = | |
144 |
|
144 | |||
145 | ; Unique application ID. Should be a random unique string for security. |
|
145 | ; Unique application ID. Should be a random unique string for security. | |
146 | app_instance_uuid = rc-production |
|
146 | app_instance_uuid = rc-production | |
147 |
|
147 | |||
148 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
148 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
149 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
149 | ; commit, or pull request exceeds this limit this diff will be displayed | |
150 | ; partially. E.g 512000 == 512Kb |
|
150 | ; partially. E.g 512000 == 512Kb | |
151 | cut_off_limit_diff = 512000 |
|
151 | cut_off_limit_diff = 512000 | |
152 |
|
152 | |||
153 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
153 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
154 | ; file inside diff which exceeds this limit will be displayed partially. |
|
154 | ; file inside diff which exceeds this limit will be displayed partially. | |
155 | ; E.g 128000 == 128Kb |
|
155 | ; E.g 128000 == 128Kb | |
156 | cut_off_limit_file = 128000 |
|
156 | cut_off_limit_file = 128000 | |
157 |
|
157 | |||
158 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
158 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
159 | vcs_full_cache = true |
|
159 | vcs_full_cache = true | |
160 |
|
160 | |||
161 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
161 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
162 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
162 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
163 | force_https = false |
|
163 | force_https = false | |
164 |
|
164 | |||
165 | ; use Strict-Transport-Security headers |
|
165 | ; use Strict-Transport-Security headers | |
166 | use_htsts = false |
|
166 | use_htsts = false | |
167 |
|
167 | |||
168 | ; Set to true if your repos are exposed using the dumb protocol |
|
168 | ; Set to true if your repos are exposed using the dumb protocol | |
169 | git_update_server_info = false |
|
169 | git_update_server_info = false | |
170 |
|
170 | |||
171 | ; RSS/ATOM feed options |
|
171 | ; RSS/ATOM feed options | |
172 | rss_cut_off_limit = 256000 |
|
172 | rss_cut_off_limit = 256000 | |
173 | rss_items_per_page = 10 |
|
173 | rss_items_per_page = 10 | |
174 | rss_include_diff = false |
|
174 | rss_include_diff = false | |
175 |
|
175 | |||
176 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
176 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
177 | ; url that does rewrites to _admin/gists/{gistid}. |
|
177 | ; url that does rewrites to _admin/gists/{gistid}. | |
178 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
178 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
179 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
179 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
180 | gist_alias_url = |
|
180 | gist_alias_url = | |
181 |
|
181 | |||
182 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
182 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
183 | ; used for access. |
|
183 | ; used for access. | |
184 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
184 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
185 | ; came from the the logged in user who own this authentication token. |
|
185 | ; came from the the logged in user who own this authentication token. | |
186 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
186 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
187 | ; authentication token. Such view would be only accessible when used together |
|
187 | ; authentication token. Such view would be only accessible when used together | |
188 | ; with this authentication token |
|
188 | ; with this authentication token | |
189 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
189 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
190 | ; The list should be "," separated and on a single line. |
|
190 | ; The list should be "," separated and on a single line. | |
191 | ; Most common views to enable: |
|
191 | ; Most common views to enable: | |
192 |
|
192 | |||
193 | # RepoCommitsView:repo_commit_download |
|
193 | # RepoCommitsView:repo_commit_download | |
194 | # RepoCommitsView:repo_commit_patch |
|
194 | # RepoCommitsView:repo_commit_patch | |
195 | # RepoCommitsView:repo_commit_raw |
|
195 | # RepoCommitsView:repo_commit_raw | |
196 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
196 | # RepoCommitsView:repo_commit_raw@TOKEN | |
197 | # RepoFilesView:repo_files_diff |
|
197 | # RepoFilesView:repo_files_diff | |
198 | # RepoFilesView:repo_archivefile |
|
198 | # RepoFilesView:repo_archivefile | |
199 | # RepoFilesView:repo_file_raw |
|
199 | # RepoFilesView:repo_file_raw | |
200 | # GistView:* |
|
200 | # GistView:* | |
201 | api_access_controllers_whitelist = |
|
201 | api_access_controllers_whitelist = | |
202 |
|
202 | |||
203 | ; Default encoding used to convert from and to unicode |
|
203 | ; Default encoding used to convert from and to unicode | |
204 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
204 | ; can be also a comma separated list of encoding in case of mixed encodings | |
205 | default_encoding = UTF-8 |
|
205 | default_encoding = UTF-8 | |
206 |
|
206 | |||
207 | ; instance-id prefix |
|
207 | ; instance-id prefix | |
208 | ; a prefix key for this instance used for cache invalidation when running |
|
208 | ; a prefix key for this instance used for cache invalidation when running | |
209 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
209 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
210 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
210 | ; all running RhodeCode instances. Leave empty if you don't use it | |
211 | instance_id = |
|
211 | instance_id = | |
212 |
|
212 | |||
213 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
213 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
214 | ; of an authentication plugin also if it is disabled by it's settings. |
|
214 | ; of an authentication plugin also if it is disabled by it's settings. | |
215 | ; This could be useful if you are unable to log in to the system due to broken |
|
215 | ; This could be useful if you are unable to log in to the system due to broken | |
216 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
216 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
217 | ; module to log in again and fix the settings. |
|
217 | ; module to log in again and fix the settings. | |
218 | ; Available builtin plugin IDs (hash is part of the ID): |
|
218 | ; Available builtin plugin IDs (hash is part of the ID): | |
219 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
219 | ; egg:rhodecode-enterprise-ce#rhodecode | |
220 | ; egg:rhodecode-enterprise-ce#pam |
|
220 | ; egg:rhodecode-enterprise-ce#pam | |
221 | ; egg:rhodecode-enterprise-ce#ldap |
|
221 | ; egg:rhodecode-enterprise-ce#ldap | |
222 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
222 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
223 | ; egg:rhodecode-enterprise-ce#headers |
|
223 | ; egg:rhodecode-enterprise-ce#headers | |
224 | ; egg:rhodecode-enterprise-ce#crowd |
|
224 | ; egg:rhodecode-enterprise-ce#crowd | |
225 |
|
225 | |||
226 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
226 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
227 |
|
227 | |||
228 | ; Flag to control loading of legacy plugins in py:/path format |
|
228 | ; Flag to control loading of legacy plugins in py:/path format | |
229 | auth_plugin.import_legacy_plugins = true |
|
229 | auth_plugin.import_legacy_plugins = true | |
230 |
|
230 | |||
231 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
231 | ; alternative return HTTP header for failed authentication. Default HTTP | |
232 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
232 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
233 | ; handling that causing a series of failed authentication calls. |
|
233 | ; handling that causing a series of failed authentication calls. | |
234 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
234 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
235 | ; This will be served instead of default 401 on bad authentication |
|
235 | ; This will be served instead of default 401 on bad authentication | |
236 | auth_ret_code = |
|
236 | auth_ret_code = | |
237 |
|
237 | |||
238 | ; use special detection method when serving auth_ret_code, instead of serving |
|
238 | ; use special detection method when serving auth_ret_code, instead of serving | |
239 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
239 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
240 | ; and then serve auth_ret_code to clients |
|
240 | ; and then serve auth_ret_code to clients | |
241 | auth_ret_code_detection = false |
|
241 | auth_ret_code_detection = false | |
242 |
|
242 | |||
243 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
243 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
244 | ; codes don't break the transactions while 4XX codes do |
|
244 | ; codes don't break the transactions while 4XX codes do | |
245 | lock_ret_code = 423 |
|
245 | lock_ret_code = 423 | |
246 |
|
246 | |||
247 | ; Filesystem location were repositories should be stored |
|
247 | ; Filesystem location were repositories should be stored | |
248 | repo_store.path = /var/opt/rhodecode_repo_store |
|
248 | repo_store.path = /var/opt/rhodecode_repo_store | |
249 |
|
249 | |||
250 | ; allows to setup custom hooks in settings page |
|
250 | ; allows to setup custom hooks in settings page | |
251 | allow_custom_hooks_settings = true |
|
251 | allow_custom_hooks_settings = true | |
252 |
|
252 | |||
253 | ; Generated license token required for EE edition license. |
|
253 | ; Generated license token required for EE edition license. | |
254 | ; New generated token value can be found in Admin > settings > license page. |
|
254 | ; New generated token value can be found in Admin > settings > license page. | |
255 | license_token = |
|
255 | license_token = | |
256 |
|
256 | |||
257 | ; This flag hides sensitive information on the license page such as token, and license data |
|
257 | ; This flag hides sensitive information on the license page such as token, and license data | |
258 | license.hide_license_info = false |
|
258 | license.hide_license_info = false | |
259 |
|
259 | |||
260 | ; supervisor connection uri, for managing supervisor and logs. |
|
260 | ; supervisor connection uri, for managing supervisor and logs. | |
261 | supervisor.uri = |
|
261 | supervisor.uri = | |
262 |
|
262 | |||
263 | ; supervisord group name/id we only want this RC instance to handle |
|
263 | ; supervisord group name/id we only want this RC instance to handle | |
264 | supervisor.group_id = dev |
|
264 | supervisor.group_id = dev | |
265 |
|
265 | |||
266 | ; Display extended labs settings |
|
266 | ; Display extended labs settings | |
267 | labs_settings_active = true |
|
267 | labs_settings_active = true | |
268 |
|
268 | |||
269 | ; Custom exception store path, defaults to TMPDIR |
|
269 | ; Custom exception store path, defaults to TMPDIR | |
270 | ; This is used to store exception from RhodeCode in shared directory |
|
270 | ; This is used to store exception from RhodeCode in shared directory | |
271 | #exception_tracker.store_path = |
|
271 | #exception_tracker.store_path = | |
272 |
|
272 | |||
273 | ; Send email with exception details when it happens |
|
273 | ; Send email with exception details when it happens | |
274 | #exception_tracker.send_email = false |
|
274 | #exception_tracker.send_email = false | |
275 |
|
275 | |||
276 | ; Comma separated list of recipients for exception emails, |
|
276 | ; Comma separated list of recipients for exception emails, | |
277 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
277 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
278 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
278 | ; Can be left empty, then emails will be sent to ALL super-admins | |
279 | #exception_tracker.send_email_recipients = |
|
279 | #exception_tracker.send_email_recipients = | |
280 |
|
280 | |||
281 | ; optional prefix to Add to email Subject |
|
281 | ; optional prefix to Add to email Subject | |
282 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
282 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
283 |
|
283 | |||
284 | ; File store configuration. This is used to store and serve uploaded files |
|
284 | ; File store configuration. This is used to store and serve uploaded files | |
285 | file_store.enabled = true |
|
285 | file_store.enabled = true | |
286 |
|
286 | |||
287 | ; Storage backend, available options are: local |
|
287 | ; Storage backend, available options are: local | |
288 | file_store.backend = local |
|
288 | file_store.backend = local | |
289 |
|
289 | |||
290 | ; path to store the uploaded binaries and artifacts |
|
290 | ; path to store the uploaded binaries and artifacts | |
291 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
291 | file_store.storage_path = /var/opt/rhodecode_data/file_store | |
292 |
|
292 | |||
293 | ; Uncomment and set this path to control settings for archive download cache. |
|
293 | ||
|
294 | ; Redis url to acquire/check generation of archives locks | |||
|
295 | archive_cache.locking.url = redis://redis:6379/1 | |||
|
296 | ||||
|
297 | ; Storage backend, only 'filesystem' and 'objectstore' are available now | |||
|
298 | archive_cache.backend.type = filesystem | |||
|
299 | ||||
|
300 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
301 | ; e.g http://minio:9000 | |||
|
302 | archive_cache.objectstore.url = http://s3-minio:9000 | |||
|
303 | ||||
|
304 | ; key for s3 auth | |||
|
305 | archive_cache.objectstore.key = key | |||
|
306 | ||||
|
307 | ; secret for s3 auth | |||
|
308 | archive_cache.objectstore.secret = secret | |||
|
309 | ||||
|
310 | ; number of sharded buckets to create to distribute archives across | |||
|
311 | ; default is 8 shards | |||
|
312 | archive_cache.objectstore.bucket_shards = 8 | |||
|
313 | ||||
|
314 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
315 | archive_cache.objectstore.retry = false | |||
|
316 | ||||
|
317 | ; number of seconds to wait for next try using retry | |||
|
318 | archive_cache.objectstore.retry_backoff = 1 | |||
|
319 | ||||
|
320 | ; how many tries do do a retry fetch from this backend | |||
|
321 | archive_cache.objectstore.retry_attempts = 10 | |||
|
322 | ||||
|
323 | ; Default is $cache_dir/archive_cache if not set | |||
294 | ; Generated repo archives will be cached at this location |
|
324 | ; Generated repo archives will be cached at this location | |
295 | ; and served from the cache during subsequent requests for the same archive of |
|
325 | ; and served from the cache during subsequent requests for the same archive of | |
296 | ; the repository. This path is important to be shared across filesystems and with |
|
326 | ; the repository. This path is important to be shared across filesystems and with | |
297 | ; RhodeCode and vcsserver |
|
327 | ; RhodeCode and vcsserver | |
298 |
|
||||
299 | ; Redis url to acquire/check generation of archives locks |
|
|||
300 | archive_cache.locking.url = redis://redis:6379/1 |
|
|||
301 |
|
||||
302 | ; Storage backend, only 'filesystem' is available now |
|
|||
303 | archive_cache.backend.type = filesystem |
|
|||
304 |
|
||||
305 | ; Default is $cache_dir/archive_cache if not set |
|
|||
306 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache |
|
328 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache | |
307 |
|
329 | |||
308 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
330 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
309 | archive_cache.filesystem.cache_size_gb = 1 |
|
331 | archive_cache.filesystem.cache_size_gb = 1 | |
310 |
|
332 | |||
311 | ; Eviction policy used to clear out after cache_size_gb limit is reached |
|
333 | ; Eviction policy used to clear out after cache_size_gb limit is reached | |
312 | archive_cache.filesystem.eviction_policy = least-recently-stored |
|
334 | archive_cache.filesystem.eviction_policy = least-recently-stored | |
313 |
|
335 | |||
314 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
336 | ; By default cache uses sharding technique, this specifies how many shards are there | |
|
337 | ; default is 8 shards | |||
315 | archive_cache.filesystem.cache_shards = 8 |
|
338 | archive_cache.filesystem.cache_shards = 8 | |
316 |
|
339 | |||
|
340 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
341 | archive_cache.filesystem.retry = false | |||
|
342 | ||||
|
343 | ; number of seconds to wait for next try using retry | |||
|
344 | archive_cache.filesystem.retry_backoff = 1 | |||
|
345 | ||||
|
346 | ; how many tries do do a retry fetch from this backend | |||
|
347 | archive_cache.filesystem.retry_attempts = 10 | |||
|
348 | ||||
317 |
|
349 | |||
318 | ; ############# |
|
350 | ; ############# | |
319 | ; CELERY CONFIG |
|
351 | ; CELERY CONFIG | |
320 | ; ############# |
|
352 | ; ############# | |
321 |
|
353 | |||
322 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
354 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
323 |
|
355 | |||
324 | use_celery = true |
|
356 | use_celery = true | |
325 |
|
357 | |||
326 | ; path to store schedule database |
|
358 | ; path to store schedule database | |
327 | #celerybeat-schedule.path = |
|
359 | #celerybeat-schedule.path = | |
328 |
|
360 | |||
329 | ; connection url to the message broker (default redis) |
|
361 | ; connection url to the message broker (default redis) | |
330 | celery.broker_url = redis://redis:6379/8 |
|
362 | celery.broker_url = redis://redis:6379/8 | |
331 |
|
363 | |||
332 | ; results backend to get results for (default redis) |
|
364 | ; results backend to get results for (default redis) | |
333 | celery.result_backend = redis://redis:6379/8 |
|
365 | celery.result_backend = redis://redis:6379/8 | |
334 |
|
366 | |||
335 | ; rabbitmq example |
|
367 | ; rabbitmq example | |
336 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
368 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
337 |
|
369 | |||
338 | ; maximum tasks to execute before worker restart |
|
370 | ; maximum tasks to execute before worker restart | |
339 | celery.max_tasks_per_child = 20 |
|
371 | celery.max_tasks_per_child = 20 | |
340 |
|
372 | |||
341 | ; tasks will never be sent to the queue, but executed locally instead. |
|
373 | ; tasks will never be sent to the queue, but executed locally instead. | |
342 | celery.task_always_eager = false |
|
374 | celery.task_always_eager = false | |
343 |
|
375 | |||
344 | ; ############# |
|
376 | ; ############# | |
345 | ; DOGPILE CACHE |
|
377 | ; DOGPILE CACHE | |
346 | ; ############# |
|
378 | ; ############# | |
347 |
|
379 | |||
348 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
380 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
349 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
381 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
350 | cache_dir = /var/opt/rhodecode_data |
|
382 | cache_dir = /var/opt/rhodecode_data | |
351 |
|
383 | |||
352 | ; ********************************************* |
|
384 | ; ********************************************* | |
353 | ; `sql_cache_short` cache for heavy SQL queries |
|
385 | ; `sql_cache_short` cache for heavy SQL queries | |
354 | ; Only supported backend is `memory_lru` |
|
386 | ; Only supported backend is `memory_lru` | |
355 | ; ********************************************* |
|
387 | ; ********************************************* | |
356 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
388 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
357 | rc_cache.sql_cache_short.expiration_time = 30 |
|
389 | rc_cache.sql_cache_short.expiration_time = 30 | |
358 |
|
390 | |||
359 |
|
391 | |||
360 | ; ***************************************************** |
|
392 | ; ***************************************************** | |
361 | ; `cache_repo_longterm` cache for repo object instances |
|
393 | ; `cache_repo_longterm` cache for repo object instances | |
362 | ; Only supported backend is `memory_lru` |
|
394 | ; Only supported backend is `memory_lru` | |
363 | ; ***************************************************** |
|
395 | ; ***************************************************** | |
364 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
396 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
365 | ; by default we use 30 Days, cache is still invalidated on push |
|
397 | ; by default we use 30 Days, cache is still invalidated on push | |
366 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
398 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
367 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
399 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
368 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
400 | rc_cache.cache_repo_longterm.max_size = 10000 | |
369 |
|
401 | |||
370 |
|
402 | |||
371 | ; ********************************************* |
|
403 | ; ********************************************* | |
372 | ; `cache_general` cache for general purpose use |
|
404 | ; `cache_general` cache for general purpose use | |
373 | ; for simplicity use rc.file_namespace backend, |
|
405 | ; for simplicity use rc.file_namespace backend, | |
374 | ; for performance and scale use rc.redis |
|
406 | ; for performance and scale use rc.redis | |
375 | ; ********************************************* |
|
407 | ; ********************************************* | |
376 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
408 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
377 | rc_cache.cache_general.expiration_time = 43200 |
|
409 | rc_cache.cache_general.expiration_time = 43200 | |
378 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
410 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
379 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
411 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db | |
380 |
|
412 | |||
381 | ; alternative `cache_general` redis backend with distributed lock |
|
413 | ; alternative `cache_general` redis backend with distributed lock | |
382 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
414 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
383 | #rc_cache.cache_general.expiration_time = 300 |
|
415 | #rc_cache.cache_general.expiration_time = 300 | |
384 |
|
416 | |||
385 | ; redis_expiration_time needs to be greater then expiration_time |
|
417 | ; redis_expiration_time needs to be greater then expiration_time | |
386 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
418 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
387 |
|
419 | |||
388 | #rc_cache.cache_general.arguments.host = localhost |
|
420 | #rc_cache.cache_general.arguments.host = localhost | |
389 | #rc_cache.cache_general.arguments.port = 6379 |
|
421 | #rc_cache.cache_general.arguments.port = 6379 | |
390 | #rc_cache.cache_general.arguments.db = 0 |
|
422 | #rc_cache.cache_general.arguments.db = 0 | |
391 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
423 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
392 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
424 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
393 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
425 | #rc_cache.cache_general.arguments.distributed_lock = true | |
394 |
|
426 | |||
395 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
427 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
396 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
428 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
397 |
|
429 | |||
398 | ; ************************************************* |
|
430 | ; ************************************************* | |
399 | ; `cache_perms` cache for permission tree, auth TTL |
|
431 | ; `cache_perms` cache for permission tree, auth TTL | |
400 | ; for simplicity use rc.file_namespace backend, |
|
432 | ; for simplicity use rc.file_namespace backend, | |
401 | ; for performance and scale use rc.redis |
|
433 | ; for performance and scale use rc.redis | |
402 | ; ************************************************* |
|
434 | ; ************************************************* | |
403 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
435 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
404 | rc_cache.cache_perms.expiration_time = 3600 |
|
436 | rc_cache.cache_perms.expiration_time = 3600 | |
405 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
437 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
406 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
438 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db | |
407 |
|
439 | |||
408 | ; alternative `cache_perms` redis backend with distributed lock |
|
440 | ; alternative `cache_perms` redis backend with distributed lock | |
409 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
441 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
410 | #rc_cache.cache_perms.expiration_time = 300 |
|
442 | #rc_cache.cache_perms.expiration_time = 300 | |
411 |
|
443 | |||
412 | ; redis_expiration_time needs to be greater then expiration_time |
|
444 | ; redis_expiration_time needs to be greater then expiration_time | |
413 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
445 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
414 |
|
446 | |||
415 | #rc_cache.cache_perms.arguments.host = localhost |
|
447 | #rc_cache.cache_perms.arguments.host = localhost | |
416 | #rc_cache.cache_perms.arguments.port = 6379 |
|
448 | #rc_cache.cache_perms.arguments.port = 6379 | |
417 | #rc_cache.cache_perms.arguments.db = 0 |
|
449 | #rc_cache.cache_perms.arguments.db = 0 | |
418 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
450 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
419 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
451 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
420 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
452 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
421 |
|
453 | |||
422 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
454 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
423 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
455 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
424 |
|
456 | |||
425 | ; *************************************************** |
|
457 | ; *************************************************** | |
426 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
458 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
427 | ; for simplicity use rc.file_namespace backend, |
|
459 | ; for simplicity use rc.file_namespace backend, | |
428 | ; for performance and scale use rc.redis |
|
460 | ; for performance and scale use rc.redis | |
429 | ; *************************************************** |
|
461 | ; *************************************************** | |
430 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
462 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
431 | rc_cache.cache_repo.expiration_time = 2592000 |
|
463 | rc_cache.cache_repo.expiration_time = 2592000 | |
432 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
464 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
433 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
465 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db | |
434 |
|
466 | |||
435 | ; alternative `cache_repo` redis backend with distributed lock |
|
467 | ; alternative `cache_repo` redis backend with distributed lock | |
436 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
468 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
437 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
469 | #rc_cache.cache_repo.expiration_time = 2592000 | |
438 |
|
470 | |||
439 | ; redis_expiration_time needs to be greater then expiration_time |
|
471 | ; redis_expiration_time needs to be greater then expiration_time | |
440 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
472 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
441 |
|
473 | |||
442 | #rc_cache.cache_repo.arguments.host = localhost |
|
474 | #rc_cache.cache_repo.arguments.host = localhost | |
443 | #rc_cache.cache_repo.arguments.port = 6379 |
|
475 | #rc_cache.cache_repo.arguments.port = 6379 | |
444 | #rc_cache.cache_repo.arguments.db = 1 |
|
476 | #rc_cache.cache_repo.arguments.db = 1 | |
445 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
477 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
446 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
478 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
447 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
479 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
448 |
|
480 | |||
449 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
481 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
450 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
482 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
451 |
|
483 | |||
452 | ; ############## |
|
484 | ; ############## | |
453 | ; BEAKER SESSION |
|
485 | ; BEAKER SESSION | |
454 | ; ############## |
|
486 | ; ############## | |
455 |
|
487 | |||
456 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
488 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
457 | ; types are file, ext:redis, ext:database, ext:memcached |
|
489 | ; types are file, ext:redis, ext:database, ext:memcached | |
458 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
490 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
459 | #beaker.session.type = file |
|
491 | #beaker.session.type = file | |
460 | #beaker.session.data_dir = %(here)s/data/sessions |
|
492 | #beaker.session.data_dir = %(here)s/data/sessions | |
461 |
|
493 | |||
462 | ; Redis based sessions |
|
494 | ; Redis based sessions | |
463 | beaker.session.type = ext:redis |
|
495 | beaker.session.type = ext:redis | |
464 | beaker.session.url = redis://redis:6379/2 |
|
496 | beaker.session.url = redis://redis:6379/2 | |
465 |
|
497 | |||
466 | ; DB based session, fast, and allows easy management over logged in users |
|
498 | ; DB based session, fast, and allows easy management over logged in users | |
467 | #beaker.session.type = ext:database |
|
499 | #beaker.session.type = ext:database | |
468 | #beaker.session.table_name = db_session |
|
500 | #beaker.session.table_name = db_session | |
469 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
501 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
470 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
502 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
471 | #beaker.session.sa.pool_recycle = 3600 |
|
503 | #beaker.session.sa.pool_recycle = 3600 | |
472 | #beaker.session.sa.echo = false |
|
504 | #beaker.session.sa.echo = false | |
473 |
|
505 | |||
474 | beaker.session.key = rhodecode |
|
506 | beaker.session.key = rhodecode | |
475 | beaker.session.secret = develop-rc-uytcxaz |
|
507 | beaker.session.secret = develop-rc-uytcxaz | |
476 | beaker.session.lock_dir = /data_ramdisk/lock |
|
508 | beaker.session.lock_dir = /data_ramdisk/lock | |
477 |
|
509 | |||
478 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
510 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
479 | ; you must disable beaker.session.secret to use this |
|
511 | ; you must disable beaker.session.secret to use this | |
480 | #beaker.session.encrypt_key = key_for_encryption |
|
512 | #beaker.session.encrypt_key = key_for_encryption | |
481 | #beaker.session.validate_key = validation_key |
|
513 | #beaker.session.validate_key = validation_key | |
482 |
|
514 | |||
483 | ; Sets session as invalid (also logging out user) if it haven not been |
|
515 | ; Sets session as invalid (also logging out user) if it haven not been | |
484 | ; accessed for given amount of time in seconds |
|
516 | ; accessed for given amount of time in seconds | |
485 | beaker.session.timeout = 2592000 |
|
517 | beaker.session.timeout = 2592000 | |
486 | beaker.session.httponly = true |
|
518 | beaker.session.httponly = true | |
487 |
|
519 | |||
488 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
520 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
489 | #beaker.session.cookie_path = /custom_prefix |
|
521 | #beaker.session.cookie_path = /custom_prefix | |
490 |
|
522 | |||
491 | ; Set https secure cookie |
|
523 | ; Set https secure cookie | |
492 | beaker.session.secure = false |
|
524 | beaker.session.secure = false | |
493 |
|
525 | |||
494 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
526 | ; default cookie expiration time in seconds, set to `true` to set expire | |
495 | ; at browser close |
|
527 | ; at browser close | |
496 | #beaker.session.cookie_expires = 3600 |
|
528 | #beaker.session.cookie_expires = 3600 | |
497 |
|
529 | |||
498 | ; ############################# |
|
530 | ; ############################# | |
499 | ; SEARCH INDEXING CONFIGURATION |
|
531 | ; SEARCH INDEXING CONFIGURATION | |
500 | ; ############################# |
|
532 | ; ############################# | |
501 |
|
533 | |||
502 | ; Full text search indexer is available in rhodecode-tools under |
|
534 | ; Full text search indexer is available in rhodecode-tools under | |
503 | ; `rhodecode-tools index` command |
|
535 | ; `rhodecode-tools index` command | |
504 |
|
536 | |||
505 | ; WHOOSH Backend, doesn't require additional services to run |
|
537 | ; WHOOSH Backend, doesn't require additional services to run | |
506 | ; it works good with few dozen repos |
|
538 | ; it works good with few dozen repos | |
507 | search.module = rhodecode.lib.index.whoosh |
|
539 | search.module = rhodecode.lib.index.whoosh | |
508 | search.location = %(here)s/data/index |
|
540 | search.location = %(here)s/data/index | |
509 |
|
541 | |||
510 | ; #################### |
|
542 | ; #################### | |
511 | ; CHANNELSTREAM CONFIG |
|
543 | ; CHANNELSTREAM CONFIG | |
512 | ; #################### |
|
544 | ; #################### | |
513 |
|
545 | |||
514 | ; channelstream enables persistent connections and live notification |
|
546 | ; channelstream enables persistent connections and live notification | |
515 | ; in the system. It's also used by the chat system |
|
547 | ; in the system. It's also used by the chat system | |
516 |
|
548 | |||
517 | channelstream.enabled = true |
|
549 | channelstream.enabled = true | |
518 |
|
550 | |||
519 | ; server address for channelstream server on the backend |
|
551 | ; server address for channelstream server on the backend | |
520 | channelstream.server = channelstream:9800 |
|
552 | channelstream.server = channelstream:9800 | |
521 |
|
553 | |||
522 | ; location of the channelstream server from outside world |
|
554 | ; location of the channelstream server from outside world | |
523 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
555 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
524 | ; by external HTTP server such as Nginx or Apache |
|
556 | ; by external HTTP server such as Nginx or Apache | |
525 | ; see Nginx/Apache configuration examples in our docs |
|
557 | ; see Nginx/Apache configuration examples in our docs | |
526 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
558 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
527 | channelstream.secret = ENV_GENERATED |
|
559 | channelstream.secret = ENV_GENERATED | |
528 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history |
|
560 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history | |
529 |
|
561 | |||
530 | ; Internal application path that Javascript uses to connect into. |
|
562 | ; Internal application path that Javascript uses to connect into. | |
531 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
563 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
532 | channelstream.proxy_path = /_channelstream |
|
564 | channelstream.proxy_path = /_channelstream | |
533 |
|
565 | |||
534 |
|
566 | |||
535 | ; ############################## |
|
567 | ; ############################## | |
536 | ; MAIN RHODECODE DATABASE CONFIG |
|
568 | ; MAIN RHODECODE DATABASE CONFIG | |
537 | ; ############################## |
|
569 | ; ############################## | |
538 |
|
570 | |||
539 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
571 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
540 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
572 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
541 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
573 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
542 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
574 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
543 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
575 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
544 |
|
576 | |||
545 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
577 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
546 |
|
578 | |||
547 | ; see sqlalchemy docs for other advanced settings |
|
579 | ; see sqlalchemy docs for other advanced settings | |
548 | ; print the sql statements to output |
|
580 | ; print the sql statements to output | |
549 | sqlalchemy.db1.echo = false |
|
581 | sqlalchemy.db1.echo = false | |
550 |
|
582 | |||
551 | ; recycle the connections after this amount of seconds |
|
583 | ; recycle the connections after this amount of seconds | |
552 | sqlalchemy.db1.pool_recycle = 3600 |
|
584 | sqlalchemy.db1.pool_recycle = 3600 | |
553 |
|
585 | |||
554 | ; the number of connections to keep open inside the connection pool. |
|
586 | ; the number of connections to keep open inside the connection pool. | |
555 | ; 0 indicates no limit |
|
587 | ; 0 indicates no limit | |
556 | ; the general calculus with gevent is: |
|
588 | ; the general calculus with gevent is: | |
557 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
589 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, | |
558 | ; then increase pool size + max overflow so that they add up to 500. |
|
590 | ; then increase pool size + max overflow so that they add up to 500. | |
559 | #sqlalchemy.db1.pool_size = 5 |
|
591 | #sqlalchemy.db1.pool_size = 5 | |
560 |
|
592 | |||
561 | ; The number of connections to allow in connection pool "overflow", that is |
|
593 | ; The number of connections to allow in connection pool "overflow", that is | |
562 | ; connections that can be opened above and beyond the pool_size setting, |
|
594 | ; connections that can be opened above and beyond the pool_size setting, | |
563 | ; which defaults to five. |
|
595 | ; which defaults to five. | |
564 | #sqlalchemy.db1.max_overflow = 10 |
|
596 | #sqlalchemy.db1.max_overflow = 10 | |
565 |
|
597 | |||
566 | ; Connection check ping, used to detect broken database connections |
|
598 | ; Connection check ping, used to detect broken database connections | |
567 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
599 | ; could be enabled to better handle cases if MySQL has gone away errors | |
568 | #sqlalchemy.db1.ping_connection = true |
|
600 | #sqlalchemy.db1.ping_connection = true | |
569 |
|
601 | |||
570 | ; ########## |
|
602 | ; ########## | |
571 | ; VCS CONFIG |
|
603 | ; VCS CONFIG | |
572 | ; ########## |
|
604 | ; ########## | |
573 | vcs.server.enable = true |
|
605 | vcs.server.enable = true | |
574 | vcs.server = vcsserver:10010 |
|
606 | vcs.server = vcsserver:10010 | |
575 |
|
607 | |||
576 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
608 | ; Web server connectivity protocol, responsible for web based VCS operations | |
577 | ; Available protocols are: |
|
609 | ; Available protocols are: | |
578 | ; `http` - use http-rpc backend (default) |
|
610 | ; `http` - use http-rpc backend (default) | |
579 | vcs.server.protocol = http |
|
611 | vcs.server.protocol = http | |
580 |
|
612 | |||
581 | ; Push/Pull operations protocol, available options are: |
|
613 | ; Push/Pull operations protocol, available options are: | |
582 | ; `http` - use http-rpc backend (default) |
|
614 | ; `http` - use http-rpc backend (default) | |
583 | vcs.scm_app_implementation = http |
|
615 | vcs.scm_app_implementation = http | |
584 |
|
616 | |||
585 | ; Push/Pull operations hooks protocol, available options are: |
|
617 | ; Push/Pull operations hooks protocol, available options are: | |
586 | ; `http` - use http-rpc backend (default) |
|
618 | ; `http` - use http-rpc backend (default) | |
587 | ; `celery` - use celery based hooks |
|
619 | ; `celery` - use celery based hooks | |
588 | vcs.hooks.protocol = http |
|
620 | vcs.hooks.protocol = http | |
589 |
|
621 | |||
590 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
622 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
591 | ; accessible via network. |
|
623 | ; accessible via network. | |
592 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
624 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
593 | vcs.hooks.host = * |
|
625 | vcs.hooks.host = * | |
594 |
|
626 | |||
595 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
627 | ; Start VCSServer with this instance as a subprocess, useful for development | |
596 | vcs.start_server = false |
|
628 | vcs.start_server = false | |
597 |
|
629 | |||
598 | ; List of enabled VCS backends, available options are: |
|
630 | ; List of enabled VCS backends, available options are: | |
599 | ; `hg` - mercurial |
|
631 | ; `hg` - mercurial | |
600 | ; `git` - git |
|
632 | ; `git` - git | |
601 | ; `svn` - subversion |
|
633 | ; `svn` - subversion | |
602 | vcs.backends = hg, git, svn |
|
634 | vcs.backends = hg, git, svn | |
603 |
|
635 | |||
604 | ; Wait this number of seconds before killing connection to the vcsserver |
|
636 | ; Wait this number of seconds before killing connection to the vcsserver | |
605 | vcs.connection_timeout = 3600 |
|
637 | vcs.connection_timeout = 3600 | |
606 |
|
638 | |||
607 | ; Cache flag to cache vcsserver remote calls locally |
|
639 | ; Cache flag to cache vcsserver remote calls locally | |
608 | ; It uses cache_region `cache_repo` |
|
640 | ; It uses cache_region `cache_repo` | |
609 | vcs.methods.cache = true |
|
641 | vcs.methods.cache = true | |
610 |
|
642 | |||
611 | ; #################################################### |
|
643 | ; #################################################### | |
612 | ; Subversion proxy support (mod_dav_svn) |
|
644 | ; Subversion proxy support (mod_dav_svn) | |
613 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
645 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
614 | ; #################################################### |
|
646 | ; #################################################### | |
615 |
|
647 | |||
616 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
648 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
617 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
649 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
618 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
650 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
619 | #vcs.svn.compatible_version = 1.8 |
|
651 | #vcs.svn.compatible_version = 1.8 | |
620 |
|
652 | |||
621 | ; Enable SVN proxy of requests over HTTP |
|
653 | ; Enable SVN proxy of requests over HTTP | |
622 | vcs.svn.proxy.enabled = true |
|
654 | vcs.svn.proxy.enabled = true | |
623 |
|
655 | |||
624 | ; host to connect to running SVN subsystem |
|
656 | ; host to connect to running SVN subsystem | |
625 | vcs.svn.proxy.host = http://svn:8090 |
|
657 | vcs.svn.proxy.host = http://svn:8090 | |
626 |
|
658 | |||
627 | ; Enable or disable the config file generation. |
|
659 | ; Enable or disable the config file generation. | |
628 | svn.proxy.generate_config = true |
|
660 | svn.proxy.generate_config = true | |
629 |
|
661 | |||
630 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
662 | ; Generate config file with `SVNListParentPath` set to `On`. | |
631 | svn.proxy.list_parent_path = true |
|
663 | svn.proxy.list_parent_path = true | |
632 |
|
664 | |||
633 | ; Set location and file name of generated config file. |
|
665 | ; Set location and file name of generated config file. | |
634 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf |
|
666 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf | |
635 |
|
667 | |||
636 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
668 | ; alternative mod_dav config template. This needs to be a valid mako template | |
637 | ; Example template can be found in the source code: |
|
669 | ; Example template can be found in the source code: | |
638 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
670 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
639 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
671 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
640 |
|
672 | |||
641 | ; Used as a prefix to the `Location` block in the generated config file. |
|
673 | ; Used as a prefix to the `Location` block in the generated config file. | |
642 | ; In most cases it should be set to `/`. |
|
674 | ; In most cases it should be set to `/`. | |
643 | svn.proxy.location_root = / |
|
675 | svn.proxy.location_root = / | |
644 |
|
676 | |||
645 | ; Command to reload the mod dav svn configuration on change. |
|
677 | ; Command to reload the mod dav svn configuration on change. | |
646 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
678 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
647 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
679 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
648 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
680 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
649 |
|
681 | |||
650 | ; If the timeout expires before the reload command finishes, the command will |
|
682 | ; If the timeout expires before the reload command finishes, the command will | |
651 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
683 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
652 | #svn.proxy.reload_timeout = 10 |
|
684 | #svn.proxy.reload_timeout = 10 | |
653 |
|
685 | |||
654 | ; #################### |
|
686 | ; #################### | |
655 | ; SSH Support Settings |
|
687 | ; SSH Support Settings | |
656 | ; #################### |
|
688 | ; #################### | |
657 |
|
689 | |||
658 | ; Defines if a custom authorized_keys file should be created and written on |
|
690 | ; Defines if a custom authorized_keys file should be created and written on | |
659 | ; any change user ssh keys. Setting this to false also disables possibility |
|
691 | ; any change user ssh keys. Setting this to false also disables possibility | |
660 | ; of adding SSH keys by users from web interface. Super admins can still |
|
692 | ; of adding SSH keys by users from web interface. Super admins can still | |
661 | ; manage SSH Keys. |
|
693 | ; manage SSH Keys. | |
662 | ssh.generate_authorized_keyfile = true |
|
694 | ssh.generate_authorized_keyfile = true | |
663 |
|
695 | |||
664 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
696 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
665 | # ssh.authorized_keys_ssh_opts = |
|
697 | # ssh.authorized_keys_ssh_opts = | |
666 |
|
698 | |||
667 | ; Path to the authorized_keys file where the generate entries are placed. |
|
699 | ; Path to the authorized_keys file where the generate entries are placed. | |
668 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
700 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
669 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
701 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
670 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode |
|
702 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode | |
671 |
|
703 | |||
672 | ; Command to execute the SSH wrapper. The binary is available in the |
|
704 | ; Command to execute the SSH wrapper. The binary is available in the | |
673 | ; RhodeCode installation directory. |
|
705 | ; RhodeCode installation directory. | |
674 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
706 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
675 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
707 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
676 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
708 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
677 |
|
709 | |||
678 | ; Allow shell when executing the ssh-wrapper command |
|
710 | ; Allow shell when executing the ssh-wrapper command | |
679 | ssh.wrapper_cmd_allow_shell = false |
|
711 | ssh.wrapper_cmd_allow_shell = false | |
680 |
|
712 | |||
681 | ; Enables logging, and detailed output send back to the client during SSH |
|
713 | ; Enables logging, and detailed output send back to the client during SSH | |
682 | ; operations. Useful for debugging, shouldn't be used in production. |
|
714 | ; operations. Useful for debugging, shouldn't be used in production. | |
683 | ssh.enable_debug_logging = true |
|
715 | ssh.enable_debug_logging = true | |
684 |
|
716 | |||
685 | ; Paths to binary executable, by default they are the names, but we can |
|
717 | ; Paths to binary executable, by default they are the names, but we can | |
686 | ; override them if we want to use a custom one |
|
718 | ; override them if we want to use a custom one | |
687 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg |
|
719 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg | |
688 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git |
|
720 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git | |
689 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve |
|
721 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve | |
690 |
|
722 | |||
691 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
723 | ; Enables SSH key generator web interface. Disabling this still allows users | |
692 | ; to add their own keys. |
|
724 | ; to add their own keys. | |
693 | ssh.enable_ui_key_generator = true |
|
725 | ssh.enable_ui_key_generator = true | |
694 |
|
726 | |||
695 | ; Statsd client config, this is used to send metrics to statsd |
|
727 | ; Statsd client config, this is used to send metrics to statsd | |
696 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
728 | ; We recommend setting statsd_exported and scrape them using Prometheus | |
697 | #statsd.enabled = false |
|
729 | #statsd.enabled = false | |
698 | #statsd.statsd_host = 0.0.0.0 |
|
730 | #statsd.statsd_host = 0.0.0.0 | |
699 | #statsd.statsd_port = 8125 |
|
731 | #statsd.statsd_port = 8125 | |
700 | #statsd.statsd_prefix = |
|
732 | #statsd.statsd_prefix = | |
701 | #statsd.statsd_ipv6 = false |
|
733 | #statsd.statsd_ipv6 = false | |
702 |
|
734 | |||
703 | ; configure logging automatically at server startup set to false |
|
735 | ; configure logging automatically at server startup set to false | |
704 | ; to use the below custom logging config. |
|
736 | ; to use the below custom logging config. | |
705 | ; RC_LOGGING_FORMATTER |
|
737 | ; RC_LOGGING_FORMATTER | |
706 | ; RC_LOGGING_LEVEL |
|
738 | ; RC_LOGGING_LEVEL | |
707 | ; env variables can control the settings for logging in case of autoconfigure |
|
739 | ; env variables can control the settings for logging in case of autoconfigure | |
708 |
|
740 | |||
709 | #logging.autoconfigure = true |
|
741 | #logging.autoconfigure = true | |
710 |
|
742 | |||
711 | ; specify your own custom logging config file to configure logging |
|
743 | ; specify your own custom logging config file to configure logging | |
712 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
744 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
713 |
|
745 | |||
714 | ; Dummy marker to add new entries after. |
|
746 | ; Dummy marker to add new entries after. | |
715 | ; Add any custom entries below. Please don't remove this marker. |
|
747 | ; Add any custom entries below. Please don't remove this marker. | |
716 | custom.conf = 1 |
|
748 | custom.conf = 1 | |
717 |
|
749 | |||
718 |
|
750 | |||
719 | ; ##################### |
|
751 | ; ##################### | |
720 | ; LOGGING CONFIGURATION |
|
752 | ; LOGGING CONFIGURATION | |
721 | ; ##################### |
|
753 | ; ##################### | |
722 |
|
754 | |||
723 | [loggers] |
|
755 | [loggers] | |
724 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
756 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
725 |
|
757 | |||
726 | [handlers] |
|
758 | [handlers] | |
727 | keys = console, console_sql |
|
759 | keys = console, console_sql | |
728 |
|
760 | |||
729 | [formatters] |
|
761 | [formatters] | |
730 | keys = generic, json, color_formatter, color_formatter_sql |
|
762 | keys = generic, json, color_formatter, color_formatter_sql | |
731 |
|
763 | |||
732 | ; ####### |
|
764 | ; ####### | |
733 | ; LOGGERS |
|
765 | ; LOGGERS | |
734 | ; ####### |
|
766 | ; ####### | |
735 | [logger_root] |
|
767 | [logger_root] | |
736 | level = NOTSET |
|
768 | level = NOTSET | |
737 | handlers = console |
|
769 | handlers = console | |
738 |
|
770 | |||
739 | [logger_sqlalchemy] |
|
771 | [logger_sqlalchemy] | |
740 | level = INFO |
|
772 | level = INFO | |
741 | handlers = console_sql |
|
773 | handlers = console_sql | |
742 | qualname = sqlalchemy.engine |
|
774 | qualname = sqlalchemy.engine | |
743 | propagate = 0 |
|
775 | propagate = 0 | |
744 |
|
776 | |||
745 | [logger_beaker] |
|
777 | [logger_beaker] | |
746 | level = DEBUG |
|
778 | level = DEBUG | |
747 | handlers = |
|
779 | handlers = | |
748 | qualname = beaker.container |
|
780 | qualname = beaker.container | |
749 | propagate = 1 |
|
781 | propagate = 1 | |
750 |
|
782 | |||
751 | [logger_rhodecode] |
|
783 | [logger_rhodecode] | |
752 | level = DEBUG |
|
784 | level = DEBUG | |
753 | handlers = |
|
785 | handlers = | |
754 | qualname = rhodecode |
|
786 | qualname = rhodecode | |
755 | propagate = 1 |
|
787 | propagate = 1 | |
756 |
|
788 | |||
757 | [logger_ssh_wrapper] |
|
789 | [logger_ssh_wrapper] | |
758 | level = DEBUG |
|
790 | level = DEBUG | |
759 | handlers = |
|
791 | handlers = | |
760 | qualname = ssh_wrapper |
|
792 | qualname = ssh_wrapper | |
761 | propagate = 1 |
|
793 | propagate = 1 | |
762 |
|
794 | |||
763 | [logger_celery] |
|
795 | [logger_celery] | |
764 | level = DEBUG |
|
796 | level = DEBUG | |
765 | handlers = |
|
797 | handlers = | |
766 | qualname = celery |
|
798 | qualname = celery | |
767 |
|
799 | |||
768 |
|
800 | |||
769 | ; ######## |
|
801 | ; ######## | |
770 | ; HANDLERS |
|
802 | ; HANDLERS | |
771 | ; ######## |
|
803 | ; ######## | |
772 |
|
804 | |||
773 | [handler_console] |
|
805 | [handler_console] | |
774 | class = StreamHandler |
|
806 | class = StreamHandler | |
775 | args = (sys.stderr, ) |
|
807 | args = (sys.stderr, ) | |
776 | level = DEBUG |
|
808 | level = DEBUG | |
777 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
809 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
778 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
810 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
779 | formatter = color_formatter |
|
811 | formatter = color_formatter | |
780 |
|
812 | |||
781 | [handler_console_sql] |
|
813 | [handler_console_sql] | |
782 | ; "level = DEBUG" logs SQL queries and results. |
|
814 | ; "level = DEBUG" logs SQL queries and results. | |
783 | ; "level = INFO" logs SQL queries. |
|
815 | ; "level = INFO" logs SQL queries. | |
784 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
816 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
785 | class = StreamHandler |
|
817 | class = StreamHandler | |
786 | args = (sys.stderr, ) |
|
818 | args = (sys.stderr, ) | |
787 | level = WARN |
|
819 | level = WARN | |
788 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
820 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
789 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
821 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
790 | formatter = color_formatter_sql |
|
822 | formatter = color_formatter_sql | |
791 |
|
823 | |||
792 | ; ########## |
|
824 | ; ########## | |
793 | ; FORMATTERS |
|
825 | ; FORMATTERS | |
794 | ; ########## |
|
826 | ; ########## | |
795 |
|
827 | |||
796 | [formatter_generic] |
|
828 | [formatter_generic] | |
797 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
829 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
798 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
830 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
799 | datefmt = %Y-%m-%d %H:%M:%S |
|
831 | datefmt = %Y-%m-%d %H:%M:%S | |
800 |
|
832 | |||
801 | [formatter_color_formatter] |
|
833 | [formatter_color_formatter] | |
802 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
834 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
803 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
835 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
804 | datefmt = %Y-%m-%d %H:%M:%S |
|
836 | datefmt = %Y-%m-%d %H:%M:%S | |
805 |
|
837 | |||
806 | [formatter_color_formatter_sql] |
|
838 | [formatter_color_formatter_sql] | |
807 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
839 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
808 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
840 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
809 | datefmt = %Y-%m-%d %H:%M:%S |
|
841 | datefmt = %Y-%m-%d %H:%M:%S | |
810 |
|
842 | |||
811 | [formatter_json] |
|
843 | [formatter_json] | |
812 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
844 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
813 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
845 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,781 +1,813 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = false |
|
8 | debug = false | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, | |
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py | |
31 | host = 127.0.0.1 |
|
31 | host = 127.0.0.1 | |
32 | port = 10020 |
|
32 | port = 10020 | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | ; ########################### |
|
35 | ; ########################### | |
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py |
|
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
43 |
|
43 | |||
44 | ; Prefix middleware for RhodeCode. |
|
44 | ; Prefix middleware for RhodeCode. | |
45 | ; recommended when using proxy setup. |
|
45 | ; recommended when using proxy setup. | |
46 | ; allows to set RhodeCode under a prefix in server. |
|
46 | ; allows to set RhodeCode under a prefix in server. | |
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
48 | ; And set your prefix like: `prefix = /custom_prefix` | |
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
50 | ; to make your cookies only work on prefix url |
|
50 | ; to make your cookies only work on prefix url | |
51 | [filter:proxy-prefix] |
|
51 | [filter:proxy-prefix] | |
52 | use = egg:PasteDeploy#prefix |
|
52 | use = egg:PasteDeploy#prefix | |
53 | prefix = / |
|
53 | prefix = / | |
54 |
|
54 | |||
55 | [app:main] |
|
55 | [app:main] | |
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
57 | ; of this file |
|
57 | ; of this file | |
58 | ; Each option in the app:main can be override by an environmental variable |
|
58 | ; Each option in the app:main can be override by an environmental variable | |
59 | ; |
|
59 | ; | |
60 | ;To override an option: |
|
60 | ;To override an option: | |
61 | ; |
|
61 | ; | |
62 | ;RC_<KeyName> |
|
62 | ;RC_<KeyName> | |
63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
63 | ;Everything should be uppercase, . and - should be replaced by _. | |
64 | ;For example, if you have these configuration settings: |
|
64 | ;For example, if you have these configuration settings: | |
65 | ;rc_cache.repo_object.backend = foo |
|
65 | ;rc_cache.repo_object.backend = foo | |
66 | ;can be overridden by |
|
66 | ;can be overridden by | |
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
68 |
|
68 | |||
69 | use = egg:rhodecode-enterprise-ce |
|
69 | use = egg:rhodecode-enterprise-ce | |
70 |
|
70 | |||
71 | ; enable proxy prefix middleware, defined above |
|
71 | ; enable proxy prefix middleware, defined above | |
72 | #filter-with = proxy-prefix |
|
72 | #filter-with = proxy-prefix | |
73 |
|
73 | |||
74 | ; encryption key used to encrypt social plugin tokens, |
|
74 | ; encryption key used to encrypt social plugin tokens, | |
75 | ; remote_urls with credentials etc, if not set it defaults to |
|
75 | ; remote_urls with credentials etc, if not set it defaults to | |
76 | ; `beaker.session.secret` |
|
76 | ; `beaker.session.secret` | |
77 | #rhodecode.encrypted_values.secret = |
|
77 | #rhodecode.encrypted_values.secret = | |
78 |
|
78 | |||
79 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
79 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
81 | #rhodecode.encrypted_values.strict = false |
|
81 | #rhodecode.encrypted_values.strict = false | |
82 |
|
82 | |||
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
84 | ; fernet is safer, and we strongly recommend switching to it. |
|
84 | ; fernet is safer, and we strongly recommend switching to it. | |
85 | ; Due to backward compatibility aes is used as default. |
|
85 | ; Due to backward compatibility aes is used as default. | |
86 | #rhodecode.encrypted_values.algorithm = fernet |
|
86 | #rhodecode.encrypted_values.algorithm = fernet | |
87 |
|
87 | |||
88 | ; Return gzipped responses from RhodeCode (static files/application) |
|
88 | ; Return gzipped responses from RhodeCode (static files/application) | |
89 | gzip_responses = false |
|
89 | gzip_responses = false | |
90 |
|
90 | |||
91 | ; Auto-generate javascript routes file on startup |
|
91 | ; Auto-generate javascript routes file on startup | |
92 | generate_js_files = false |
|
92 | generate_js_files = false | |
93 |
|
93 | |||
94 | ; System global default language. |
|
94 | ; System global default language. | |
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
96 | lang = en |
|
96 | lang = en | |
97 |
|
97 | |||
98 | ; Perform a full repository scan and import on each server start. |
|
98 | ; Perform a full repository scan and import on each server start. | |
99 | ; Settings this to true could lead to very long startup time. |
|
99 | ; Settings this to true could lead to very long startup time. | |
100 | startup.import_repos = false |
|
100 | startup.import_repos = false | |
101 |
|
101 | |||
102 | ; URL at which the application is running. This is used for Bootstrapping |
|
102 | ; URL at which the application is running. This is used for Bootstrapping | |
103 | ; requests in context when no web request is available. Used in ishell, or |
|
103 | ; requests in context when no web request is available. Used in ishell, or | |
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
105 | app.base_url = http://rhodecode.local |
|
105 | app.base_url = http://rhodecode.local | |
106 |
|
106 | |||
107 | ; Host at which the Service API is running. |
|
107 | ; Host at which the Service API is running. | |
108 | app.service_api.host = http://rhodecode.local:10020 |
|
108 | app.service_api.host = http://rhodecode.local:10020 | |
109 |
|
109 | |||
110 | ; Secret for Service API authentication. |
|
110 | ; Secret for Service API authentication. | |
111 | app.service_api.token = |
|
111 | app.service_api.token = | |
112 |
|
112 | |||
113 | ; Unique application ID. Should be a random unique string for security. |
|
113 | ; Unique application ID. Should be a random unique string for security. | |
114 | app_instance_uuid = rc-production |
|
114 | app_instance_uuid = rc-production | |
115 |
|
115 | |||
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
117 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
117 | ; commit, or pull request exceeds this limit this diff will be displayed | |
118 | ; partially. E.g 512000 == 512Kb |
|
118 | ; partially. E.g 512000 == 512Kb | |
119 | cut_off_limit_diff = 512000 |
|
119 | cut_off_limit_diff = 512000 | |
120 |
|
120 | |||
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
122 | ; file inside diff which exceeds this limit will be displayed partially. |
|
122 | ; file inside diff which exceeds this limit will be displayed partially. | |
123 | ; E.g 128000 == 128Kb |
|
123 | ; E.g 128000 == 128Kb | |
124 | cut_off_limit_file = 128000 |
|
124 | cut_off_limit_file = 128000 | |
125 |
|
125 | |||
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
127 | vcs_full_cache = true |
|
127 | vcs_full_cache = true | |
128 |
|
128 | |||
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
131 | force_https = false |
|
131 | force_https = false | |
132 |
|
132 | |||
133 | ; use Strict-Transport-Security headers |
|
133 | ; use Strict-Transport-Security headers | |
134 | use_htsts = false |
|
134 | use_htsts = false | |
135 |
|
135 | |||
136 | ; Set to true if your repos are exposed using the dumb protocol |
|
136 | ; Set to true if your repos are exposed using the dumb protocol | |
137 | git_update_server_info = false |
|
137 | git_update_server_info = false | |
138 |
|
138 | |||
139 | ; RSS/ATOM feed options |
|
139 | ; RSS/ATOM feed options | |
140 | rss_cut_off_limit = 256000 |
|
140 | rss_cut_off_limit = 256000 | |
141 | rss_items_per_page = 10 |
|
141 | rss_items_per_page = 10 | |
142 | rss_include_diff = false |
|
142 | rss_include_diff = false | |
143 |
|
143 | |||
144 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
144 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
145 | ; url that does rewrites to _admin/gists/{gistid}. |
|
145 | ; url that does rewrites to _admin/gists/{gistid}. | |
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
148 | gist_alias_url = |
|
148 | gist_alias_url = | |
149 |
|
149 | |||
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
151 | ; used for access. |
|
151 | ; used for access. | |
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
153 | ; came from the the logged in user who own this authentication token. |
|
153 | ; came from the the logged in user who own this authentication token. | |
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
155 | ; authentication token. Such view would be only accessible when used together |
|
155 | ; authentication token. Such view would be only accessible when used together | |
156 | ; with this authentication token |
|
156 | ; with this authentication token | |
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
158 | ; The list should be "," separated and on a single line. |
|
158 | ; The list should be "," separated and on a single line. | |
159 | ; Most common views to enable: |
|
159 | ; Most common views to enable: | |
160 |
|
160 | |||
161 | # RepoCommitsView:repo_commit_download |
|
161 | # RepoCommitsView:repo_commit_download | |
162 | # RepoCommitsView:repo_commit_patch |
|
162 | # RepoCommitsView:repo_commit_patch | |
163 | # RepoCommitsView:repo_commit_raw |
|
163 | # RepoCommitsView:repo_commit_raw | |
164 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
164 | # RepoCommitsView:repo_commit_raw@TOKEN | |
165 | # RepoFilesView:repo_files_diff |
|
165 | # RepoFilesView:repo_files_diff | |
166 | # RepoFilesView:repo_archivefile |
|
166 | # RepoFilesView:repo_archivefile | |
167 | # RepoFilesView:repo_file_raw |
|
167 | # RepoFilesView:repo_file_raw | |
168 | # GistView:* |
|
168 | # GistView:* | |
169 | api_access_controllers_whitelist = |
|
169 | api_access_controllers_whitelist = | |
170 |
|
170 | |||
171 | ; Default encoding used to convert from and to unicode |
|
171 | ; Default encoding used to convert from and to unicode | |
172 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
172 | ; can be also a comma separated list of encoding in case of mixed encodings | |
173 | default_encoding = UTF-8 |
|
173 | default_encoding = UTF-8 | |
174 |
|
174 | |||
175 | ; instance-id prefix |
|
175 | ; instance-id prefix | |
176 | ; a prefix key for this instance used for cache invalidation when running |
|
176 | ; a prefix key for this instance used for cache invalidation when running | |
177 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
177 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
178 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
178 | ; all running RhodeCode instances. Leave empty if you don't use it | |
179 | instance_id = |
|
179 | instance_id = | |
180 |
|
180 | |||
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
182 | ; of an authentication plugin also if it is disabled by it's settings. |
|
182 | ; of an authentication plugin also if it is disabled by it's settings. | |
183 | ; This could be useful if you are unable to log in to the system due to broken |
|
183 | ; This could be useful if you are unable to log in to the system due to broken | |
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
185 | ; module to log in again and fix the settings. |
|
185 | ; module to log in again and fix the settings. | |
186 | ; Available builtin plugin IDs (hash is part of the ID): |
|
186 | ; Available builtin plugin IDs (hash is part of the ID): | |
187 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
187 | ; egg:rhodecode-enterprise-ce#rhodecode | |
188 | ; egg:rhodecode-enterprise-ce#pam |
|
188 | ; egg:rhodecode-enterprise-ce#pam | |
189 | ; egg:rhodecode-enterprise-ce#ldap |
|
189 | ; egg:rhodecode-enterprise-ce#ldap | |
190 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
190 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
191 | ; egg:rhodecode-enterprise-ce#headers |
|
191 | ; egg:rhodecode-enterprise-ce#headers | |
192 | ; egg:rhodecode-enterprise-ce#crowd |
|
192 | ; egg:rhodecode-enterprise-ce#crowd | |
193 |
|
193 | |||
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
195 |
|
195 | |||
196 | ; Flag to control loading of legacy plugins in py:/path format |
|
196 | ; Flag to control loading of legacy plugins in py:/path format | |
197 | auth_plugin.import_legacy_plugins = true |
|
197 | auth_plugin.import_legacy_plugins = true | |
198 |
|
198 | |||
199 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
199 | ; alternative return HTTP header for failed authentication. Default HTTP | |
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
201 | ; handling that causing a series of failed authentication calls. |
|
201 | ; handling that causing a series of failed authentication calls. | |
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
203 | ; This will be served instead of default 401 on bad authentication |
|
203 | ; This will be served instead of default 401 on bad authentication | |
204 | auth_ret_code = |
|
204 | auth_ret_code = | |
205 |
|
205 | |||
206 | ; use special detection method when serving auth_ret_code, instead of serving |
|
206 | ; use special detection method when serving auth_ret_code, instead of serving | |
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
208 | ; and then serve auth_ret_code to clients |
|
208 | ; and then serve auth_ret_code to clients | |
209 | auth_ret_code_detection = false |
|
209 | auth_ret_code_detection = false | |
210 |
|
210 | |||
211 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
211 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
212 | ; codes don't break the transactions while 4XX codes do |
|
212 | ; codes don't break the transactions while 4XX codes do | |
213 | lock_ret_code = 423 |
|
213 | lock_ret_code = 423 | |
214 |
|
214 | |||
215 | ; Filesystem location were repositories should be stored |
|
215 | ; Filesystem location were repositories should be stored | |
216 | repo_store.path = /var/opt/rhodecode_repo_store |
|
216 | repo_store.path = /var/opt/rhodecode_repo_store | |
217 |
|
217 | |||
218 | ; allows to setup custom hooks in settings page |
|
218 | ; allows to setup custom hooks in settings page | |
219 | allow_custom_hooks_settings = true |
|
219 | allow_custom_hooks_settings = true | |
220 |
|
220 | |||
221 | ; Generated license token required for EE edition license. |
|
221 | ; Generated license token required for EE edition license. | |
222 | ; New generated token value can be found in Admin > settings > license page. |
|
222 | ; New generated token value can be found in Admin > settings > license page. | |
223 | license_token = |
|
223 | license_token = | |
224 |
|
224 | |||
225 | ; This flag hides sensitive information on the license page such as token, and license data |
|
225 | ; This flag hides sensitive information on the license page such as token, and license data | |
226 | license.hide_license_info = false |
|
226 | license.hide_license_info = false | |
227 |
|
227 | |||
228 | ; supervisor connection uri, for managing supervisor and logs. |
|
228 | ; supervisor connection uri, for managing supervisor and logs. | |
229 | supervisor.uri = |
|
229 | supervisor.uri = | |
230 |
|
230 | |||
231 | ; supervisord group name/id we only want this RC instance to handle |
|
231 | ; supervisord group name/id we only want this RC instance to handle | |
232 | supervisor.group_id = prod |
|
232 | supervisor.group_id = prod | |
233 |
|
233 | |||
234 | ; Display extended labs settings |
|
234 | ; Display extended labs settings | |
235 | labs_settings_active = true |
|
235 | labs_settings_active = true | |
236 |
|
236 | |||
237 | ; Custom exception store path, defaults to TMPDIR |
|
237 | ; Custom exception store path, defaults to TMPDIR | |
238 | ; This is used to store exception from RhodeCode in shared directory |
|
238 | ; This is used to store exception from RhodeCode in shared directory | |
239 | #exception_tracker.store_path = |
|
239 | #exception_tracker.store_path = | |
240 |
|
240 | |||
241 | ; Send email with exception details when it happens |
|
241 | ; Send email with exception details when it happens | |
242 | #exception_tracker.send_email = false |
|
242 | #exception_tracker.send_email = false | |
243 |
|
243 | |||
244 | ; Comma separated list of recipients for exception emails, |
|
244 | ; Comma separated list of recipients for exception emails, | |
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
246 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
246 | ; Can be left empty, then emails will be sent to ALL super-admins | |
247 | #exception_tracker.send_email_recipients = |
|
247 | #exception_tracker.send_email_recipients = | |
248 |
|
248 | |||
249 | ; optional prefix to Add to email Subject |
|
249 | ; optional prefix to Add to email Subject | |
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
251 |
|
251 | |||
252 | ; File store configuration. This is used to store and serve uploaded files |
|
252 | ; File store configuration. This is used to store and serve uploaded files | |
253 | file_store.enabled = true |
|
253 | file_store.enabled = true | |
254 |
|
254 | |||
255 | ; Storage backend, available options are: local |
|
255 | ; Storage backend, available options are: local | |
256 | file_store.backend = local |
|
256 | file_store.backend = local | |
257 |
|
257 | |||
258 | ; path to store the uploaded binaries and artifacts |
|
258 | ; path to store the uploaded binaries and artifacts | |
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store | |
260 |
|
260 | |||
261 | ; Uncomment and set this path to control settings for archive download cache. |
|
261 | ||
|
262 | ; Redis url to acquire/check generation of archives locks | |||
|
263 | archive_cache.locking.url = redis://redis:6379/1 | |||
|
264 | ||||
|
265 | ; Storage backend, only 'filesystem' and 'objectstore' are available now | |||
|
266 | archive_cache.backend.type = filesystem | |||
|
267 | ||||
|
268 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
269 | ; e.g http://minio:9000 | |||
|
270 | archive_cache.objectstore.url = http://s3-minio:9000 | |||
|
271 | ||||
|
272 | ; key for s3 auth | |||
|
273 | archive_cache.objectstore.key = key | |||
|
274 | ||||
|
275 | ; secret for s3 auth | |||
|
276 | archive_cache.objectstore.secret = secret | |||
|
277 | ||||
|
278 | ; number of sharded buckets to create to distribute archives across | |||
|
279 | ; default is 8 shards | |||
|
280 | archive_cache.objectstore.bucket_shards = 8 | |||
|
281 | ||||
|
282 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
283 | archive_cache.objectstore.retry = false | |||
|
284 | ||||
|
285 | ; number of seconds to wait for next try using retry | |||
|
286 | archive_cache.objectstore.retry_backoff = 1 | |||
|
287 | ||||
|
288 | ; how many tries do do a retry fetch from this backend | |||
|
289 | archive_cache.objectstore.retry_attempts = 10 | |||
|
290 | ||||
|
291 | ; Default is $cache_dir/archive_cache if not set | |||
262 | ; Generated repo archives will be cached at this location |
|
292 | ; Generated repo archives will be cached at this location | |
263 | ; and served from the cache during subsequent requests for the same archive of |
|
293 | ; and served from the cache during subsequent requests for the same archive of | |
264 | ; the repository. This path is important to be shared across filesystems and with |
|
294 | ; the repository. This path is important to be shared across filesystems and with | |
265 | ; RhodeCode and vcsserver |
|
295 | ; RhodeCode and vcsserver | |
266 |
|
||||
267 | ; Redis url to acquire/check generation of archives locks |
|
|||
268 | archive_cache.locking.url = redis://redis:6379/1 |
|
|||
269 |
|
||||
270 | ; Storage backend, only 'filesystem' is available now |
|
|||
271 | archive_cache.backend.type = filesystem |
|
|||
272 |
|
||||
273 | ; Default is $cache_dir/archive_cache if not set |
|
|||
274 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache |
|
296 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache | |
275 |
|
297 | |||
276 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
298 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
277 | archive_cache.filesystem.cache_size_gb = 40 |
|
299 | archive_cache.filesystem.cache_size_gb = 40 | |
278 |
|
300 | |||
279 | ; Eviction policy used to clear out after cache_size_gb limit is reached |
|
301 | ; Eviction policy used to clear out after cache_size_gb limit is reached | |
280 | archive_cache.filesystem.eviction_policy = least-recently-stored |
|
302 | archive_cache.filesystem.eviction_policy = least-recently-stored | |
281 |
|
303 | |||
282 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
304 | ; By default cache uses sharding technique, this specifies how many shards are there | |
|
305 | ; default is 8 shards | |||
283 | archive_cache.filesystem.cache_shards = 8 |
|
306 | archive_cache.filesystem.cache_shards = 8 | |
284 |
|
307 | |||
|
308 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
309 | archive_cache.filesystem.retry = false | |||
|
310 | ||||
|
311 | ; number of seconds to wait for next try using retry | |||
|
312 | archive_cache.filesystem.retry_backoff = 1 | |||
|
313 | ||||
|
314 | ; how many tries do do a retry fetch from this backend | |||
|
315 | archive_cache.filesystem.retry_attempts = 10 | |||
|
316 | ||||
285 |
|
317 | |||
286 | ; ############# |
|
318 | ; ############# | |
287 | ; CELERY CONFIG |
|
319 | ; CELERY CONFIG | |
288 | ; ############# |
|
320 | ; ############# | |
289 |
|
321 | |||
290 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
322 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
291 |
|
323 | |||
292 | use_celery = true |
|
324 | use_celery = true | |
293 |
|
325 | |||
294 | ; path to store schedule database |
|
326 | ; path to store schedule database | |
295 | #celerybeat-schedule.path = |
|
327 | #celerybeat-schedule.path = | |
296 |
|
328 | |||
297 | ; connection url to the message broker (default redis) |
|
329 | ; connection url to the message broker (default redis) | |
298 | celery.broker_url = redis://redis:6379/8 |
|
330 | celery.broker_url = redis://redis:6379/8 | |
299 |
|
331 | |||
300 | ; results backend to get results for (default redis) |
|
332 | ; results backend to get results for (default redis) | |
301 | celery.result_backend = redis://redis:6379/8 |
|
333 | celery.result_backend = redis://redis:6379/8 | |
302 |
|
334 | |||
303 | ; rabbitmq example |
|
335 | ; rabbitmq example | |
304 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
336 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
305 |
|
337 | |||
306 | ; maximum tasks to execute before worker restart |
|
338 | ; maximum tasks to execute before worker restart | |
307 | celery.max_tasks_per_child = 20 |
|
339 | celery.max_tasks_per_child = 20 | |
308 |
|
340 | |||
309 | ; tasks will never be sent to the queue, but executed locally instead. |
|
341 | ; tasks will never be sent to the queue, but executed locally instead. | |
310 | celery.task_always_eager = false |
|
342 | celery.task_always_eager = false | |
311 |
|
343 | |||
312 | ; ############# |
|
344 | ; ############# | |
313 | ; DOGPILE CACHE |
|
345 | ; DOGPILE CACHE | |
314 | ; ############# |
|
346 | ; ############# | |
315 |
|
347 | |||
316 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
348 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
317 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
349 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
318 | cache_dir = /var/opt/rhodecode_data |
|
350 | cache_dir = /var/opt/rhodecode_data | |
319 |
|
351 | |||
320 | ; ********************************************* |
|
352 | ; ********************************************* | |
321 | ; `sql_cache_short` cache for heavy SQL queries |
|
353 | ; `sql_cache_short` cache for heavy SQL queries | |
322 | ; Only supported backend is `memory_lru` |
|
354 | ; Only supported backend is `memory_lru` | |
323 | ; ********************************************* |
|
355 | ; ********************************************* | |
324 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
356 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
325 | rc_cache.sql_cache_short.expiration_time = 30 |
|
357 | rc_cache.sql_cache_short.expiration_time = 30 | |
326 |
|
358 | |||
327 |
|
359 | |||
328 | ; ***************************************************** |
|
360 | ; ***************************************************** | |
329 | ; `cache_repo_longterm` cache for repo object instances |
|
361 | ; `cache_repo_longterm` cache for repo object instances | |
330 | ; Only supported backend is `memory_lru` |
|
362 | ; Only supported backend is `memory_lru` | |
331 | ; ***************************************************** |
|
363 | ; ***************************************************** | |
332 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
364 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
333 | ; by default we use 30 Days, cache is still invalidated on push |
|
365 | ; by default we use 30 Days, cache is still invalidated on push | |
334 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
366 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
335 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
367 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
336 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
368 | rc_cache.cache_repo_longterm.max_size = 10000 | |
337 |
|
369 | |||
338 |
|
370 | |||
339 | ; ********************************************* |
|
371 | ; ********************************************* | |
340 | ; `cache_general` cache for general purpose use |
|
372 | ; `cache_general` cache for general purpose use | |
341 | ; for simplicity use rc.file_namespace backend, |
|
373 | ; for simplicity use rc.file_namespace backend, | |
342 | ; for performance and scale use rc.redis |
|
374 | ; for performance and scale use rc.redis | |
343 | ; ********************************************* |
|
375 | ; ********************************************* | |
344 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
376 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
345 | rc_cache.cache_general.expiration_time = 43200 |
|
377 | rc_cache.cache_general.expiration_time = 43200 | |
346 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
378 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
347 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
379 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db | |
348 |
|
380 | |||
349 | ; alternative `cache_general` redis backend with distributed lock |
|
381 | ; alternative `cache_general` redis backend with distributed lock | |
350 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
382 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
351 | #rc_cache.cache_general.expiration_time = 300 |
|
383 | #rc_cache.cache_general.expiration_time = 300 | |
352 |
|
384 | |||
353 | ; redis_expiration_time needs to be greater then expiration_time |
|
385 | ; redis_expiration_time needs to be greater then expiration_time | |
354 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
386 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
355 |
|
387 | |||
356 | #rc_cache.cache_general.arguments.host = localhost |
|
388 | #rc_cache.cache_general.arguments.host = localhost | |
357 | #rc_cache.cache_general.arguments.port = 6379 |
|
389 | #rc_cache.cache_general.arguments.port = 6379 | |
358 | #rc_cache.cache_general.arguments.db = 0 |
|
390 | #rc_cache.cache_general.arguments.db = 0 | |
359 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
391 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
360 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
392 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
361 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
393 | #rc_cache.cache_general.arguments.distributed_lock = true | |
362 |
|
394 | |||
363 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
395 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
364 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
396 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
365 |
|
397 | |||
366 | ; ************************************************* |
|
398 | ; ************************************************* | |
367 | ; `cache_perms` cache for permission tree, auth TTL |
|
399 | ; `cache_perms` cache for permission tree, auth TTL | |
368 | ; for simplicity use rc.file_namespace backend, |
|
400 | ; for simplicity use rc.file_namespace backend, | |
369 | ; for performance and scale use rc.redis |
|
401 | ; for performance and scale use rc.redis | |
370 | ; ************************************************* |
|
402 | ; ************************************************* | |
371 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
403 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
372 | rc_cache.cache_perms.expiration_time = 3600 |
|
404 | rc_cache.cache_perms.expiration_time = 3600 | |
373 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
405 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
374 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
406 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db | |
375 |
|
407 | |||
376 | ; alternative `cache_perms` redis backend with distributed lock |
|
408 | ; alternative `cache_perms` redis backend with distributed lock | |
377 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
409 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
378 | #rc_cache.cache_perms.expiration_time = 300 |
|
410 | #rc_cache.cache_perms.expiration_time = 300 | |
379 |
|
411 | |||
380 | ; redis_expiration_time needs to be greater then expiration_time |
|
412 | ; redis_expiration_time needs to be greater then expiration_time | |
381 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
413 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
382 |
|
414 | |||
383 | #rc_cache.cache_perms.arguments.host = localhost |
|
415 | #rc_cache.cache_perms.arguments.host = localhost | |
384 | #rc_cache.cache_perms.arguments.port = 6379 |
|
416 | #rc_cache.cache_perms.arguments.port = 6379 | |
385 | #rc_cache.cache_perms.arguments.db = 0 |
|
417 | #rc_cache.cache_perms.arguments.db = 0 | |
386 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
418 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
387 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
419 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
388 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
420 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
389 |
|
421 | |||
390 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
422 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
391 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
423 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
392 |
|
424 | |||
393 | ; *************************************************** |
|
425 | ; *************************************************** | |
394 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
426 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
395 | ; for simplicity use rc.file_namespace backend, |
|
427 | ; for simplicity use rc.file_namespace backend, | |
396 | ; for performance and scale use rc.redis |
|
428 | ; for performance and scale use rc.redis | |
397 | ; *************************************************** |
|
429 | ; *************************************************** | |
398 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
430 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
399 | rc_cache.cache_repo.expiration_time = 2592000 |
|
431 | rc_cache.cache_repo.expiration_time = 2592000 | |
400 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
432 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
401 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
433 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db | |
402 |
|
434 | |||
403 | ; alternative `cache_repo` redis backend with distributed lock |
|
435 | ; alternative `cache_repo` redis backend with distributed lock | |
404 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
436 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
405 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
437 | #rc_cache.cache_repo.expiration_time = 2592000 | |
406 |
|
438 | |||
407 | ; redis_expiration_time needs to be greater then expiration_time |
|
439 | ; redis_expiration_time needs to be greater then expiration_time | |
408 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
440 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
409 |
|
441 | |||
410 | #rc_cache.cache_repo.arguments.host = localhost |
|
442 | #rc_cache.cache_repo.arguments.host = localhost | |
411 | #rc_cache.cache_repo.arguments.port = 6379 |
|
443 | #rc_cache.cache_repo.arguments.port = 6379 | |
412 | #rc_cache.cache_repo.arguments.db = 1 |
|
444 | #rc_cache.cache_repo.arguments.db = 1 | |
413 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
445 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
414 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
446 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
415 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
447 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
416 |
|
448 | |||
417 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
449 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
418 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
450 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
419 |
|
451 | |||
420 | ; ############## |
|
452 | ; ############## | |
421 | ; BEAKER SESSION |
|
453 | ; BEAKER SESSION | |
422 | ; ############## |
|
454 | ; ############## | |
423 |
|
455 | |||
424 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
456 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
425 | ; types are file, ext:redis, ext:database, ext:memcached |
|
457 | ; types are file, ext:redis, ext:database, ext:memcached | |
426 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
458 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
427 | #beaker.session.type = file |
|
459 | #beaker.session.type = file | |
428 | #beaker.session.data_dir = %(here)s/data/sessions |
|
460 | #beaker.session.data_dir = %(here)s/data/sessions | |
429 |
|
461 | |||
430 | ; Redis based sessions |
|
462 | ; Redis based sessions | |
431 | beaker.session.type = ext:redis |
|
463 | beaker.session.type = ext:redis | |
432 | beaker.session.url = redis://redis:6379/2 |
|
464 | beaker.session.url = redis://redis:6379/2 | |
433 |
|
465 | |||
434 | ; DB based session, fast, and allows easy management over logged in users |
|
466 | ; DB based session, fast, and allows easy management over logged in users | |
435 | #beaker.session.type = ext:database |
|
467 | #beaker.session.type = ext:database | |
436 | #beaker.session.table_name = db_session |
|
468 | #beaker.session.table_name = db_session | |
437 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
469 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
438 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
470 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
439 | #beaker.session.sa.pool_recycle = 3600 |
|
471 | #beaker.session.sa.pool_recycle = 3600 | |
440 | #beaker.session.sa.echo = false |
|
472 | #beaker.session.sa.echo = false | |
441 |
|
473 | |||
442 | beaker.session.key = rhodecode |
|
474 | beaker.session.key = rhodecode | |
443 | beaker.session.secret = production-rc-uytcxaz |
|
475 | beaker.session.secret = production-rc-uytcxaz | |
444 | beaker.session.lock_dir = /data_ramdisk/lock |
|
476 | beaker.session.lock_dir = /data_ramdisk/lock | |
445 |
|
477 | |||
446 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
478 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
447 | ; you must disable beaker.session.secret to use this |
|
479 | ; you must disable beaker.session.secret to use this | |
448 | #beaker.session.encrypt_key = key_for_encryption |
|
480 | #beaker.session.encrypt_key = key_for_encryption | |
449 | #beaker.session.validate_key = validation_key |
|
481 | #beaker.session.validate_key = validation_key | |
450 |
|
482 | |||
451 | ; Sets session as invalid (also logging out user) if it haven not been |
|
483 | ; Sets session as invalid (also logging out user) if it haven not been | |
452 | ; accessed for given amount of time in seconds |
|
484 | ; accessed for given amount of time in seconds | |
453 | beaker.session.timeout = 2592000 |
|
485 | beaker.session.timeout = 2592000 | |
454 | beaker.session.httponly = true |
|
486 | beaker.session.httponly = true | |
455 |
|
487 | |||
456 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
488 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
457 | #beaker.session.cookie_path = /custom_prefix |
|
489 | #beaker.session.cookie_path = /custom_prefix | |
458 |
|
490 | |||
459 | ; Set https secure cookie |
|
491 | ; Set https secure cookie | |
460 | beaker.session.secure = false |
|
492 | beaker.session.secure = false | |
461 |
|
493 | |||
462 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
494 | ; default cookie expiration time in seconds, set to `true` to set expire | |
463 | ; at browser close |
|
495 | ; at browser close | |
464 | #beaker.session.cookie_expires = 3600 |
|
496 | #beaker.session.cookie_expires = 3600 | |
465 |
|
497 | |||
466 | ; ############################# |
|
498 | ; ############################# | |
467 | ; SEARCH INDEXING CONFIGURATION |
|
499 | ; SEARCH INDEXING CONFIGURATION | |
468 | ; ############################# |
|
500 | ; ############################# | |
469 |
|
501 | |||
470 | ; Full text search indexer is available in rhodecode-tools under |
|
502 | ; Full text search indexer is available in rhodecode-tools under | |
471 | ; `rhodecode-tools index` command |
|
503 | ; `rhodecode-tools index` command | |
472 |
|
504 | |||
473 | ; WHOOSH Backend, doesn't require additional services to run |
|
505 | ; WHOOSH Backend, doesn't require additional services to run | |
474 | ; it works good with few dozen repos |
|
506 | ; it works good with few dozen repos | |
475 | search.module = rhodecode.lib.index.whoosh |
|
507 | search.module = rhodecode.lib.index.whoosh | |
476 | search.location = %(here)s/data/index |
|
508 | search.location = %(here)s/data/index | |
477 |
|
509 | |||
478 | ; #################### |
|
510 | ; #################### | |
479 | ; CHANNELSTREAM CONFIG |
|
511 | ; CHANNELSTREAM CONFIG | |
480 | ; #################### |
|
512 | ; #################### | |
481 |
|
513 | |||
482 | ; channelstream enables persistent connections and live notification |
|
514 | ; channelstream enables persistent connections and live notification | |
483 | ; in the system. It's also used by the chat system |
|
515 | ; in the system. It's also used by the chat system | |
484 |
|
516 | |||
485 | channelstream.enabled = true |
|
517 | channelstream.enabled = true | |
486 |
|
518 | |||
487 | ; server address for channelstream server on the backend |
|
519 | ; server address for channelstream server on the backend | |
488 | channelstream.server = channelstream:9800 |
|
520 | channelstream.server = channelstream:9800 | |
489 |
|
521 | |||
490 | ; location of the channelstream server from outside world |
|
522 | ; location of the channelstream server from outside world | |
491 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
523 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
492 | ; by external HTTP server such as Nginx or Apache |
|
524 | ; by external HTTP server such as Nginx or Apache | |
493 | ; see Nginx/Apache configuration examples in our docs |
|
525 | ; see Nginx/Apache configuration examples in our docs | |
494 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
526 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
495 | channelstream.secret = ENV_GENERATED |
|
527 | channelstream.secret = ENV_GENERATED | |
496 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history |
|
528 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history | |
497 |
|
529 | |||
498 | ; Internal application path that Javascript uses to connect into. |
|
530 | ; Internal application path that Javascript uses to connect into. | |
499 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
531 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
500 | channelstream.proxy_path = /_channelstream |
|
532 | channelstream.proxy_path = /_channelstream | |
501 |
|
533 | |||
502 |
|
534 | |||
503 | ; ############################## |
|
535 | ; ############################## | |
504 | ; MAIN RHODECODE DATABASE CONFIG |
|
536 | ; MAIN RHODECODE DATABASE CONFIG | |
505 | ; ############################## |
|
537 | ; ############################## | |
506 |
|
538 | |||
507 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
539 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
508 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
540 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
509 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
541 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
510 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
542 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
511 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
543 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
512 |
|
544 | |||
513 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
545 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
514 |
|
546 | |||
515 | ; see sqlalchemy docs for other advanced settings |
|
547 | ; see sqlalchemy docs for other advanced settings | |
516 | ; print the sql statements to output |
|
548 | ; print the sql statements to output | |
517 | sqlalchemy.db1.echo = false |
|
549 | sqlalchemy.db1.echo = false | |
518 |
|
550 | |||
519 | ; recycle the connections after this amount of seconds |
|
551 | ; recycle the connections after this amount of seconds | |
520 | sqlalchemy.db1.pool_recycle = 3600 |
|
552 | sqlalchemy.db1.pool_recycle = 3600 | |
521 |
|
553 | |||
522 | ; the number of connections to keep open inside the connection pool. |
|
554 | ; the number of connections to keep open inside the connection pool. | |
523 | ; 0 indicates no limit |
|
555 | ; 0 indicates no limit | |
524 | ; the general calculus with gevent is: |
|
556 | ; the general calculus with gevent is: | |
525 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
557 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, | |
526 | ; then increase pool size + max overflow so that they add up to 500. |
|
558 | ; then increase pool size + max overflow so that they add up to 500. | |
527 | #sqlalchemy.db1.pool_size = 5 |
|
559 | #sqlalchemy.db1.pool_size = 5 | |
528 |
|
560 | |||
529 | ; The number of connections to allow in connection pool "overflow", that is |
|
561 | ; The number of connections to allow in connection pool "overflow", that is | |
530 | ; connections that can be opened above and beyond the pool_size setting, |
|
562 | ; connections that can be opened above and beyond the pool_size setting, | |
531 | ; which defaults to five. |
|
563 | ; which defaults to five. | |
532 | #sqlalchemy.db1.max_overflow = 10 |
|
564 | #sqlalchemy.db1.max_overflow = 10 | |
533 |
|
565 | |||
534 | ; Connection check ping, used to detect broken database connections |
|
566 | ; Connection check ping, used to detect broken database connections | |
535 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
567 | ; could be enabled to better handle cases if MySQL has gone away errors | |
536 | #sqlalchemy.db1.ping_connection = true |
|
568 | #sqlalchemy.db1.ping_connection = true | |
537 |
|
569 | |||
538 | ; ########## |
|
570 | ; ########## | |
539 | ; VCS CONFIG |
|
571 | ; VCS CONFIG | |
540 | ; ########## |
|
572 | ; ########## | |
541 | vcs.server.enable = true |
|
573 | vcs.server.enable = true | |
542 | vcs.server = vcsserver:10010 |
|
574 | vcs.server = vcsserver:10010 | |
543 |
|
575 | |||
544 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
576 | ; Web server connectivity protocol, responsible for web based VCS operations | |
545 | ; Available protocols are: |
|
577 | ; Available protocols are: | |
546 | ; `http` - use http-rpc backend (default) |
|
578 | ; `http` - use http-rpc backend (default) | |
547 | vcs.server.protocol = http |
|
579 | vcs.server.protocol = http | |
548 |
|
580 | |||
549 | ; Push/Pull operations protocol, available options are: |
|
581 | ; Push/Pull operations protocol, available options are: | |
550 | ; `http` - use http-rpc backend (default) |
|
582 | ; `http` - use http-rpc backend (default) | |
551 | vcs.scm_app_implementation = http |
|
583 | vcs.scm_app_implementation = http | |
552 |
|
584 | |||
553 | ; Push/Pull operations hooks protocol, available options are: |
|
585 | ; Push/Pull operations hooks protocol, available options are: | |
554 | ; `http` - use http-rpc backend (default) |
|
586 | ; `http` - use http-rpc backend (default) | |
555 | ; `celery` - use celery based hooks |
|
587 | ; `celery` - use celery based hooks | |
556 | vcs.hooks.protocol = http |
|
588 | vcs.hooks.protocol = http | |
557 |
|
589 | |||
558 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
590 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
559 | ; accessible via network. |
|
591 | ; accessible via network. | |
560 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
592 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
561 | vcs.hooks.host = * |
|
593 | vcs.hooks.host = * | |
562 |
|
594 | |||
563 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
595 | ; Start VCSServer with this instance as a subprocess, useful for development | |
564 | vcs.start_server = false |
|
596 | vcs.start_server = false | |
565 |
|
597 | |||
566 | ; List of enabled VCS backends, available options are: |
|
598 | ; List of enabled VCS backends, available options are: | |
567 | ; `hg` - mercurial |
|
599 | ; `hg` - mercurial | |
568 | ; `git` - git |
|
600 | ; `git` - git | |
569 | ; `svn` - subversion |
|
601 | ; `svn` - subversion | |
570 | vcs.backends = hg, git, svn |
|
602 | vcs.backends = hg, git, svn | |
571 |
|
603 | |||
572 | ; Wait this number of seconds before killing connection to the vcsserver |
|
604 | ; Wait this number of seconds before killing connection to the vcsserver | |
573 | vcs.connection_timeout = 3600 |
|
605 | vcs.connection_timeout = 3600 | |
574 |
|
606 | |||
575 | ; Cache flag to cache vcsserver remote calls locally |
|
607 | ; Cache flag to cache vcsserver remote calls locally | |
576 | ; It uses cache_region `cache_repo` |
|
608 | ; It uses cache_region `cache_repo` | |
577 | vcs.methods.cache = true |
|
609 | vcs.methods.cache = true | |
578 |
|
610 | |||
579 | ; #################################################### |
|
611 | ; #################################################### | |
580 | ; Subversion proxy support (mod_dav_svn) |
|
612 | ; Subversion proxy support (mod_dav_svn) | |
581 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
613 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
582 | ; #################################################### |
|
614 | ; #################################################### | |
583 |
|
615 | |||
584 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
616 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
585 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
617 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
586 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
618 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
587 | #vcs.svn.compatible_version = 1.8 |
|
619 | #vcs.svn.compatible_version = 1.8 | |
588 |
|
620 | |||
589 | ; Enable SVN proxy of requests over HTTP |
|
621 | ; Enable SVN proxy of requests over HTTP | |
590 | vcs.svn.proxy.enabled = true |
|
622 | vcs.svn.proxy.enabled = true | |
591 |
|
623 | |||
592 | ; host to connect to running SVN subsystem |
|
624 | ; host to connect to running SVN subsystem | |
593 | vcs.svn.proxy.host = http://svn:8090 |
|
625 | vcs.svn.proxy.host = http://svn:8090 | |
594 |
|
626 | |||
595 | ; Enable or disable the config file generation. |
|
627 | ; Enable or disable the config file generation. | |
596 | svn.proxy.generate_config = true |
|
628 | svn.proxy.generate_config = true | |
597 |
|
629 | |||
598 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
630 | ; Generate config file with `SVNListParentPath` set to `On`. | |
599 | svn.proxy.list_parent_path = true |
|
631 | svn.proxy.list_parent_path = true | |
600 |
|
632 | |||
601 | ; Set location and file name of generated config file. |
|
633 | ; Set location and file name of generated config file. | |
602 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf |
|
634 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf | |
603 |
|
635 | |||
604 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
636 | ; alternative mod_dav config template. This needs to be a valid mako template | |
605 | ; Example template can be found in the source code: |
|
637 | ; Example template can be found in the source code: | |
606 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
638 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
607 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
639 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
608 |
|
640 | |||
609 | ; Used as a prefix to the `Location` block in the generated config file. |
|
641 | ; Used as a prefix to the `Location` block in the generated config file. | |
610 | ; In most cases it should be set to `/`. |
|
642 | ; In most cases it should be set to `/`. | |
611 | svn.proxy.location_root = / |
|
643 | svn.proxy.location_root = / | |
612 |
|
644 | |||
613 | ; Command to reload the mod dav svn configuration on change. |
|
645 | ; Command to reload the mod dav svn configuration on change. | |
614 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
646 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
615 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
647 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
616 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
648 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
617 |
|
649 | |||
618 | ; If the timeout expires before the reload command finishes, the command will |
|
650 | ; If the timeout expires before the reload command finishes, the command will | |
619 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
651 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
620 | #svn.proxy.reload_timeout = 10 |
|
652 | #svn.proxy.reload_timeout = 10 | |
621 |
|
653 | |||
622 | ; #################### |
|
654 | ; #################### | |
623 | ; SSH Support Settings |
|
655 | ; SSH Support Settings | |
624 | ; #################### |
|
656 | ; #################### | |
625 |
|
657 | |||
626 | ; Defines if a custom authorized_keys file should be created and written on |
|
658 | ; Defines if a custom authorized_keys file should be created and written on | |
627 | ; any change user ssh keys. Setting this to false also disables possibility |
|
659 | ; any change user ssh keys. Setting this to false also disables possibility | |
628 | ; of adding SSH keys by users from web interface. Super admins can still |
|
660 | ; of adding SSH keys by users from web interface. Super admins can still | |
629 | ; manage SSH Keys. |
|
661 | ; manage SSH Keys. | |
630 | ssh.generate_authorized_keyfile = true |
|
662 | ssh.generate_authorized_keyfile = true | |
631 |
|
663 | |||
632 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
664 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
633 | # ssh.authorized_keys_ssh_opts = |
|
665 | # ssh.authorized_keys_ssh_opts = | |
634 |
|
666 | |||
635 | ; Path to the authorized_keys file where the generate entries are placed. |
|
667 | ; Path to the authorized_keys file where the generate entries are placed. | |
636 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
668 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
637 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
669 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
638 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode |
|
670 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode | |
639 |
|
671 | |||
640 | ; Command to execute the SSH wrapper. The binary is available in the |
|
672 | ; Command to execute the SSH wrapper. The binary is available in the | |
641 | ; RhodeCode installation directory. |
|
673 | ; RhodeCode installation directory. | |
642 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
674 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
643 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
675 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
644 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
676 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
645 |
|
677 | |||
646 | ; Allow shell when executing the ssh-wrapper command |
|
678 | ; Allow shell when executing the ssh-wrapper command | |
647 | ssh.wrapper_cmd_allow_shell = false |
|
679 | ssh.wrapper_cmd_allow_shell = false | |
648 |
|
680 | |||
649 | ; Enables logging, and detailed output send back to the client during SSH |
|
681 | ; Enables logging, and detailed output send back to the client during SSH | |
650 | ; operations. Useful for debugging, shouldn't be used in production. |
|
682 | ; operations. Useful for debugging, shouldn't be used in production. | |
651 | ssh.enable_debug_logging = false |
|
683 | ssh.enable_debug_logging = false | |
652 |
|
684 | |||
653 | ; Paths to binary executable, by default they are the names, but we can |
|
685 | ; Paths to binary executable, by default they are the names, but we can | |
654 | ; override them if we want to use a custom one |
|
686 | ; override them if we want to use a custom one | |
655 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg |
|
687 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg | |
656 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git |
|
688 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git | |
657 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve |
|
689 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve | |
658 |
|
690 | |||
659 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
691 | ; Enables SSH key generator web interface. Disabling this still allows users | |
660 | ; to add their own keys. |
|
692 | ; to add their own keys. | |
661 | ssh.enable_ui_key_generator = true |
|
693 | ssh.enable_ui_key_generator = true | |
662 |
|
694 | |||
663 | ; Statsd client config, this is used to send metrics to statsd |
|
695 | ; Statsd client config, this is used to send metrics to statsd | |
664 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
696 | ; We recommend setting statsd_exported and scrape them using Prometheus | |
665 | #statsd.enabled = false |
|
697 | #statsd.enabled = false | |
666 | #statsd.statsd_host = 0.0.0.0 |
|
698 | #statsd.statsd_host = 0.0.0.0 | |
667 | #statsd.statsd_port = 8125 |
|
699 | #statsd.statsd_port = 8125 | |
668 | #statsd.statsd_prefix = |
|
700 | #statsd.statsd_prefix = | |
669 | #statsd.statsd_ipv6 = false |
|
701 | #statsd.statsd_ipv6 = false | |
670 |
|
702 | |||
671 | ; configure logging automatically at server startup set to false |
|
703 | ; configure logging automatically at server startup set to false | |
672 | ; to use the below custom logging config. |
|
704 | ; to use the below custom logging config. | |
673 | ; RC_LOGGING_FORMATTER |
|
705 | ; RC_LOGGING_FORMATTER | |
674 | ; RC_LOGGING_LEVEL |
|
706 | ; RC_LOGGING_LEVEL | |
675 | ; env variables can control the settings for logging in case of autoconfigure |
|
707 | ; env variables can control the settings for logging in case of autoconfigure | |
676 |
|
708 | |||
677 | #logging.autoconfigure = true |
|
709 | #logging.autoconfigure = true | |
678 |
|
710 | |||
679 | ; specify your own custom logging config file to configure logging |
|
711 | ; specify your own custom logging config file to configure logging | |
680 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
712 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
681 |
|
713 | |||
682 | ; Dummy marker to add new entries after. |
|
714 | ; Dummy marker to add new entries after. | |
683 | ; Add any custom entries below. Please don't remove this marker. |
|
715 | ; Add any custom entries below. Please don't remove this marker. | |
684 | custom.conf = 1 |
|
716 | custom.conf = 1 | |
685 |
|
717 | |||
686 |
|
718 | |||
687 | ; ##################### |
|
719 | ; ##################### | |
688 | ; LOGGING CONFIGURATION |
|
720 | ; LOGGING CONFIGURATION | |
689 | ; ##################### |
|
721 | ; ##################### | |
690 |
|
722 | |||
691 | [loggers] |
|
723 | [loggers] | |
692 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
724 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
693 |
|
725 | |||
694 | [handlers] |
|
726 | [handlers] | |
695 | keys = console, console_sql |
|
727 | keys = console, console_sql | |
696 |
|
728 | |||
697 | [formatters] |
|
729 | [formatters] | |
698 | keys = generic, json, color_formatter, color_formatter_sql |
|
730 | keys = generic, json, color_formatter, color_formatter_sql | |
699 |
|
731 | |||
700 | ; ####### |
|
732 | ; ####### | |
701 | ; LOGGERS |
|
733 | ; LOGGERS | |
702 | ; ####### |
|
734 | ; ####### | |
703 | [logger_root] |
|
735 | [logger_root] | |
704 | level = NOTSET |
|
736 | level = NOTSET | |
705 | handlers = console |
|
737 | handlers = console | |
706 |
|
738 | |||
707 | [logger_sqlalchemy] |
|
739 | [logger_sqlalchemy] | |
708 | level = INFO |
|
740 | level = INFO | |
709 | handlers = console_sql |
|
741 | handlers = console_sql | |
710 | qualname = sqlalchemy.engine |
|
742 | qualname = sqlalchemy.engine | |
711 | propagate = 0 |
|
743 | propagate = 0 | |
712 |
|
744 | |||
713 | [logger_beaker] |
|
745 | [logger_beaker] | |
714 | level = DEBUG |
|
746 | level = DEBUG | |
715 | handlers = |
|
747 | handlers = | |
716 | qualname = beaker.container |
|
748 | qualname = beaker.container | |
717 | propagate = 1 |
|
749 | propagate = 1 | |
718 |
|
750 | |||
719 | [logger_rhodecode] |
|
751 | [logger_rhodecode] | |
720 | level = DEBUG |
|
752 | level = DEBUG | |
721 | handlers = |
|
753 | handlers = | |
722 | qualname = rhodecode |
|
754 | qualname = rhodecode | |
723 | propagate = 1 |
|
755 | propagate = 1 | |
724 |
|
756 | |||
725 | [logger_ssh_wrapper] |
|
757 | [logger_ssh_wrapper] | |
726 | level = DEBUG |
|
758 | level = DEBUG | |
727 | handlers = |
|
759 | handlers = | |
728 | qualname = ssh_wrapper |
|
760 | qualname = ssh_wrapper | |
729 | propagate = 1 |
|
761 | propagate = 1 | |
730 |
|
762 | |||
731 | [logger_celery] |
|
763 | [logger_celery] | |
732 | level = DEBUG |
|
764 | level = DEBUG | |
733 | handlers = |
|
765 | handlers = | |
734 | qualname = celery |
|
766 | qualname = celery | |
735 |
|
767 | |||
736 |
|
768 | |||
737 | ; ######## |
|
769 | ; ######## | |
738 | ; HANDLERS |
|
770 | ; HANDLERS | |
739 | ; ######## |
|
771 | ; ######## | |
740 |
|
772 | |||
741 | [handler_console] |
|
773 | [handler_console] | |
742 | class = StreamHandler |
|
774 | class = StreamHandler | |
743 | args = (sys.stderr, ) |
|
775 | args = (sys.stderr, ) | |
744 | level = INFO |
|
776 | level = INFO | |
745 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
777 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
746 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
778 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
747 | formatter = generic |
|
779 | formatter = generic | |
748 |
|
780 | |||
749 | [handler_console_sql] |
|
781 | [handler_console_sql] | |
750 | ; "level = DEBUG" logs SQL queries and results. |
|
782 | ; "level = DEBUG" logs SQL queries and results. | |
751 | ; "level = INFO" logs SQL queries. |
|
783 | ; "level = INFO" logs SQL queries. | |
752 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
784 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
753 | class = StreamHandler |
|
785 | class = StreamHandler | |
754 | args = (sys.stderr, ) |
|
786 | args = (sys.stderr, ) | |
755 | level = WARN |
|
787 | level = WARN | |
756 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
788 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
757 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
789 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
758 | formatter = generic |
|
790 | formatter = generic | |
759 |
|
791 | |||
760 | ; ########## |
|
792 | ; ########## | |
761 | ; FORMATTERS |
|
793 | ; FORMATTERS | |
762 | ; ########## |
|
794 | ; ########## | |
763 |
|
795 | |||
764 | [formatter_generic] |
|
796 | [formatter_generic] | |
765 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
797 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
766 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
798 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
767 | datefmt = %Y-%m-%d %H:%M:%S |
|
799 | datefmt = %Y-%m-%d %H:%M:%S | |
768 |
|
800 | |||
769 | [formatter_color_formatter] |
|
801 | [formatter_color_formatter] | |
770 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
802 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
771 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
803 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
772 | datefmt = %Y-%m-%d %H:%M:%S |
|
804 | datefmt = %Y-%m-%d %H:%M:%S | |
773 |
|
805 | |||
774 | [formatter_color_formatter_sql] |
|
806 | [formatter_color_formatter_sql] | |
775 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
807 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
776 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
808 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
777 | datefmt = %Y-%m-%d %H:%M:%S |
|
809 | datefmt = %Y-%m-%d %H:%M:%S | |
778 |
|
810 | |||
779 | [formatter_json] |
|
811 | [formatter_json] | |
780 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
812 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
781 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
813 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,286 +1,315 b'' | |||||
1 | # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]' |
|
1 | # deps, generated via pipdeptree --exclude setuptools,wheel,pipdeptree,pip -f | tr '[:upper:]' '[:lower:]' | |
2 |
|
2 | |||
3 | alembic==1.13.1 |
|
3 | alembic==1.13.1 | |
4 | mako==1.2.4 |
|
4 | mako==1.2.4 | |
5 | markupsafe==2.1.2 |
|
5 | markupsafe==2.1.2 | |
6 | sqlalchemy==1.4.52 |
|
6 | sqlalchemy==1.4.52 | |
7 | greenlet==3.0.3 |
|
7 | greenlet==3.0.3 | |
8 | typing_extensions==4.9.0 |
|
8 | typing_extensions==4.9.0 | |
9 | async-timeout==4.0.3 |
|
9 | async-timeout==4.0.3 | |
10 | babel==2.12.1 |
|
10 | babel==2.12.1 | |
11 | beaker==1.12.1 |
|
11 | beaker==1.12.1 | |
12 | celery==5.3.6 |
|
12 | celery==5.3.6 | |
13 | billiard==4.2.0 |
|
13 | billiard==4.2.0 | |
14 | click==8.1.3 |
|
14 | click==8.1.3 | |
15 | click-didyoumean==0.3.0 |
|
15 | click-didyoumean==0.3.0 | |
16 | click==8.1.3 |
|
16 | click==8.1.3 | |
17 | click-plugins==1.1.1 |
|
17 | click-plugins==1.1.1 | |
18 | click==8.1.3 |
|
18 | click==8.1.3 | |
19 | click-repl==0.2.0 |
|
19 | click-repl==0.2.0 | |
20 | click==8.1.3 |
|
20 | click==8.1.3 | |
21 | prompt-toolkit==3.0.38 |
|
21 | prompt-toolkit==3.0.38 | |
22 | wcwidth==0.2.6 |
|
22 | wcwidth==0.2.6 | |
23 | six==1.16.0 |
|
23 | six==1.16.0 | |
24 | kombu==5.3.5 |
|
24 | kombu==5.3.5 | |
25 | amqp==5.2.0 |
|
25 | amqp==5.2.0 | |
26 | vine==5.1.0 |
|
26 | vine==5.1.0 | |
27 | vine==5.1.0 |
|
27 | vine==5.1.0 | |
28 | python-dateutil==2.8.2 |
|
28 | python-dateutil==2.8.2 | |
29 | six==1.16.0 |
|
29 | six==1.16.0 | |
30 | tzdata==2024.1 |
|
30 | tzdata==2024.1 | |
31 | vine==5.1.0 |
|
31 | vine==5.1.0 | |
32 | channelstream==0.7.1 |
|
32 | channelstream==0.7.1 | |
33 | gevent==24.2.1 |
|
33 | gevent==24.2.1 | |
34 | greenlet==3.0.3 |
|
34 | greenlet==3.0.3 | |
35 | zope.event==5.0.0 |
|
35 | zope.event==5.0.0 | |
36 | zope.interface==6.3.0 |
|
36 | zope.interface==6.3.0 | |
37 | itsdangerous==1.1.0 |
|
37 | itsdangerous==1.1.0 | |
38 | marshmallow==2.18.0 |
|
38 | marshmallow==2.18.0 | |
39 | pyramid==2.0.2 |
|
39 | pyramid==2.0.2 | |
40 | hupper==1.12 |
|
40 | hupper==1.12 | |
41 | plaster==1.1.2 |
|
41 | plaster==1.1.2 | |
42 | plaster-pastedeploy==1.0.1 |
|
42 | plaster-pastedeploy==1.0.1 | |
43 | pastedeploy==3.1.0 |
|
43 | pastedeploy==3.1.0 | |
44 | plaster==1.1.2 |
|
44 | plaster==1.1.2 | |
45 | translationstring==1.4 |
|
45 | translationstring==1.4 | |
46 | venusian==3.0.0 |
|
46 | venusian==3.0.0 | |
47 | webob==1.8.7 |
|
47 | webob==1.8.7 | |
48 | zope.deprecation==5.0.0 |
|
48 | zope.deprecation==5.0.0 | |
49 | zope.interface==6.3.0 |
|
49 | zope.interface==6.3.0 | |
50 | pyramid-apispec==0.3.3 |
|
50 | pyramid-apispec==0.3.3 | |
51 | apispec==1.3.3 |
|
51 | apispec==1.3.3 | |
52 | pyramid-jinja2==2.10 |
|
52 | pyramid-jinja2==2.10 | |
53 | jinja2==3.1.2 |
|
53 | jinja2==3.1.2 | |
54 | markupsafe==2.1.2 |
|
54 | markupsafe==2.1.2 | |
55 | markupsafe==2.1.2 |
|
55 | markupsafe==2.1.2 | |
56 | pyramid==2.0.2 |
|
56 | pyramid==2.0.2 | |
57 | hupper==1.12 |
|
57 | hupper==1.12 | |
58 | plaster==1.1.2 |
|
58 | plaster==1.1.2 | |
59 | plaster-pastedeploy==1.0.1 |
|
59 | plaster-pastedeploy==1.0.1 | |
60 | pastedeploy==3.1.0 |
|
60 | pastedeploy==3.1.0 | |
61 | plaster==1.1.2 |
|
61 | plaster==1.1.2 | |
62 | translationstring==1.4 |
|
62 | translationstring==1.4 | |
63 | venusian==3.0.0 |
|
63 | venusian==3.0.0 | |
64 | webob==1.8.7 |
|
64 | webob==1.8.7 | |
65 | zope.deprecation==5.0.0 |
|
65 | zope.deprecation==5.0.0 | |
66 | zope.interface==6.3.0 |
|
66 | zope.interface==6.3.0 | |
67 | zope.deprecation==5.0.0 |
|
67 | zope.deprecation==5.0.0 | |
68 | python-dateutil==2.8.2 |
|
68 | python-dateutil==2.8.2 | |
69 | six==1.16.0 |
|
69 | six==1.16.0 | |
70 | requests==2.28.2 |
|
70 | requests==2.28.2 | |
71 | certifi==2022.12.7 |
|
71 | certifi==2022.12.7 | |
72 | charset-normalizer==3.1.0 |
|
72 | charset-normalizer==3.1.0 | |
73 | idna==3.4 |
|
73 | idna==3.4 | |
74 | urllib3==1.26.14 |
|
74 | urllib3==1.26.14 | |
75 | ws4py==0.5.1 |
|
75 | ws4py==0.5.1 | |
76 | deform==2.0.15 |
|
76 | deform==2.0.15 | |
77 | chameleon==3.10.2 |
|
77 | chameleon==3.10.2 | |
78 | colander==2.0 |
|
78 | colander==2.0 | |
79 | iso8601==1.1.0 |
|
79 | iso8601==1.1.0 | |
80 | translationstring==1.4 |
|
80 | translationstring==1.4 | |
81 | iso8601==1.1.0 |
|
81 | iso8601==1.1.0 | |
82 | peppercorn==0.6 |
|
82 | peppercorn==0.6 | |
83 | translationstring==1.4 |
|
83 | translationstring==1.4 | |
84 | zope.deprecation==5.0.0 |
|
84 | zope.deprecation==5.0.0 | |
85 | docutils==0.19 |
|
85 | docutils==0.19 | |
86 | dogpile.cache==1.3.3 |
|
86 | dogpile.cache==1.3.3 | |
87 | decorator==5.1.1 |
|
87 | decorator==5.1.1 | |
88 | stevedore==5.1.0 |
|
88 | stevedore==5.1.0 | |
89 | pbr==5.11.1 |
|
89 | pbr==5.11.1 | |
90 | formencode==2.1.0 |
|
90 | formencode==2.1.0 | |
91 | six==1.16.0 |
|
91 | six==1.16.0 | |
|
92 | fsspec==2024.6.0 | |||
92 | gunicorn==21.2.0 |
|
93 | gunicorn==21.2.0 | |
93 | packaging==24.0 |
|
94 | packaging==24.0 | |
94 | gevent==24.2.1 |
|
95 | gevent==24.2.1 | |
95 | greenlet==3.0.3 |
|
96 | greenlet==3.0.3 | |
96 | zope.event==5.0.0 |
|
97 | zope.event==5.0.0 | |
97 | zope.interface==6.3.0 |
|
98 | zope.interface==6.3.0 | |
98 | ipython==8.14.0 |
|
99 | ipython==8.14.0 | |
99 | backcall==0.2.0 |
|
100 | backcall==0.2.0 | |
100 | decorator==5.1.1 |
|
101 | decorator==5.1.1 | |
101 | jedi==0.19.0 |
|
102 | jedi==0.19.0 | |
102 | parso==0.8.3 |
|
103 | parso==0.8.3 | |
103 | matplotlib-inline==0.1.6 |
|
104 | matplotlib-inline==0.1.6 | |
104 | traitlets==5.9.0 |
|
105 | traitlets==5.9.0 | |
105 | pexpect==4.8.0 |
|
106 | pexpect==4.8.0 | |
106 | ptyprocess==0.7.0 |
|
107 | ptyprocess==0.7.0 | |
107 | pickleshare==0.7.5 |
|
108 | pickleshare==0.7.5 | |
108 | prompt-toolkit==3.0.38 |
|
109 | prompt-toolkit==3.0.38 | |
109 | wcwidth==0.2.6 |
|
110 | wcwidth==0.2.6 | |
110 | pygments==2.15.1 |
|
111 | pygments==2.15.1 | |
111 | stack-data==0.6.2 |
|
112 | stack-data==0.6.2 | |
112 | asttokens==2.2.1 |
|
113 | asttokens==2.2.1 | |
113 | six==1.16.0 |
|
114 | six==1.16.0 | |
114 | executing==1.2.0 |
|
115 | executing==1.2.0 | |
115 | pure-eval==0.2.2 |
|
116 | pure-eval==0.2.2 | |
116 | traitlets==5.9.0 |
|
117 | traitlets==5.9.0 | |
117 | markdown==3.4.3 |
|
118 | markdown==3.4.3 | |
118 | msgpack==1.0.8 |
|
119 | msgpack==1.0.8 | |
119 | mysqlclient==2.1.1 |
|
120 | mysqlclient==2.1.1 | |
120 | nbconvert==7.7.3 |
|
121 | nbconvert==7.7.3 | |
121 | beautifulsoup4==4.12.3 |
|
122 | beautifulsoup4==4.12.3 | |
122 | soupsieve==2.5 |
|
123 | soupsieve==2.5 | |
123 | bleach==6.1.0 |
|
124 | bleach==6.1.0 | |
124 | six==1.16.0 |
|
125 | six==1.16.0 | |
125 | webencodings==0.5.1 |
|
126 | webencodings==0.5.1 | |
126 | defusedxml==0.7.1 |
|
127 | defusedxml==0.7.1 | |
127 | jinja2==3.1.2 |
|
128 | jinja2==3.1.2 | |
128 | markupsafe==2.1.2 |
|
129 | markupsafe==2.1.2 | |
129 | jupyter_core==5.3.1 |
|
130 | jupyter_core==5.3.1 | |
130 | platformdirs==3.10.0 |
|
131 | platformdirs==3.10.0 | |
131 | traitlets==5.9.0 |
|
132 | traitlets==5.9.0 | |
132 | jupyterlab-pygments==0.2.2 |
|
133 | jupyterlab-pygments==0.2.2 | |
133 | markupsafe==2.1.2 |
|
134 | markupsafe==2.1.2 | |
134 | mistune==2.0.5 |
|
135 | mistune==2.0.5 | |
135 | nbclient==0.8.0 |
|
136 | nbclient==0.8.0 | |
136 | jupyter_client==8.3.0 |
|
137 | jupyter_client==8.3.0 | |
137 | jupyter_core==5.3.1 |
|
138 | jupyter_core==5.3.1 | |
138 | platformdirs==3.10.0 |
|
139 | platformdirs==3.10.0 | |
139 | traitlets==5.9.0 |
|
140 | traitlets==5.9.0 | |
140 | python-dateutil==2.8.2 |
|
141 | python-dateutil==2.8.2 | |
141 | six==1.16.0 |
|
142 | six==1.16.0 | |
142 | pyzmq==25.0.0 |
|
143 | pyzmq==25.0.0 | |
143 | tornado==6.2 |
|
144 | tornado==6.2 | |
144 | traitlets==5.9.0 |
|
145 | traitlets==5.9.0 | |
145 | jupyter_core==5.3.1 |
|
146 | jupyter_core==5.3.1 | |
146 | platformdirs==3.10.0 |
|
147 | platformdirs==3.10.0 | |
147 | traitlets==5.9.0 |
|
148 | traitlets==5.9.0 | |
148 | nbformat==5.9.2 |
|
149 | nbformat==5.9.2 | |
149 | fastjsonschema==2.18.0 |
|
150 | fastjsonschema==2.18.0 | |
150 | jsonschema==4.18.6 |
|
151 | jsonschema==4.18.6 | |
151 | attrs==22.2.0 |
|
152 | attrs==22.2.0 | |
152 | pyrsistent==0.19.3 |
|
153 | pyrsistent==0.19.3 | |
153 | jupyter_core==5.3.1 |
|
154 | jupyter_core==5.3.1 | |
154 | platformdirs==3.10.0 |
|
155 | platformdirs==3.10.0 | |
155 | traitlets==5.9.0 |
|
156 | traitlets==5.9.0 | |
156 | traitlets==5.9.0 |
|
157 | traitlets==5.9.0 | |
157 | traitlets==5.9.0 |
|
158 | traitlets==5.9.0 | |
158 | nbformat==5.9.2 |
|
159 | nbformat==5.9.2 | |
159 | fastjsonschema==2.18.0 |
|
160 | fastjsonschema==2.18.0 | |
160 | jsonschema==4.18.6 |
|
161 | jsonschema==4.18.6 | |
161 | attrs==22.2.0 |
|
162 | attrs==22.2.0 | |
162 | pyrsistent==0.19.3 |
|
163 | pyrsistent==0.19.3 | |
163 | jupyter_core==5.3.1 |
|
164 | jupyter_core==5.3.1 | |
164 | platformdirs==3.10.0 |
|
165 | platformdirs==3.10.0 | |
165 | traitlets==5.9.0 |
|
166 | traitlets==5.9.0 | |
166 | traitlets==5.9.0 |
|
167 | traitlets==5.9.0 | |
167 | pandocfilters==1.5.0 |
|
168 | pandocfilters==1.5.0 | |
168 | pygments==2.15.1 |
|
169 | pygments==2.15.1 | |
169 | tinycss2==1.2.1 |
|
170 | tinycss2==1.2.1 | |
170 | webencodings==0.5.1 |
|
171 | webencodings==0.5.1 | |
171 | traitlets==5.9.0 |
|
172 | traitlets==5.9.0 | |
172 | orjson==3.10.3 |
|
173 | orjson==3.10.3 | |
173 | paste==3.10.1 |
|
174 | paste==3.10.1 | |
174 | premailer==3.10.0 |
|
175 | premailer==3.10.0 | |
175 | cachetools==5.3.3 |
|
176 | cachetools==5.3.3 | |
176 | cssselect==1.2.0 |
|
177 | cssselect==1.2.0 | |
177 | cssutils==2.6.0 |
|
178 | cssutils==2.6.0 | |
178 | lxml==4.9.3 |
|
179 | lxml==4.9.3 | |
179 | requests==2.28.2 |
|
180 | requests==2.28.2 | |
180 | certifi==2022.12.7 |
|
181 | certifi==2022.12.7 | |
181 | charset-normalizer==3.1.0 |
|
182 | charset-normalizer==3.1.0 | |
182 | idna==3.4 |
|
183 | idna==3.4 | |
183 | urllib3==1.26.14 |
|
184 | urllib3==1.26.14 | |
184 | psutil==5.9.8 |
|
185 | psutil==5.9.8 | |
185 | psycopg2==2.9.9 |
|
186 | psycopg2==2.9.9 | |
186 | py-bcrypt==0.4 |
|
187 | py-bcrypt==0.4 | |
187 | pycmarkgfm==1.2.0 |
|
188 | pycmarkgfm==1.2.0 | |
188 | cffi==1.16.0 |
|
189 | cffi==1.16.0 | |
189 | pycparser==2.21 |
|
190 | pycparser==2.21 | |
190 | pycryptodome==3.17 |
|
191 | pycryptodome==3.17 | |
191 | pycurl==7.45.3 |
|
192 | pycurl==7.45.3 | |
192 | pymysql==1.0.3 |
|
193 | pymysql==1.0.3 | |
193 | pyotp==2.8.0 |
|
194 | pyotp==2.8.0 | |
194 | pyparsing==3.1.1 |
|
195 | pyparsing==3.1.1 | |
195 | pyramid-debugtoolbar==4.12.1 |
|
196 | pyramid-debugtoolbar==4.12.1 | |
196 | pygments==2.15.1 |
|
197 | pygments==2.15.1 | |
197 | pyramid==2.0.2 |
|
198 | pyramid==2.0.2 | |
198 | hupper==1.12 |
|
199 | hupper==1.12 | |
199 | plaster==1.1.2 |
|
200 | plaster==1.1.2 | |
200 | plaster-pastedeploy==1.0.1 |
|
201 | plaster-pastedeploy==1.0.1 | |
201 | pastedeploy==3.1.0 |
|
202 | pastedeploy==3.1.0 | |
202 | plaster==1.1.2 |
|
203 | plaster==1.1.2 | |
203 | translationstring==1.4 |
|
204 | translationstring==1.4 | |
204 | venusian==3.0.0 |
|
205 | venusian==3.0.0 | |
205 | webob==1.8.7 |
|
206 | webob==1.8.7 | |
206 | zope.deprecation==5.0.0 |
|
207 | zope.deprecation==5.0.0 | |
207 | zope.interface==6.3.0 |
|
208 | zope.interface==6.3.0 | |
208 | pyramid-mako==1.1.0 |
|
209 | pyramid-mako==1.1.0 | |
209 | mako==1.2.4 |
|
210 | mako==1.2.4 | |
210 | markupsafe==2.1.2 |
|
211 | markupsafe==2.1.2 | |
211 | pyramid==2.0.2 |
|
212 | pyramid==2.0.2 | |
212 | hupper==1.12 |
|
213 | hupper==1.12 | |
213 | plaster==1.1.2 |
|
214 | plaster==1.1.2 | |
214 | plaster-pastedeploy==1.0.1 |
|
215 | plaster-pastedeploy==1.0.1 | |
215 | pastedeploy==3.1.0 |
|
216 | pastedeploy==3.1.0 | |
216 | plaster==1.1.2 |
|
217 | plaster==1.1.2 | |
217 | translationstring==1.4 |
|
218 | translationstring==1.4 | |
218 | venusian==3.0.0 |
|
219 | venusian==3.0.0 | |
219 | webob==1.8.7 |
|
220 | webob==1.8.7 | |
220 | zope.deprecation==5.0.0 |
|
221 | zope.deprecation==5.0.0 | |
221 | zope.interface==6.3.0 |
|
222 | zope.interface==6.3.0 | |
222 | pyramid-mailer==0.15.1 |
|
223 | pyramid-mailer==0.15.1 | |
223 | pyramid==2.0.2 |
|
224 | pyramid==2.0.2 | |
224 | hupper==1.12 |
|
225 | hupper==1.12 | |
225 | plaster==1.1.2 |
|
226 | plaster==1.1.2 | |
226 | plaster-pastedeploy==1.0.1 |
|
227 | plaster-pastedeploy==1.0.1 | |
227 | pastedeploy==3.1.0 |
|
228 | pastedeploy==3.1.0 | |
228 | plaster==1.1.2 |
|
229 | plaster==1.1.2 | |
229 | translationstring==1.4 |
|
230 | translationstring==1.4 | |
230 | venusian==3.0.0 |
|
231 | venusian==3.0.0 | |
231 | webob==1.8.7 |
|
232 | webob==1.8.7 | |
232 | zope.deprecation==5.0.0 |
|
233 | zope.deprecation==5.0.0 | |
233 | zope.interface==6.3.0 |
|
234 | zope.interface==6.3.0 | |
234 | repoze.sendmail==4.4.1 |
|
235 | repoze.sendmail==4.4.1 | |
235 | transaction==3.1.0 |
|
236 | transaction==3.1.0 | |
236 | zope.interface==6.3.0 |
|
237 | zope.interface==6.3.0 | |
237 | zope.interface==6.3.0 |
|
238 | zope.interface==6.3.0 | |
238 | transaction==3.1.0 |
|
239 | transaction==3.1.0 | |
239 | zope.interface==6.3.0 |
|
240 | zope.interface==6.3.0 | |
240 | python-ldap==3.4.3 |
|
241 | python-ldap==3.4.3 | |
241 | pyasn1==0.4.8 |
|
242 | pyasn1==0.4.8 | |
242 | pyasn1-modules==0.2.8 |
|
243 | pyasn1-modules==0.2.8 | |
243 | pyasn1==0.4.8 |
|
244 | pyasn1==0.4.8 | |
244 | python-memcached==1.59 |
|
245 | python-memcached==1.59 | |
245 | six==1.16.0 |
|
246 | six==1.16.0 | |
246 | python-pam==2.0.2 |
|
247 | python-pam==2.0.2 | |
247 | python3-saml==1.15.0 |
|
248 | python3-saml==1.15.0 | |
248 | isodate==0.6.1 |
|
249 | isodate==0.6.1 | |
249 | six==1.16.0 |
|
250 | six==1.16.0 | |
250 | lxml==4.9.3 |
|
251 | lxml==4.9.3 | |
251 | xmlsec==1.3.13 |
|
252 | xmlsec==1.3.13 | |
252 | lxml==4.9.3 |
|
253 | lxml==4.9.3 | |
253 | pyyaml==6.0.1 |
|
254 | pyyaml==6.0.1 | |
254 | redis==5.0.4 |
|
255 | redis==5.0.4 | |
255 | async-timeout==4.0.3 |
|
256 | async-timeout==4.0.3 | |
256 | regex==2022.10.31 |
|
257 | regex==2022.10.31 | |
257 | routes==2.5.1 |
|
258 | routes==2.5.1 | |
258 | repoze.lru==0.7 |
|
259 | repoze.lru==0.7 | |
259 | six==1.16.0 |
|
260 | six==1.16.0 | |
|
261 | s3fs==2024.6.0 | |||
|
262 | aiobotocore==2.13.0 | |||
|
263 | aiohttp==3.9.5 | |||
|
264 | aiosignal==1.3.1 | |||
|
265 | frozenlist==1.4.1 | |||
|
266 | attrs==22.2.0 | |||
|
267 | frozenlist==1.4.1 | |||
|
268 | multidict==6.0.5 | |||
|
269 | yarl==1.9.4 | |||
|
270 | idna==3.4 | |||
|
271 | multidict==6.0.5 | |||
|
272 | aioitertools==0.11.0 | |||
|
273 | botocore==1.34.106 | |||
|
274 | jmespath==1.0.1 | |||
|
275 | python-dateutil==2.8.2 | |||
|
276 | six==1.16.0 | |||
|
277 | urllib3==1.26.14 | |||
|
278 | wrapt==1.16.0 | |||
|
279 | aiohttp==3.9.5 | |||
|
280 | aiosignal==1.3.1 | |||
|
281 | frozenlist==1.4.1 | |||
|
282 | attrs==22.2.0 | |||
|
283 | frozenlist==1.4.1 | |||
|
284 | multidict==6.0.5 | |||
|
285 | yarl==1.9.4 | |||
|
286 | idna==3.4 | |||
|
287 | multidict==6.0.5 | |||
|
288 | fsspec==2024.6.0 | |||
260 | simplejson==3.19.2 |
|
289 | simplejson==3.19.2 | |
261 | sshpubkeys==3.3.1 |
|
290 | sshpubkeys==3.3.1 | |
262 | cryptography==40.0.2 |
|
291 | cryptography==40.0.2 | |
263 | cffi==1.16.0 |
|
292 | cffi==1.16.0 | |
264 | pycparser==2.21 |
|
293 | pycparser==2.21 | |
265 | ecdsa==0.18.0 |
|
294 | ecdsa==0.18.0 | |
266 | six==1.16.0 |
|
295 | six==1.16.0 | |
267 | sqlalchemy==1.4.52 |
|
296 | sqlalchemy==1.4.52 | |
268 | greenlet==3.0.3 |
|
297 | greenlet==3.0.3 | |
269 | typing_extensions==4.9.0 |
|
298 | typing_extensions==4.9.0 | |
270 | supervisor==4.2.5 |
|
299 | supervisor==4.2.5 | |
271 | tzlocal==4.3 |
|
300 | tzlocal==4.3 | |
272 | pytz-deprecation-shim==0.1.0.post0 |
|
301 | pytz-deprecation-shim==0.1.0.post0 | |
273 | tzdata==2024.1 |
|
302 | tzdata==2024.1 | |
274 | tempita==0.5.2 |
|
303 | tempita==0.5.2 | |
275 | unidecode==1.3.6 |
|
304 | unidecode==1.3.6 | |
276 | urlobject==2.4.3 |
|
305 | urlobject==2.4.3 | |
277 | waitress==3.0.0 |
|
306 | waitress==3.0.0 | |
278 | webhelpers2==2.1 |
|
307 | webhelpers2==2.1 | |
279 | markupsafe==2.1.2 |
|
308 | markupsafe==2.1.2 | |
280 | six==1.16.0 |
|
309 | six==1.16.0 | |
281 | whoosh==2.7.4 |
|
310 | whoosh==2.7.4 | |
282 | zope.cachedescriptors==5.0.0 |
|
311 | zope.cachedescriptors==5.0.0 | |
283 | qrcode==7.4.2 |
|
312 | qrcode==7.4.2 | |
284 |
|
313 | |||
285 | ## uncomment to add the debug libraries |
|
314 | ## uncomment to add the debug libraries | |
286 | #-r requirements_debug.txt |
|
315 | #-r requirements_debug.txt |
@@ -1,1716 +1,1716 b'' | |||||
1 | # Copyright (C) 2011-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2011-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import itertools |
|
19 | import itertools | |
20 | import logging |
|
20 | import logging | |
21 | import os |
|
21 | import os | |
22 | import collections |
|
22 | import collections | |
23 | import urllib.request |
|
23 | import urllib.request | |
24 | import urllib.parse |
|
24 | import urllib.parse | |
25 | import urllib.error |
|
25 | import urllib.error | |
26 | import pathlib |
|
26 | import pathlib | |
27 | import time |
|
27 | import time | |
28 | import random |
|
28 | import random | |
29 |
|
29 | |||
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
30 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound | |
31 |
|
31 | |||
32 | from pyramid.renderers import render |
|
32 | from pyramid.renderers import render | |
33 | from pyramid.response import Response |
|
33 | from pyramid.response import Response | |
34 |
|
34 | |||
35 | import rhodecode |
|
35 | import rhodecode | |
36 | from rhodecode.apps._base import RepoAppView |
|
36 | from rhodecode.apps._base import RepoAppView | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | from rhodecode.lib import diffs, helpers as h, rc_cache |
|
39 | from rhodecode.lib import diffs, helpers as h, rc_cache | |
40 | from rhodecode.lib import audit_logger |
|
40 | from rhodecode.lib import audit_logger | |
41 | from rhodecode.lib.hash_utils import sha1_safe |
|
41 | from rhodecode.lib.hash_utils import sha1_safe | |
42 |
from rhodecode.lib. |
|
42 | from rhodecode.lib.archive_cache import ( | |
43 | get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator) |
|
43 | get_archival_cache_store, get_archival_config, ArchiveCacheGenerationLock, archive_iterator) | |
44 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars |
|
44 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars | |
45 | from rhodecode.lib.view_utils import parse_path_ref |
|
45 | from rhodecode.lib.view_utils import parse_path_ref | |
46 | from rhodecode.lib.exceptions import NonRelativePathError |
|
46 | from rhodecode.lib.exceptions import NonRelativePathError | |
47 | from rhodecode.lib.codeblocks import ( |
|
47 | from rhodecode.lib.codeblocks import ( | |
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) | |
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode |
|
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode | |
50 | from rhodecode.lib.type_utils import str2bool |
|
50 | from rhodecode.lib.type_utils import str2bool | |
51 | from rhodecode.lib.str_utils import safe_str, safe_int |
|
51 | from rhodecode.lib.str_utils import safe_str, safe_int | |
52 | from rhodecode.lib.auth import ( |
|
52 | from rhodecode.lib.auth import ( | |
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
54 | from rhodecode.lib.vcs import path as vcspath |
|
54 | from rhodecode.lib.vcs import path as vcspath | |
55 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
55 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
56 | from rhodecode.lib.vcs.conf import settings |
|
56 | from rhodecode.lib.vcs.conf import settings | |
57 | from rhodecode.lib.vcs.nodes import FileNode |
|
57 | from rhodecode.lib.vcs.nodes import FileNode | |
58 | from rhodecode.lib.vcs.exceptions import ( |
|
58 | from rhodecode.lib.vcs.exceptions import ( | |
59 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
59 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, | |
60 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
60 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, | |
61 | NodeDoesNotExistError, CommitError, NodeError) |
|
61 | NodeDoesNotExistError, CommitError, NodeError) | |
62 |
|
62 | |||
63 | from rhodecode.model.scm import ScmModel |
|
63 | from rhodecode.model.scm import ScmModel | |
64 | from rhodecode.model.db import Repository |
|
64 | from rhodecode.model.db import Repository | |
65 |
|
65 | |||
66 | log = logging.getLogger(__name__) |
|
66 | log = logging.getLogger(__name__) | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): |
|
69 | def get_archive_name(db_repo_id, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True): | |
70 | # original backward compat name of archive |
|
70 | # original backward compat name of archive | |
71 | clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_')) |
|
71 | clean_name = safe_str(convert_special_chars(db_repo_name).replace('/', '_')) | |
72 |
|
72 | |||
73 | # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip |
|
73 | # e.g vcsserver-id-abcd-sub-1-abcfdef-archive-all.zip | |
74 | # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip |
|
74 | # vcsserver-id-abcd-sub-0-abcfdef-COMMIT_SHA-PATH_SHA.zip | |
75 | id_sha = sha1_safe(str(db_repo_id))[:4] |
|
75 | id_sha = sha1_safe(str(db_repo_id))[:4] | |
76 | sub_repo = 'sub-1' if subrepos else 'sub-0' |
|
76 | sub_repo = 'sub-1' if subrepos else 'sub-0' | |
77 | commit = commit_sha if with_hash else 'archive' |
|
77 | commit = commit_sha if with_hash else 'archive' | |
78 | path_marker = (path_sha if with_hash else '') or 'all' |
|
78 | path_marker = (path_sha if with_hash else '') or 'all' | |
79 | archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}' |
|
79 | archive_name = f'{clean_name}-id-{id_sha}-{sub_repo}-{commit}-{path_marker}{ext}' | |
80 |
|
80 | |||
81 | return archive_name |
|
81 | return archive_name | |
82 |
|
82 | |||
83 |
|
83 | |||
84 | def get_path_sha(at_path): |
|
84 | def get_path_sha(at_path): | |
85 | return safe_str(sha1_safe(at_path)[:8]) |
|
85 | return safe_str(sha1_safe(at_path)[:8]) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def _get_archive_spec(fname): |
|
88 | def _get_archive_spec(fname): | |
89 | log.debug('Detecting archive spec for: `%s`', fname) |
|
89 | log.debug('Detecting archive spec for: `%s`', fname) | |
90 |
|
90 | |||
91 | fileformat = None |
|
91 | fileformat = None | |
92 | ext = None |
|
92 | ext = None | |
93 | content_type = None |
|
93 | content_type = None | |
94 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
94 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: | |
95 |
|
95 | |||
96 | if fname.endswith(extension): |
|
96 | if fname.endswith(extension): | |
97 | fileformat = a_type |
|
97 | fileformat = a_type | |
98 | log.debug('archive is of type: %s', fileformat) |
|
98 | log.debug('archive is of type: %s', fileformat) | |
99 | ext = extension |
|
99 | ext = extension | |
100 | break |
|
100 | break | |
101 |
|
101 | |||
102 | if not fileformat: |
|
102 | if not fileformat: | |
103 | raise ValueError() |
|
103 | raise ValueError() | |
104 |
|
104 | |||
105 | # left over part of whole fname is the commit |
|
105 | # left over part of whole fname is the commit | |
106 | commit_id = fname[:-len(ext)] |
|
106 | commit_id = fname[:-len(ext)] | |
107 |
|
107 | |||
108 | return commit_id, ext, fileformat, content_type |
|
108 | return commit_id, ext, fileformat, content_type | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class RepoFilesView(RepoAppView): |
|
111 | class RepoFilesView(RepoAppView): | |
112 |
|
112 | |||
113 | @staticmethod |
|
113 | @staticmethod | |
114 | def adjust_file_path_for_svn(f_path, repo): |
|
114 | def adjust_file_path_for_svn(f_path, repo): | |
115 | """ |
|
115 | """ | |
116 | Computes the relative path of `f_path`. |
|
116 | Computes the relative path of `f_path`. | |
117 |
|
117 | |||
118 | This is mainly based on prefix matching of the recognized tags and |
|
118 | This is mainly based on prefix matching of the recognized tags and | |
119 | branches in the underlying repository. |
|
119 | branches in the underlying repository. | |
120 | """ |
|
120 | """ | |
121 | tags_and_branches = itertools.chain( |
|
121 | tags_and_branches = itertools.chain( | |
122 | repo.branches.keys(), |
|
122 | repo.branches.keys(), | |
123 | repo.tags.keys()) |
|
123 | repo.tags.keys()) | |
124 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
124 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) | |
125 |
|
125 | |||
126 | for name in tags_and_branches: |
|
126 | for name in tags_and_branches: | |
127 | if f_path.startswith(f'{name}/'): |
|
127 | if f_path.startswith(f'{name}/'): | |
128 | f_path = vcspath.relpath(f_path, name) |
|
128 | f_path = vcspath.relpath(f_path, name) | |
129 | break |
|
129 | break | |
130 | return f_path |
|
130 | return f_path | |
131 |
|
131 | |||
132 | def load_default_context(self): |
|
132 | def load_default_context(self): | |
133 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
133 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
134 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
134 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
135 | c.enable_downloads = self.db_repo.enable_downloads |
|
135 | c.enable_downloads = self.db_repo.enable_downloads | |
136 | return c |
|
136 | return c | |
137 |
|
137 | |||
138 | def _ensure_not_locked(self, commit_id='tip'): |
|
138 | def _ensure_not_locked(self, commit_id='tip'): | |
139 | _ = self.request.translate |
|
139 | _ = self.request.translate | |
140 |
|
140 | |||
141 | repo = self.db_repo |
|
141 | repo = self.db_repo | |
142 | if repo.enable_locking and repo.locked[0]: |
|
142 | if repo.enable_locking and repo.locked[0]: | |
143 | h.flash(_('This repository has been locked by %s on %s') |
|
143 | h.flash(_('This repository has been locked by %s on %s') | |
144 | % (h.person_by_id(repo.locked[0]), |
|
144 | % (h.person_by_id(repo.locked[0]), | |
145 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
145 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
146 | 'warning') |
|
146 | 'warning') | |
147 | files_url = h.route_path( |
|
147 | files_url = h.route_path( | |
148 | 'repo_files:default_path', |
|
148 | 'repo_files:default_path', | |
149 | repo_name=self.db_repo_name, commit_id=commit_id) |
|
149 | repo_name=self.db_repo_name, commit_id=commit_id) | |
150 | raise HTTPFound(files_url) |
|
150 | raise HTTPFound(files_url) | |
151 |
|
151 | |||
152 | def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False): |
|
152 | def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False): | |
153 | _ = self.request.translate |
|
153 | _ = self.request.translate | |
154 |
|
154 | |||
155 | if not is_head: |
|
155 | if not is_head: | |
156 | message = _('Cannot modify file. ' |
|
156 | message = _('Cannot modify file. ' | |
157 | 'Given commit `{}` is not head of a branch.').format(commit_id) |
|
157 | 'Given commit `{}` is not head of a branch.').format(commit_id) | |
158 | h.flash(message, category='warning') |
|
158 | h.flash(message, category='warning') | |
159 |
|
159 | |||
160 | if json_mode: |
|
160 | if json_mode: | |
161 | return message |
|
161 | return message | |
162 |
|
162 | |||
163 | files_url = h.route_path( |
|
163 | files_url = h.route_path( | |
164 | 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id, |
|
164 | 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id, | |
165 | f_path=f_path) |
|
165 | f_path=f_path) | |
166 | raise HTTPFound(files_url) |
|
166 | raise HTTPFound(files_url) | |
167 |
|
167 | |||
168 | def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False): |
|
168 | def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False): | |
169 | _ = self.request.translate |
|
169 | _ = self.request.translate | |
170 |
|
170 | |||
171 | rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission( |
|
171 | rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission( | |
172 | self.db_repo_name, branch_name) |
|
172 | self.db_repo_name, branch_name) | |
173 | if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']: |
|
173 | if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']: | |
174 | message = _('Branch `{}` changes forbidden by rule {}.').format( |
|
174 | message = _('Branch `{}` changes forbidden by rule {}.').format( | |
175 | h.escape(branch_name), h.escape(rule)) |
|
175 | h.escape(branch_name), h.escape(rule)) | |
176 | h.flash(message, 'warning') |
|
176 | h.flash(message, 'warning') | |
177 |
|
177 | |||
178 | if json_mode: |
|
178 | if json_mode: | |
179 | return message |
|
179 | return message | |
180 |
|
180 | |||
181 | files_url = h.route_path( |
|
181 | files_url = h.route_path( | |
182 | 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id) |
|
182 | 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id) | |
183 |
|
183 | |||
184 | raise HTTPFound(files_url) |
|
184 | raise HTTPFound(files_url) | |
185 |
|
185 | |||
186 | def _get_commit_and_path(self): |
|
186 | def _get_commit_and_path(self): | |
187 | default_commit_id = self.db_repo.landing_ref_name |
|
187 | default_commit_id = self.db_repo.landing_ref_name | |
188 | default_f_path = '/' |
|
188 | default_f_path = '/' | |
189 |
|
189 | |||
190 | commit_id = self.request.matchdict.get( |
|
190 | commit_id = self.request.matchdict.get( | |
191 | 'commit_id', default_commit_id) |
|
191 | 'commit_id', default_commit_id) | |
192 | f_path = self._get_f_path(self.request.matchdict, default_f_path) |
|
192 | f_path = self._get_f_path(self.request.matchdict, default_f_path) | |
193 | return commit_id, f_path |
|
193 | return commit_id, f_path | |
194 |
|
194 | |||
195 | def _get_default_encoding(self, c): |
|
195 | def _get_default_encoding(self, c): | |
196 | enc_list = getattr(c, 'default_encodings', []) |
|
196 | enc_list = getattr(c, 'default_encodings', []) | |
197 | return enc_list[0] if enc_list else 'UTF-8' |
|
197 | return enc_list[0] if enc_list else 'UTF-8' | |
198 |
|
198 | |||
199 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): |
|
199 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): | |
200 | """ |
|
200 | """ | |
201 | This is a safe way to get commit. If an error occurs it redirects to |
|
201 | This is a safe way to get commit. If an error occurs it redirects to | |
202 | tip with proper message |
|
202 | tip with proper message | |
203 |
|
203 | |||
204 | :param commit_id: id of commit to fetch |
|
204 | :param commit_id: id of commit to fetch | |
205 | :param redirect_after: toggle redirection |
|
205 | :param redirect_after: toggle redirection | |
206 | """ |
|
206 | """ | |
207 | _ = self.request.translate |
|
207 | _ = self.request.translate | |
208 |
|
208 | |||
209 | try: |
|
209 | try: | |
210 | return self.rhodecode_vcs_repo.get_commit(commit_id) |
|
210 | return self.rhodecode_vcs_repo.get_commit(commit_id) | |
211 | except EmptyRepositoryError: |
|
211 | except EmptyRepositoryError: | |
212 | if not redirect_after: |
|
212 | if not redirect_after: | |
213 | return None |
|
213 | return None | |
214 |
|
214 | |||
215 | add_new = upload_new = "" |
|
215 | add_new = upload_new = "" | |
216 | if h.HasRepoPermissionAny( |
|
216 | if h.HasRepoPermissionAny( | |
217 | 'repository.write', 'repository.admin')(self.db_repo_name): |
|
217 | 'repository.write', 'repository.admin')(self.db_repo_name): | |
218 | _url = h.route_path( |
|
218 | _url = h.route_path( | |
219 | 'repo_files_add_file', |
|
219 | 'repo_files_add_file', | |
220 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
220 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |
221 | add_new = h.link_to( |
|
221 | add_new = h.link_to( | |
222 | _('add a new file'), _url, class_="alert-link") |
|
222 | _('add a new file'), _url, class_="alert-link") | |
223 |
|
223 | |||
224 | _url_upld = h.route_path( |
|
224 | _url_upld = h.route_path( | |
225 | 'repo_files_upload_file', |
|
225 | 'repo_files_upload_file', | |
226 | repo_name=self.db_repo_name, commit_id=0, f_path='') |
|
226 | repo_name=self.db_repo_name, commit_id=0, f_path='') | |
227 | upload_new = h.link_to( |
|
227 | upload_new = h.link_to( | |
228 | _('upload a new file'), _url_upld, class_="alert-link") |
|
228 | _('upload a new file'), _url_upld, class_="alert-link") | |
229 |
|
229 | |||
230 | h.flash(h.literal( |
|
230 | h.flash(h.literal( | |
231 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') |
|
231 | _('There are no files yet. Click here to %s or %s.') % (add_new, upload_new)), category='warning') | |
232 | raise HTTPFound( |
|
232 | raise HTTPFound( | |
233 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
233 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |
234 |
|
234 | |||
235 | except (CommitDoesNotExistError, LookupError) as e: |
|
235 | except (CommitDoesNotExistError, LookupError) as e: | |
236 | msg = _('No such commit exists for this repository. Commit: {}').format(commit_id) |
|
236 | msg = _('No such commit exists for this repository. Commit: {}').format(commit_id) | |
237 | h.flash(msg, category='error') |
|
237 | h.flash(msg, category='error') | |
238 | raise HTTPNotFound() |
|
238 | raise HTTPNotFound() | |
239 | except RepositoryError as e: |
|
239 | except RepositoryError as e: | |
240 | h.flash(h.escape(safe_str(e)), category='error') |
|
240 | h.flash(h.escape(safe_str(e)), category='error') | |
241 | raise HTTPNotFound() |
|
241 | raise HTTPNotFound() | |
242 |
|
242 | |||
243 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): |
|
243 | def _get_filenode_or_redirect(self, commit_obj, path, pre_load=None): | |
244 | """ |
|
244 | """ | |
245 | Returns file_node, if error occurs or given path is directory, |
|
245 | Returns file_node, if error occurs or given path is directory, | |
246 | it'll redirect to top level path |
|
246 | it'll redirect to top level path | |
247 | """ |
|
247 | """ | |
248 | _ = self.request.translate |
|
248 | _ = self.request.translate | |
249 |
|
249 | |||
250 | try: |
|
250 | try: | |
251 | file_node = commit_obj.get_node(path, pre_load=pre_load) |
|
251 | file_node = commit_obj.get_node(path, pre_load=pre_load) | |
252 | if file_node.is_dir(): |
|
252 | if file_node.is_dir(): | |
253 | raise RepositoryError('The given path is a directory') |
|
253 | raise RepositoryError('The given path is a directory') | |
254 | except CommitDoesNotExistError: |
|
254 | except CommitDoesNotExistError: | |
255 | log.exception('No such commit exists for this repository') |
|
255 | log.exception('No such commit exists for this repository') | |
256 | h.flash(_('No such commit exists for this repository'), category='error') |
|
256 | h.flash(_('No such commit exists for this repository'), category='error') | |
257 | raise HTTPNotFound() |
|
257 | raise HTTPNotFound() | |
258 | except RepositoryError as e: |
|
258 | except RepositoryError as e: | |
259 | log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e) |
|
259 | log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e) | |
260 | h.flash(h.escape(safe_str(e)), category='error') |
|
260 | h.flash(h.escape(safe_str(e)), category='error') | |
261 | raise HTTPNotFound() |
|
261 | raise HTTPNotFound() | |
262 |
|
262 | |||
263 | return file_node |
|
263 | return file_node | |
264 |
|
264 | |||
265 | def _is_valid_head(self, commit_id, repo, landing_ref): |
|
265 | def _is_valid_head(self, commit_id, repo, landing_ref): | |
266 | branch_name = sha_commit_id = '' |
|
266 | branch_name = sha_commit_id = '' | |
267 | is_head = False |
|
267 | is_head = False | |
268 | log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo) |
|
268 | log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo) | |
269 |
|
269 | |||
270 | for _branch_name, branch_commit_id in repo.branches.items(): |
|
270 | for _branch_name, branch_commit_id in repo.branches.items(): | |
271 | # simple case we pass in branch name, it's a HEAD |
|
271 | # simple case we pass in branch name, it's a HEAD | |
272 | if commit_id == _branch_name: |
|
272 | if commit_id == _branch_name: | |
273 | is_head = True |
|
273 | is_head = True | |
274 | branch_name = _branch_name |
|
274 | branch_name = _branch_name | |
275 | sha_commit_id = branch_commit_id |
|
275 | sha_commit_id = branch_commit_id | |
276 | break |
|
276 | break | |
277 | # case when we pass in full sha commit_id, which is a head |
|
277 | # case when we pass in full sha commit_id, which is a head | |
278 | elif commit_id == branch_commit_id: |
|
278 | elif commit_id == branch_commit_id: | |
279 | is_head = True |
|
279 | is_head = True | |
280 | branch_name = _branch_name |
|
280 | branch_name = _branch_name | |
281 | sha_commit_id = branch_commit_id |
|
281 | sha_commit_id = branch_commit_id | |
282 | break |
|
282 | break | |
283 |
|
283 | |||
284 | if h.is_svn(repo) and not repo.is_empty(): |
|
284 | if h.is_svn(repo) and not repo.is_empty(): | |
285 | # Note: Subversion only has one head. |
|
285 | # Note: Subversion only has one head. | |
286 | if commit_id == repo.get_commit(commit_idx=-1).raw_id: |
|
286 | if commit_id == repo.get_commit(commit_idx=-1).raw_id: | |
287 | is_head = True |
|
287 | is_head = True | |
288 | return branch_name, sha_commit_id, is_head |
|
288 | return branch_name, sha_commit_id, is_head | |
289 |
|
289 | |||
290 | # checked branches, means we only need to try to get the branch/commit_sha |
|
290 | # checked branches, means we only need to try to get the branch/commit_sha | |
291 | if repo.is_empty(): |
|
291 | if repo.is_empty(): | |
292 | is_head = True |
|
292 | is_head = True | |
293 | branch_name = landing_ref |
|
293 | branch_name = landing_ref | |
294 | sha_commit_id = EmptyCommit().raw_id |
|
294 | sha_commit_id = EmptyCommit().raw_id | |
295 | else: |
|
295 | else: | |
296 | commit = repo.get_commit(commit_id=commit_id) |
|
296 | commit = repo.get_commit(commit_id=commit_id) | |
297 | if commit: |
|
297 | if commit: | |
298 | branch_name = commit.branch |
|
298 | branch_name = commit.branch | |
299 | sha_commit_id = commit.raw_id |
|
299 | sha_commit_id = commit.raw_id | |
300 |
|
300 | |||
301 | return branch_name, sha_commit_id, is_head |
|
301 | return branch_name, sha_commit_id, is_head | |
302 |
|
302 | |||
303 | def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None): |
|
303 | def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None): | |
304 |
|
304 | |||
305 | repo_id = self.db_repo.repo_id |
|
305 | repo_id = self.db_repo.repo_id | |
306 | force_recache = self.get_recache_flag() |
|
306 | force_recache = self.get_recache_flag() | |
307 |
|
307 | |||
308 | cache_seconds = safe_int( |
|
308 | cache_seconds = safe_int( | |
309 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
309 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) | |
310 | cache_on = not force_recache and cache_seconds > 0 |
|
310 | cache_on = not force_recache and cache_seconds > 0 | |
311 | log.debug( |
|
311 | log.debug( | |
312 | 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`' |
|
312 | 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`' | |
313 | 'with caching: %s[TTL: %ss]' % ( |
|
313 | 'with caching: %s[TTL: %ss]' % ( | |
314 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
314 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
315 |
|
315 | |||
316 | cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}' |
|
316 | cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}' | |
317 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
317 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
318 |
|
318 | |||
319 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
319 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
320 | def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev): |
|
320 | def compute_file_tree(_name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev): | |
321 | log.debug('Generating cached file tree at for repo_id: %s, %s, %s', |
|
321 | log.debug('Generating cached file tree at for repo_id: %s, %s, %s', | |
322 | _repo_id, _commit_id, _f_path) |
|
322 | _repo_id, _commit_id, _f_path) | |
323 |
|
323 | |||
324 | c.full_load = _full_load |
|
324 | c.full_load = _full_load | |
325 | return render( |
|
325 | return render( | |
326 | 'rhodecode:templates/files/files_browser_tree.mako', |
|
326 | 'rhodecode:templates/files/files_browser_tree.mako', | |
327 | self._get_template_context(c), self.request, _at_rev) |
|
327 | self._get_template_context(c), self.request, _at_rev) | |
328 |
|
328 | |||
329 | return compute_file_tree( |
|
329 | return compute_file_tree( | |
330 | self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) |
|
330 | self.db_repo.repo_name_hash, self.db_repo.repo_id, commit_id, f_path, full_load, at_rev) | |
331 |
|
331 | |||
332 | def create_pure_path(self, *parts): |
|
332 | def create_pure_path(self, *parts): | |
333 | # Split paths and sanitize them, removing any ../ etc |
|
333 | # Split paths and sanitize them, removing any ../ etc | |
334 | sanitized_path = [ |
|
334 | sanitized_path = [ | |
335 | x for x in pathlib.PurePath(*parts).parts |
|
335 | x for x in pathlib.PurePath(*parts).parts | |
336 | if x not in ['.', '..']] |
|
336 | if x not in ['.', '..']] | |
337 |
|
337 | |||
338 | pure_path = pathlib.PurePath(*sanitized_path) |
|
338 | pure_path = pathlib.PurePath(*sanitized_path) | |
339 | return pure_path |
|
339 | return pure_path | |
340 |
|
340 | |||
341 | def _is_lf_enabled(self, target_repo): |
|
341 | def _is_lf_enabled(self, target_repo): | |
342 | lf_enabled = False |
|
342 | lf_enabled = False | |
343 |
|
343 | |||
344 | lf_key_for_vcs_map = { |
|
344 | lf_key_for_vcs_map = { | |
345 | 'hg': 'extensions_largefiles', |
|
345 | 'hg': 'extensions_largefiles', | |
346 | 'git': 'vcs_git_lfs_enabled' |
|
346 | 'git': 'vcs_git_lfs_enabled' | |
347 | } |
|
347 | } | |
348 |
|
348 | |||
349 | lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type) |
|
349 | lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type) | |
350 |
|
350 | |||
351 | if lf_key_for_vcs: |
|
351 | if lf_key_for_vcs: | |
352 | lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs) |
|
352 | lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs) | |
353 |
|
353 | |||
354 | return lf_enabled |
|
354 | return lf_enabled | |
355 |
|
355 | |||
356 | @LoginRequired() |
|
356 | @LoginRequired() | |
357 | @HasRepoPermissionAnyDecorator( |
|
357 | @HasRepoPermissionAnyDecorator( | |
358 | 'repository.read', 'repository.write', 'repository.admin') |
|
358 | 'repository.read', 'repository.write', 'repository.admin') | |
359 | def repo_archivefile(self): |
|
359 | def repo_archivefile(self): | |
360 | # archive cache config |
|
360 | # archive cache config | |
361 | from rhodecode import CONFIG |
|
361 | from rhodecode import CONFIG | |
362 | _ = self.request.translate |
|
362 | _ = self.request.translate | |
363 | self.load_default_context() |
|
363 | self.load_default_context() | |
364 | default_at_path = '/' |
|
364 | default_at_path = '/' | |
365 | fname = self.request.matchdict['fname'] |
|
365 | fname = self.request.matchdict['fname'] | |
366 | subrepos = self.request.GET.get('subrepos') == 'true' |
|
366 | subrepos = self.request.GET.get('subrepos') == 'true' | |
367 | with_hash = str2bool(self.request.GET.get('with_hash', '1')) |
|
367 | with_hash = str2bool(self.request.GET.get('with_hash', '1')) | |
368 | at_path = self.request.GET.get('at_path') or default_at_path |
|
368 | at_path = self.request.GET.get('at_path') or default_at_path | |
369 |
|
369 | |||
370 | if not self.db_repo.enable_downloads: |
|
370 | if not self.db_repo.enable_downloads: | |
371 | return Response(_('Downloads disabled')) |
|
371 | return Response(_('Downloads disabled')) | |
372 |
|
372 | |||
373 | try: |
|
373 | try: | |
374 | commit_id, ext, fileformat, content_type = \ |
|
374 | commit_id, ext, fileformat, content_type = \ | |
375 | _get_archive_spec(fname) |
|
375 | _get_archive_spec(fname) | |
376 | except ValueError: |
|
376 | except ValueError: | |
377 | return Response(_('Unknown archive type for: `{}`').format( |
|
377 | return Response(_('Unknown archive type for: `{}`').format( | |
378 | h.escape(fname))) |
|
378 | h.escape(fname))) | |
379 |
|
379 | |||
380 | try: |
|
380 | try: | |
381 | commit = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
381 | commit = self.rhodecode_vcs_repo.get_commit(commit_id) | |
382 | except CommitDoesNotExistError: |
|
382 | except CommitDoesNotExistError: | |
383 | return Response(_('Unknown commit_id {}').format( |
|
383 | return Response(_('Unknown commit_id {}').format( | |
384 | h.escape(commit_id))) |
|
384 | h.escape(commit_id))) | |
385 | except EmptyRepositoryError: |
|
385 | except EmptyRepositoryError: | |
386 | return Response(_('Empty repository')) |
|
386 | return Response(_('Empty repository')) | |
387 |
|
387 | |||
388 | # we used a ref, or a shorter version, lets redirect client ot use explicit hash |
|
388 | # we used a ref, or a shorter version, lets redirect client ot use explicit hash | |
389 | if commit_id != commit.raw_id: |
|
389 | if commit_id != commit.raw_id: | |
390 | fname=f'{commit.raw_id}{ext}' |
|
390 | fname=f'{commit.raw_id}{ext}' | |
391 | raise HTTPFound(self.request.current_route_path(fname=fname)) |
|
391 | raise HTTPFound(self.request.current_route_path(fname=fname)) | |
392 |
|
392 | |||
393 | try: |
|
393 | try: | |
394 | at_path = commit.get_node(at_path).path or default_at_path |
|
394 | at_path = commit.get_node(at_path).path or default_at_path | |
395 | except Exception: |
|
395 | except Exception: | |
396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) |
|
396 | return Response(_('No node at path {} for this repository').format(h.escape(at_path))) | |
397 |
|
397 | |||
398 | path_sha = get_path_sha(at_path) |
|
398 | path_sha = get_path_sha(at_path) | |
399 |
|
399 | |||
400 | # used for cache etc, consistent unique archive name |
|
400 | # used for cache etc, consistent unique archive name | |
401 | archive_name_key = get_archive_name( |
|
401 | archive_name_key = get_archive_name( | |
402 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, |
|
402 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
403 | path_sha=path_sha, with_hash=True) |
|
403 | path_sha=path_sha, with_hash=True) | |
404 |
|
404 | |||
405 | if not with_hash: |
|
405 | if not with_hash: | |
406 | path_sha = '' |
|
406 | path_sha = '' | |
407 |
|
407 | |||
408 | # what end client gets served |
|
408 | # what end client gets served | |
409 | response_archive_name = get_archive_name( |
|
409 | response_archive_name = get_archive_name( | |
410 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, |
|
410 | self.db_repo.repo_id, self.db_repo_name, commit_sha=commit.short_id, ext=ext, subrepos=subrepos, | |
411 | path_sha=path_sha, with_hash=with_hash) |
|
411 | path_sha=path_sha, with_hash=with_hash) | |
412 |
|
412 | |||
413 | # remove extension from our archive directory name |
|
413 | # remove extension from our archive directory name | |
414 | archive_dir_name = response_archive_name[:-len(ext)] |
|
414 | archive_dir_name = response_archive_name[:-len(ext)] | |
415 |
|
415 | |||
416 | archive_cache_disable = self.request.GET.get('no_cache') |
|
416 | archive_cache_disable = self.request.GET.get('no_cache') | |
417 |
|
417 | |||
418 | d_cache = get_archival_cache_store(config=CONFIG) |
|
418 | d_cache = get_archival_cache_store(config=CONFIG) | |
419 |
|
419 | |||
420 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver |
|
420 | # NOTE: we get the config to pass to a call to lazy-init the SAME type of cache on vcsserver | |
421 | d_cache_conf = get_archival_config(config=CONFIG) |
|
421 | d_cache_conf = get_archival_config(config=CONFIG) | |
422 |
|
422 | |||
423 | # This is also a cache key, and lock key |
|
423 | # This is also a cache key, and lock key | |
424 | reentrant_lock_key = archive_name_key + '.lock' |
|
424 | reentrant_lock_key = archive_name_key + '.lock' | |
425 |
|
425 | |||
426 | use_cached_archive = False |
|
426 | use_cached_archive = False | |
427 | if not archive_cache_disable and archive_name_key in d_cache: |
|
427 | if not archive_cache_disable and archive_name_key in d_cache: | |
428 | reader, metadata = d_cache.fetch(archive_name_key) |
|
428 | reader, metadata = d_cache.fetch(archive_name_key) | |
429 |
|
429 | |||
430 | use_cached_archive = True |
|
430 | use_cached_archive = True | |
431 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', |
|
431 | log.debug('Found cached archive as key=%s tag=%s, serving archive from cache reader=%s', | |
432 | archive_name_key, metadata, reader.name) |
|
432 | archive_name_key, metadata, reader.name) | |
433 | else: |
|
433 | else: | |
434 | reader = None |
|
434 | reader = None | |
435 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) |
|
435 | log.debug('Archive with key=%s is not yet cached, creating one now...', archive_name_key) | |
436 |
|
436 | |||
437 | if not reader: |
|
437 | if not reader: | |
438 | # generate new archive, as previous was not found in the cache |
|
438 | # generate new archive, as previous was not found in the cache | |
439 | try: |
|
439 | try: | |
440 | with d_cache.get_lock(reentrant_lock_key): |
|
440 | with d_cache.get_lock(reentrant_lock_key): | |
441 | try: |
|
441 | try: | |
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, |
|
442 | commit.archive_repo(archive_name_key, archive_dir_name=archive_dir_name, | |
443 | kind=fileformat, subrepos=subrepos, |
|
443 | kind=fileformat, subrepos=subrepos, | |
444 | archive_at_path=at_path, cache_config=d_cache_conf) |
|
444 | archive_at_path=at_path, cache_config=d_cache_conf) | |
445 | except ImproperArchiveTypeError: |
|
445 | except ImproperArchiveTypeError: | |
446 | return _('Unknown archive type') |
|
446 | return _('Unknown archive type') | |
447 |
|
447 | |||
448 | except ArchiveCacheGenerationLock: |
|
448 | except ArchiveCacheGenerationLock: | |
449 | retry_after = round(random.uniform(0.3, 3.0), 1) |
|
449 | retry_after = round(random.uniform(0.3, 3.0), 1) | |
450 | time.sleep(retry_after) |
|
450 | time.sleep(retry_after) | |
451 |
|
451 | |||
452 | location = self.request.url |
|
452 | location = self.request.url | |
453 | response = Response( |
|
453 | response = Response( | |
454 | f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}" |
|
454 | f"archive {archive_name_key} generation in progress, Retry-After={retry_after}, Location={location}" | |
455 | ) |
|
455 | ) | |
456 | response.headers["Retry-After"] = str(retry_after) |
|
456 | response.headers["Retry-After"] = str(retry_after) | |
457 | response.status_code = 307 # temporary redirect |
|
457 | response.status_code = 307 # temporary redirect | |
458 |
|
458 | |||
459 | response.location = location |
|
459 | response.location = location | |
460 | return response |
|
460 | return response | |
461 |
|
461 | |||
462 | reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30) |
|
462 | reader, metadata = d_cache.fetch(archive_name_key, retry=True, retry_attempts=30) | |
463 |
|
463 | |||
464 | response = Response(app_iter=archive_iterator(reader)) |
|
464 | response = Response(app_iter=archive_iterator(reader)) | |
465 | response.content_disposition = f'attachment; filename={response_archive_name}' |
|
465 | response.content_disposition = f'attachment; filename={response_archive_name}' | |
466 | response.content_type = str(content_type) |
|
466 | response.content_type = str(content_type) | |
467 |
|
467 | |||
468 | try: |
|
468 | try: | |
469 | return response |
|
469 | return response | |
470 | finally: |
|
470 | finally: | |
471 | # store download action |
|
471 | # store download action | |
472 | audit_logger.store_web( |
|
472 | audit_logger.store_web( | |
473 | 'repo.archive.download', action_data={ |
|
473 | 'repo.archive.download', action_data={ | |
474 | 'user_agent': self.request.user_agent, |
|
474 | 'user_agent': self.request.user_agent, | |
475 | 'archive_name': archive_name_key, |
|
475 | 'archive_name': archive_name_key, | |
476 | 'archive_spec': fname, |
|
476 | 'archive_spec': fname, | |
477 | 'archive_cached': use_cached_archive}, |
|
477 | 'archive_cached': use_cached_archive}, | |
478 | user=self._rhodecode_user, |
|
478 | user=self._rhodecode_user, | |
479 | repo=self.db_repo, |
|
479 | repo=self.db_repo, | |
480 | commit=True |
|
480 | commit=True | |
481 | ) |
|
481 | ) | |
482 |
|
482 | |||
483 | def _get_file_node(self, commit_id, f_path): |
|
483 | def _get_file_node(self, commit_id, f_path): | |
484 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
484 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: | |
485 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
485 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
486 | try: |
|
486 | try: | |
487 | node = commit.get_node(f_path) |
|
487 | node = commit.get_node(f_path) | |
488 | if node.is_dir(): |
|
488 | if node.is_dir(): | |
489 | raise NodeError(f'{node} path is a {type(node)} not a file') |
|
489 | raise NodeError(f'{node} path is a {type(node)} not a file') | |
490 | except NodeDoesNotExistError: |
|
490 | except NodeDoesNotExistError: | |
491 | commit = EmptyCommit( |
|
491 | commit = EmptyCommit( | |
492 | commit_id=commit_id, |
|
492 | commit_id=commit_id, | |
493 | idx=commit.idx, |
|
493 | idx=commit.idx, | |
494 | repo=commit.repository, |
|
494 | repo=commit.repository, | |
495 | alias=commit.repository.alias, |
|
495 | alias=commit.repository.alias, | |
496 | message=commit.message, |
|
496 | message=commit.message, | |
497 | author=commit.author, |
|
497 | author=commit.author, | |
498 | date=commit.date) |
|
498 | date=commit.date) | |
499 | node = FileNode(safe_bytes(f_path), b'', commit=commit) |
|
499 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
500 | else: |
|
500 | else: | |
501 | commit = EmptyCommit( |
|
501 | commit = EmptyCommit( | |
502 | repo=self.rhodecode_vcs_repo, |
|
502 | repo=self.rhodecode_vcs_repo, | |
503 | alias=self.rhodecode_vcs_repo.alias) |
|
503 | alias=self.rhodecode_vcs_repo.alias) | |
504 | node = FileNode(safe_bytes(f_path), b'', commit=commit) |
|
504 | node = FileNode(safe_bytes(f_path), b'', commit=commit) | |
505 | return node |
|
505 | return node | |
506 |
|
506 | |||
507 | @LoginRequired() |
|
507 | @LoginRequired() | |
508 | @HasRepoPermissionAnyDecorator( |
|
508 | @HasRepoPermissionAnyDecorator( | |
509 | 'repository.read', 'repository.write', 'repository.admin') |
|
509 | 'repository.read', 'repository.write', 'repository.admin') | |
510 | def repo_files_diff(self): |
|
510 | def repo_files_diff(self): | |
511 | c = self.load_default_context() |
|
511 | c = self.load_default_context() | |
512 | f_path = self._get_f_path(self.request.matchdict) |
|
512 | f_path = self._get_f_path(self.request.matchdict) | |
513 | diff1 = self.request.GET.get('diff1', '') |
|
513 | diff1 = self.request.GET.get('diff1', '') | |
514 | diff2 = self.request.GET.get('diff2', '') |
|
514 | diff2 = self.request.GET.get('diff2', '') | |
515 |
|
515 | |||
516 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
516 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) | |
517 |
|
517 | |||
518 | ignore_whitespace = str2bool(self.request.GET.get('ignorews')) |
|
518 | ignore_whitespace = str2bool(self.request.GET.get('ignorews')) | |
519 | line_context = self.request.GET.get('context', 3) |
|
519 | line_context = self.request.GET.get('context', 3) | |
520 |
|
520 | |||
521 | if not any((diff1, diff2)): |
|
521 | if not any((diff1, diff2)): | |
522 | h.flash( |
|
522 | h.flash( | |
523 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
523 | 'Need query parameter "diff1" or "diff2" to generate a diff.', | |
524 | category='error') |
|
524 | category='error') | |
525 | raise HTTPBadRequest() |
|
525 | raise HTTPBadRequest() | |
526 |
|
526 | |||
527 | c.action = self.request.GET.get('diff') |
|
527 | c.action = self.request.GET.get('diff') | |
528 | if c.action not in ['download', 'raw']: |
|
528 | if c.action not in ['download', 'raw']: | |
529 | compare_url = h.route_path( |
|
529 | compare_url = h.route_path( | |
530 | 'repo_compare', |
|
530 | 'repo_compare', | |
531 | repo_name=self.db_repo_name, |
|
531 | repo_name=self.db_repo_name, | |
532 | source_ref_type='rev', |
|
532 | source_ref_type='rev', | |
533 | source_ref=diff1, |
|
533 | source_ref=diff1, | |
534 | target_repo=self.db_repo_name, |
|
534 | target_repo=self.db_repo_name, | |
535 | target_ref_type='rev', |
|
535 | target_ref_type='rev', | |
536 | target_ref=diff2, |
|
536 | target_ref=diff2, | |
537 | _query=dict(f_path=f_path)) |
|
537 | _query=dict(f_path=f_path)) | |
538 | # redirect to new view if we render diff |
|
538 | # redirect to new view if we render diff | |
539 | raise HTTPFound(compare_url) |
|
539 | raise HTTPFound(compare_url) | |
540 |
|
540 | |||
541 | try: |
|
541 | try: | |
542 | node1 = self._get_file_node(diff1, path1) |
|
542 | node1 = self._get_file_node(diff1, path1) | |
543 | node2 = self._get_file_node(diff2, f_path) |
|
543 | node2 = self._get_file_node(diff2, f_path) | |
544 | except (RepositoryError, NodeError): |
|
544 | except (RepositoryError, NodeError): | |
545 | log.exception("Exception while trying to get node from repository") |
|
545 | log.exception("Exception while trying to get node from repository") | |
546 | raise HTTPFound( |
|
546 | raise HTTPFound( | |
547 | h.route_path('repo_files', repo_name=self.db_repo_name, |
|
547 | h.route_path('repo_files', repo_name=self.db_repo_name, | |
548 | commit_id='tip', f_path=f_path)) |
|
548 | commit_id='tip', f_path=f_path)) | |
549 |
|
549 | |||
550 | if all(isinstance(node.commit, EmptyCommit) |
|
550 | if all(isinstance(node.commit, EmptyCommit) | |
551 | for node in (node1, node2)): |
|
551 | for node in (node1, node2)): | |
552 | raise HTTPNotFound() |
|
552 | raise HTTPNotFound() | |
553 |
|
553 | |||
554 | c.commit_1 = node1.commit |
|
554 | c.commit_1 = node1.commit | |
555 | c.commit_2 = node2.commit |
|
555 | c.commit_2 = node2.commit | |
556 |
|
556 | |||
557 | if c.action == 'download': |
|
557 | if c.action == 'download': | |
558 | _diff = diffs.get_gitdiff(node1, node2, |
|
558 | _diff = diffs.get_gitdiff(node1, node2, | |
559 | ignore_whitespace=ignore_whitespace, |
|
559 | ignore_whitespace=ignore_whitespace, | |
560 | context=line_context) |
|
560 | context=line_context) | |
561 | # NOTE: this was using diff_format='gitdiff' |
|
561 | # NOTE: this was using diff_format='gitdiff' | |
562 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') |
|
562 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
563 |
|
563 | |||
564 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
564 | response = Response(self.path_filter.get_raw_patch(diff)) | |
565 | response.content_type = 'text/plain' |
|
565 | response.content_type = 'text/plain' | |
566 | response.content_disposition = ( |
|
566 | response.content_disposition = ( | |
567 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' |
|
567 | f'attachment; filename={f_path}_{diff1}_vs_{diff2}.diff' | |
568 | ) |
|
568 | ) | |
569 | charset = self._get_default_encoding(c) |
|
569 | charset = self._get_default_encoding(c) | |
570 | if charset: |
|
570 | if charset: | |
571 | response.charset = charset |
|
571 | response.charset = charset | |
572 | return response |
|
572 | return response | |
573 |
|
573 | |||
574 | elif c.action == 'raw': |
|
574 | elif c.action == 'raw': | |
575 | _diff = diffs.get_gitdiff(node1, node2, |
|
575 | _diff = diffs.get_gitdiff(node1, node2, | |
576 | ignore_whitespace=ignore_whitespace, |
|
576 | ignore_whitespace=ignore_whitespace, | |
577 | context=line_context) |
|
577 | context=line_context) | |
578 | # NOTE: this was using diff_format='gitdiff' |
|
578 | # NOTE: this was using diff_format='gitdiff' | |
579 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') |
|
579 | diff = diffs.DiffProcessor(_diff, diff_format='newdiff') | |
580 |
|
580 | |||
581 | response = Response(self.path_filter.get_raw_patch(diff)) |
|
581 | response = Response(self.path_filter.get_raw_patch(diff)) | |
582 | response.content_type = 'text/plain' |
|
582 | response.content_type = 'text/plain' | |
583 | charset = self._get_default_encoding(c) |
|
583 | charset = self._get_default_encoding(c) | |
584 | if charset: |
|
584 | if charset: | |
585 | response.charset = charset |
|
585 | response.charset = charset | |
586 | return response |
|
586 | return response | |
587 |
|
587 | |||
588 | # in case we ever end up here |
|
588 | # in case we ever end up here | |
589 | raise HTTPNotFound() |
|
589 | raise HTTPNotFound() | |
590 |
|
590 | |||
591 | @LoginRequired() |
|
591 | @LoginRequired() | |
592 | @HasRepoPermissionAnyDecorator( |
|
592 | @HasRepoPermissionAnyDecorator( | |
593 | 'repository.read', 'repository.write', 'repository.admin') |
|
593 | 'repository.read', 'repository.write', 'repository.admin') | |
594 | def repo_files_diff_2way_redirect(self): |
|
594 | def repo_files_diff_2way_redirect(self): | |
595 | """ |
|
595 | """ | |
596 | Kept only to make OLD links work |
|
596 | Kept only to make OLD links work | |
597 | """ |
|
597 | """ | |
598 | f_path = self._get_f_path_unchecked(self.request.matchdict) |
|
598 | f_path = self._get_f_path_unchecked(self.request.matchdict) | |
599 | diff1 = self.request.GET.get('diff1', '') |
|
599 | diff1 = self.request.GET.get('diff1', '') | |
600 | diff2 = self.request.GET.get('diff2', '') |
|
600 | diff2 = self.request.GET.get('diff2', '') | |
601 |
|
601 | |||
602 | if not any((diff1, diff2)): |
|
602 | if not any((diff1, diff2)): | |
603 | h.flash( |
|
603 | h.flash( | |
604 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
604 | 'Need query parameter "diff1" or "diff2" to generate a diff.', | |
605 | category='error') |
|
605 | category='error') | |
606 | raise HTTPBadRequest() |
|
606 | raise HTTPBadRequest() | |
607 |
|
607 | |||
608 | compare_url = h.route_path( |
|
608 | compare_url = h.route_path( | |
609 | 'repo_compare', |
|
609 | 'repo_compare', | |
610 | repo_name=self.db_repo_name, |
|
610 | repo_name=self.db_repo_name, | |
611 | source_ref_type='rev', |
|
611 | source_ref_type='rev', | |
612 | source_ref=diff1, |
|
612 | source_ref=diff1, | |
613 | target_ref_type='rev', |
|
613 | target_ref_type='rev', | |
614 | target_ref=diff2, |
|
614 | target_ref=diff2, | |
615 | _query=dict(f_path=f_path, diffmode='sideside', |
|
615 | _query=dict(f_path=f_path, diffmode='sideside', | |
616 | target_repo=self.db_repo_name,)) |
|
616 | target_repo=self.db_repo_name,)) | |
617 | raise HTTPFound(compare_url) |
|
617 | raise HTTPFound(compare_url) | |
618 |
|
618 | |||
619 | @LoginRequired() |
|
619 | @LoginRequired() | |
620 | def repo_files_default_commit_redirect(self): |
|
620 | def repo_files_default_commit_redirect(self): | |
621 | """ |
|
621 | """ | |
622 | Special page that redirects to the landing page of files based on the default |
|
622 | Special page that redirects to the landing page of files based on the default | |
623 | commit for repository |
|
623 | commit for repository | |
624 | """ |
|
624 | """ | |
625 | c = self.load_default_context() |
|
625 | c = self.load_default_context() | |
626 | ref_name = c.rhodecode_db_repo.landing_ref_name |
|
626 | ref_name = c.rhodecode_db_repo.landing_ref_name | |
627 | landing_url = h.repo_files_by_ref_url( |
|
627 | landing_url = h.repo_files_by_ref_url( | |
628 | c.rhodecode_db_repo.repo_name, |
|
628 | c.rhodecode_db_repo.repo_name, | |
629 | c.rhodecode_db_repo.repo_type, |
|
629 | c.rhodecode_db_repo.repo_type, | |
630 | f_path='', |
|
630 | f_path='', | |
631 | ref_name=ref_name, |
|
631 | ref_name=ref_name, | |
632 | commit_id='tip', |
|
632 | commit_id='tip', | |
633 | query=dict(at=ref_name) |
|
633 | query=dict(at=ref_name) | |
634 | ) |
|
634 | ) | |
635 |
|
635 | |||
636 | raise HTTPFound(landing_url) |
|
636 | raise HTTPFound(landing_url) | |
637 |
|
637 | |||
638 | @LoginRequired() |
|
638 | @LoginRequired() | |
639 | @HasRepoPermissionAnyDecorator( |
|
639 | @HasRepoPermissionAnyDecorator( | |
640 | 'repository.read', 'repository.write', 'repository.admin') |
|
640 | 'repository.read', 'repository.write', 'repository.admin') | |
641 | def repo_files(self): |
|
641 | def repo_files(self): | |
642 | c = self.load_default_context() |
|
642 | c = self.load_default_context() | |
643 |
|
643 | |||
644 | view_name = getattr(self.request.matched_route, 'name', None) |
|
644 | view_name = getattr(self.request.matched_route, 'name', None) | |
645 |
|
645 | |||
646 | c.annotate = view_name == 'repo_files:annotated' |
|
646 | c.annotate = view_name == 'repo_files:annotated' | |
647 | # default is false, but .rst/.md files later are auto rendered, we can |
|
647 | # default is false, but .rst/.md files later are auto rendered, we can | |
648 | # overwrite auto rendering by setting this GET flag |
|
648 | # overwrite auto rendering by setting this GET flag | |
649 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) |
|
649 | c.renderer = view_name == 'repo_files:rendered' or not self.request.GET.get('no-render', False) | |
650 |
|
650 | |||
651 | commit_id, f_path = self._get_commit_and_path() |
|
651 | commit_id, f_path = self._get_commit_and_path() | |
652 |
|
652 | |||
653 | c.commit = self._get_commit_or_redirect(commit_id) |
|
653 | c.commit = self._get_commit_or_redirect(commit_id) | |
654 | c.branch = self.request.GET.get('branch', None) |
|
654 | c.branch = self.request.GET.get('branch', None) | |
655 | c.f_path = f_path |
|
655 | c.f_path = f_path | |
656 | at_rev = self.request.GET.get('at') |
|
656 | at_rev = self.request.GET.get('at') | |
657 |
|
657 | |||
658 | # files or dirs |
|
658 | # files or dirs | |
659 | try: |
|
659 | try: | |
660 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) |
|
660 | c.file = c.commit.get_node(f_path, pre_load=['is_binary', 'size', 'data']) | |
661 |
|
661 | |||
662 | c.file_author = True |
|
662 | c.file_author = True | |
663 | c.file_tree = '' |
|
663 | c.file_tree = '' | |
664 |
|
664 | |||
665 | # prev link |
|
665 | # prev link | |
666 | try: |
|
666 | try: | |
667 | prev_commit = c.commit.prev(c.branch) |
|
667 | prev_commit = c.commit.prev(c.branch) | |
668 | c.prev_commit = prev_commit |
|
668 | c.prev_commit = prev_commit | |
669 | c.url_prev = h.route_path( |
|
669 | c.url_prev = h.route_path( | |
670 | 'repo_files', repo_name=self.db_repo_name, |
|
670 | 'repo_files', repo_name=self.db_repo_name, | |
671 | commit_id=prev_commit.raw_id, f_path=f_path) |
|
671 | commit_id=prev_commit.raw_id, f_path=f_path) | |
672 | if c.branch: |
|
672 | if c.branch: | |
673 | c.url_prev += '?branch=%s' % c.branch |
|
673 | c.url_prev += '?branch=%s' % c.branch | |
674 | except (CommitDoesNotExistError, VCSError): |
|
674 | except (CommitDoesNotExistError, VCSError): | |
675 | c.url_prev = '#' |
|
675 | c.url_prev = '#' | |
676 | c.prev_commit = EmptyCommit() |
|
676 | c.prev_commit = EmptyCommit() | |
677 |
|
677 | |||
678 | # next link |
|
678 | # next link | |
679 | try: |
|
679 | try: | |
680 | next_commit = c.commit.next(c.branch) |
|
680 | next_commit = c.commit.next(c.branch) | |
681 | c.next_commit = next_commit |
|
681 | c.next_commit = next_commit | |
682 | c.url_next = h.route_path( |
|
682 | c.url_next = h.route_path( | |
683 | 'repo_files', repo_name=self.db_repo_name, |
|
683 | 'repo_files', repo_name=self.db_repo_name, | |
684 | commit_id=next_commit.raw_id, f_path=f_path) |
|
684 | commit_id=next_commit.raw_id, f_path=f_path) | |
685 | if c.branch: |
|
685 | if c.branch: | |
686 | c.url_next += '?branch=%s' % c.branch |
|
686 | c.url_next += '?branch=%s' % c.branch | |
687 | except (CommitDoesNotExistError, VCSError): |
|
687 | except (CommitDoesNotExistError, VCSError): | |
688 | c.url_next = '#' |
|
688 | c.url_next = '#' | |
689 | c.next_commit = EmptyCommit() |
|
689 | c.next_commit = EmptyCommit() | |
690 |
|
690 | |||
691 | # load file content |
|
691 | # load file content | |
692 | if c.file.is_file(): |
|
692 | if c.file.is_file(): | |
693 |
|
693 | |||
694 | c.lf_node = {} |
|
694 | c.lf_node = {} | |
695 |
|
695 | |||
696 | has_lf_enabled = self._is_lf_enabled(self.db_repo) |
|
696 | has_lf_enabled = self._is_lf_enabled(self.db_repo) | |
697 | if has_lf_enabled: |
|
697 | if has_lf_enabled: | |
698 | c.lf_node = c.file.get_largefile_node() |
|
698 | c.lf_node = c.file.get_largefile_node() | |
699 |
|
699 | |||
700 | c.file_source_page = 'true' |
|
700 | c.file_source_page = 'true' | |
701 | c.file_last_commit = c.file.last_commit |
|
701 | c.file_last_commit = c.file.last_commit | |
702 |
|
702 | |||
703 | c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file |
|
703 | c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file | |
704 |
|
704 | |||
705 | if not (c.file_size_too_big or c.file.is_binary): |
|
705 | if not (c.file_size_too_big or c.file.is_binary): | |
706 | if c.annotate: # annotation has precedence over renderer |
|
706 | if c.annotate: # annotation has precedence over renderer | |
707 | c.annotated_lines = filenode_as_annotated_lines_tokens( |
|
707 | c.annotated_lines = filenode_as_annotated_lines_tokens( | |
708 | c.file |
|
708 | c.file | |
709 | ) |
|
709 | ) | |
710 | else: |
|
710 | else: | |
711 | c.renderer = ( |
|
711 | c.renderer = ( | |
712 | c.renderer and h.renderer_from_filename(c.file.path) |
|
712 | c.renderer and h.renderer_from_filename(c.file.path) | |
713 | ) |
|
713 | ) | |
714 | if not c.renderer: |
|
714 | if not c.renderer: | |
715 | c.lines = filenode_as_lines_tokens(c.file) |
|
715 | c.lines = filenode_as_lines_tokens(c.file) | |
716 |
|
716 | |||
717 | _branch_name, _sha_commit_id, is_head = \ |
|
717 | _branch_name, _sha_commit_id, is_head = \ | |
718 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
718 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
719 | landing_ref=self.db_repo.landing_ref_name) |
|
719 | landing_ref=self.db_repo.landing_ref_name) | |
720 | c.on_branch_head = is_head |
|
720 | c.on_branch_head = is_head | |
721 |
|
721 | |||
722 | branch = c.commit.branch if ( |
|
722 | branch = c.commit.branch if ( | |
723 | c.commit.branch and '/' not in c.commit.branch) else None |
|
723 | c.commit.branch and '/' not in c.commit.branch) else None | |
724 | c.branch_or_raw_id = branch or c.commit.raw_id |
|
724 | c.branch_or_raw_id = branch or c.commit.raw_id | |
725 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) |
|
725 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) | |
726 |
|
726 | |||
727 | author = c.file_last_commit.author |
|
727 | author = c.file_last_commit.author | |
728 | c.authors = [[ |
|
728 | c.authors = [[ | |
729 | h.email(author), |
|
729 | h.email(author), | |
730 | h.person(author, 'username_or_name_or_email'), |
|
730 | h.person(author, 'username_or_name_or_email'), | |
731 | 1 |
|
731 | 1 | |
732 | ]] |
|
732 | ]] | |
733 |
|
733 | |||
734 | else: # load tree content at path |
|
734 | else: # load tree content at path | |
735 | c.file_source_page = 'false' |
|
735 | c.file_source_page = 'false' | |
736 | c.authors = [] |
|
736 | c.authors = [] | |
737 | # this loads a simple tree without metadata to speed things up |
|
737 | # this loads a simple tree without metadata to speed things up | |
738 | # later via ajax we call repo_nodetree_full and fetch whole |
|
738 | # later via ajax we call repo_nodetree_full and fetch whole | |
739 | c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev) |
|
739 | c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev) | |
740 |
|
740 | |||
741 | c.readme_data, c.readme_file = \ |
|
741 | c.readme_data, c.readme_file = \ | |
742 | self._get_readme_data(self.db_repo, c.visual.default_renderer, |
|
742 | self._get_readme_data(self.db_repo, c.visual.default_renderer, | |
743 | c.commit.raw_id, f_path) |
|
743 | c.commit.raw_id, f_path) | |
744 |
|
744 | |||
745 | except RepositoryError as e: |
|
745 | except RepositoryError as e: | |
746 | h.flash(h.escape(safe_str(e)), category='error') |
|
746 | h.flash(h.escape(safe_str(e)), category='error') | |
747 | raise HTTPNotFound() |
|
747 | raise HTTPNotFound() | |
748 |
|
748 | |||
749 | if self.request.environ.get('HTTP_X_PJAX'): |
|
749 | if self.request.environ.get('HTTP_X_PJAX'): | |
750 | html = render('rhodecode:templates/files/files_pjax.mako', |
|
750 | html = render('rhodecode:templates/files/files_pjax.mako', | |
751 | self._get_template_context(c), self.request) |
|
751 | self._get_template_context(c), self.request) | |
752 | else: |
|
752 | else: | |
753 | html = render('rhodecode:templates/files/files.mako', |
|
753 | html = render('rhodecode:templates/files/files.mako', | |
754 | self._get_template_context(c), self.request) |
|
754 | self._get_template_context(c), self.request) | |
755 | return Response(html) |
|
755 | return Response(html) | |
756 |
|
756 | |||
757 | @HasRepoPermissionAnyDecorator( |
|
757 | @HasRepoPermissionAnyDecorator( | |
758 | 'repository.read', 'repository.write', 'repository.admin') |
|
758 | 'repository.read', 'repository.write', 'repository.admin') | |
759 | def repo_files_annotated_previous(self): |
|
759 | def repo_files_annotated_previous(self): | |
760 | self.load_default_context() |
|
760 | self.load_default_context() | |
761 |
|
761 | |||
762 | commit_id, f_path = self._get_commit_and_path() |
|
762 | commit_id, f_path = self._get_commit_and_path() | |
763 | commit = self._get_commit_or_redirect(commit_id) |
|
763 | commit = self._get_commit_or_redirect(commit_id) | |
764 | prev_commit_id = commit.raw_id |
|
764 | prev_commit_id = commit.raw_id | |
765 | line_anchor = self.request.GET.get('line_anchor') |
|
765 | line_anchor = self.request.GET.get('line_anchor') | |
766 | is_file = False |
|
766 | is_file = False | |
767 | try: |
|
767 | try: | |
768 | _file = commit.get_node(f_path) |
|
768 | _file = commit.get_node(f_path) | |
769 | is_file = _file.is_file() |
|
769 | is_file = _file.is_file() | |
770 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): |
|
770 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): | |
771 | pass |
|
771 | pass | |
772 |
|
772 | |||
773 | if is_file: |
|
773 | if is_file: | |
774 | history = commit.get_path_history(f_path) |
|
774 | history = commit.get_path_history(f_path) | |
775 | prev_commit_id = history[1].raw_id \ |
|
775 | prev_commit_id = history[1].raw_id \ | |
776 | if len(history) > 1 else prev_commit_id |
|
776 | if len(history) > 1 else prev_commit_id | |
777 | prev_url = h.route_path( |
|
777 | prev_url = h.route_path( | |
778 | 'repo_files:annotated', repo_name=self.db_repo_name, |
|
778 | 'repo_files:annotated', repo_name=self.db_repo_name, | |
779 | commit_id=prev_commit_id, f_path=f_path, |
|
779 | commit_id=prev_commit_id, f_path=f_path, | |
780 | _anchor=f'L{line_anchor}') |
|
780 | _anchor=f'L{line_anchor}') | |
781 |
|
781 | |||
782 | raise HTTPFound(prev_url) |
|
782 | raise HTTPFound(prev_url) | |
783 |
|
783 | |||
784 | @LoginRequired() |
|
784 | @LoginRequired() | |
785 | @HasRepoPermissionAnyDecorator( |
|
785 | @HasRepoPermissionAnyDecorator( | |
786 | 'repository.read', 'repository.write', 'repository.admin') |
|
786 | 'repository.read', 'repository.write', 'repository.admin') | |
787 | def repo_nodetree_full(self): |
|
787 | def repo_nodetree_full(self): | |
788 | """ |
|
788 | """ | |
789 | Returns rendered html of file tree that contains commit date, |
|
789 | Returns rendered html of file tree that contains commit date, | |
790 | author, commit_id for the specified combination of |
|
790 | author, commit_id for the specified combination of | |
791 | repo, commit_id and file path |
|
791 | repo, commit_id and file path | |
792 | """ |
|
792 | """ | |
793 | c = self.load_default_context() |
|
793 | c = self.load_default_context() | |
794 |
|
794 | |||
795 | commit_id, f_path = self._get_commit_and_path() |
|
795 | commit_id, f_path = self._get_commit_and_path() | |
796 | commit = self._get_commit_or_redirect(commit_id) |
|
796 | commit = self._get_commit_or_redirect(commit_id) | |
797 | try: |
|
797 | try: | |
798 | dir_node = commit.get_node(f_path) |
|
798 | dir_node = commit.get_node(f_path) | |
799 | except RepositoryError as e: |
|
799 | except RepositoryError as e: | |
800 | return Response(f'error: {h.escape(safe_str(e))}') |
|
800 | return Response(f'error: {h.escape(safe_str(e))}') | |
801 |
|
801 | |||
802 | if dir_node.is_file(): |
|
802 | if dir_node.is_file(): | |
803 | return Response('') |
|
803 | return Response('') | |
804 |
|
804 | |||
805 | c.file = dir_node |
|
805 | c.file = dir_node | |
806 | c.commit = commit |
|
806 | c.commit = commit | |
807 | at_rev = self.request.GET.get('at') |
|
807 | at_rev = self.request.GET.get('at') | |
808 |
|
808 | |||
809 | html = self._get_tree_at_commit( |
|
809 | html = self._get_tree_at_commit( | |
810 | c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev) |
|
810 | c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev) | |
811 |
|
811 | |||
812 | return Response(html) |
|
812 | return Response(html) | |
813 |
|
813 | |||
814 | def _get_attachement_headers(self, f_path): |
|
814 | def _get_attachement_headers(self, f_path): | |
815 | f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
815 | f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1]) | |
816 | safe_path = f_name.replace('"', '\\"') |
|
816 | safe_path = f_name.replace('"', '\\"') | |
817 | encoded_path = urllib.parse.quote(f_name) |
|
817 | encoded_path = urllib.parse.quote(f_name) | |
818 |
|
818 | |||
819 | headers = "attachment; " \ |
|
819 | headers = "attachment; " \ | |
820 | "filename=\"{}\"; " \ |
|
820 | "filename=\"{}\"; " \ | |
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) |
|
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) | |
822 |
|
822 | |||
823 | return safe_bytes(headers).decode('latin-1', errors='replace') |
|
823 | return safe_bytes(headers).decode('latin-1', errors='replace') | |
824 |
|
824 | |||
825 | @LoginRequired() |
|
825 | @LoginRequired() | |
826 | @HasRepoPermissionAnyDecorator( |
|
826 | @HasRepoPermissionAnyDecorator( | |
827 | 'repository.read', 'repository.write', 'repository.admin') |
|
827 | 'repository.read', 'repository.write', 'repository.admin') | |
828 | def repo_file_raw(self): |
|
828 | def repo_file_raw(self): | |
829 | """ |
|
829 | """ | |
830 | Action for show as raw, some mimetypes are "rendered", |
|
830 | Action for show as raw, some mimetypes are "rendered", | |
831 | those include images, icons. |
|
831 | those include images, icons. | |
832 | """ |
|
832 | """ | |
833 | c = self.load_default_context() |
|
833 | c = self.load_default_context() | |
834 |
|
834 | |||
835 | commit_id, f_path = self._get_commit_and_path() |
|
835 | commit_id, f_path = self._get_commit_and_path() | |
836 | commit = self._get_commit_or_redirect(commit_id) |
|
836 | commit = self._get_commit_or_redirect(commit_id) | |
837 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
837 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
838 |
|
838 | |||
839 | raw_mimetype_mapping = { |
|
839 | raw_mimetype_mapping = { | |
840 | # map original mimetype to a mimetype used for "show as raw" |
|
840 | # map original mimetype to a mimetype used for "show as raw" | |
841 | # you can also provide a content-disposition to override the |
|
841 | # you can also provide a content-disposition to override the | |
842 | # default "attachment" disposition. |
|
842 | # default "attachment" disposition. | |
843 | # orig_type: (new_type, new_dispo) |
|
843 | # orig_type: (new_type, new_dispo) | |
844 |
|
844 | |||
845 | # show images inline: |
|
845 | # show images inline: | |
846 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
846 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can | |
847 | # for example render an SVG with javascript inside or even render |
|
847 | # for example render an SVG with javascript inside or even render | |
848 | # HTML. |
|
848 | # HTML. | |
849 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
849 | 'image/x-icon': ('image/x-icon', 'inline'), | |
850 | 'image/png': ('image/png', 'inline'), |
|
850 | 'image/png': ('image/png', 'inline'), | |
851 | 'image/gif': ('image/gif', 'inline'), |
|
851 | 'image/gif': ('image/gif', 'inline'), | |
852 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
852 | 'image/jpeg': ('image/jpeg', 'inline'), | |
853 | 'application/pdf': ('application/pdf', 'inline'), |
|
853 | 'application/pdf': ('application/pdf', 'inline'), | |
854 | } |
|
854 | } | |
855 |
|
855 | |||
856 | mimetype = file_node.mimetype |
|
856 | mimetype = file_node.mimetype | |
857 | try: |
|
857 | try: | |
858 | mimetype, disposition = raw_mimetype_mapping[mimetype] |
|
858 | mimetype, disposition = raw_mimetype_mapping[mimetype] | |
859 | except KeyError: |
|
859 | except KeyError: | |
860 | # we don't know anything special about this, handle it safely |
|
860 | # we don't know anything special about this, handle it safely | |
861 | if file_node.is_binary: |
|
861 | if file_node.is_binary: | |
862 | # do same as download raw for binary files |
|
862 | # do same as download raw for binary files | |
863 | mimetype, disposition = 'application/octet-stream', 'attachment' |
|
863 | mimetype, disposition = 'application/octet-stream', 'attachment' | |
864 | else: |
|
864 | else: | |
865 | # do not just use the original mimetype, but force text/plain, |
|
865 | # do not just use the original mimetype, but force text/plain, | |
866 | # otherwise it would serve text/html and that might be unsafe. |
|
866 | # otherwise it would serve text/html and that might be unsafe. | |
867 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
867 | # Note: underlying vcs library fakes text/plain mimetype if the | |
868 | # mimetype can not be determined and it thinks it is not |
|
868 | # mimetype can not be determined and it thinks it is not | |
869 | # binary.This might lead to erroneous text display in some |
|
869 | # binary.This might lead to erroneous text display in some | |
870 | # cases, but helps in other cases, like with text files |
|
870 | # cases, but helps in other cases, like with text files | |
871 | # without extension. |
|
871 | # without extension. | |
872 | mimetype, disposition = 'text/plain', 'inline' |
|
872 | mimetype, disposition = 'text/plain', 'inline' | |
873 |
|
873 | |||
874 | if disposition == 'attachment': |
|
874 | if disposition == 'attachment': | |
875 | disposition = self._get_attachement_headers(f_path) |
|
875 | disposition = self._get_attachement_headers(f_path) | |
876 |
|
876 | |||
877 | stream_content = file_node.stream_bytes() |
|
877 | stream_content = file_node.stream_bytes() | |
878 |
|
878 | |||
879 | response = Response(app_iter=stream_content) |
|
879 | response = Response(app_iter=stream_content) | |
880 | response.content_disposition = disposition |
|
880 | response.content_disposition = disposition | |
881 | response.content_type = mimetype |
|
881 | response.content_type = mimetype | |
882 |
|
882 | |||
883 | charset = self._get_default_encoding(c) |
|
883 | charset = self._get_default_encoding(c) | |
884 | if charset: |
|
884 | if charset: | |
885 | response.charset = charset |
|
885 | response.charset = charset | |
886 |
|
886 | |||
887 | return response |
|
887 | return response | |
888 |
|
888 | |||
889 | @LoginRequired() |
|
889 | @LoginRequired() | |
890 | @HasRepoPermissionAnyDecorator( |
|
890 | @HasRepoPermissionAnyDecorator( | |
891 | 'repository.read', 'repository.write', 'repository.admin') |
|
891 | 'repository.read', 'repository.write', 'repository.admin') | |
892 | def repo_file_download(self): |
|
892 | def repo_file_download(self): | |
893 | c = self.load_default_context() |
|
893 | c = self.load_default_context() | |
894 |
|
894 | |||
895 | commit_id, f_path = self._get_commit_and_path() |
|
895 | commit_id, f_path = self._get_commit_and_path() | |
896 | commit = self._get_commit_or_redirect(commit_id) |
|
896 | commit = self._get_commit_or_redirect(commit_id) | |
897 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
897 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
898 |
|
898 | |||
899 | if self.request.GET.get('lf'): |
|
899 | if self.request.GET.get('lf'): | |
900 | # only if lf get flag is passed, we download this file |
|
900 | # only if lf get flag is passed, we download this file | |
901 | # as LFS/Largefile |
|
901 | # as LFS/Largefile | |
902 | lf_node = file_node.get_largefile_node() |
|
902 | lf_node = file_node.get_largefile_node() | |
903 | if lf_node: |
|
903 | if lf_node: | |
904 | # overwrite our pointer with the REAL large-file |
|
904 | # overwrite our pointer with the REAL large-file | |
905 | file_node = lf_node |
|
905 | file_node = lf_node | |
906 |
|
906 | |||
907 | disposition = self._get_attachement_headers(f_path) |
|
907 | disposition = self._get_attachement_headers(f_path) | |
908 |
|
908 | |||
909 | stream_content = file_node.stream_bytes() |
|
909 | stream_content = file_node.stream_bytes() | |
910 |
|
910 | |||
911 | response = Response(app_iter=stream_content) |
|
911 | response = Response(app_iter=stream_content) | |
912 | response.content_disposition = disposition |
|
912 | response.content_disposition = disposition | |
913 | response.content_type = file_node.mimetype |
|
913 | response.content_type = file_node.mimetype | |
914 |
|
914 | |||
915 | charset = self._get_default_encoding(c) |
|
915 | charset = self._get_default_encoding(c) | |
916 | if charset: |
|
916 | if charset: | |
917 | response.charset = charset |
|
917 | response.charset = charset | |
918 |
|
918 | |||
919 | return response |
|
919 | return response | |
920 |
|
920 | |||
921 | def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path): |
|
921 | def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path): | |
922 |
|
922 | |||
923 | cache_seconds = safe_int( |
|
923 | cache_seconds = safe_int( | |
924 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
924 | rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) | |
925 | cache_on = cache_seconds > 0 |
|
925 | cache_on = cache_seconds > 0 | |
926 | log.debug( |
|
926 | log.debug( | |
927 | 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`' |
|
927 | 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`' | |
928 | 'with caching: %s[TTL: %ss]' % ( |
|
928 | 'with caching: %s[TTL: %ss]' % ( | |
929 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) |
|
929 | repo_id, commit_id, f_path, cache_on, cache_seconds or 0)) | |
930 |
|
930 | |||
931 | cache_namespace_uid = f'repo.{repo_id}' |
|
931 | cache_namespace_uid = f'repo.{repo_id}' | |
932 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
932 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
933 |
|
933 | |||
934 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) |
|
934 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on) | |
935 | def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path): |
|
935 | def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path): | |
936 | log.debug('Generating cached nodelist for repo_id:%s, %s, %s', |
|
936 | log.debug('Generating cached nodelist for repo_id:%s, %s, %s', | |
937 | _repo_id, commit_id, f_path) |
|
937 | _repo_id, commit_id, f_path) | |
938 | try: |
|
938 | try: | |
939 | _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path) |
|
939 | _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path) | |
940 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
940 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: | |
941 | log.exception(safe_str(e)) |
|
941 | log.exception(safe_str(e)) | |
942 | h.flash(h.escape(safe_str(e)), category='error') |
|
942 | h.flash(h.escape(safe_str(e)), category='error') | |
943 | raise HTTPFound(h.route_path( |
|
943 | raise HTTPFound(h.route_path( | |
944 | 'repo_files', repo_name=self.db_repo_name, |
|
944 | 'repo_files', repo_name=self.db_repo_name, | |
945 | commit_id='tip', f_path='/')) |
|
945 | commit_id='tip', f_path='/')) | |
946 |
|
946 | |||
947 | return _d + _f |
|
947 | return _d + _f | |
948 |
|
948 | |||
949 | result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, |
|
949 | result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id, | |
950 | commit_id, f_path) |
|
950 | commit_id, f_path) | |
951 | return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result) |
|
951 | return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result) | |
952 |
|
952 | |||
953 | @LoginRequired() |
|
953 | @LoginRequired() | |
954 | @HasRepoPermissionAnyDecorator( |
|
954 | @HasRepoPermissionAnyDecorator( | |
955 | 'repository.read', 'repository.write', 'repository.admin') |
|
955 | 'repository.read', 'repository.write', 'repository.admin') | |
956 | def repo_nodelist(self): |
|
956 | def repo_nodelist(self): | |
957 | self.load_default_context() |
|
957 | self.load_default_context() | |
958 |
|
958 | |||
959 | commit_id, f_path = self._get_commit_and_path() |
|
959 | commit_id, f_path = self._get_commit_and_path() | |
960 | commit = self._get_commit_or_redirect(commit_id) |
|
960 | commit = self._get_commit_or_redirect(commit_id) | |
961 |
|
961 | |||
962 | metadata = self._get_nodelist_at_commit( |
|
962 | metadata = self._get_nodelist_at_commit( | |
963 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) |
|
963 | self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path) | |
964 | return {'nodes': [x for x in metadata]} |
|
964 | return {'nodes': [x for x in metadata]} | |
965 |
|
965 | |||
966 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): |
|
966 | def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type): | |
967 | items = [] |
|
967 | items = [] | |
968 | for name, commit_id in branches_or_tags.items(): |
|
968 | for name, commit_id in branches_or_tags.items(): | |
969 | sym_ref = symbolic_reference(commit_id, name, f_path, ref_type) |
|
969 | sym_ref = symbolic_reference(commit_id, name, f_path, ref_type) | |
970 | items.append((sym_ref, name, ref_type)) |
|
970 | items.append((sym_ref, name, ref_type)) | |
971 | return items |
|
971 | return items | |
972 |
|
972 | |||
973 | def _symbolic_reference(self, commit_id, name, f_path, ref_type): |
|
973 | def _symbolic_reference(self, commit_id, name, f_path, ref_type): | |
974 | return commit_id |
|
974 | return commit_id | |
975 |
|
975 | |||
976 | def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type): |
|
976 | def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type): | |
977 | return commit_id |
|
977 | return commit_id | |
978 |
|
978 | |||
979 | # NOTE(dan): old code we used in "diff" mode compare |
|
979 | # NOTE(dan): old code we used in "diff" mode compare | |
980 | new_f_path = vcspath.join(name, f_path) |
|
980 | new_f_path = vcspath.join(name, f_path) | |
981 | return f'{new_f_path}@{commit_id}' |
|
981 | return f'{new_f_path}@{commit_id}' | |
982 |
|
982 | |||
983 | def _get_node_history(self, commit_obj, f_path, commits=None): |
|
983 | def _get_node_history(self, commit_obj, f_path, commits=None): | |
984 | """ |
|
984 | """ | |
985 | get commit history for given node |
|
985 | get commit history for given node | |
986 |
|
986 | |||
987 | :param commit_obj: commit to calculate history |
|
987 | :param commit_obj: commit to calculate history | |
988 | :param f_path: path for node to calculate history for |
|
988 | :param f_path: path for node to calculate history for | |
989 | :param commits: if passed don't calculate history and take |
|
989 | :param commits: if passed don't calculate history and take | |
990 | commits defined in this list |
|
990 | commits defined in this list | |
991 | """ |
|
991 | """ | |
992 | _ = self.request.translate |
|
992 | _ = self.request.translate | |
993 |
|
993 | |||
994 | # calculate history based on tip |
|
994 | # calculate history based on tip | |
995 | tip = self.rhodecode_vcs_repo.get_commit() |
|
995 | tip = self.rhodecode_vcs_repo.get_commit() | |
996 | if commits is None: |
|
996 | if commits is None: | |
997 | pre_load = ["author", "branch"] |
|
997 | pre_load = ["author", "branch"] | |
998 | try: |
|
998 | try: | |
999 | commits = tip.get_path_history(f_path, pre_load=pre_load) |
|
999 | commits = tip.get_path_history(f_path, pre_load=pre_load) | |
1000 | except (NodeDoesNotExistError, CommitError): |
|
1000 | except (NodeDoesNotExistError, CommitError): | |
1001 | # this node is not present at tip! |
|
1001 | # this node is not present at tip! | |
1002 | commits = commit_obj.get_path_history(f_path, pre_load=pre_load) |
|
1002 | commits = commit_obj.get_path_history(f_path, pre_load=pre_load) | |
1003 |
|
1003 | |||
1004 | history = [] |
|
1004 | history = [] | |
1005 | commits_group = ([], _("Changesets")) |
|
1005 | commits_group = ([], _("Changesets")) | |
1006 | for commit in commits: |
|
1006 | for commit in commits: | |
1007 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1007 | branch = ' (%s)' % commit.branch if commit.branch else '' | |
1008 | n_desc = f'r{commit.idx}:{commit.short_id}{branch}' |
|
1008 | n_desc = f'r{commit.idx}:{commit.short_id}{branch}' | |
1009 | commits_group[0].append((commit.raw_id, n_desc, 'sha')) |
|
1009 | commits_group[0].append((commit.raw_id, n_desc, 'sha')) | |
1010 | history.append(commits_group) |
|
1010 | history.append(commits_group) | |
1011 |
|
1011 | |||
1012 | symbolic_reference = self._symbolic_reference |
|
1012 | symbolic_reference = self._symbolic_reference | |
1013 |
|
1013 | |||
1014 | if self.rhodecode_vcs_repo.alias == 'svn': |
|
1014 | if self.rhodecode_vcs_repo.alias == 'svn': | |
1015 | adjusted_f_path = RepoFilesView.adjust_file_path_for_svn( |
|
1015 | adjusted_f_path = RepoFilesView.adjust_file_path_for_svn( | |
1016 | f_path, self.rhodecode_vcs_repo) |
|
1016 | f_path, self.rhodecode_vcs_repo) | |
1017 | if adjusted_f_path != f_path: |
|
1017 | if adjusted_f_path != f_path: | |
1018 | log.debug( |
|
1018 | log.debug( | |
1019 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1019 | 'Recognized svn tag or branch in file "%s", using svn ' | |
1020 | 'specific symbolic references', f_path) |
|
1020 | 'specific symbolic references', f_path) | |
1021 | f_path = adjusted_f_path |
|
1021 | f_path = adjusted_f_path | |
1022 | symbolic_reference = self._symbolic_reference_svn |
|
1022 | symbolic_reference = self._symbolic_reference_svn | |
1023 |
|
1023 | |||
1024 | branches = self._create_references( |
|
1024 | branches = self._create_references( | |
1025 | self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch') |
|
1025 | self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch') | |
1026 | branches_group = (branches, _("Branches")) |
|
1026 | branches_group = (branches, _("Branches")) | |
1027 |
|
1027 | |||
1028 | tags = self._create_references( |
|
1028 | tags = self._create_references( | |
1029 | self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag') |
|
1029 | self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag') | |
1030 | tags_group = (tags, _("Tags")) |
|
1030 | tags_group = (tags, _("Tags")) | |
1031 |
|
1031 | |||
1032 | history.append(branches_group) |
|
1032 | history.append(branches_group) | |
1033 | history.append(tags_group) |
|
1033 | history.append(tags_group) | |
1034 |
|
1034 | |||
1035 | return history, commits |
|
1035 | return history, commits | |
1036 |
|
1036 | |||
1037 | @LoginRequired() |
|
1037 | @LoginRequired() | |
1038 | @HasRepoPermissionAnyDecorator( |
|
1038 | @HasRepoPermissionAnyDecorator( | |
1039 | 'repository.read', 'repository.write', 'repository.admin') |
|
1039 | 'repository.read', 'repository.write', 'repository.admin') | |
1040 | def repo_file_history(self): |
|
1040 | def repo_file_history(self): | |
1041 | self.load_default_context() |
|
1041 | self.load_default_context() | |
1042 |
|
1042 | |||
1043 | commit_id, f_path = self._get_commit_and_path() |
|
1043 | commit_id, f_path = self._get_commit_and_path() | |
1044 | commit = self._get_commit_or_redirect(commit_id) |
|
1044 | commit = self._get_commit_or_redirect(commit_id) | |
1045 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
1045 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
1046 |
|
1046 | |||
1047 | if file_node.is_file(): |
|
1047 | if file_node.is_file(): | |
1048 | file_history, _hist = self._get_node_history(commit, f_path) |
|
1048 | file_history, _hist = self._get_node_history(commit, f_path) | |
1049 |
|
1049 | |||
1050 | res = [] |
|
1050 | res = [] | |
1051 | for section_items, section in file_history: |
|
1051 | for section_items, section in file_history: | |
1052 | items = [] |
|
1052 | items = [] | |
1053 | for obj_id, obj_text, obj_type in section_items: |
|
1053 | for obj_id, obj_text, obj_type in section_items: | |
1054 | at_rev = '' |
|
1054 | at_rev = '' | |
1055 | if obj_type in ['branch', 'bookmark', 'tag']: |
|
1055 | if obj_type in ['branch', 'bookmark', 'tag']: | |
1056 | at_rev = obj_text |
|
1056 | at_rev = obj_text | |
1057 | entry = { |
|
1057 | entry = { | |
1058 | 'id': obj_id, |
|
1058 | 'id': obj_id, | |
1059 | 'text': obj_text, |
|
1059 | 'text': obj_text, | |
1060 | 'type': obj_type, |
|
1060 | 'type': obj_type, | |
1061 | 'at_rev': at_rev |
|
1061 | 'at_rev': at_rev | |
1062 | } |
|
1062 | } | |
1063 |
|
1063 | |||
1064 | items.append(entry) |
|
1064 | items.append(entry) | |
1065 |
|
1065 | |||
1066 | res.append({ |
|
1066 | res.append({ | |
1067 | 'text': section, |
|
1067 | 'text': section, | |
1068 | 'children': items |
|
1068 | 'children': items | |
1069 | }) |
|
1069 | }) | |
1070 |
|
1070 | |||
1071 | data = { |
|
1071 | data = { | |
1072 | 'more': False, |
|
1072 | 'more': False, | |
1073 | 'results': res |
|
1073 | 'results': res | |
1074 | } |
|
1074 | } | |
1075 | return data |
|
1075 | return data | |
1076 |
|
1076 | |||
1077 | log.warning('Cannot fetch history for directory') |
|
1077 | log.warning('Cannot fetch history for directory') | |
1078 | raise HTTPBadRequest() |
|
1078 | raise HTTPBadRequest() | |
1079 |
|
1079 | |||
1080 | @LoginRequired() |
|
1080 | @LoginRequired() | |
1081 | @HasRepoPermissionAnyDecorator( |
|
1081 | @HasRepoPermissionAnyDecorator( | |
1082 | 'repository.read', 'repository.write', 'repository.admin') |
|
1082 | 'repository.read', 'repository.write', 'repository.admin') | |
1083 | def repo_file_authors(self): |
|
1083 | def repo_file_authors(self): | |
1084 | c = self.load_default_context() |
|
1084 | c = self.load_default_context() | |
1085 |
|
1085 | |||
1086 | commit_id, f_path = self._get_commit_and_path() |
|
1086 | commit_id, f_path = self._get_commit_and_path() | |
1087 | commit = self._get_commit_or_redirect(commit_id) |
|
1087 | commit = self._get_commit_or_redirect(commit_id) | |
1088 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
1088 | file_node = self._get_filenode_or_redirect(commit, f_path) | |
1089 |
|
1089 | |||
1090 | if not file_node.is_file(): |
|
1090 | if not file_node.is_file(): | |
1091 | raise HTTPBadRequest() |
|
1091 | raise HTTPBadRequest() | |
1092 |
|
1092 | |||
1093 | c.file_last_commit = file_node.last_commit |
|
1093 | c.file_last_commit = file_node.last_commit | |
1094 | if self.request.GET.get('annotate') == '1': |
|
1094 | if self.request.GET.get('annotate') == '1': | |
1095 | # use _hist from annotation if annotation mode is on |
|
1095 | # use _hist from annotation if annotation mode is on | |
1096 | commit_ids = {x[1] for x in file_node.annotate} |
|
1096 | commit_ids = {x[1] for x in file_node.annotate} | |
1097 | _hist = ( |
|
1097 | _hist = ( | |
1098 | self.rhodecode_vcs_repo.get_commit(commit_id) |
|
1098 | self.rhodecode_vcs_repo.get_commit(commit_id) | |
1099 | for commit_id in commit_ids) |
|
1099 | for commit_id in commit_ids) | |
1100 | else: |
|
1100 | else: | |
1101 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
1101 | _f_history, _hist = self._get_node_history(commit, f_path) | |
1102 | c.file_author = False |
|
1102 | c.file_author = False | |
1103 |
|
1103 | |||
1104 | unique = collections.OrderedDict() |
|
1104 | unique = collections.OrderedDict() | |
1105 | for commit in _hist: |
|
1105 | for commit in _hist: | |
1106 | author = commit.author |
|
1106 | author = commit.author | |
1107 | if author not in unique: |
|
1107 | if author not in unique: | |
1108 | unique[commit.author] = [ |
|
1108 | unique[commit.author] = [ | |
1109 | h.email(author), |
|
1109 | h.email(author), | |
1110 | h.person(author, 'username_or_name_or_email'), |
|
1110 | h.person(author, 'username_or_name_or_email'), | |
1111 | 1 # counter |
|
1111 | 1 # counter | |
1112 | ] |
|
1112 | ] | |
1113 |
|
1113 | |||
1114 | else: |
|
1114 | else: | |
1115 | # increase counter |
|
1115 | # increase counter | |
1116 | unique[commit.author][2] += 1 |
|
1116 | unique[commit.author][2] += 1 | |
1117 |
|
1117 | |||
1118 | c.authors = [val for val in unique.values()] |
|
1118 | c.authors = [val for val in unique.values()] | |
1119 |
|
1119 | |||
1120 | return self._get_template_context(c) |
|
1120 | return self._get_template_context(c) | |
1121 |
|
1121 | |||
1122 | @LoginRequired() |
|
1122 | @LoginRequired() | |
1123 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1123 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1124 | def repo_files_check_head(self): |
|
1124 | def repo_files_check_head(self): | |
1125 | self.load_default_context() |
|
1125 | self.load_default_context() | |
1126 |
|
1126 | |||
1127 | commit_id, f_path = self._get_commit_and_path() |
|
1127 | commit_id, f_path = self._get_commit_and_path() | |
1128 | _branch_name, _sha_commit_id, is_head = \ |
|
1128 | _branch_name, _sha_commit_id, is_head = \ | |
1129 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1129 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1130 | landing_ref=self.db_repo.landing_ref_name) |
|
1130 | landing_ref=self.db_repo.landing_ref_name) | |
1131 |
|
1131 | |||
1132 | new_path = self.request.POST.get('path') |
|
1132 | new_path = self.request.POST.get('path') | |
1133 | operation = self.request.POST.get('operation') |
|
1133 | operation = self.request.POST.get('operation') | |
1134 | path_exist = '' |
|
1134 | path_exist = '' | |
1135 |
|
1135 | |||
1136 | if new_path and operation in ['create', 'upload']: |
|
1136 | if new_path and operation in ['create', 'upload']: | |
1137 | new_f_path = os.path.join(f_path.lstrip('/'), new_path) |
|
1137 | new_f_path = os.path.join(f_path.lstrip('/'), new_path) | |
1138 | try: |
|
1138 | try: | |
1139 | commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
1139 | commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id) | |
1140 | # NOTE(dan): construct whole path without leading / |
|
1140 | # NOTE(dan): construct whole path without leading / | |
1141 | file_node = commit_obj.get_node(new_f_path) |
|
1141 | file_node = commit_obj.get_node(new_f_path) | |
1142 | if file_node is not None: |
|
1142 | if file_node is not None: | |
1143 | path_exist = new_f_path |
|
1143 | path_exist = new_f_path | |
1144 | except EmptyRepositoryError: |
|
1144 | except EmptyRepositoryError: | |
1145 | pass |
|
1145 | pass | |
1146 | except Exception: |
|
1146 | except Exception: | |
1147 | pass |
|
1147 | pass | |
1148 |
|
1148 | |||
1149 | return { |
|
1149 | return { | |
1150 | 'branch': _branch_name, |
|
1150 | 'branch': _branch_name, | |
1151 | 'sha': _sha_commit_id, |
|
1151 | 'sha': _sha_commit_id, | |
1152 | 'is_head': is_head, |
|
1152 | 'is_head': is_head, | |
1153 | 'path_exists': path_exist |
|
1153 | 'path_exists': path_exist | |
1154 | } |
|
1154 | } | |
1155 |
|
1155 | |||
1156 | @LoginRequired() |
|
1156 | @LoginRequired() | |
1157 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1157 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1158 | def repo_files_remove_file(self): |
|
1158 | def repo_files_remove_file(self): | |
1159 | _ = self.request.translate |
|
1159 | _ = self.request.translate | |
1160 | c = self.load_default_context() |
|
1160 | c = self.load_default_context() | |
1161 | commit_id, f_path = self._get_commit_and_path() |
|
1161 | commit_id, f_path = self._get_commit_and_path() | |
1162 |
|
1162 | |||
1163 | self._ensure_not_locked() |
|
1163 | self._ensure_not_locked() | |
1164 | _branch_name, _sha_commit_id, is_head = \ |
|
1164 | _branch_name, _sha_commit_id, is_head = \ | |
1165 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1165 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1166 | landing_ref=self.db_repo.landing_ref_name) |
|
1166 | landing_ref=self.db_repo.landing_ref_name) | |
1167 |
|
1167 | |||
1168 | self.forbid_non_head(is_head, f_path) |
|
1168 | self.forbid_non_head(is_head, f_path) | |
1169 | self.check_branch_permission(_branch_name) |
|
1169 | self.check_branch_permission(_branch_name) | |
1170 |
|
1170 | |||
1171 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1171 | c.commit = self._get_commit_or_redirect(commit_id) | |
1172 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1172 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1173 |
|
1173 | |||
1174 | c.default_message = _( |
|
1174 | c.default_message = _( | |
1175 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
1175 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) | |
1176 | c.f_path = f_path |
|
1176 | c.f_path = f_path | |
1177 |
|
1177 | |||
1178 | return self._get_template_context(c) |
|
1178 | return self._get_template_context(c) | |
1179 |
|
1179 | |||
1180 | @LoginRequired() |
|
1180 | @LoginRequired() | |
1181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1182 | @CSRFRequired() |
|
1182 | @CSRFRequired() | |
1183 | def repo_files_delete_file(self): |
|
1183 | def repo_files_delete_file(self): | |
1184 | _ = self.request.translate |
|
1184 | _ = self.request.translate | |
1185 |
|
1185 | |||
1186 | c = self.load_default_context() |
|
1186 | c = self.load_default_context() | |
1187 | commit_id, f_path = self._get_commit_and_path() |
|
1187 | commit_id, f_path = self._get_commit_and_path() | |
1188 |
|
1188 | |||
1189 | self._ensure_not_locked() |
|
1189 | self._ensure_not_locked() | |
1190 | _branch_name, _sha_commit_id, is_head = \ |
|
1190 | _branch_name, _sha_commit_id, is_head = \ | |
1191 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1191 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1192 | landing_ref=self.db_repo.landing_ref_name) |
|
1192 | landing_ref=self.db_repo.landing_ref_name) | |
1193 |
|
1193 | |||
1194 | self.forbid_non_head(is_head, f_path) |
|
1194 | self.forbid_non_head(is_head, f_path) | |
1195 | self.check_branch_permission(_branch_name) |
|
1195 | self.check_branch_permission(_branch_name) | |
1196 |
|
1196 | |||
1197 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1197 | c.commit = self._get_commit_or_redirect(commit_id) | |
1198 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1198 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1199 |
|
1199 | |||
1200 | c.default_message = _( |
|
1200 | c.default_message = _( | |
1201 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
1201 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) | |
1202 | c.f_path = f_path |
|
1202 | c.f_path = f_path | |
1203 | node_path = f_path |
|
1203 | node_path = f_path | |
1204 | author = self._rhodecode_db_user.full_contact |
|
1204 | author = self._rhodecode_db_user.full_contact | |
1205 | message = self.request.POST.get('message') or c.default_message |
|
1205 | message = self.request.POST.get('message') or c.default_message | |
1206 | try: |
|
1206 | try: | |
1207 | nodes = { |
|
1207 | nodes = { | |
1208 | safe_bytes(node_path): { |
|
1208 | safe_bytes(node_path): { | |
1209 | 'content': b'' |
|
1209 | 'content': b'' | |
1210 | } |
|
1210 | } | |
1211 | } |
|
1211 | } | |
1212 | ScmModel().delete_nodes( |
|
1212 | ScmModel().delete_nodes( | |
1213 | user=self._rhodecode_db_user.user_id, repo=self.db_repo, |
|
1213 | user=self._rhodecode_db_user.user_id, repo=self.db_repo, | |
1214 | message=message, |
|
1214 | message=message, | |
1215 | nodes=nodes, |
|
1215 | nodes=nodes, | |
1216 | parent_commit=c.commit, |
|
1216 | parent_commit=c.commit, | |
1217 | author=author, |
|
1217 | author=author, | |
1218 | ) |
|
1218 | ) | |
1219 |
|
1219 | |||
1220 | h.flash( |
|
1220 | h.flash( | |
1221 | _('Successfully deleted file `{}`').format( |
|
1221 | _('Successfully deleted file `{}`').format( | |
1222 | h.escape(f_path)), category='success') |
|
1222 | h.escape(f_path)), category='success') | |
1223 | except Exception: |
|
1223 | except Exception: | |
1224 | log.exception('Error during commit operation') |
|
1224 | log.exception('Error during commit operation') | |
1225 | h.flash(_('Error occurred during commit'), category='error') |
|
1225 | h.flash(_('Error occurred during commit'), category='error') | |
1226 | raise HTTPFound( |
|
1226 | raise HTTPFound( | |
1227 | h.route_path('repo_commit', repo_name=self.db_repo_name, |
|
1227 | h.route_path('repo_commit', repo_name=self.db_repo_name, | |
1228 | commit_id='tip')) |
|
1228 | commit_id='tip')) | |
1229 |
|
1229 | |||
1230 | @LoginRequired() |
|
1230 | @LoginRequired() | |
1231 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1231 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1232 | def repo_files_edit_file(self): |
|
1232 | def repo_files_edit_file(self): | |
1233 | _ = self.request.translate |
|
1233 | _ = self.request.translate | |
1234 | c = self.load_default_context() |
|
1234 | c = self.load_default_context() | |
1235 | commit_id, f_path = self._get_commit_and_path() |
|
1235 | commit_id, f_path = self._get_commit_and_path() | |
1236 |
|
1236 | |||
1237 | self._ensure_not_locked() |
|
1237 | self._ensure_not_locked() | |
1238 | _branch_name, _sha_commit_id, is_head = \ |
|
1238 | _branch_name, _sha_commit_id, is_head = \ | |
1239 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1239 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1240 | landing_ref=self.db_repo.landing_ref_name) |
|
1240 | landing_ref=self.db_repo.landing_ref_name) | |
1241 |
|
1241 | |||
1242 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1242 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1243 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1243 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1244 |
|
1244 | |||
1245 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1245 | c.commit = self._get_commit_or_redirect(commit_id) | |
1246 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1246 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1247 |
|
1247 | |||
1248 | if c.file.is_binary: |
|
1248 | if c.file.is_binary: | |
1249 | files_url = h.route_path( |
|
1249 | files_url = h.route_path( | |
1250 | 'repo_files', |
|
1250 | 'repo_files', | |
1251 | repo_name=self.db_repo_name, |
|
1251 | repo_name=self.db_repo_name, | |
1252 | commit_id=c.commit.raw_id, f_path=f_path) |
|
1252 | commit_id=c.commit.raw_id, f_path=f_path) | |
1253 | raise HTTPFound(files_url) |
|
1253 | raise HTTPFound(files_url) | |
1254 |
|
1254 | |||
1255 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1255 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) | |
1256 | c.f_path = f_path |
|
1256 | c.f_path = f_path | |
1257 |
|
1257 | |||
1258 | return self._get_template_context(c) |
|
1258 | return self._get_template_context(c) | |
1259 |
|
1259 | |||
1260 | @LoginRequired() |
|
1260 | @LoginRequired() | |
1261 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1261 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1262 | @CSRFRequired() |
|
1262 | @CSRFRequired() | |
1263 | def repo_files_update_file(self): |
|
1263 | def repo_files_update_file(self): | |
1264 | _ = self.request.translate |
|
1264 | _ = self.request.translate | |
1265 | c = self.load_default_context() |
|
1265 | c = self.load_default_context() | |
1266 | commit_id, f_path = self._get_commit_and_path() |
|
1266 | commit_id, f_path = self._get_commit_and_path() | |
1267 |
|
1267 | |||
1268 | self._ensure_not_locked() |
|
1268 | self._ensure_not_locked() | |
1269 |
|
1269 | |||
1270 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1270 | c.commit = self._get_commit_or_redirect(commit_id) | |
1271 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1271 | c.file = self._get_filenode_or_redirect(c.commit, f_path) | |
1272 |
|
1272 | |||
1273 | if c.file.is_binary: |
|
1273 | if c.file.is_binary: | |
1274 | raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, |
|
1274 | raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name, | |
1275 | commit_id=c.commit.raw_id, f_path=f_path)) |
|
1275 | commit_id=c.commit.raw_id, f_path=f_path)) | |
1276 |
|
1276 | |||
1277 | _branch_name, _sha_commit_id, is_head = \ |
|
1277 | _branch_name, _sha_commit_id, is_head = \ | |
1278 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1278 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1279 | landing_ref=self.db_repo.landing_ref_name) |
|
1279 | landing_ref=self.db_repo.landing_ref_name) | |
1280 |
|
1280 | |||
1281 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1281 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1282 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1282 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1283 |
|
1283 | |||
1284 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1284 | c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path) | |
1285 | c.f_path = f_path |
|
1285 | c.f_path = f_path | |
1286 |
|
1286 | |||
1287 | old_content = c.file.str_content |
|
1287 | old_content = c.file.str_content | |
1288 | sl = old_content.splitlines(1) |
|
1288 | sl = old_content.splitlines(1) | |
1289 | first_line = sl[0] if sl else '' |
|
1289 | first_line = sl[0] if sl else '' | |
1290 |
|
1290 | |||
1291 | r_post = self.request.POST |
|
1291 | r_post = self.request.POST | |
1292 | # line endings: 0 - Unix, 1 - Mac, 2 - DOS |
|
1292 | # line endings: 0 - Unix, 1 - Mac, 2 - DOS | |
1293 | line_ending_mode = detect_mode(first_line, 0) |
|
1293 | line_ending_mode = detect_mode(first_line, 0) | |
1294 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) |
|
1294 | content = convert_line_endings(r_post.get('content', ''), line_ending_mode) | |
1295 |
|
1295 | |||
1296 | message = r_post.get('message') or c.default_message |
|
1296 | message = r_post.get('message') or c.default_message | |
1297 |
|
1297 | |||
1298 | org_node_path = c.file.str_path |
|
1298 | org_node_path = c.file.str_path | |
1299 | filename = r_post['filename'] |
|
1299 | filename = r_post['filename'] | |
1300 |
|
1300 | |||
1301 | root_path = c.file.dir_path |
|
1301 | root_path = c.file.dir_path | |
1302 | pure_path = self.create_pure_path(root_path, filename) |
|
1302 | pure_path = self.create_pure_path(root_path, filename) | |
1303 | node_path = pure_path.as_posix() |
|
1303 | node_path = pure_path.as_posix() | |
1304 |
|
1304 | |||
1305 | default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name, |
|
1305 | default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name, | |
1306 | commit_id=commit_id) |
|
1306 | commit_id=commit_id) | |
1307 | if content == old_content and node_path == org_node_path: |
|
1307 | if content == old_content and node_path == org_node_path: | |
1308 | h.flash(_('No changes detected on {}').format(h.escape(org_node_path)), |
|
1308 | h.flash(_('No changes detected on {}').format(h.escape(org_node_path)), | |
1309 | category='warning') |
|
1309 | category='warning') | |
1310 | raise HTTPFound(default_redirect_url) |
|
1310 | raise HTTPFound(default_redirect_url) | |
1311 |
|
1311 | |||
1312 | try: |
|
1312 | try: | |
1313 | mapping = { |
|
1313 | mapping = { | |
1314 | c.file.bytes_path: { |
|
1314 | c.file.bytes_path: { | |
1315 | 'org_filename': org_node_path, |
|
1315 | 'org_filename': org_node_path, | |
1316 | 'filename': safe_bytes(node_path), |
|
1316 | 'filename': safe_bytes(node_path), | |
1317 | 'content': safe_bytes(content), |
|
1317 | 'content': safe_bytes(content), | |
1318 | 'lexer': '', |
|
1318 | 'lexer': '', | |
1319 | 'op': 'mod', |
|
1319 | 'op': 'mod', | |
1320 | 'mode': c.file.mode |
|
1320 | 'mode': c.file.mode | |
1321 | } |
|
1321 | } | |
1322 | } |
|
1322 | } | |
1323 |
|
1323 | |||
1324 | commit = ScmModel().update_nodes( |
|
1324 | commit = ScmModel().update_nodes( | |
1325 | user=self._rhodecode_db_user.user_id, |
|
1325 | user=self._rhodecode_db_user.user_id, | |
1326 | repo=self.db_repo, |
|
1326 | repo=self.db_repo, | |
1327 | message=message, |
|
1327 | message=message, | |
1328 | nodes=mapping, |
|
1328 | nodes=mapping, | |
1329 | parent_commit=c.commit, |
|
1329 | parent_commit=c.commit, | |
1330 | ) |
|
1330 | ) | |
1331 |
|
1331 | |||
1332 | h.flash(_('Successfully committed changes to file `{}`').format( |
|
1332 | h.flash(_('Successfully committed changes to file `{}`').format( | |
1333 | h.escape(f_path)), category='success') |
|
1333 | h.escape(f_path)), category='success') | |
1334 | default_redirect_url = h.route_path( |
|
1334 | default_redirect_url = h.route_path( | |
1335 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1335 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1336 |
|
1336 | |||
1337 | except Exception: |
|
1337 | except Exception: | |
1338 | log.exception('Error occurred during commit') |
|
1338 | log.exception('Error occurred during commit') | |
1339 | h.flash(_('Error occurred during commit'), category='error') |
|
1339 | h.flash(_('Error occurred during commit'), category='error') | |
1340 |
|
1340 | |||
1341 | raise HTTPFound(default_redirect_url) |
|
1341 | raise HTTPFound(default_redirect_url) | |
1342 |
|
1342 | |||
1343 | @LoginRequired() |
|
1343 | @LoginRequired() | |
1344 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1344 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1345 | def repo_files_add_file(self): |
|
1345 | def repo_files_add_file(self): | |
1346 | _ = self.request.translate |
|
1346 | _ = self.request.translate | |
1347 | c = self.load_default_context() |
|
1347 | c = self.load_default_context() | |
1348 | commit_id, f_path = self._get_commit_and_path() |
|
1348 | commit_id, f_path = self._get_commit_and_path() | |
1349 |
|
1349 | |||
1350 | self._ensure_not_locked() |
|
1350 | self._ensure_not_locked() | |
1351 |
|
1351 | |||
1352 | # Check if we need to use this page to upload binary |
|
1352 | # Check if we need to use this page to upload binary | |
1353 | upload_binary = str2bool(self.request.params.get('upload_binary', False)) |
|
1353 | upload_binary = str2bool(self.request.params.get('upload_binary', False)) | |
1354 |
|
1354 | |||
1355 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1355 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1356 | if c.commit is None: |
|
1356 | if c.commit is None: | |
1357 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1357 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1358 |
|
1358 | |||
1359 | if self.rhodecode_vcs_repo.is_empty(): |
|
1359 | if self.rhodecode_vcs_repo.is_empty(): | |
1360 | # for empty repository we cannot check for current branch, we rely on |
|
1360 | # for empty repository we cannot check for current branch, we rely on | |
1361 | # c.commit.branch instead |
|
1361 | # c.commit.branch instead | |
1362 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1362 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1363 | else: |
|
1363 | else: | |
1364 | _branch_name, _sha_commit_id, is_head = \ |
|
1364 | _branch_name, _sha_commit_id, is_head = \ | |
1365 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1365 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1366 | landing_ref=self.db_repo.landing_ref_name) |
|
1366 | landing_ref=self.db_repo.landing_ref_name) | |
1367 |
|
1367 | |||
1368 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1368 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1369 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1369 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1370 |
|
1370 | |||
1371 | c.default_message = (_('Added file via RhodeCode Enterprise')) \ |
|
1371 | c.default_message = (_('Added file via RhodeCode Enterprise')) \ | |
1372 | if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path)) |
|
1372 | if not upload_binary else (_('Edited file {} via RhodeCode Enterprise').format(f_path)) | |
1373 | c.f_path = f_path.lstrip('/') # ensure not relative path |
|
1373 | c.f_path = f_path.lstrip('/') # ensure not relative path | |
1374 | c.replace_binary = upload_binary |
|
1374 | c.replace_binary = upload_binary | |
1375 |
|
1375 | |||
1376 | return self._get_template_context(c) |
|
1376 | return self._get_template_context(c) | |
1377 |
|
1377 | |||
1378 | @LoginRequired() |
|
1378 | @LoginRequired() | |
1379 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1379 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1380 | @CSRFRequired() |
|
1380 | @CSRFRequired() | |
1381 | def repo_files_create_file(self): |
|
1381 | def repo_files_create_file(self): | |
1382 | _ = self.request.translate |
|
1382 | _ = self.request.translate | |
1383 | c = self.load_default_context() |
|
1383 | c = self.load_default_context() | |
1384 | commit_id, f_path = self._get_commit_and_path() |
|
1384 | commit_id, f_path = self._get_commit_and_path() | |
1385 |
|
1385 | |||
1386 | self._ensure_not_locked() |
|
1386 | self._ensure_not_locked() | |
1387 |
|
1387 | |||
1388 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1388 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1389 | if c.commit is None: |
|
1389 | if c.commit is None: | |
1390 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1390 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1391 |
|
1391 | |||
1392 | # calculate redirect URL |
|
1392 | # calculate redirect URL | |
1393 | if self.rhodecode_vcs_repo.is_empty(): |
|
1393 | if self.rhodecode_vcs_repo.is_empty(): | |
1394 | default_redirect_url = h.route_path( |
|
1394 | default_redirect_url = h.route_path( | |
1395 | 'repo_summary', repo_name=self.db_repo_name) |
|
1395 | 'repo_summary', repo_name=self.db_repo_name) | |
1396 | else: |
|
1396 | else: | |
1397 | default_redirect_url = h.route_path( |
|
1397 | default_redirect_url = h.route_path( | |
1398 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1398 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1399 |
|
1399 | |||
1400 | if self.rhodecode_vcs_repo.is_empty(): |
|
1400 | if self.rhodecode_vcs_repo.is_empty(): | |
1401 | # for empty repository we cannot check for current branch, we rely on |
|
1401 | # for empty repository we cannot check for current branch, we rely on | |
1402 | # c.commit.branch instead |
|
1402 | # c.commit.branch instead | |
1403 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1403 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1404 | else: |
|
1404 | else: | |
1405 | _branch_name, _sha_commit_id, is_head = \ |
|
1405 | _branch_name, _sha_commit_id, is_head = \ | |
1406 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1406 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1407 | landing_ref=self.db_repo.landing_ref_name) |
|
1407 | landing_ref=self.db_repo.landing_ref_name) | |
1408 |
|
1408 | |||
1409 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) |
|
1409 | self.forbid_non_head(is_head, f_path, commit_id=commit_id) | |
1410 | self.check_branch_permission(_branch_name, commit_id=commit_id) |
|
1410 | self.check_branch_permission(_branch_name, commit_id=commit_id) | |
1411 |
|
1411 | |||
1412 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1412 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
1413 | c.f_path = f_path |
|
1413 | c.f_path = f_path | |
1414 |
|
1414 | |||
1415 | r_post = self.request.POST |
|
1415 | r_post = self.request.POST | |
1416 | message = r_post.get('message') or c.default_message |
|
1416 | message = r_post.get('message') or c.default_message | |
1417 | filename = r_post.get('filename') |
|
1417 | filename = r_post.get('filename') | |
1418 | unix_mode = 0 |
|
1418 | unix_mode = 0 | |
1419 |
|
1419 | |||
1420 | if not filename: |
|
1420 | if not filename: | |
1421 | # If there's no commit, redirect to repo summary |
|
1421 | # If there's no commit, redirect to repo summary | |
1422 | if type(c.commit) is EmptyCommit: |
|
1422 | if type(c.commit) is EmptyCommit: | |
1423 | redirect_url = h.route_path( |
|
1423 | redirect_url = h.route_path( | |
1424 | 'repo_summary', repo_name=self.db_repo_name) |
|
1424 | 'repo_summary', repo_name=self.db_repo_name) | |
1425 | else: |
|
1425 | else: | |
1426 | redirect_url = default_redirect_url |
|
1426 | redirect_url = default_redirect_url | |
1427 | h.flash(_('No filename specified'), category='warning') |
|
1427 | h.flash(_('No filename specified'), category='warning') | |
1428 | raise HTTPFound(redirect_url) |
|
1428 | raise HTTPFound(redirect_url) | |
1429 |
|
1429 | |||
1430 | root_path = f_path |
|
1430 | root_path = f_path | |
1431 | pure_path = self.create_pure_path(root_path, filename) |
|
1431 | pure_path = self.create_pure_path(root_path, filename) | |
1432 | node_path = pure_path.as_posix().lstrip('/') |
|
1432 | node_path = pure_path.as_posix().lstrip('/') | |
1433 |
|
1433 | |||
1434 | author = self._rhodecode_db_user.full_contact |
|
1434 | author = self._rhodecode_db_user.full_contact | |
1435 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
1435 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
1436 | nodes = { |
|
1436 | nodes = { | |
1437 | safe_bytes(node_path): { |
|
1437 | safe_bytes(node_path): { | |
1438 | 'content': safe_bytes(content) |
|
1438 | 'content': safe_bytes(content) | |
1439 | } |
|
1439 | } | |
1440 | } |
|
1440 | } | |
1441 |
|
1441 | |||
1442 | try: |
|
1442 | try: | |
1443 |
|
1443 | |||
1444 | commit = ScmModel().create_nodes( |
|
1444 | commit = ScmModel().create_nodes( | |
1445 | user=self._rhodecode_db_user.user_id, |
|
1445 | user=self._rhodecode_db_user.user_id, | |
1446 | repo=self.db_repo, |
|
1446 | repo=self.db_repo, | |
1447 | message=message, |
|
1447 | message=message, | |
1448 | nodes=nodes, |
|
1448 | nodes=nodes, | |
1449 | parent_commit=c.commit, |
|
1449 | parent_commit=c.commit, | |
1450 | author=author, |
|
1450 | author=author, | |
1451 | ) |
|
1451 | ) | |
1452 |
|
1452 | |||
1453 | h.flash(_('Successfully committed new file `{}`').format( |
|
1453 | h.flash(_('Successfully committed new file `{}`').format( | |
1454 | h.escape(node_path)), category='success') |
|
1454 | h.escape(node_path)), category='success') | |
1455 |
|
1455 | |||
1456 | default_redirect_url = h.route_path( |
|
1456 | default_redirect_url = h.route_path( | |
1457 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1457 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1458 |
|
1458 | |||
1459 | except NonRelativePathError: |
|
1459 | except NonRelativePathError: | |
1460 | log.exception('Non Relative path found') |
|
1460 | log.exception('Non Relative path found') | |
1461 | h.flash(_('The location specified must be a relative path and must not ' |
|
1461 | h.flash(_('The location specified must be a relative path and must not ' | |
1462 | 'contain .. in the path'), category='warning') |
|
1462 | 'contain .. in the path'), category='warning') | |
1463 | raise HTTPFound(default_redirect_url) |
|
1463 | raise HTTPFound(default_redirect_url) | |
1464 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1464 | except (NodeError, NodeAlreadyExistsError) as e: | |
1465 | h.flash(h.escape(safe_str(e)), category='error') |
|
1465 | h.flash(h.escape(safe_str(e)), category='error') | |
1466 | except Exception: |
|
1466 | except Exception: | |
1467 | log.exception('Error occurred during commit') |
|
1467 | log.exception('Error occurred during commit') | |
1468 | h.flash(_('Error occurred during commit'), category='error') |
|
1468 | h.flash(_('Error occurred during commit'), category='error') | |
1469 |
|
1469 | |||
1470 | raise HTTPFound(default_redirect_url) |
|
1470 | raise HTTPFound(default_redirect_url) | |
1471 |
|
1471 | |||
1472 | @LoginRequired() |
|
1472 | @LoginRequired() | |
1473 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1473 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1474 | @CSRFRequired() |
|
1474 | @CSRFRequired() | |
1475 | def repo_files_upload_file(self): |
|
1475 | def repo_files_upload_file(self): | |
1476 | _ = self.request.translate |
|
1476 | _ = self.request.translate | |
1477 | c = self.load_default_context() |
|
1477 | c = self.load_default_context() | |
1478 | commit_id, f_path = self._get_commit_and_path() |
|
1478 | commit_id, f_path = self._get_commit_and_path() | |
1479 |
|
1479 | |||
1480 | self._ensure_not_locked() |
|
1480 | self._ensure_not_locked() | |
1481 |
|
1481 | |||
1482 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1482 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1483 | if c.commit is None: |
|
1483 | if c.commit is None: | |
1484 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1484 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1485 |
|
1485 | |||
1486 | # calculate redirect URL |
|
1486 | # calculate redirect URL | |
1487 | if self.rhodecode_vcs_repo.is_empty(): |
|
1487 | if self.rhodecode_vcs_repo.is_empty(): | |
1488 | default_redirect_url = h.route_path( |
|
1488 | default_redirect_url = h.route_path( | |
1489 | 'repo_summary', repo_name=self.db_repo_name) |
|
1489 | 'repo_summary', repo_name=self.db_repo_name) | |
1490 | else: |
|
1490 | else: | |
1491 | default_redirect_url = h.route_path( |
|
1491 | default_redirect_url = h.route_path( | |
1492 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1492 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1493 |
|
1493 | |||
1494 | if self.rhodecode_vcs_repo.is_empty(): |
|
1494 | if self.rhodecode_vcs_repo.is_empty(): | |
1495 | # for empty repository we cannot check for current branch, we rely on |
|
1495 | # for empty repository we cannot check for current branch, we rely on | |
1496 | # c.commit.branch instead |
|
1496 | # c.commit.branch instead | |
1497 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1497 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1498 | else: |
|
1498 | else: | |
1499 | _branch_name, _sha_commit_id, is_head = \ |
|
1499 | _branch_name, _sha_commit_id, is_head = \ | |
1500 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1500 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1501 | landing_ref=self.db_repo.landing_ref_name) |
|
1501 | landing_ref=self.db_repo.landing_ref_name) | |
1502 |
|
1502 | |||
1503 | error = self.forbid_non_head(is_head, f_path, json_mode=True) |
|
1503 | error = self.forbid_non_head(is_head, f_path, json_mode=True) | |
1504 | if error: |
|
1504 | if error: | |
1505 | return { |
|
1505 | return { | |
1506 | 'error': error, |
|
1506 | 'error': error, | |
1507 | 'redirect_url': default_redirect_url |
|
1507 | 'redirect_url': default_redirect_url | |
1508 | } |
|
1508 | } | |
1509 | error = self.check_branch_permission(_branch_name, json_mode=True) |
|
1509 | error = self.check_branch_permission(_branch_name, json_mode=True) | |
1510 | if error: |
|
1510 | if error: | |
1511 | return { |
|
1511 | return { | |
1512 | 'error': error, |
|
1512 | 'error': error, | |
1513 | 'redirect_url': default_redirect_url |
|
1513 | 'redirect_url': default_redirect_url | |
1514 | } |
|
1514 | } | |
1515 |
|
1515 | |||
1516 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1516 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
1517 | c.f_path = f_path |
|
1517 | c.f_path = f_path | |
1518 |
|
1518 | |||
1519 | r_post = self.request.POST |
|
1519 | r_post = self.request.POST | |
1520 |
|
1520 | |||
1521 | message = c.default_message |
|
1521 | message = c.default_message | |
1522 | user_message = r_post.getall('message') |
|
1522 | user_message = r_post.getall('message') | |
1523 | if isinstance(user_message, list) and user_message: |
|
1523 | if isinstance(user_message, list) and user_message: | |
1524 | # we take the first from duplicated results if it's not empty |
|
1524 | # we take the first from duplicated results if it's not empty | |
1525 | message = user_message[0] if user_message[0] else message |
|
1525 | message = user_message[0] if user_message[0] else message | |
1526 |
|
1526 | |||
1527 | nodes = {} |
|
1527 | nodes = {} | |
1528 |
|
1528 | |||
1529 | for file_obj in r_post.getall('files_upload') or []: |
|
1529 | for file_obj in r_post.getall('files_upload') or []: | |
1530 | content = file_obj.file |
|
1530 | content = file_obj.file | |
1531 | filename = file_obj.filename |
|
1531 | filename = file_obj.filename | |
1532 |
|
1532 | |||
1533 | root_path = f_path |
|
1533 | root_path = f_path | |
1534 | pure_path = self.create_pure_path(root_path, filename) |
|
1534 | pure_path = self.create_pure_path(root_path, filename) | |
1535 | node_path = pure_path.as_posix().lstrip('/') |
|
1535 | node_path = pure_path.as_posix().lstrip('/') | |
1536 |
|
1536 | |||
1537 | nodes[safe_bytes(node_path)] = { |
|
1537 | nodes[safe_bytes(node_path)] = { | |
1538 | 'content': content |
|
1538 | 'content': content | |
1539 | } |
|
1539 | } | |
1540 |
|
1540 | |||
1541 | if not nodes: |
|
1541 | if not nodes: | |
1542 | error = 'missing files' |
|
1542 | error = 'missing files' | |
1543 | return { |
|
1543 | return { | |
1544 | 'error': error, |
|
1544 | 'error': error, | |
1545 | 'redirect_url': default_redirect_url |
|
1545 | 'redirect_url': default_redirect_url | |
1546 | } |
|
1546 | } | |
1547 |
|
1547 | |||
1548 | author = self._rhodecode_db_user.full_contact |
|
1548 | author = self._rhodecode_db_user.full_contact | |
1549 |
|
1549 | |||
1550 | try: |
|
1550 | try: | |
1551 | commit = ScmModel().create_nodes( |
|
1551 | commit = ScmModel().create_nodes( | |
1552 | user=self._rhodecode_db_user.user_id, |
|
1552 | user=self._rhodecode_db_user.user_id, | |
1553 | repo=self.db_repo, |
|
1553 | repo=self.db_repo, | |
1554 | message=message, |
|
1554 | message=message, | |
1555 | nodes=nodes, |
|
1555 | nodes=nodes, | |
1556 | parent_commit=c.commit, |
|
1556 | parent_commit=c.commit, | |
1557 | author=author, |
|
1557 | author=author, | |
1558 | ) |
|
1558 | ) | |
1559 | if len(nodes) == 1: |
|
1559 | if len(nodes) == 1: | |
1560 | flash_message = _('Successfully committed {} new files').format(len(nodes)) |
|
1560 | flash_message = _('Successfully committed {} new files').format(len(nodes)) | |
1561 | else: |
|
1561 | else: | |
1562 | flash_message = _('Successfully committed 1 new file') |
|
1562 | flash_message = _('Successfully committed 1 new file') | |
1563 |
|
1563 | |||
1564 | h.flash(flash_message, category='success') |
|
1564 | h.flash(flash_message, category='success') | |
1565 |
|
1565 | |||
1566 | default_redirect_url = h.route_path( |
|
1566 | default_redirect_url = h.route_path( | |
1567 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1567 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1568 |
|
1568 | |||
1569 | except NonRelativePathError: |
|
1569 | except NonRelativePathError: | |
1570 | log.exception('Non Relative path found') |
|
1570 | log.exception('Non Relative path found') | |
1571 | error = _('The location specified must be a relative path and must not ' |
|
1571 | error = _('The location specified must be a relative path and must not ' | |
1572 | 'contain .. in the path') |
|
1572 | 'contain .. in the path') | |
1573 | h.flash(error, category='warning') |
|
1573 | h.flash(error, category='warning') | |
1574 |
|
1574 | |||
1575 | return { |
|
1575 | return { | |
1576 | 'error': error, |
|
1576 | 'error': error, | |
1577 | 'redirect_url': default_redirect_url |
|
1577 | 'redirect_url': default_redirect_url | |
1578 | } |
|
1578 | } | |
1579 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1579 | except (NodeError, NodeAlreadyExistsError) as e: | |
1580 | error = h.escape(e) |
|
1580 | error = h.escape(e) | |
1581 | h.flash(error, category='error') |
|
1581 | h.flash(error, category='error') | |
1582 |
|
1582 | |||
1583 | return { |
|
1583 | return { | |
1584 | 'error': error, |
|
1584 | 'error': error, | |
1585 | 'redirect_url': default_redirect_url |
|
1585 | 'redirect_url': default_redirect_url | |
1586 | } |
|
1586 | } | |
1587 | except Exception: |
|
1587 | except Exception: | |
1588 | log.exception('Error occurred during commit') |
|
1588 | log.exception('Error occurred during commit') | |
1589 | error = _('Error occurred during commit') |
|
1589 | error = _('Error occurred during commit') | |
1590 | h.flash(error, category='error') |
|
1590 | h.flash(error, category='error') | |
1591 | return { |
|
1591 | return { | |
1592 | 'error': error, |
|
1592 | 'error': error, | |
1593 | 'redirect_url': default_redirect_url |
|
1593 | 'redirect_url': default_redirect_url | |
1594 | } |
|
1594 | } | |
1595 |
|
1595 | |||
1596 | return { |
|
1596 | return { | |
1597 | 'error': None, |
|
1597 | 'error': None, | |
1598 | 'redirect_url': default_redirect_url |
|
1598 | 'redirect_url': default_redirect_url | |
1599 | } |
|
1599 | } | |
1600 |
|
1600 | |||
1601 | @LoginRequired() |
|
1601 | @LoginRequired() | |
1602 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1602 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
1603 | @CSRFRequired() |
|
1603 | @CSRFRequired() | |
1604 | def repo_files_replace_file(self): |
|
1604 | def repo_files_replace_file(self): | |
1605 | _ = self.request.translate |
|
1605 | _ = self.request.translate | |
1606 | c = self.load_default_context() |
|
1606 | c = self.load_default_context() | |
1607 | commit_id, f_path = self._get_commit_and_path() |
|
1607 | commit_id, f_path = self._get_commit_and_path() | |
1608 |
|
1608 | |||
1609 | self._ensure_not_locked() |
|
1609 | self._ensure_not_locked() | |
1610 |
|
1610 | |||
1611 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1611 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) | |
1612 | if c.commit is None: |
|
1612 | if c.commit is None: | |
1613 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1613 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) | |
1614 |
|
1614 | |||
1615 | if self.rhodecode_vcs_repo.is_empty(): |
|
1615 | if self.rhodecode_vcs_repo.is_empty(): | |
1616 | default_redirect_url = h.route_path( |
|
1616 | default_redirect_url = h.route_path( | |
1617 | 'repo_summary', repo_name=self.db_repo_name) |
|
1617 | 'repo_summary', repo_name=self.db_repo_name) | |
1618 | else: |
|
1618 | else: | |
1619 | default_redirect_url = h.route_path( |
|
1619 | default_redirect_url = h.route_path( | |
1620 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') |
|
1620 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
1621 |
|
1621 | |||
1622 | if self.rhodecode_vcs_repo.is_empty(): |
|
1622 | if self.rhodecode_vcs_repo.is_empty(): | |
1623 | # for empty repository we cannot check for current branch, we rely on |
|
1623 | # for empty repository we cannot check for current branch, we rely on | |
1624 | # c.commit.branch instead |
|
1624 | # c.commit.branch instead | |
1625 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True |
|
1625 | _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True | |
1626 | else: |
|
1626 | else: | |
1627 | _branch_name, _sha_commit_id, is_head = \ |
|
1627 | _branch_name, _sha_commit_id, is_head = \ | |
1628 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, |
|
1628 | self._is_valid_head(commit_id, self.rhodecode_vcs_repo, | |
1629 | landing_ref=self.db_repo.landing_ref_name) |
|
1629 | landing_ref=self.db_repo.landing_ref_name) | |
1630 |
|
1630 | |||
1631 | error = self.forbid_non_head(is_head, f_path, json_mode=True) |
|
1631 | error = self.forbid_non_head(is_head, f_path, json_mode=True) | |
1632 | if error: |
|
1632 | if error: | |
1633 | return { |
|
1633 | return { | |
1634 | 'error': error, |
|
1634 | 'error': error, | |
1635 | 'redirect_url': default_redirect_url |
|
1635 | 'redirect_url': default_redirect_url | |
1636 | } |
|
1636 | } | |
1637 | error = self.check_branch_permission(_branch_name, json_mode=True) |
|
1637 | error = self.check_branch_permission(_branch_name, json_mode=True) | |
1638 | if error: |
|
1638 | if error: | |
1639 | return { |
|
1639 | return { | |
1640 | 'error': error, |
|
1640 | 'error': error, | |
1641 | 'redirect_url': default_redirect_url |
|
1641 | 'redirect_url': default_redirect_url | |
1642 | } |
|
1642 | } | |
1643 |
|
1643 | |||
1644 | c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path)) |
|
1644 | c.default_message = (_('Edited file {} via RhodeCode Enterprise').format(f_path)) | |
1645 | c.f_path = f_path |
|
1645 | c.f_path = f_path | |
1646 |
|
1646 | |||
1647 | r_post = self.request.POST |
|
1647 | r_post = self.request.POST | |
1648 |
|
1648 | |||
1649 | message = c.default_message |
|
1649 | message = c.default_message | |
1650 | user_message = r_post.getall('message') |
|
1650 | user_message = r_post.getall('message') | |
1651 | if isinstance(user_message, list) and user_message: |
|
1651 | if isinstance(user_message, list) and user_message: | |
1652 | # we take the first from duplicated results if it's not empty |
|
1652 | # we take the first from duplicated results if it's not empty | |
1653 | message = user_message[0] if user_message[0] else message |
|
1653 | message = user_message[0] if user_message[0] else message | |
1654 |
|
1654 | |||
1655 | data_for_replacement = r_post.getall('files_upload') or [] |
|
1655 | data_for_replacement = r_post.getall('files_upload') or [] | |
1656 | if (objects_count := len(data_for_replacement)) > 1: |
|
1656 | if (objects_count := len(data_for_replacement)) > 1: | |
1657 | return { |
|
1657 | return { | |
1658 | 'error': 'too many files for replacement', |
|
1658 | 'error': 'too many files for replacement', | |
1659 | 'redirect_url': default_redirect_url |
|
1659 | 'redirect_url': default_redirect_url | |
1660 | } |
|
1660 | } | |
1661 | elif not objects_count: |
|
1661 | elif not objects_count: | |
1662 | return { |
|
1662 | return { | |
1663 | 'error': 'missing files', |
|
1663 | 'error': 'missing files', | |
1664 | 'redirect_url': default_redirect_url |
|
1664 | 'redirect_url': default_redirect_url | |
1665 | } |
|
1665 | } | |
1666 |
|
1666 | |||
1667 | content = data_for_replacement[0].file |
|
1667 | content = data_for_replacement[0].file | |
1668 | retrieved_filename = data_for_replacement[0].filename |
|
1668 | retrieved_filename = data_for_replacement[0].filename | |
1669 |
|
1669 | |||
1670 | if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]: |
|
1670 | if retrieved_filename.split('.')[-1] != f_path.split('.')[-1]: | |
1671 | return { |
|
1671 | return { | |
1672 | 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension', |
|
1672 | 'error': 'file extension of uploaded file doesn\'t match an original file\'s extension', | |
1673 | 'redirect_url': default_redirect_url |
|
1673 | 'redirect_url': default_redirect_url | |
1674 | } |
|
1674 | } | |
1675 |
|
1675 | |||
1676 | author = self._rhodecode_db_user.full_contact |
|
1676 | author = self._rhodecode_db_user.full_contact | |
1677 |
|
1677 | |||
1678 | try: |
|
1678 | try: | |
1679 | commit = ScmModel().update_binary_node( |
|
1679 | commit = ScmModel().update_binary_node( | |
1680 | user=self._rhodecode_db_user.user_id, |
|
1680 | user=self._rhodecode_db_user.user_id, | |
1681 | repo=self.db_repo, |
|
1681 | repo=self.db_repo, | |
1682 | message=message, |
|
1682 | message=message, | |
1683 | node={ |
|
1683 | node={ | |
1684 | 'content': content, |
|
1684 | 'content': content, | |
1685 | 'file_path': f_path.encode(), |
|
1685 | 'file_path': f_path.encode(), | |
1686 | }, |
|
1686 | }, | |
1687 | parent_commit=c.commit, |
|
1687 | parent_commit=c.commit, | |
1688 | author=author, |
|
1688 | author=author, | |
1689 | ) |
|
1689 | ) | |
1690 |
|
1690 | |||
1691 | h.flash(_('Successfully committed 1 new file'), category='success') |
|
1691 | h.flash(_('Successfully committed 1 new file'), category='success') | |
1692 |
|
1692 | |||
1693 | default_redirect_url = h.route_path( |
|
1693 | default_redirect_url = h.route_path( | |
1694 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) |
|
1694 | 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id) | |
1695 |
|
1695 | |||
1696 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1696 | except (NodeError, NodeAlreadyExistsError) as e: | |
1697 | error = h.escape(e) |
|
1697 | error = h.escape(e) | |
1698 | h.flash(error, category='error') |
|
1698 | h.flash(error, category='error') | |
1699 |
|
1699 | |||
1700 | return { |
|
1700 | return { | |
1701 | 'error': error, |
|
1701 | 'error': error, | |
1702 | 'redirect_url': default_redirect_url |
|
1702 | 'redirect_url': default_redirect_url | |
1703 | } |
|
1703 | } | |
1704 | except Exception: |
|
1704 | except Exception: | |
1705 | log.exception('Error occurred during commit') |
|
1705 | log.exception('Error occurred during commit') | |
1706 | error = _('Error occurred during commit') |
|
1706 | error = _('Error occurred during commit') | |
1707 | h.flash(error, category='error') |
|
1707 | h.flash(error, category='error') | |
1708 | return { |
|
1708 | return { | |
1709 | 'error': error, |
|
1709 | 'error': error, | |
1710 | 'redirect_url': default_redirect_url |
|
1710 | 'redirect_url': default_redirect_url | |
1711 | } |
|
1711 | } | |
1712 |
|
1712 | |||
1713 | return { |
|
1713 | return { | |
1714 | 'error': None, |
|
1714 | 'error': None, | |
1715 | 'redirect_url': default_redirect_url |
|
1715 | 'redirect_url': default_redirect_url | |
1716 | } |
|
1716 | } |
@@ -1,205 +1,221 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import tempfile |
|
20 | import tempfile | |
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.settings import asbool |
|
23 | from pyramid.settings import asbool | |
24 |
|
24 | |||
25 | from rhodecode.config.settings_maker import SettingsMaker |
|
25 | from rhodecode.config.settings_maker import SettingsMaker | |
26 | from rhodecode.config import utils as config_utils |
|
26 | from rhodecode.config import utils as config_utils | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def sanitize_settings_and_apply_defaults(global_config, settings): |
|
31 | def sanitize_settings_and_apply_defaults(global_config, settings): | |
32 | """ |
|
32 | """ | |
33 | Applies settings defaults and does all type conversion. |
|
33 | Applies settings defaults and does all type conversion. | |
34 |
|
34 | |||
35 | We would move all settings parsing and preparation into this place, so that |
|
35 | We would move all settings parsing and preparation into this place, so that | |
36 | we have only one place left which deals with this part. The remaining parts |
|
36 | we have only one place left which deals with this part. The remaining parts | |
37 | of the application would start to rely fully on well-prepared settings. |
|
37 | of the application would start to rely fully on well-prepared settings. | |
38 |
|
38 | |||
39 | This piece would later be split up per topic to avoid a big fat monster |
|
39 | This piece would later be split up per topic to avoid a big fat monster | |
40 | function. |
|
40 | function. | |
41 | """ |
|
41 | """ | |
42 | jn = os.path.join |
|
42 | jn = os.path.join | |
43 |
|
43 | |||
44 | global_settings_maker = SettingsMaker(global_config) |
|
44 | global_settings_maker = SettingsMaker(global_config) | |
45 | global_settings_maker.make_setting('debug', default=False, parser='bool') |
|
45 | global_settings_maker.make_setting('debug', default=False, parser='bool') | |
46 | debug_enabled = asbool(global_config.get('debug')) |
|
46 | debug_enabled = asbool(global_config.get('debug')) | |
47 |
|
47 | |||
48 | settings_maker = SettingsMaker(settings) |
|
48 | settings_maker = SettingsMaker(settings) | |
49 |
|
49 | |||
50 | settings_maker.make_setting( |
|
50 | settings_maker.make_setting( | |
51 | 'logging.autoconfigure', |
|
51 | 'logging.autoconfigure', | |
52 | default=False, |
|
52 | default=False, | |
53 | parser='bool') |
|
53 | parser='bool') | |
54 |
|
54 | |||
55 | logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini') |
|
55 | logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini') | |
56 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') |
|
56 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') | |
57 |
|
57 | |||
58 | # Default includes, possible to change as a user |
|
58 | # Default includes, possible to change as a user | |
59 | pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline') |
|
59 | pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline') | |
60 | log.debug( |
|
60 | log.debug( | |
61 | "Using the following pyramid.includes: %s", |
|
61 | "Using the following pyramid.includes: %s", | |
62 | pyramid_includes) |
|
62 | pyramid_includes) | |
63 |
|
63 | |||
64 | settings_maker.make_setting('rhodecode.edition', 'Community Edition') |
|
64 | settings_maker.make_setting('rhodecode.edition', 'Community Edition') | |
65 | settings_maker.make_setting('rhodecode.edition_id', 'CE') |
|
65 | settings_maker.make_setting('rhodecode.edition_id', 'CE') | |
66 |
|
66 | |||
67 | if 'mako.default_filters' not in settings: |
|
67 | if 'mako.default_filters' not in settings: | |
68 | # set custom default filters if we don't have it defined |
|
68 | # set custom default filters if we don't have it defined | |
69 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
69 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' | |
70 | settings['mako.default_filters'] = 'h_filter' |
|
70 | settings['mako.default_filters'] = 'h_filter' | |
71 |
|
71 | |||
72 | if 'mako.directories' not in settings: |
|
72 | if 'mako.directories' not in settings: | |
73 | mako_directories = settings.setdefault('mako.directories', [ |
|
73 | mako_directories = settings.setdefault('mako.directories', [ | |
74 | # Base templates of the original application |
|
74 | # Base templates of the original application | |
75 | 'rhodecode:templates', |
|
75 | 'rhodecode:templates', | |
76 | ]) |
|
76 | ]) | |
77 | log.debug( |
|
77 | log.debug( | |
78 | "Using the following Mako template directories: %s", |
|
78 | "Using the following Mako template directories: %s", | |
79 | mako_directories) |
|
79 | mako_directories) | |
80 |
|
80 | |||
81 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X |
|
81 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X | |
82 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': |
|
82 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': | |
83 | raw_url = settings['beaker.session.url'] |
|
83 | raw_url = settings['beaker.session.url'] | |
84 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): |
|
84 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): | |
85 | settings['beaker.session.url'] = 'redis://' + raw_url |
|
85 | settings['beaker.session.url'] = 'redis://' + raw_url | |
86 |
|
86 | |||
87 | settings_maker.make_setting('__file__', global_config.get('__file__')) |
|
87 | settings_maker.make_setting('__file__', global_config.get('__file__')) | |
88 |
|
88 | |||
89 | # TODO: johbo: Re-think this, usually the call to config.include |
|
89 | # TODO: johbo: Re-think this, usually the call to config.include | |
90 | # should allow to pass in a prefix. |
|
90 | # should allow to pass in a prefix. | |
91 | settings_maker.make_setting('rhodecode.api.url', '/_admin/api') |
|
91 | settings_maker.make_setting('rhodecode.api.url', '/_admin/api') | |
92 |
|
92 | |||
93 | # Sanitize generic settings. |
|
93 | # Sanitize generic settings. | |
94 | settings_maker.make_setting('default_encoding', 'UTF-8', parser='list') |
|
94 | settings_maker.make_setting('default_encoding', 'UTF-8', parser='list') | |
95 | settings_maker.make_setting('gzip_responses', False, parser='bool') |
|
95 | settings_maker.make_setting('gzip_responses', False, parser='bool') | |
96 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') |
|
96 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') | |
97 |
|
97 | |||
98 | # statsd |
|
98 | # statsd | |
99 | settings_maker.make_setting('statsd.enabled', False, parser='bool') |
|
99 | settings_maker.make_setting('statsd.enabled', False, parser='bool') | |
100 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') |
|
100 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') | |
101 | settings_maker.make_setting('statsd.statsd_port', 9125, parser='int') |
|
101 | settings_maker.make_setting('statsd.statsd_port', 9125, parser='int') | |
102 | settings_maker.make_setting('statsd.statsd_prefix', '') |
|
102 | settings_maker.make_setting('statsd.statsd_prefix', '') | |
103 | settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') |
|
103 | settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') | |
104 |
|
104 | |||
105 | settings_maker.make_setting('vcs.svn.compatible_version', '') |
|
105 | settings_maker.make_setting('vcs.svn.compatible_version', '') | |
106 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') |
|
106 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') | |
107 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') |
|
107 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') | |
108 | settings_maker.make_setting('vcs.hooks.protocol', 'http') |
|
108 | settings_maker.make_setting('vcs.hooks.protocol', 'http') | |
109 | settings_maker.make_setting('vcs.hooks.host', '*') |
|
109 | settings_maker.make_setting('vcs.hooks.host', '*') | |
110 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') |
|
110 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') | |
111 | settings_maker.make_setting('vcs.server', '') |
|
111 | settings_maker.make_setting('vcs.server', '') | |
112 | settings_maker.make_setting('vcs.server.protocol', 'http') |
|
112 | settings_maker.make_setting('vcs.server.protocol', 'http') | |
113 | settings_maker.make_setting('vcs.server.enable', 'true', parser='bool') |
|
113 | settings_maker.make_setting('vcs.server.enable', 'true', parser='bool') | |
114 | settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool') |
|
114 | settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool') | |
115 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') |
|
115 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') | |
116 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') |
|
116 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') | |
117 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') |
|
117 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') | |
118 |
|
118 | |||
119 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') |
|
119 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') | |
120 |
|
120 | |||
121 | # repo_store path |
|
121 | # repo_store path | |
122 | settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store') |
|
122 | settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store') | |
123 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
123 | # Support legacy values of vcs.scm_app_implementation. Legacy | |
124 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or |
|
124 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or | |
125 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. |
|
125 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. | |
126 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
126 | scm_app_impl = settings['vcs.scm_app_implementation'] | |
127 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: |
|
127 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: | |
128 | settings['vcs.scm_app_implementation'] = 'http' |
|
128 | settings['vcs.scm_app_implementation'] = 'http' | |
129 |
|
129 | |||
130 | settings_maker.make_setting('appenlight', False, parser='bool') |
|
130 | settings_maker.make_setting('appenlight', False, parser='bool') | |
131 |
|
131 | |||
132 | temp_store = tempfile.gettempdir() |
|
132 | temp_store = tempfile.gettempdir() | |
133 | tmp_cache_dir = jn(temp_store, 'rc_cache') |
|
133 | tmp_cache_dir = jn(temp_store, 'rc_cache') | |
134 |
|
134 | |||
135 | # save default, cache dir, and use it for all backends later. |
|
135 | # save default, cache dir, and use it for all backends later. | |
136 | default_cache_dir = settings_maker.make_setting( |
|
136 | default_cache_dir = settings_maker.make_setting( | |
137 | 'cache_dir', |
|
137 | 'cache_dir', | |
138 | default=tmp_cache_dir, default_when_empty=True, |
|
138 | default=tmp_cache_dir, default_when_empty=True, | |
139 | parser='dir:ensured') |
|
139 | parser='dir:ensured') | |
140 |
|
140 | |||
141 | # exception store cache |
|
141 | # exception store cache | |
142 | settings_maker.make_setting( |
|
142 | settings_maker.make_setting( | |
143 | 'exception_tracker.store_path', |
|
143 | 'exception_tracker.store_path', | |
144 | default=jn(default_cache_dir, 'exc_store'), default_when_empty=True, |
|
144 | default=jn(default_cache_dir, 'exc_store'), default_when_empty=True, | |
145 | parser='dir:ensured' |
|
145 | parser='dir:ensured' | |
146 | ) |
|
146 | ) | |
147 |
|
147 | |||
148 | settings_maker.make_setting( |
|
148 | settings_maker.make_setting( | |
149 | 'celerybeat-schedule.path', |
|
149 | 'celerybeat-schedule.path', | |
150 | default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True, |
|
150 | default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True, | |
151 | parser='file:ensured' |
|
151 | parser='file:ensured' | |
152 | ) |
|
152 | ) | |
153 |
|
153 | |||
154 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') |
|
154 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') | |
155 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) |
|
155 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) | |
156 |
|
156 | |||
157 | # sessions, ensure file since no-value is memory |
|
157 | # sessions, ensure file since no-value is memory | |
158 | settings_maker.make_setting('beaker.session.type', 'file') |
|
158 | settings_maker.make_setting('beaker.session.type', 'file') | |
159 | settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data')) |
|
159 | settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data')) | |
160 |
|
160 | |||
161 | # cache_general |
|
161 | # cache_general | |
162 | settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace') |
|
162 | settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace') | |
163 | settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int') |
|
163 | settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int') | |
164 | settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db')) |
|
164 | settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db')) | |
165 |
|
165 | |||
166 | # cache_perms |
|
166 | # cache_perms | |
167 | settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace') |
|
167 | settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace') | |
168 | settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int') |
|
168 | settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int') | |
169 | settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db')) |
|
169 | settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db')) | |
170 |
|
170 | |||
171 | # cache_repo |
|
171 | # cache_repo | |
172 | settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace') |
|
172 | settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace') | |
173 | settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int') |
|
173 | settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int') | |
174 | settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db')) |
|
174 | settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db')) | |
175 |
|
175 | |||
176 | # cache_license |
|
176 | # cache_license | |
177 | settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace') |
|
177 | settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace') | |
178 | settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int') |
|
178 | settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int') | |
179 | settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db')) |
|
179 | settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db')) | |
180 |
|
180 | |||
181 | # cache_repo_longterm memory, 96H |
|
181 | # cache_repo_longterm memory, 96H | |
182 | settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru') |
|
182 | settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru') | |
183 | settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int') |
|
183 | settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int') | |
184 | settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int') |
|
184 | settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int') | |
185 |
|
185 | |||
186 | # sql_cache_short |
|
186 | # sql_cache_short | |
187 | settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru') |
|
187 | settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru') | |
188 | settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int') |
|
188 | settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int') | |
189 | settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int') |
|
189 | settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int') | |
190 |
|
190 | |||
191 | # archive_cache |
|
191 | # archive_cache | |
192 | settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1') |
|
192 | settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1') | |
193 | settings_maker.make_setting('archive_cache.backend.type', 'filesystem') |
|
193 | settings_maker.make_setting('archive_cache.backend.type', 'filesystem') | |
194 |
|
194 | |||
195 | settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) |
|
195 | settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) | |
|
196 | settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int') | |||
196 | settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float') |
|
197 | settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float') | |
197 | settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int') |
|
|||
198 | settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored') |
|
198 | settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored') | |
199 |
|
199 | |||
|
200 | settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool') | |||
|
201 | settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int') | |||
|
202 | settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int') | |||
|
203 | ||||
|
204 | settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) | |||
|
205 | settings_maker.make_setting('archive_cache.objectstore.key', '') | |||
|
206 | settings_maker.make_setting('archive_cache.objectstore.secret', '') | |||
|
207 | settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int') | |||
|
208 | ||||
|
209 | settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float') | |||
|
210 | settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored') | |||
|
211 | ||||
|
212 | settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool') | |||
|
213 | settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int') | |||
|
214 | settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int') | |||
|
215 | ||||
200 | settings_maker.env_expand() |
|
216 | settings_maker.env_expand() | |
201 |
|
217 | |||
202 | # configure instance id |
|
218 | # configure instance id | |
203 | config_utils.set_instance_id(settings) |
|
219 | config_utils.set_instance_id(settings) | |
204 |
|
220 | |||
205 | return settings |
|
221 | return settings |
@@ -1,466 +1,466 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import sys |
|
20 | import sys | |
21 | import collections |
|
21 | import collections | |
22 |
|
22 | |||
23 | import time |
|
23 | import time | |
24 | import logging.config |
|
24 | import logging.config | |
25 |
|
25 | |||
26 | from paste.gzipper import make_gzip_middleware |
|
26 | from paste.gzipper import make_gzip_middleware | |
27 | import pyramid.events |
|
27 | import pyramid.events | |
28 | from pyramid.wsgi import wsgiapp |
|
28 | from pyramid.wsgi import wsgiapp | |
29 | from pyramid.config import Configurator |
|
29 | from pyramid.config import Configurator | |
30 | from pyramid.settings import asbool, aslist |
|
30 | from pyramid.settings import asbool, aslist | |
31 | from pyramid.httpexceptions import ( |
|
31 | from pyramid.httpexceptions import ( | |
32 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) |
|
32 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) | |
33 | from pyramid.renderers import render_to_response |
|
33 | from pyramid.renderers import render_to_response | |
34 |
|
34 | |||
35 | from rhodecode.model import meta |
|
35 | from rhodecode.model import meta | |
36 | from rhodecode.config import patches |
|
36 | from rhodecode.config import patches | |
37 |
|
37 | |||
38 | from rhodecode.config.environment import load_pyramid_environment |
|
38 | from rhodecode.config.environment import load_pyramid_environment | |
39 |
|
39 | |||
40 | import rhodecode.events |
|
40 | import rhodecode.events | |
41 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults |
|
41 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults | |
42 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
42 | from rhodecode.lib.middleware.vcs import VCSMiddleware | |
43 | from rhodecode.lib.request import Request |
|
43 | from rhodecode.lib.request import Request | |
44 | from rhodecode.lib.vcs import VCSCommunicationError |
|
44 | from rhodecode.lib.vcs import VCSCommunicationError | |
45 | from rhodecode.lib.exceptions import VCSServerUnavailable |
|
45 | from rhodecode.lib.exceptions import VCSServerUnavailable | |
46 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
46 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled | |
47 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
47 | from rhodecode.lib.middleware.https_fixup import HttpsFixup | |
48 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
48 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin | |
49 | from rhodecode.lib.utils2 import AttributeDict |
|
49 | from rhodecode.lib.utils2 import AttributeDict | |
50 | from rhodecode.lib.exc_tracking import store_exception, format_exc |
|
50 | from rhodecode.lib.exc_tracking import store_exception, format_exc | |
51 | from rhodecode.subscribers import ( |
|
51 | from rhodecode.subscribers import ( | |
52 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
52 | scan_repositories_if_enabled, write_js_routes_if_enabled, | |
53 | write_metadata_if_needed, write_usage_data) |
|
53 | write_metadata_if_needed, write_usage_data) | |
54 | from rhodecode.lib.statsd_client import StatsdClient |
|
54 | from rhodecode.lib.statsd_client import StatsdClient | |
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def is_http_error(response): |
|
59 | def is_http_error(response): | |
60 | # error which should have traceback |
|
60 | # error which should have traceback | |
61 | return response.status_code > 499 |
|
61 | return response.status_code > 499 | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def should_load_all(): |
|
64 | def should_load_all(): | |
65 | """ |
|
65 | """ | |
66 | Returns if all application components should be loaded. In some cases it's |
|
66 | Returns if all application components should be loaded. In some cases it's | |
67 | desired to skip apps loading for faster shell script execution |
|
67 | desired to skip apps loading for faster shell script execution | |
68 | """ |
|
68 | """ | |
69 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') |
|
69 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') | |
70 | if ssh_cmd: |
|
70 | if ssh_cmd: | |
71 | return False |
|
71 | return False | |
72 |
|
72 | |||
73 | return True |
|
73 | return True | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | def make_pyramid_app(global_config, **settings): |
|
76 | def make_pyramid_app(global_config, **settings): | |
77 | """ |
|
77 | """ | |
78 | Constructs the WSGI application based on Pyramid. |
|
78 | Constructs the WSGI application based on Pyramid. | |
79 |
|
79 | |||
80 | Specials: |
|
80 | Specials: | |
81 |
|
81 | |||
82 | * The application can also be integrated like a plugin via the call to |
|
82 | * The application can also be integrated like a plugin via the call to | |
83 | `includeme`. This is accompanied with the other utility functions which |
|
83 | `includeme`. This is accompanied with the other utility functions which | |
84 | are called. Changing this should be done with great care to not break |
|
84 | are called. Changing this should be done with great care to not break | |
85 | cases when these fragments are assembled from another place. |
|
85 | cases when these fragments are assembled from another place. | |
86 |
|
86 | |||
87 | """ |
|
87 | """ | |
88 | start_time = time.time() |
|
88 | start_time = time.time() | |
89 | log.info('Pyramid app config starting') |
|
89 | log.info('Pyramid app config starting') | |
90 |
|
90 | |||
91 | sanitize_settings_and_apply_defaults(global_config, settings) |
|
91 | sanitize_settings_and_apply_defaults(global_config, settings) | |
92 |
|
92 | |||
93 | # init and bootstrap StatsdClient |
|
93 | # init and bootstrap StatsdClient | |
94 | StatsdClient.setup(settings) |
|
94 | StatsdClient.setup(settings) | |
95 |
|
95 | |||
96 | config = Configurator(settings=settings) |
|
96 | config = Configurator(settings=settings) | |
97 | # Init our statsd at very start |
|
97 | # Init our statsd at very start | |
98 | config.registry.statsd = StatsdClient.statsd |
|
98 | config.registry.statsd = StatsdClient.statsd | |
99 |
|
99 | |||
100 | # Apply compatibility patches |
|
100 | # Apply compatibility patches | |
101 | patches.inspect_getargspec() |
|
101 | patches.inspect_getargspec() | |
102 |
|
102 | |||
103 | load_pyramid_environment(global_config, settings) |
|
103 | load_pyramid_environment(global_config, settings) | |
104 |
|
104 | |||
105 | # Static file view comes first |
|
105 | # Static file view comes first | |
106 | includeme_first(config) |
|
106 | includeme_first(config) | |
107 |
|
107 | |||
108 | includeme(config) |
|
108 | includeme(config) | |
109 |
|
109 | |||
110 | pyramid_app = config.make_wsgi_app() |
|
110 | pyramid_app = config.make_wsgi_app() | |
111 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
111 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) | |
112 | pyramid_app.config = config |
|
112 | pyramid_app.config = config | |
113 |
|
113 | |||
114 | celery_settings = get_celery_config(settings) |
|
114 | celery_settings = get_celery_config(settings) | |
115 | config.configure_celery(celery_settings) |
|
115 | config.configure_celery(celery_settings) | |
116 |
|
116 | |||
117 | # creating the app uses a connection - return it after we are done |
|
117 | # creating the app uses a connection - return it after we are done | |
118 | meta.Session.remove() |
|
118 | meta.Session.remove() | |
119 |
|
119 | |||
120 | total_time = time.time() - start_time |
|
120 | total_time = time.time() - start_time | |
121 | log.info('Pyramid app created and configured in %.2fs', total_time) |
|
121 | log.info('Pyramid app created and configured in %.2fs', total_time) | |
122 | return pyramid_app |
|
122 | return pyramid_app | |
123 |
|
123 | |||
124 |
|
124 | |||
125 | def get_celery_config(settings): |
|
125 | def get_celery_config(settings): | |
126 | """ |
|
126 | """ | |
127 | Converts basic ini configuration into celery 4.X options |
|
127 | Converts basic ini configuration into celery 4.X options | |
128 | """ |
|
128 | """ | |
129 |
|
129 | |||
130 | def key_converter(key_name): |
|
130 | def key_converter(key_name): | |
131 | pref = 'celery.' |
|
131 | pref = 'celery.' | |
132 | if key_name.startswith(pref): |
|
132 | if key_name.startswith(pref): | |
133 | return key_name[len(pref):].replace('.', '_').lower() |
|
133 | return key_name[len(pref):].replace('.', '_').lower() | |
134 |
|
134 | |||
135 | def type_converter(parsed_key, value): |
|
135 | def type_converter(parsed_key, value): | |
136 | # cast to int |
|
136 | # cast to int | |
137 | if value.isdigit(): |
|
137 | if value.isdigit(): | |
138 | return int(value) |
|
138 | return int(value) | |
139 |
|
139 | |||
140 | # cast to bool |
|
140 | # cast to bool | |
141 | if value.lower() in ['true', 'false', 'True', 'False']: |
|
141 | if value.lower() in ['true', 'false', 'True', 'False']: | |
142 | return value.lower() == 'true' |
|
142 | return value.lower() == 'true' | |
143 | return value |
|
143 | return value | |
144 |
|
144 | |||
145 | celery_config = {} |
|
145 | celery_config = {} | |
146 | for k, v in settings.items(): |
|
146 | for k, v in settings.items(): | |
147 | pref = 'celery.' |
|
147 | pref = 'celery.' | |
148 | if k.startswith(pref): |
|
148 | if k.startswith(pref): | |
149 | celery_config[key_converter(k)] = type_converter(key_converter(k), v) |
|
149 | celery_config[key_converter(k)] = type_converter(key_converter(k), v) | |
150 |
|
150 | |||
151 | # TODO:rethink if we want to support celerybeat based file config, probably NOT |
|
151 | # TODO:rethink if we want to support celerybeat based file config, probably NOT | |
152 | # beat_config = {} |
|
152 | # beat_config = {} | |
153 | # for section in parser.sections(): |
|
153 | # for section in parser.sections(): | |
154 | # if section.startswith('celerybeat:'): |
|
154 | # if section.startswith('celerybeat:'): | |
155 | # name = section.split(':', 1)[1] |
|
155 | # name = section.split(':', 1)[1] | |
156 | # beat_config[name] = get_beat_config(parser, section) |
|
156 | # beat_config[name] = get_beat_config(parser, section) | |
157 |
|
157 | |||
158 | # final compose of settings |
|
158 | # final compose of settings | |
159 | celery_settings = {} |
|
159 | celery_settings = {} | |
160 |
|
160 | |||
161 | if celery_config: |
|
161 | if celery_config: | |
162 | celery_settings.update(celery_config) |
|
162 | celery_settings.update(celery_config) | |
163 | # if beat_config: |
|
163 | # if beat_config: | |
164 | # celery_settings.update({'beat_schedule': beat_config}) |
|
164 | # celery_settings.update({'beat_schedule': beat_config}) | |
165 |
|
165 | |||
166 | return celery_settings |
|
166 | return celery_settings | |
167 |
|
167 | |||
168 |
|
168 | |||
169 | def not_found_view(request): |
|
169 | def not_found_view(request): | |
170 | """ |
|
170 | """ | |
171 | This creates the view which should be registered as not-found-view to |
|
171 | This creates the view which should be registered as not-found-view to | |
172 | pyramid. |
|
172 | pyramid. | |
173 | """ |
|
173 | """ | |
174 |
|
174 | |||
175 | if not getattr(request, 'vcs_call', None): |
|
175 | if not getattr(request, 'vcs_call', None): | |
176 | # handle like regular case with our error_handler |
|
176 | # handle like regular case with our error_handler | |
177 | return error_handler(HTTPNotFound(), request) |
|
177 | return error_handler(HTTPNotFound(), request) | |
178 |
|
178 | |||
179 | # handle not found view as a vcs call |
|
179 | # handle not found view as a vcs call | |
180 | settings = request.registry.settings |
|
180 | settings = request.registry.settings | |
181 | ae_client = getattr(request, 'ae_client', None) |
|
181 | ae_client = getattr(request, 'ae_client', None) | |
182 | vcs_app = VCSMiddleware( |
|
182 | vcs_app = VCSMiddleware( | |
183 | HTTPNotFound(), request.registry, settings, |
|
183 | HTTPNotFound(), request.registry, settings, | |
184 | appenlight_client=ae_client) |
|
184 | appenlight_client=ae_client) | |
185 |
|
185 | |||
186 | return wsgiapp(vcs_app)(None, request) |
|
186 | return wsgiapp(vcs_app)(None, request) | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | def error_handler(exception, request): |
|
189 | def error_handler(exception, request): | |
190 | import rhodecode |
|
190 | import rhodecode | |
191 | from rhodecode.lib import helpers |
|
191 | from rhodecode.lib import helpers | |
192 |
|
192 | |||
193 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
193 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' | |
194 |
|
194 | |||
195 | base_response = HTTPInternalServerError() |
|
195 | base_response = HTTPInternalServerError() | |
196 | # prefer original exception for the response since it may have headers set |
|
196 | # prefer original exception for the response since it may have headers set | |
197 | if isinstance(exception, HTTPException): |
|
197 | if isinstance(exception, HTTPException): | |
198 | base_response = exception |
|
198 | base_response = exception | |
199 | elif isinstance(exception, VCSCommunicationError): |
|
199 | elif isinstance(exception, VCSCommunicationError): | |
200 | base_response = VCSServerUnavailable() |
|
200 | base_response = VCSServerUnavailable() | |
201 |
|
201 | |||
202 | if is_http_error(base_response): |
|
202 | if is_http_error(base_response): | |
203 | traceback_info = format_exc(request.exc_info) |
|
203 | traceback_info = format_exc(request.exc_info) | |
204 | log.error( |
|
204 | log.error( | |
205 | 'error occurred handling this request for path: %s, \n%s', |
|
205 | 'error occurred handling this request for path: %s, \n%s', | |
206 | request.path, traceback_info) |
|
206 | request.path, traceback_info) | |
207 |
|
207 | |||
208 | error_explanation = base_response.explanation or str(base_response) |
|
208 | error_explanation = base_response.explanation or str(base_response) | |
209 | if base_response.status_code == 404: |
|
209 | if base_response.status_code == 404: | |
210 | error_explanation += " Optionally you don't have permission to access this page." |
|
210 | error_explanation += " Optionally you don't have permission to access this page." | |
211 | c = AttributeDict() |
|
211 | c = AttributeDict() | |
212 | c.error_message = base_response.status |
|
212 | c.error_message = base_response.status | |
213 | c.error_explanation = error_explanation |
|
213 | c.error_explanation = error_explanation | |
214 | c.visual = AttributeDict() |
|
214 | c.visual = AttributeDict() | |
215 |
|
215 | |||
216 | c.visual.rhodecode_support_url = ( |
|
216 | c.visual.rhodecode_support_url = ( | |
217 | request.registry.settings.get('rhodecode_support_url') or |
|
217 | request.registry.settings.get('rhodecode_support_url') or | |
218 | request.route_url('rhodecode_support') |
|
218 | request.route_url('rhodecode_support') | |
219 | ) |
|
219 | ) | |
220 | c.redirect_time = 0 |
|
220 | c.redirect_time = 0 | |
221 | c.rhodecode_name = rhodecode_title |
|
221 | c.rhodecode_name = rhodecode_title | |
222 | if not c.rhodecode_name: |
|
222 | if not c.rhodecode_name: | |
223 | c.rhodecode_name = 'Rhodecode' |
|
223 | c.rhodecode_name = 'Rhodecode' | |
224 |
|
224 | |||
225 | c.causes = [] |
|
225 | c.causes = [] | |
226 | if is_http_error(base_response): |
|
226 | if is_http_error(base_response): | |
227 | c.causes.append('Server is overloaded.') |
|
227 | c.causes.append('Server is overloaded.') | |
228 | c.causes.append('Server database connection is lost.') |
|
228 | c.causes.append('Server database connection is lost.') | |
229 | c.causes.append('Server expected unhandled error.') |
|
229 | c.causes.append('Server expected unhandled error.') | |
230 |
|
230 | |||
231 | if hasattr(base_response, 'causes'): |
|
231 | if hasattr(base_response, 'causes'): | |
232 | c.causes = base_response.causes |
|
232 | c.causes = base_response.causes | |
233 |
|
233 | |||
234 | c.messages = helpers.flash.pop_messages(request=request) |
|
234 | c.messages = helpers.flash.pop_messages(request=request) | |
235 | exc_info = sys.exc_info() |
|
235 | exc_info = sys.exc_info() | |
236 | c.exception_id = id(exc_info) |
|
236 | c.exception_id = id(exc_info) | |
237 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ |
|
237 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ | |
238 | or base_response.status_code > 499 |
|
238 | or base_response.status_code > 499 | |
239 | c.exception_id_url = request.route_url( |
|
239 | c.exception_id_url = request.route_url( | |
240 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) |
|
240 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) | |
241 |
|
241 | |||
242 | debug_mode = rhodecode.ConfigGet().get_bool('debug') |
|
242 | debug_mode = rhodecode.ConfigGet().get_bool('debug') | |
243 | if c.show_exception_id: |
|
243 | if c.show_exception_id: | |
244 | store_exception(c.exception_id, exc_info) |
|
244 | store_exception(c.exception_id, exc_info) | |
245 | c.exception_debug = debug_mode |
|
245 | c.exception_debug = debug_mode | |
246 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') |
|
246 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') | |
247 |
|
247 | |||
248 | if debug_mode: |
|
248 | if debug_mode: | |
249 | try: |
|
249 | try: | |
250 | from rich.traceback import install |
|
250 | from rich.traceback import install | |
251 | install(show_locals=True) |
|
251 | install(show_locals=True) | |
252 | log.debug('Installing rich tracebacks...') |
|
252 | log.debug('Installing rich tracebacks...') | |
253 | except ImportError: |
|
253 | except ImportError: | |
254 | pass |
|
254 | pass | |
255 |
|
255 | |||
256 | response = render_to_response( |
|
256 | response = render_to_response( | |
257 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
257 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, | |
258 | response=base_response) |
|
258 | response=base_response) | |
259 |
|
259 | |||
260 | response.headers["X-RC-Exception-Id"] = str(c.exception_id) |
|
260 | response.headers["X-RC-Exception-Id"] = str(c.exception_id) | |
261 |
|
261 | |||
262 | statsd = request.registry.statsd |
|
262 | statsd = request.registry.statsd | |
263 | if statsd and base_response.status_code > 499: |
|
263 | if statsd and base_response.status_code > 499: | |
264 | exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}" |
|
264 | exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}" | |
265 | statsd.incr('rhodecode_exception_total', |
|
265 | statsd.incr('rhodecode_exception_total', | |
266 | tags=["exc_source:web", |
|
266 | tags=["exc_source:web", | |
267 | f"http_code:{base_response.status_code}", |
|
267 | f"http_code:{base_response.status_code}", | |
268 | f"type:{exc_type}"]) |
|
268 | f"type:{exc_type}"]) | |
269 |
|
269 | |||
270 | return response |
|
270 | return response | |
271 |
|
271 | |||
272 |
|
272 | |||
273 | def includeme_first(config): |
|
273 | def includeme_first(config): | |
274 | # redirect automatic browser favicon.ico requests to correct place |
|
274 | # redirect automatic browser favicon.ico requests to correct place | |
275 | def favicon_redirect(context, request): |
|
275 | def favicon_redirect(context, request): | |
276 | return HTTPFound( |
|
276 | return HTTPFound( | |
277 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
277 | request.static_path('rhodecode:public/images/favicon.ico')) | |
278 |
|
278 | |||
279 | config.add_view(favicon_redirect, route_name='favicon') |
|
279 | config.add_view(favicon_redirect, route_name='favicon') | |
280 | config.add_route('favicon', '/favicon.ico') |
|
280 | config.add_route('favicon', '/favicon.ico') | |
281 |
|
281 | |||
282 | def robots_redirect(context, request): |
|
282 | def robots_redirect(context, request): | |
283 | return HTTPFound( |
|
283 | return HTTPFound( | |
284 | request.static_path('rhodecode:public/robots.txt')) |
|
284 | request.static_path('rhodecode:public/robots.txt')) | |
285 |
|
285 | |||
286 | config.add_view(robots_redirect, route_name='robots') |
|
286 | config.add_view(robots_redirect, route_name='robots') | |
287 | config.add_route('robots', '/robots.txt') |
|
287 | config.add_route('robots', '/robots.txt') | |
288 |
|
288 | |||
289 | config.add_static_view( |
|
289 | config.add_static_view( | |
290 | '_static/deform', 'deform:static') |
|
290 | '_static/deform', 'deform:static') | |
291 | config.add_static_view( |
|
291 | config.add_static_view( | |
292 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
292 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) | |
293 |
|
293 | |||
294 |
|
294 | |||
295 | ce_auth_resources = [ |
|
295 | ce_auth_resources = [ | |
296 | 'rhodecode.authentication.plugins.auth_crowd', |
|
296 | 'rhodecode.authentication.plugins.auth_crowd', | |
297 | 'rhodecode.authentication.plugins.auth_headers', |
|
297 | 'rhodecode.authentication.plugins.auth_headers', | |
298 | 'rhodecode.authentication.plugins.auth_jasig_cas', |
|
298 | 'rhodecode.authentication.plugins.auth_jasig_cas', | |
299 | 'rhodecode.authentication.plugins.auth_ldap', |
|
299 | 'rhodecode.authentication.plugins.auth_ldap', | |
300 | 'rhodecode.authentication.plugins.auth_pam', |
|
300 | 'rhodecode.authentication.plugins.auth_pam', | |
301 | 'rhodecode.authentication.plugins.auth_rhodecode', |
|
301 | 'rhodecode.authentication.plugins.auth_rhodecode', | |
302 | 'rhodecode.authentication.plugins.auth_token', |
|
302 | 'rhodecode.authentication.plugins.auth_token', | |
303 | ] |
|
303 | ] | |
304 |
|
304 | |||
305 |
|
305 | |||
306 | def includeme(config, auth_resources=None): |
|
306 | def includeme(config, auth_resources=None): | |
307 | from rhodecode.lib.celerylib.loader import configure_celery |
|
307 | from rhodecode.lib.celerylib.loader import configure_celery | |
308 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) |
|
308 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) | |
309 | settings = config.registry.settings |
|
309 | settings = config.registry.settings | |
310 | config.set_request_factory(Request) |
|
310 | config.set_request_factory(Request) | |
311 |
|
311 | |||
312 | # plugin information |
|
312 | # plugin information | |
313 | config.registry.rhodecode_plugins = collections.OrderedDict() |
|
313 | config.registry.rhodecode_plugins = collections.OrderedDict() | |
314 |
|
314 | |||
315 | config.add_directive( |
|
315 | config.add_directive( | |
316 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
316 | 'register_rhodecode_plugin', register_rhodecode_plugin) | |
317 |
|
317 | |||
318 | config.add_directive('configure_celery', configure_celery) |
|
318 | config.add_directive('configure_celery', configure_celery) | |
319 |
|
319 | |||
320 | if settings.get('appenlight', False): |
|
320 | if settings.get('appenlight', False): | |
321 | config.include('appenlight_client.ext.pyramid_tween') |
|
321 | config.include('appenlight_client.ext.pyramid_tween') | |
322 |
|
322 | |||
323 | load_all = should_load_all() |
|
323 | load_all = should_load_all() | |
324 |
|
324 | |||
325 | # Includes which are required. The application would fail without them. |
|
325 | # Includes which are required. The application would fail without them. | |
326 | config.include('pyramid_mako') |
|
326 | config.include('pyramid_mako') | |
327 | config.include('rhodecode.lib.rc_beaker') |
|
327 | config.include('rhodecode.lib.rc_beaker') | |
328 | config.include('rhodecode.lib.rc_cache') |
|
328 | config.include('rhodecode.lib.rc_cache') | |
329 |
config.include('rhodecode.lib. |
|
329 | config.include('rhodecode.lib.archive_cache') | |
330 |
|
330 | |||
331 | config.include('rhodecode.apps._base.navigation') |
|
331 | config.include('rhodecode.apps._base.navigation') | |
332 | config.include('rhodecode.apps._base.subscribers') |
|
332 | config.include('rhodecode.apps._base.subscribers') | |
333 | config.include('rhodecode.tweens') |
|
333 | config.include('rhodecode.tweens') | |
334 | config.include('rhodecode.authentication') |
|
334 | config.include('rhodecode.authentication') | |
335 |
|
335 | |||
336 | if load_all: |
|
336 | if load_all: | |
337 |
|
337 | |||
338 | # load CE authentication plugins |
|
338 | # load CE authentication plugins | |
339 |
|
339 | |||
340 | if auth_resources: |
|
340 | if auth_resources: | |
341 | ce_auth_resources.extend(auth_resources) |
|
341 | ce_auth_resources.extend(auth_resources) | |
342 |
|
342 | |||
343 | for resource in ce_auth_resources: |
|
343 | for resource in ce_auth_resources: | |
344 | config.include(resource) |
|
344 | config.include(resource) | |
345 |
|
345 | |||
346 | # Auto discover authentication plugins and include their configuration. |
|
346 | # Auto discover authentication plugins and include their configuration. | |
347 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): |
|
347 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): | |
348 | from rhodecode.authentication import discover_legacy_plugins |
|
348 | from rhodecode.authentication import discover_legacy_plugins | |
349 | discover_legacy_plugins(config) |
|
349 | discover_legacy_plugins(config) | |
350 |
|
350 | |||
351 | # apps |
|
351 | # apps | |
352 | if load_all: |
|
352 | if load_all: | |
353 | log.debug('Starting config.include() calls') |
|
353 | log.debug('Starting config.include() calls') | |
354 | config.include('rhodecode.api.includeme') |
|
354 | config.include('rhodecode.api.includeme') | |
355 | config.include('rhodecode.apps._base.includeme') |
|
355 | config.include('rhodecode.apps._base.includeme') | |
356 | config.include('rhodecode.apps._base.navigation.includeme') |
|
356 | config.include('rhodecode.apps._base.navigation.includeme') | |
357 | config.include('rhodecode.apps._base.subscribers.includeme') |
|
357 | config.include('rhodecode.apps._base.subscribers.includeme') | |
358 | config.include('rhodecode.apps.hovercards.includeme') |
|
358 | config.include('rhodecode.apps.hovercards.includeme') | |
359 | config.include('rhodecode.apps.ops.includeme') |
|
359 | config.include('rhodecode.apps.ops.includeme') | |
360 | config.include('rhodecode.apps.channelstream.includeme') |
|
360 | config.include('rhodecode.apps.channelstream.includeme') | |
361 | config.include('rhodecode.apps.file_store.includeme') |
|
361 | config.include('rhodecode.apps.file_store.includeme') | |
362 | config.include('rhodecode.apps.admin.includeme') |
|
362 | config.include('rhodecode.apps.admin.includeme') | |
363 | config.include('rhodecode.apps.login.includeme') |
|
363 | config.include('rhodecode.apps.login.includeme') | |
364 | config.include('rhodecode.apps.home.includeme') |
|
364 | config.include('rhodecode.apps.home.includeme') | |
365 | config.include('rhodecode.apps.journal.includeme') |
|
365 | config.include('rhodecode.apps.journal.includeme') | |
366 |
|
366 | |||
367 | config.include('rhodecode.apps.repository.includeme') |
|
367 | config.include('rhodecode.apps.repository.includeme') | |
368 | config.include('rhodecode.apps.repo_group.includeme') |
|
368 | config.include('rhodecode.apps.repo_group.includeme') | |
369 | config.include('rhodecode.apps.user_group.includeme') |
|
369 | config.include('rhodecode.apps.user_group.includeme') | |
370 | config.include('rhodecode.apps.search.includeme') |
|
370 | config.include('rhodecode.apps.search.includeme') | |
371 | config.include('rhodecode.apps.user_profile.includeme') |
|
371 | config.include('rhodecode.apps.user_profile.includeme') | |
372 | config.include('rhodecode.apps.user_group_profile.includeme') |
|
372 | config.include('rhodecode.apps.user_group_profile.includeme') | |
373 | config.include('rhodecode.apps.my_account.includeme') |
|
373 | config.include('rhodecode.apps.my_account.includeme') | |
374 | config.include('rhodecode.apps.gist.includeme') |
|
374 | config.include('rhodecode.apps.gist.includeme') | |
375 |
|
375 | |||
376 | config.include('rhodecode.apps.svn_support.includeme') |
|
376 | config.include('rhodecode.apps.svn_support.includeme') | |
377 | config.include('rhodecode.apps.ssh_support.includeme') |
|
377 | config.include('rhodecode.apps.ssh_support.includeme') | |
378 | config.include('rhodecode.apps.debug_style') |
|
378 | config.include('rhodecode.apps.debug_style') | |
379 |
|
379 | |||
380 | if load_all: |
|
380 | if load_all: | |
381 | config.include('rhodecode.integrations.includeme') |
|
381 | config.include('rhodecode.integrations.includeme') | |
382 | config.include('rhodecode.integrations.routes.includeme') |
|
382 | config.include('rhodecode.integrations.routes.includeme') | |
383 |
|
383 | |||
384 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
384 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) | |
385 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
385 | settings['default_locale_name'] = settings.get('lang', 'en') | |
386 | config.add_translation_dirs('rhodecode:i18n/') |
|
386 | config.add_translation_dirs('rhodecode:i18n/') | |
387 |
|
387 | |||
388 | # Add subscribers. |
|
388 | # Add subscribers. | |
389 | if load_all: |
|
389 | if load_all: | |
390 | log.debug('Adding subscribers...') |
|
390 | log.debug('Adding subscribers...') | |
391 | config.add_subscriber(scan_repositories_if_enabled, |
|
391 | config.add_subscriber(scan_repositories_if_enabled, | |
392 | pyramid.events.ApplicationCreated) |
|
392 | pyramid.events.ApplicationCreated) | |
393 | config.add_subscriber(write_metadata_if_needed, |
|
393 | config.add_subscriber(write_metadata_if_needed, | |
394 | pyramid.events.ApplicationCreated) |
|
394 | pyramid.events.ApplicationCreated) | |
395 | config.add_subscriber(write_usage_data, |
|
395 | config.add_subscriber(write_usage_data, | |
396 | pyramid.events.ApplicationCreated) |
|
396 | pyramid.events.ApplicationCreated) | |
397 | config.add_subscriber(write_js_routes_if_enabled, |
|
397 | config.add_subscriber(write_js_routes_if_enabled, | |
398 | pyramid.events.ApplicationCreated) |
|
398 | pyramid.events.ApplicationCreated) | |
399 |
|
399 | |||
400 |
|
400 | |||
401 | # Set the default renderer for HTML templates to mako. |
|
401 | # Set the default renderer for HTML templates to mako. | |
402 | config.add_mako_renderer('.html') |
|
402 | config.add_mako_renderer('.html') | |
403 |
|
403 | |||
404 | config.add_renderer( |
|
404 | config.add_renderer( | |
405 | name='json_ext', |
|
405 | name='json_ext', | |
406 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
406 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') | |
407 |
|
407 | |||
408 | config.add_renderer( |
|
408 | config.add_renderer( | |
409 | name='string_html', |
|
409 | name='string_html', | |
410 | factory='rhodecode.lib.string_renderer.html') |
|
410 | factory='rhodecode.lib.string_renderer.html') | |
411 |
|
411 | |||
412 | # include RhodeCode plugins |
|
412 | # include RhodeCode plugins | |
413 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
413 | includes = aslist(settings.get('rhodecode.includes', [])) | |
414 | log.debug('processing rhodecode.includes data...') |
|
414 | log.debug('processing rhodecode.includes data...') | |
415 | for inc in includes: |
|
415 | for inc in includes: | |
416 | config.include(inc) |
|
416 | config.include(inc) | |
417 |
|
417 | |||
418 | # custom not found view, if our pyramid app doesn't know how to handle |
|
418 | # custom not found view, if our pyramid app doesn't know how to handle | |
419 | # the request pass it to potential VCS handling ap |
|
419 | # the request pass it to potential VCS handling ap | |
420 | config.add_notfound_view(not_found_view) |
|
420 | config.add_notfound_view(not_found_view) | |
421 | if not settings.get('debugtoolbar.enabled', False): |
|
421 | if not settings.get('debugtoolbar.enabled', False): | |
422 | # disabled debugtoolbar handle all exceptions via the error_handlers |
|
422 | # disabled debugtoolbar handle all exceptions via the error_handlers | |
423 | config.add_view(error_handler, context=Exception) |
|
423 | config.add_view(error_handler, context=Exception) | |
424 |
|
424 | |||
425 | # all errors including 403/404/50X |
|
425 | # all errors including 403/404/50X | |
426 | config.add_view(error_handler, context=HTTPError) |
|
426 | config.add_view(error_handler, context=HTTPError) | |
427 |
|
427 | |||
428 |
|
428 | |||
429 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
429 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): | |
430 | """ |
|
430 | """ | |
431 | Apply outer WSGI middlewares around the application. |
|
431 | Apply outer WSGI middlewares around the application. | |
432 | """ |
|
432 | """ | |
433 | registry = config.registry |
|
433 | registry = config.registry | |
434 | settings = registry.settings |
|
434 | settings = registry.settings | |
435 |
|
435 | |||
436 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
436 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy | |
437 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
437 | pyramid_app = HttpsFixup(pyramid_app, settings) | |
438 |
|
438 | |||
439 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( |
|
439 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( | |
440 | pyramid_app, settings) |
|
440 | pyramid_app, settings) | |
441 | registry.ae_client = _ae_client |
|
441 | registry.ae_client = _ae_client | |
442 |
|
442 | |||
443 | if settings['gzip_responses']: |
|
443 | if settings['gzip_responses']: | |
444 | pyramid_app = make_gzip_middleware( |
|
444 | pyramid_app = make_gzip_middleware( | |
445 | pyramid_app, settings, compress_level=1) |
|
445 | pyramid_app, settings, compress_level=1) | |
446 |
|
446 | |||
447 | # this should be the outer most middleware in the wsgi stack since |
|
447 | # this should be the outer most middleware in the wsgi stack since | |
448 | # middleware like Routes make database calls |
|
448 | # middleware like Routes make database calls | |
449 | def pyramid_app_with_cleanup(environ, start_response): |
|
449 | def pyramid_app_with_cleanup(environ, start_response): | |
450 | start = time.time() |
|
450 | start = time.time() | |
451 | try: |
|
451 | try: | |
452 | return pyramid_app(environ, start_response) |
|
452 | return pyramid_app(environ, start_response) | |
453 | finally: |
|
453 | finally: | |
454 | # Dispose current database session and rollback uncommitted |
|
454 | # Dispose current database session and rollback uncommitted | |
455 | # transactions. |
|
455 | # transactions. | |
456 | meta.Session.remove() |
|
456 | meta.Session.remove() | |
457 |
|
457 | |||
458 | # In a single threaded mode server, on non sqlite db we should have |
|
458 | # In a single threaded mode server, on non sqlite db we should have | |
459 | # '0 Current Checked out connections' at the end of a request, |
|
459 | # '0 Current Checked out connections' at the end of a request, | |
460 | # if not, then something, somewhere is leaving a connection open |
|
460 | # if not, then something, somewhere is leaving a connection open | |
461 | pool = meta.get_engine().pool |
|
461 | pool = meta.get_engine().pool | |
462 | log.debug('sa pool status: %s', pool.status()) |
|
462 | log.debug('sa pool status: %s', pool.status()) | |
463 | total = time.time() - start |
|
463 | total = time.time() - start | |
464 | log.debug('Request processing finalized: %.4fs', total) |
|
464 | log.debug('Request processing finalized: %.4fs', total) | |
465 |
|
465 | |||
466 | return pyramid_app_with_cleanup |
|
466 | return pyramid_app_with_cleanup |
@@ -1,29 +1,78 b'' | |||||
1 | # Copyright (C) 2015-2024 RhodeCode GmbH |
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | from .fanout_cache import get_archival_cache_store |
|
19 | import logging | |
20 | from .fanout_cache import get_archival_config |
|
20 | ||
|
21 | from .backends.fanout_cache import FileSystemFanoutCache | |||
|
22 | from .backends.objectstore_cache import ObjectStoreCache | |||
21 |
|
23 | |||
22 | from .utils import archive_iterator |
|
24 | from .utils import archive_iterator # noqa | |
23 |
from . |
|
25 | from .lock import ArchiveCacheGenerationLock # noqa | |
|
26 | ||||
|
27 | log = logging.getLogger(__name__) | |||
|
28 | ||||
|
29 | ||||
|
30 | cache_meta = None | |||
24 |
|
31 | |||
25 |
|
32 | |||
26 | def includeme(config): |
|
33 | def includeme(config): | |
27 | # init our cache at start |
|
34 | # init our cache at start | |
28 | settings = config.get_settings() |
|
35 | settings = config.get_settings() | |
29 | get_archival_cache_store(settings) |
|
36 | get_archival_cache_store(settings) | |
|
37 | ||||
|
38 | ||||
|
39 | def get_archival_config(config): | |||
|
40 | ||||
|
41 | final_config = { | |||
|
42 | ||||
|
43 | } | |||
|
44 | ||||
|
45 | for k, v in config.items(): | |||
|
46 | if k.startswith('archive_cache'): | |||
|
47 | final_config[k] = v | |||
|
48 | ||||
|
49 | return final_config | |||
|
50 | ||||
|
51 | ||||
|
52 | def get_archival_cache_store(config, always_init=False): | |||
|
53 | ||||
|
54 | global cache_meta | |||
|
55 | if cache_meta is not None and not always_init: | |||
|
56 | return cache_meta | |||
|
57 | ||||
|
58 | config = get_archival_config(config) | |||
|
59 | backend = config['archive_cache.backend.type'] | |||
|
60 | ||||
|
61 | archive_cache_locking_url = config['archive_cache.locking.url'] | |||
|
62 | ||||
|
63 | match backend: | |||
|
64 | case 'filesystem': | |||
|
65 | d_cache = FileSystemFanoutCache( | |||
|
66 | locking_url=archive_cache_locking_url, | |||
|
67 | **config | |||
|
68 | ) | |||
|
69 | case 'objectstore': | |||
|
70 | d_cache = ObjectStoreCache( | |||
|
71 | locking_url=archive_cache_locking_url, | |||
|
72 | **config | |||
|
73 | ) | |||
|
74 | case _: | |||
|
75 | raise ValueError(f'archive_cache.backend.type only supports "filesystem" or "objectstore" got {backend} ') | |||
|
76 | ||||
|
77 | cache_meta = d_cache | |||
|
78 | return cache_meta |
@@ -1,456 +1,166 b'' | |||||
1 | # Copyright (C) 2015-2024 RhodeCode GmbH |
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import codecs |
|
19 | import codecs | |
20 |
import |
|
20 | import hashlib | |
21 | import functools |
|
|||
22 | import os |
|
|||
23 | import logging |
|
21 | import logging | |
24 |
import |
|
22 | import os | |
25 | import typing |
|
23 | ||
26 | import zlib |
|
24 | import fsspec | |
27 | import sqlite3 |
|
|||
28 |
|
25 | |||
29 | from ...ext_json import json |
|
26 | from .base import BaseCache, BaseShard | |
30 | from .lock import GenerationLock |
|
27 | from ..utils import ShardFileReader, NOT_GIVEN | |
31 |
from .utils import |
|
28 | from ...type_utils import str2bool | |
32 |
|
29 | |||
33 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
34 |
|
31 | |||
35 | cache_meta = None |
|
|||
36 |
|
32 | |||
37 | UNKNOWN = -241 |
|
33 | class FileSystemShard(BaseShard): | |
38 | NO_VAL = -917 |
|
|||
39 |
|
||||
40 | MODE_BINARY = 'BINARY' |
|
|||
41 |
|
||||
42 |
|
||||
43 | EVICTION_POLICY = { |
|
|||
44 | 'none': { |
|
|||
45 | 'evict': None, |
|
|||
46 | }, |
|
|||
47 | 'least-recently-stored': { |
|
|||
48 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time', |
|
|||
49 | }, |
|
|||
50 | 'least-recently-used': { |
|
|||
51 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time', |
|
|||
52 | }, |
|
|||
53 | 'least-frequently-used': { |
|
|||
54 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count', |
|
|||
55 | }, |
|
|||
56 | } |
|
|||
57 |
|
||||
58 |
|
||||
59 | class DB: |
|
|||
60 |
|
||||
61 | def __init__(self): |
|
|||
62 | self.connection = sqlite3.connect(':memory:') |
|
|||
63 | self._init_db() |
|
|||
64 |
|
||||
65 | def _init_db(self): |
|
|||
66 | qry = ''' |
|
|||
67 | CREATE TABLE IF NOT EXISTS archive_cache ( |
|
|||
68 | rowid INTEGER PRIMARY KEY, |
|
|||
69 | key_file TEXT, |
|
|||
70 | key_file_path TEXT, |
|
|||
71 | filename TEXT, |
|
|||
72 | full_path TEXT, |
|
|||
73 | store_time REAL, |
|
|||
74 | access_time REAL, |
|
|||
75 | access_count INTEGER DEFAULT 0, |
|
|||
76 | size INTEGER DEFAULT 0 |
|
|||
77 | ) |
|
|||
78 | ''' |
|
|||
79 |
|
||||
80 | self.sql(qry) |
|
|||
81 | self.connection.commit() |
|
|||
82 |
|
||||
83 | @property |
|
|||
84 | def sql(self): |
|
|||
85 | return self.connection.execute |
|
|||
86 |
|
||||
87 | def bulk_insert(self, rows): |
|
|||
88 | qry = ''' |
|
|||
89 | INSERT INTO archive_cache ( |
|
|||
90 | rowid, |
|
|||
91 | key_file, |
|
|||
92 | key_file_path, |
|
|||
93 | filename, |
|
|||
94 | full_path, |
|
|||
95 | store_time, |
|
|||
96 | access_time, |
|
|||
97 | access_count, |
|
|||
98 | size |
|
|||
99 | ) |
|
|||
100 | VALUES ( |
|
|||
101 | ?, ?, ?, ?, ?, ?, ?, ?, ? |
|
|||
102 | ) |
|
|||
103 | ''' |
|
|||
104 | cursor = self.connection.cursor() |
|
|||
105 | cursor.executemany(qry, rows) |
|
|||
106 | self.connection.commit() |
|
|||
107 |
|
||||
108 |
|
||||
109 | class FileSystemCache: |
|
|||
110 |
|
34 | |||
111 | def __init__(self, index, directory, **settings): |
|
35 | def __init__(self, index, directory, **settings): | |
112 | self._index = index |
|
36 | self._index = index | |
113 | self._directory = directory |
|
37 | self._directory = directory | |
|
38 | self.storage_type = 'directory' | |||
|
39 | self.fs = fsspec.filesystem('file') | |||
114 |
|
40 | |||
115 | @property |
|
41 | @property | |
116 | def directory(self): |
|
42 | def directory(self): | |
117 | """Cache directory.""" |
|
43 | """Cache directory.""" | |
118 | return self._directory |
|
44 | return self._directory | |
119 |
|
45 | |||
120 | def _write_file(self, full_path, iterator, mode, encoding=None): |
|
46 | def _get_keyfile(self, archive_key) -> tuple[str, str]: | |
121 | full_dir, _ = os.path.split(full_path) |
|
47 | key_file = f'{archive_key}.{self.key_suffix}' | |
|
48 | return key_file, os.path.join(self.directory, key_file) | |||
122 |
|
49 | |||
|
50 | def _get_writer(self, path, mode): | |||
123 | for count in range(1, 11): |
|
51 | for count in range(1, 11): | |
124 | with contextlib.suppress(OSError): |
|
|||
125 | os.makedirs(full_dir) |
|
|||
126 |
|
||||
127 | try: |
|
52 | try: | |
128 | # Another cache may have deleted the directory before |
|
53 | # Another cache may have deleted the directory before | |
129 | # the file could be opened. |
|
54 | # the file could be opened. | |
130 |
|
|
55 | return self.fs.open(path, mode) | |
131 | except OSError: |
|
56 | except OSError: | |
132 | if count == 10: |
|
57 | if count == 10: | |
133 | # Give up after 10 tries to open the file. |
|
58 | # Give up after 10 tries to open the file. | |
134 | raise |
|
59 | raise | |
135 | continue |
|
60 | continue | |
136 |
|
61 | |||
137 | with writer: |
|
62 | def _write_file(self, full_path, iterator, mode): | |
138 | size = 0 |
|
63 | # ensure dir exists | |
139 | for chunk in iterator: |
|
64 | destination, _ = os.path.split(full_path) | |
140 | size += len(chunk) |
|
65 | if not self.fs.exists(destination): | |
141 | writer.write(chunk) |
|
66 | self.fs.makedirs(destination) | |
142 | writer.flush() |
|
|||
143 | # Get the file descriptor |
|
|||
144 | fd = writer.fileno() |
|
|||
145 |
|
||||
146 | # Sync the file descriptor to disk, helps with NFS cases... |
|
|||
147 | os.fsync(fd) |
|
|||
148 | log.debug('written new archive cache under %s', full_path) |
|
|||
149 | return size |
|
|||
150 |
|
||||
151 | def _get_keyfile(self, key): |
|
|||
152 | return os.path.join(self._directory, f'{key}.key') |
|
|||
153 |
|
67 | |||
154 | def store(self, key, value_reader, metadata): |
|
68 | writer = self._get_writer(full_path, mode) | |
155 | filename, full_path = self.random_filename() |
|
|||
156 | key_file = self._get_keyfile(key) |
|
|||
157 |
|
||||
158 | # STORE METADATA |
|
|||
159 | _metadata = { |
|
|||
160 | "version": "v1", |
|
|||
161 | "filename": filename, |
|
|||
162 | "full_path": full_path, |
|
|||
163 | "key_file": key_file, |
|
|||
164 | "store_time": time.time(), |
|
|||
165 | "access_count": 1, |
|
|||
166 | "access_time": 0, |
|
|||
167 | "size": 0 |
|
|||
168 | } |
|
|||
169 | if metadata: |
|
|||
170 | _metadata.update(metadata) |
|
|||
171 |
|
||||
172 | reader = functools.partial(value_reader.read, 2**22) |
|
|||
173 |
|
69 | |||
174 | iterator = iter(reader, b'') |
|
70 | digest = hashlib.sha256() | |
175 | size = self._write_file(full_path, iterator, 'xb') |
|
71 | with writer: | |
176 | metadata['size'] = size |
|
72 | size = 0 | |
177 |
|
73 | for chunk in iterator: | ||
178 | # after archive is finished, we create a key to save the presence of the binary file |
|
74 | size += len(chunk) | |
179 | with open(key_file, 'wb') as f: |
|
75 | digest.update(chunk) | |
180 | f.write(json.dumps(_metadata)) |
|
76 | writer.write(chunk) | |
181 |
|
77 | writer.flush() | ||
182 | return key, size, MODE_BINARY, filename, _metadata |
|
78 | # Get the file descriptor | |
183 |
|
79 | fd = writer.fileno() | ||
184 | def fetch(self, key, retry=False, retry_attempts=10) -> tuple[typing.BinaryIO, dict]: |
|
|||
185 |
|
||||
186 | if retry: |
|
|||
187 | for attempt in range(retry_attempts): |
|
|||
188 | if key in self: |
|
|||
189 | break |
|
|||
190 | # we dind't find the key, wait 1s, and re-check |
|
|||
191 | time.sleep(1) |
|
|||
192 |
|
80 | |||
193 | if key not in self: |
|
81 | # Sync the file descriptor to disk, helps with NFS cases... | |
194 | log.exception('requested {key} not found in {self}', key, self) |
|
82 | os.fsync(fd) | |
195 | raise KeyError(key) |
|
83 | sha256 = digest.hexdigest() | |
196 |
|
84 | log.debug('written new archive cache under %s, sha256: %s', full_path, sha256) | ||
197 | key_file = self._get_keyfile(key) |
|
85 | return size, sha256 | |
198 | with open(key_file, 'rb') as f: |
|
|||
199 | metadata = json.loads(f.read()) |
|
|||
200 |
|
||||
201 | filename = metadata['filename'] |
|
|||
202 |
|
86 | |||
203 | try: |
|
87 | def store(self, key, value_reader, metadata: dict | None = None): | |
204 | return open(os.path.join(self.directory, filename), 'rb'), metadata |
|
88 | return self._store(key, value_reader, metadata, mode='xb') | |
205 | finally: |
|
|||
206 | # update usage stats, count and accessed |
|
|||
207 | metadata["access_count"] = metadata.get("access_count", 0) + 1 |
|
|||
208 | metadata["access_time"] = time.time() |
|
|||
209 |
|
89 | |||
210 | with open(key_file, 'wb') as f: |
|
90 | def fetch(self, key, retry=NOT_GIVEN, retry_attempts=NOT_GIVEN, retry_backoff=1) -> tuple[ShardFileReader, dict]: | |
211 | f.write(json.dumps(metadata)) |
|
91 | return self._fetch(key, retry, retry_attempts, retry_backoff) | |
|
92 | ||||
|
93 | def remove(self, key): | |||
|
94 | return self._remove(key) | |||
212 |
|
95 | |||
213 | def random_filename(self): |
|
96 | def random_filename(self): | |
214 | """Return filename and full-path tuple for file storage. |
|
97 | """Return filename and full-path tuple for file storage. | |
215 |
|
98 | |||
216 | Filename will be a randomly generated 28 character hexadecimal string |
|
99 | Filename will be a randomly generated 28 character hexadecimal string | |
217 | with ".archive_cache" suffixed. Two levels of sub-directories will be used to |
|
100 | with ".archive_cache" suffixed. Two levels of sub-directories will be used to | |
218 | reduce the size of directories. On older filesystems, lookups in |
|
101 | reduce the size of directories. On older filesystems, lookups in | |
219 | directories with many files may be slow. |
|
102 | directories with many files may be slow. | |
220 | """ |
|
103 | """ | |
221 |
|
104 | |||
222 | hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') |
|
105 | hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') | |
223 | sub_dir = os.path.join(hex_name[:2], hex_name[2:4]) |
|
|||
224 | name = hex_name[4:] + '.archive_cache' |
|
|||
225 | filename = os.path.join(sub_dir, name) |
|
|||
226 | full_path = os.path.join(self.directory, filename) |
|
|||
227 | return filename, full_path |
|
|||
228 |
|
||||
229 | def hash(self, key): |
|
|||
230 | """Compute portable hash for `key`. |
|
|||
231 |
|
||||
232 | :param key: key to hash |
|
|||
233 | :return: hash value |
|
|||
234 |
|
106 | |||
235 | """ |
|
107 | archive_name = hex_name[4:] + '.archive_cache' | |
236 | mask = 0xFFFFFFFF |
|
108 | filename = f"{hex_name[:2]}/{hex_name[2:4]}/{archive_name}" | |
237 | return zlib.adler32(key.encode('utf-8')) & mask # noqa |
|
|||
238 |
|
||||
239 | def __contains__(self, key): |
|
|||
240 | """Return `True` if `key` matching item is found in cache. |
|
|||
241 |
|
109 | |||
242 | :param key: key matching item |
|
110 | full_path = os.path.join(self.directory, filename) | |
243 | :return: True if key matching item |
|
111 | return archive_name, full_path | |
244 |
|
||||
245 | """ |
|
|||
246 | key_file = self._get_keyfile(key) |
|
|||
247 | return os.path.exists(key_file) |
|
|||
248 |
|
112 | |||
249 | def __repr__(self): |
|
113 | def __repr__(self): | |
250 |
return f' |
|
114 | return f'{self.__class__.__name__}(index={self._index}, dir={self.directory})' | |
251 |
|
115 | |||
252 |
|
116 | |||
253 | class FanoutCache: |
|
117 | class FileSystemFanoutCache(BaseCache): | |
254 | """Cache that shards keys and values.""" |
|
|||
255 |
|
118 | |||
256 | def __init__( |
|
119 | def __init__(self, locking_url, **settings): | |
257 | self, directory=None, **settings |
|
120 | """ | |
258 | ): |
|
121 | Initialize file system cache instance. | |
259 | """Initialize cache instance. |
|
|||
260 |
|
122 | |||
261 | :param str directory: cache directory |
|
123 | :param str locking_url: redis url for a lock | |
262 | :param settings: settings dict |
|
124 | :param settings: settings dict | |
263 |
|
125 | |||
264 | """ |
|
126 | """ | |
265 | if directory is None: |
|
127 | self._locking_url = locking_url | |
266 | raise ValueError('directory cannot be None') |
|
128 | self._config = settings | |
267 |
|
129 | cache_dir = self.get_conf('archive_cache.filesystem.store_dir') | ||
268 |
directory = str(dir |
|
130 | directory = str(cache_dir) | |
269 | directory = os.path.expanduser(directory) |
|
131 | directory = os.path.expanduser(directory) | |
270 | directory = os.path.expandvars(directory) |
|
132 | directory = os.path.expandvars(directory) | |
271 | self._directory = directory |
|
133 | self._directory = directory | |
|
134 | self._storage_path = directory | |||
272 |
|
135 | |||
273 | self._count = settings.pop('cache_shards') |
|
136 | # check if it's ok to write, and re-create the archive cache | |
274 | self._locking_url = settings.pop('locking_url') |
|
137 | if not os.path.isdir(self._directory): | |
|
138 | os.makedirs(self._directory, exist_ok=True) | |||
|
139 | ||||
|
140 | self._count = int(self.get_conf('archive_cache.filesystem.cache_shards', pop=True)) | |||
275 |
|
141 | |||
276 |
self._eviction_policy = se |
|
142 | self._eviction_policy = self.get_conf('archive_cache.filesystem.eviction_policy', pop=True) | |
277 |
self._cache_size_limit = se |
|
143 | self._cache_size_limit = self.gb_to_bytes(int(self.get_conf('archive_cache.filesystem.cache_size_gb'))) | |
278 |
|
144 | |||
|
145 | self.retry = str2bool(self.get_conf('archive_cache.filesystem.retry', pop=True)) | |||
|
146 | self.retry_attempts = int(self.get_conf('archive_cache.filesystem.retry_attempts', pop=True)) | |||
|
147 | self.retry_backoff = int(self.get_conf('archive_cache.filesystem.retry_backoff', pop=True)) | |||
|
148 | ||||
|
149 | log.debug('Initializing archival cache instance under %s', self._directory) | |||
279 | self._shards = tuple( |
|
150 | self._shards = tuple( | |
280 |
FileSystem |
|
151 | FileSystemShard( | |
281 | index=num, |
|
152 | index=num, | |
282 | directory=os.path.join(directory, 'shard_%03d' % num), |
|
153 | directory=os.path.join(directory, 'shard_%03d' % num), | |
283 | **settings, |
|
154 | **settings, | |
284 | ) |
|
155 | ) | |
285 | for num in range(self._count) |
|
156 | for num in range(self._count) | |
286 | ) |
|
157 | ) | |
287 | self._hash = self._shards[0].hash |
|
158 | self._hash = self._shards[0].hash | |
288 |
|
159 | |||
289 | @property |
|
160 | def _get_shard(self, key) -> FileSystemShard: | |
290 | def directory(self): |
|
|||
291 | """Cache directory.""" |
|
|||
292 | return self._directory |
|
|||
293 |
|
||||
294 | def get_lock(self, lock_key): |
|
|||
295 | return GenerationLock(lock_key, self._locking_url) |
|
|||
296 |
|
||||
297 | def _get_shard(self, key) -> FileSystemCache: |
|
|||
298 | index = self._hash(key) % self._count |
|
161 | index = self._hash(key) % self._count | |
299 | shard = self._shards[index] |
|
162 | shard = self._shards[index] | |
300 | return shard |
|
163 | return shard | |
301 |
|
164 | |||
302 | def store(self, key, value_reader, metadata=None): |
|
165 | def _get_size(self, shard, archive_path): | |
303 | shard = self._get_shard(key) |
|
166 | return os.stat(archive_path).st_size | |
304 | return shard.store(key, value_reader, metadata) |
|
|||
305 |
|
||||
306 | def fetch(self, key, retry=False, retry_attempts=10): |
|
|||
307 | """Return file handle corresponding to `key` from cache. |
|
|||
308 | """ |
|
|||
309 | shard = self._get_shard(key) |
|
|||
310 | return shard.fetch(key, retry=retry, retry_attempts=retry_attempts) |
|
|||
311 |
|
||||
312 | def has_key(self, key): |
|
|||
313 | """Return `True` if `key` matching item is found in cache. |
|
|||
314 |
|
||||
315 | :param key: key for item |
|
|||
316 | :return: True if key is found |
|
|||
317 |
|
||||
318 | """ |
|
|||
319 | shard = self._get_shard(key) |
|
|||
320 | return key in shard |
|
|||
321 |
|
||||
322 | def __contains__(self, item): |
|
|||
323 | return self.has_key(item) |
|
|||
324 |
|
||||
325 | def evict(self, policy=None, size_limit=None): |
|
|||
326 | """ |
|
|||
327 | Remove old items based on the conditions |
|
|||
328 |
|
||||
329 |
|
||||
330 | explanation of this algo: |
|
|||
331 | iterate over each shard, then for each shard iterate over the .key files |
|
|||
332 | read the key files metadata stored. This gives us a full list of keys, cached_archived, their size and |
|
|||
333 | access data, time creation, and access counts. |
|
|||
334 |
|
||||
335 | Store that into a memory DB so we can run different sorting strategies easily. |
|
|||
336 | Summing the size is a sum sql query. |
|
|||
337 |
|
||||
338 | Then we run a sorting strategy based on eviction policy. |
|
|||
339 | We iterate over sorted keys, and remove each checking if we hit the overall limit. |
|
|||
340 | """ |
|
|||
341 |
|
||||
342 | policy = policy or self._eviction_policy |
|
|||
343 | size_limit = size_limit or self._cache_size_limit |
|
|||
344 |
|
||||
345 | select_policy = EVICTION_POLICY[policy]['evict'] |
|
|||
346 |
|
||||
347 | log.debug('Running eviction policy \'%s\', and checking for size limit: %s', |
|
|||
348 | policy, format_size(size_limit)) |
|
|||
349 |
|
||||
350 | if select_policy is None: |
|
|||
351 | return 0 |
|
|||
352 |
|
||||
353 | db = DB() |
|
|||
354 |
|
||||
355 | data = [] |
|
|||
356 | cnt = 1 |
|
|||
357 | for shard in self._shards: |
|
|||
358 | for key_file in os.listdir(shard.directory): |
|
|||
359 | if key_file.endswith('.key'): |
|
|||
360 | key_file_path = os.path.join(shard.directory, key_file) |
|
|||
361 | with open(key_file_path, 'rb') as f: |
|
|||
362 | metadata = json.loads(f.read()) |
|
|||
363 |
|
||||
364 | size = metadata.get('size') |
|
|||
365 | filename = metadata.get('filename') |
|
|||
366 | full_path = metadata.get('full_path') |
|
|||
367 |
|
||||
368 | if not size: |
|
|||
369 | # in case we don't have size re-calc it... |
|
|||
370 | size = os.stat(full_path).st_size |
|
|||
371 |
|
||||
372 | data.append([ |
|
|||
373 | cnt, |
|
|||
374 | key_file, |
|
|||
375 | key_file_path, |
|
|||
376 | filename, |
|
|||
377 | full_path, |
|
|||
378 | metadata.get('store_time', 0), |
|
|||
379 | metadata.get('access_time', 0), |
|
|||
380 | metadata.get('access_count', 0), |
|
|||
381 | size, |
|
|||
382 | ]) |
|
|||
383 | cnt += 1 |
|
|||
384 |
|
||||
385 | # Insert bulk data using executemany |
|
|||
386 | db.bulk_insert(data) |
|
|||
387 |
|
||||
388 | ((total_size,),) = db.sql('SELECT COALESCE(SUM(size), 0) FROM archive_cache').fetchall() |
|
|||
389 | log.debug('Analyzed %s keys, occupied: %s', len(data), format_size(total_size)) |
|
|||
390 | select_policy_qry = select_policy.format(fields='key_file_path, full_path, size') |
|
|||
391 | sorted_keys = db.sql(select_policy_qry).fetchall() |
|
|||
392 |
|
||||
393 | removed_items = 0 |
|
|||
394 | removed_size = 0 |
|
|||
395 | for key, cached_file, size in sorted_keys: |
|
|||
396 | # simulate removal impact BEFORE removal |
|
|||
397 | total_size -= size |
|
|||
398 |
|
||||
399 | if total_size <= size_limit: |
|
|||
400 | # we obtained what we wanted... |
|
|||
401 | break |
|
|||
402 |
|
||||
403 | os.remove(cached_file) |
|
|||
404 | os.remove(key) |
|
|||
405 | removed_items += 1 |
|
|||
406 | removed_size += size |
|
|||
407 |
|
||||
408 | log.debug('Removed %s cache archives, and reduced size: %s', removed_items, format_size(removed_size)) |
|
|||
409 | return removed_items |
|
|||
410 |
|
||||
411 |
|
||||
412 | def get_archival_config(config): |
|
|||
413 |
|
||||
414 | final_config = { |
|
|||
415 |
|
||||
416 | } |
|
|||
417 |
|
||||
418 | for k, v in config.items(): |
|
|||
419 | if k.startswith('archive_cache'): |
|
|||
420 | final_config[k] = v |
|
|||
421 |
|
||||
422 | return final_config |
|
|||
423 |
|
||||
424 |
|
||||
425 | def get_archival_cache_store(config): |
|
|||
426 |
|
||||
427 | global cache_meta |
|
|||
428 | if cache_meta is not None: |
|
|||
429 | return cache_meta |
|
|||
430 |
|
||||
431 | config = get_archival_config(config) |
|
|||
432 | backend = config['archive_cache.backend.type'] |
|
|||
433 | if backend != 'filesystem': |
|
|||
434 | raise ValueError('archive_cache.backend.type only supports "filesystem"') |
|
|||
435 |
|
||||
436 | archive_cache_locking_url = config['archive_cache.locking.url'] |
|
|||
437 | archive_cache_dir = config['archive_cache.filesystem.store_dir'] |
|
|||
438 | archive_cache_size_gb = config['archive_cache.filesystem.cache_size_gb'] |
|
|||
439 | archive_cache_shards = config['archive_cache.filesystem.cache_shards'] |
|
|||
440 | archive_cache_eviction_policy = config['archive_cache.filesystem.eviction_policy'] |
|
|||
441 |
|
||||
442 | log.debug('Initializing archival cache instance under %s', archive_cache_dir) |
|
|||
443 |
|
||||
444 | # check if it's ok to write, and re-create the archive cache |
|
|||
445 | if not os.path.isdir(archive_cache_dir): |
|
|||
446 | os.makedirs(archive_cache_dir, exist_ok=True) |
|
|||
447 |
|
||||
448 | d_cache = FanoutCache( |
|
|||
449 | archive_cache_dir, |
|
|||
450 | locking_url=archive_cache_locking_url, |
|
|||
451 | cache_shards=archive_cache_shards, |
|
|||
452 | cache_size_limit=archive_cache_size_gb * 1024 * 1024 * 1024, |
|
|||
453 | cache_eviction_policy=archive_cache_eviction_policy |
|
|||
454 | ) |
|
|||
455 | cache_meta = d_cache |
|
|||
456 | return cache_meta |
|
@@ -1,59 +1,62 b'' | |||||
1 | # Copyright (C) 2015-2024 RhodeCode GmbH |
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import redis |
|
19 | import redis | |
20 |
from |
|
20 | from .._vendor import redis_lock | |
21 | from .utils import ArchiveCacheGenerationLock |
|
21 | ||
|
22 | ||||
|
23 | class ArchiveCacheGenerationLock(Exception): | |||
|
24 | pass | |||
22 |
|
25 | |||
23 |
|
26 | |||
24 | class GenerationLock: |
|
27 | class GenerationLock: | |
25 | """ |
|
28 | """ | |
26 | Locking mechanism that detects if a lock is acquired |
|
29 | Locking mechanism that detects if a lock is acquired | |
27 |
|
30 | |||
28 | with GenerationLock(lock_key): |
|
31 | with GenerationLock(lock_key): | |
29 | compute_archive() |
|
32 | compute_archive() | |
30 | """ |
|
33 | """ | |
31 | lock_timeout = 7200 |
|
34 | lock_timeout = 7200 | |
32 |
|
35 | |||
33 | def __init__(self, lock_key, url): |
|
36 | def __init__(self, lock_key, url): | |
34 | self.lock_key = lock_key |
|
37 | self.lock_key = lock_key | |
35 | self._create_client(url) |
|
38 | self._create_client(url) | |
36 | self.lock = self.get_lock() |
|
39 | self.lock = self.get_lock() | |
37 |
|
40 | |||
38 | def _create_client(self, url): |
|
41 | def _create_client(self, url): | |
39 | connection_pool = redis.ConnectionPool.from_url(url) |
|
42 | connection_pool = redis.ConnectionPool.from_url(url) | |
40 | self.writer_client = redis.StrictRedis( |
|
43 | self.writer_client = redis.StrictRedis( | |
41 | connection_pool=connection_pool |
|
44 | connection_pool=connection_pool | |
42 | ) |
|
45 | ) | |
43 | self.reader_client = self.writer_client |
|
46 | self.reader_client = self.writer_client | |
44 |
|
47 | |||
45 | def get_lock(self): |
|
48 | def get_lock(self): | |
46 | return redis_lock.Lock( |
|
49 | return redis_lock.Lock( | |
47 | redis_client=self.writer_client, |
|
50 | redis_client=self.writer_client, | |
48 | name=self.lock_key, |
|
51 | name=self.lock_key, | |
49 | expire=self.lock_timeout, |
|
52 | expire=self.lock_timeout, | |
50 | strict=True |
|
53 | strict=True | |
51 | ) |
|
54 | ) | |
52 |
|
55 | |||
53 | def __enter__(self): |
|
56 | def __enter__(self): | |
54 | acquired = self.lock.acquire(blocking=False) |
|
57 | acquired = self.lock.acquire(blocking=False) | |
55 | if not acquired: |
|
58 | if not acquired: | |
56 | raise ArchiveCacheGenerationLock('Failed to create a lock') |
|
59 | raise ArchiveCacheGenerationLock('Failed to create a lock') | |
57 |
|
60 | |||
58 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
61 | def __exit__(self, exc_type, exc_val, exc_tb): | |
59 | self.lock.release() |
|
62 | self.lock.release() |
@@ -1,72 +1,134 b'' | |||||
1 | # Copyright (C) 2015-2024 RhodeCode GmbH |
|
1 | # Copyright (C) 2015-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 |
import |
|
19 | import sqlite3 | |
|
20 | import s3fs.core | |||
|
21 | ||||
|
22 | NOT_GIVEN = -917 | |||
20 |
|
23 | |||
21 |
|
24 | |||
22 | class ArchiveCacheGenerationLock(Exception): |
|
25 | EVICTION_POLICY = { | |
23 | pass |
|
26 | 'none': { | |
|
27 | 'evict': None, | |||
|
28 | }, | |||
|
29 | 'least-recently-stored': { | |||
|
30 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY store_time', | |||
|
31 | }, | |||
|
32 | 'least-recently-used': { | |||
|
33 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_time', | |||
|
34 | }, | |||
|
35 | 'least-frequently-used': { | |||
|
36 | 'evict': 'SELECT {fields} FROM archive_cache ORDER BY access_count', | |||
|
37 | }, | |||
|
38 | } | |||
24 |
|
39 | |||
25 |
|
40 | |||
26 | def archive_iterator(_reader, block_size: int = 4096 * 512): |
|
41 | def archive_iterator(_reader, block_size: int = 4096 * 512): | |
27 | # 4096 * 64 = 64KB |
|
42 | # 4096 * 64 = 64KB | |
28 | while 1: |
|
43 | while 1: | |
29 | data = _reader.read(block_size) |
|
44 | data = _reader.read(block_size) | |
30 | if not data: |
|
45 | if not data: | |
31 | break |
|
46 | break | |
32 | yield data |
|
47 | yield data | |
33 |
|
48 | |||
34 |
|
49 | |||
35 | def get_directory_statistics(start_path): |
|
|||
36 | """ |
|
|||
37 | total_files, total_size, directory_stats = get_directory_statistics(start_path) |
|
|||
38 |
|
||||
39 | print(f"Directory statistics for: {start_path}\n") |
|
|||
40 | print(f"Total files: {total_files}") |
|
|||
41 | print(f"Total size: {format_size(total_size)}\n") |
|
|||
42 |
|
||||
43 | :param start_path: |
|
|||
44 | :return: |
|
|||
45 | """ |
|
|||
46 |
|
||||
47 | total_files = 0 |
|
|||
48 | total_size = 0 |
|
|||
49 | directory_stats = {} |
|
|||
50 |
|
||||
51 | for dir_path, dir_names, file_names in os.walk(start_path): |
|
|||
52 | dir_size = 0 |
|
|||
53 | file_count = len(file_names) |
|
|||
54 |
|
||||
55 | for file in file_names: |
|
|||
56 | filepath = os.path.join(dir_path, file) |
|
|||
57 | file_size = os.path.getsize(filepath) |
|
|||
58 | dir_size += file_size |
|
|||
59 |
|
||||
60 | directory_stats[dir_path] = {'file_count': file_count, 'size': dir_size} |
|
|||
61 | total_files += file_count |
|
|||
62 | total_size += dir_size |
|
|||
63 |
|
||||
64 | return total_files, total_size, directory_stats |
|
|||
65 |
|
||||
66 |
|
||||
67 | def format_size(size): |
|
50 | def format_size(size): | |
68 | # Convert size in bytes to a human-readable format (e.g., KB, MB, GB) |
|
51 | # Convert size in bytes to a human-readable format (e.g., KB, MB, GB) | |
69 | for unit in ['B', 'KB', 'MB', 'GB', 'TB']: |
|
52 | for unit in ['B', 'KB', 'MB', 'GB', 'TB']: | |
70 | if size < 1024: |
|
53 | if size < 1024: | |
71 | return f"{size:.2f} {unit}" |
|
54 | return f"{size:.2f} {unit}" | |
72 | size /= 1024 |
|
55 | size /= 1024 | |
|
56 | ||||
|
57 | ||||
|
58 | class StatsDB: | |||
|
59 | ||||
|
60 | def __init__(self): | |||
|
61 | self.connection = sqlite3.connect(':memory:') | |||
|
62 | self._init_db() | |||
|
63 | ||||
|
64 | def _init_db(self): | |||
|
65 | qry = ''' | |||
|
66 | CREATE TABLE IF NOT EXISTS archive_cache ( | |||
|
67 | rowid INTEGER PRIMARY KEY, | |||
|
68 | key_file TEXT, | |||
|
69 | key_file_path TEXT, | |||
|
70 | archive_key TEXT, | |||
|
71 | archive_path TEXT, | |||
|
72 | store_time REAL, | |||
|
73 | access_time REAL, | |||
|
74 | access_count INTEGER DEFAULT 0, | |||
|
75 | size INTEGER DEFAULT 0 | |||
|
76 | ) | |||
|
77 | ''' | |||
|
78 | ||||
|
79 | self.sql(qry) | |||
|
80 | self.connection.commit() | |||
|
81 | ||||
|
82 | @property | |||
|
83 | def sql(self): | |||
|
84 | return self.connection.execute | |||
|
85 | ||||
|
86 | def bulk_insert(self, rows): | |||
|
87 | qry = ''' | |||
|
88 | INSERT INTO archive_cache ( | |||
|
89 | rowid, | |||
|
90 | key_file, | |||
|
91 | key_file_path, | |||
|
92 | archive_key, | |||
|
93 | archive_path, | |||
|
94 | store_time, | |||
|
95 | access_time, | |||
|
96 | access_count, | |||
|
97 | size | |||
|
98 | ) | |||
|
99 | VALUES ( | |||
|
100 | ?, ?, ?, ?, ?, ?, ?, ?, ? | |||
|
101 | ) | |||
|
102 | ''' | |||
|
103 | cursor = self.connection.cursor() | |||
|
104 | cursor.executemany(qry, rows) | |||
|
105 | self.connection.commit() | |||
|
106 | ||||
|
107 | def get_total_size(self): | |||
|
108 | qry = 'SELECT COALESCE(SUM(size), 0) FROM archive_cache' | |||
|
109 | ((total_size,),) = self.sql(qry).fetchall() | |||
|
110 | return total_size | |||
|
111 | ||||
|
112 | def get_sorted_keys(self, select_policy): | |||
|
113 | select_policy_qry = select_policy.format(fields='key_file, archive_key, size') | |||
|
114 | return self.sql(select_policy_qry).fetchall() | |||
|
115 | ||||
|
116 | ||||
|
117 | class ShardFileReader: | |||
|
118 | ||||
|
119 | def __init__(self, file_like_reader): | |||
|
120 | self._file_like_reader = file_like_reader | |||
|
121 | ||||
|
122 | def __getattr__(self, item): | |||
|
123 | if isinstance(self._file_like_reader, s3fs.core.S3File): | |||
|
124 | match item: | |||
|
125 | case 'name': | |||
|
126 | # S3 FileWrapper doesn't support name attribute, and we use it | |||
|
127 | return self._file_like_reader.full_name | |||
|
128 | case _: | |||
|
129 | return getattr(self._file_like_reader, item) | |||
|
130 | else: | |||
|
131 | return getattr(self._file_like_reader, item) | |||
|
132 | ||||
|
133 | def __repr__(self): | |||
|
134 | return f'<{self.__class__.__name__}={self._file_like_reader}>' |
@@ -1,607 +1,607 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | """ | |
20 | The base Controller API |
|
20 | The base Controller API | |
21 | Provides the BaseController class for subclassing. And usage in different |
|
21 | Provides the BaseController class for subclassing. And usage in different | |
22 | controllers |
|
22 | controllers | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import socket |
|
26 | import socket | |
27 | import base64 |
|
27 | import base64 | |
28 |
|
28 | |||
29 | import markupsafe |
|
29 | import markupsafe | |
30 | import ipaddress |
|
30 | import ipaddress | |
31 |
|
31 | |||
32 | import paste.httpheaders |
|
32 | import paste.httpheaders | |
33 | from paste.auth.basic import AuthBasicAuthenticator |
|
33 | from paste.auth.basic import AuthBasicAuthenticator | |
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception | |
35 |
|
35 | |||
36 | import rhodecode |
|
36 | import rhodecode | |
37 | from rhodecode.authentication.base import VCS_TYPE |
|
37 | from rhodecode.authentication.base import VCS_TYPE | |
38 | from rhodecode.lib import auth, utils2 |
|
38 | from rhodecode.lib import auth, utils2 | |
39 | from rhodecode.lib import helpers as h |
|
39 | from rhodecode.lib import helpers as h | |
40 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
40 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper | |
41 | from rhodecode.lib.exceptions import UserCreationError |
|
41 | from rhodecode.lib.exceptions import UserCreationError | |
42 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) |
|
42 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) | |
43 | from rhodecode.lib.utils2 import AttributeDict |
|
43 | from rhodecode.lib.utils2 import AttributeDict | |
44 | from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str |
|
44 | from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str | |
45 | from rhodecode.lib.type_utils import aslist, str2bool |
|
45 | from rhodecode.lib.type_utils import aslist, str2bool | |
46 | from rhodecode.lib.hash_utils import sha1 |
|
46 | from rhodecode.lib.hash_utils import sha1 | |
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark |
|
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark | |
48 | from rhodecode.model.notification import NotificationModel |
|
48 | from rhodecode.model.notification import NotificationModel | |
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel | |
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def _filter_proxy(ip): |
|
54 | def _filter_proxy(ip): | |
55 | """ |
|
55 | """ | |
56 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
56 | Passed in IP addresses in HEADERS can be in a special format of multiple | |
57 | ips. Those comma separated IPs are passed from various proxies in the |
|
57 | ips. Those comma separated IPs are passed from various proxies in the | |
58 | chain of request processing. The left-most being the original client. |
|
58 | chain of request processing. The left-most being the original client. | |
59 | We only care about the first IP which came from the org. client. |
|
59 | We only care about the first IP which came from the org. client. | |
60 |
|
60 | |||
61 | :param ip: ip string from headers |
|
61 | :param ip: ip string from headers | |
62 | """ |
|
62 | """ | |
63 | if ',' in ip: |
|
63 | if ',' in ip: | |
64 | _ips = ip.split(',') |
|
64 | _ips = ip.split(',') | |
65 | _first_ip = _ips[0].strip() |
|
65 | _first_ip = _ips[0].strip() | |
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
67 | return _first_ip |
|
67 | return _first_ip | |
68 | return ip |
|
68 | return ip | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def _filter_port(ip): |
|
71 | def _filter_port(ip): | |
72 | """ |
|
72 | """ | |
73 | Removes a port from ip, there are 4 main cases to handle here. |
|
73 | Removes a port from ip, there are 4 main cases to handle here. | |
74 | - ipv4 eg. 127.0.0.1 |
|
74 | - ipv4 eg. 127.0.0.1 | |
75 | - ipv6 eg. ::1 |
|
75 | - ipv6 eg. ::1 | |
76 | - ipv4+port eg. 127.0.0.1:8080 |
|
76 | - ipv4+port eg. 127.0.0.1:8080 | |
77 | - ipv6+port eg. [::1]:8080 |
|
77 | - ipv6+port eg. [::1]:8080 | |
78 |
|
78 | |||
79 | :param ip: |
|
79 | :param ip: | |
80 | """ |
|
80 | """ | |
81 | def is_ipv6(ip_addr): |
|
81 | def is_ipv6(ip_addr): | |
82 | if hasattr(socket, 'inet_pton'): |
|
82 | if hasattr(socket, 'inet_pton'): | |
83 | try: |
|
83 | try: | |
84 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
84 | socket.inet_pton(socket.AF_INET6, ip_addr) | |
85 | except socket.error: |
|
85 | except socket.error: | |
86 | return False |
|
86 | return False | |
87 | else: |
|
87 | else: | |
88 | # fallback to ipaddress |
|
88 | # fallback to ipaddress | |
89 | try: |
|
89 | try: | |
90 | ipaddress.IPv6Address(safe_str(ip_addr)) |
|
90 | ipaddress.IPv6Address(safe_str(ip_addr)) | |
91 | except Exception: |
|
91 | except Exception: | |
92 | return False |
|
92 | return False | |
93 | return True |
|
93 | return True | |
94 |
|
94 | |||
95 | if ':' not in ip: # must be ipv4 pure ip |
|
95 | if ':' not in ip: # must be ipv4 pure ip | |
96 | return ip |
|
96 | return ip | |
97 |
|
97 | |||
98 | if '[' in ip and ']' in ip: # ipv6 with port |
|
98 | if '[' in ip and ']' in ip: # ipv6 with port | |
99 | return ip.split(']')[0][1:].lower() |
|
99 | return ip.split(']')[0][1:].lower() | |
100 |
|
100 | |||
101 | # must be ipv6 or ipv4 with port |
|
101 | # must be ipv6 or ipv4 with port | |
102 | if is_ipv6(ip): |
|
102 | if is_ipv6(ip): | |
103 | return ip |
|
103 | return ip | |
104 | else: |
|
104 | else: | |
105 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
105 | ip, _port = ip.split(':')[:2] # means ipv4+port | |
106 | return ip |
|
106 | return ip | |
107 |
|
107 | |||
108 |
|
108 | |||
109 | def get_ip_addr(environ): |
|
109 | def get_ip_addr(environ): | |
110 | proxy_key = 'HTTP_X_REAL_IP' |
|
110 | proxy_key = 'HTTP_X_REAL_IP' | |
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
112 | def_key = 'REMOTE_ADDR' |
|
112 | def_key = 'REMOTE_ADDR' | |
113 |
|
113 | |||
114 | def ip_filters(ip_): |
|
114 | def ip_filters(ip_): | |
115 | return _filter_port(_filter_proxy(ip_)) |
|
115 | return _filter_port(_filter_proxy(ip_)) | |
116 |
|
116 | |||
117 | ip = environ.get(proxy_key) |
|
117 | ip = environ.get(proxy_key) | |
118 | if ip: |
|
118 | if ip: | |
119 | return ip_filters(ip) |
|
119 | return ip_filters(ip) | |
120 |
|
120 | |||
121 | ip = environ.get(proxy_key2) |
|
121 | ip = environ.get(proxy_key2) | |
122 | if ip: |
|
122 | if ip: | |
123 | return ip_filters(ip) |
|
123 | return ip_filters(ip) | |
124 |
|
124 | |||
125 | ip = environ.get(def_key, '0.0.0.0') |
|
125 | ip = environ.get(def_key, '0.0.0.0') | |
126 | return ip_filters(ip) |
|
126 | return ip_filters(ip) | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | def get_server_ip_addr(environ, log_errors=True): |
|
129 | def get_server_ip_addr(environ, log_errors=True): | |
130 | hostname = environ.get('SERVER_NAME') |
|
130 | hostname = environ.get('SERVER_NAME') | |
131 | try: |
|
131 | try: | |
132 | return socket.gethostbyname(hostname) |
|
132 | return socket.gethostbyname(hostname) | |
133 | except Exception as e: |
|
133 | except Exception as e: | |
134 | if log_errors: |
|
134 | if log_errors: | |
135 | # in some cases this lookup is not possible, and we don't want to |
|
135 | # in some cases this lookup is not possible, and we don't want to | |
136 | # make it an exception in logs |
|
136 | # make it an exception in logs | |
137 | log.exception('Could not retrieve server ip address: %s', e) |
|
137 | log.exception('Could not retrieve server ip address: %s', e) | |
138 | return hostname |
|
138 | return hostname | |
139 |
|
139 | |||
140 |
|
140 | |||
141 | def get_server_port(environ): |
|
141 | def get_server_port(environ): | |
142 | return environ.get('SERVER_PORT') |
|
142 | return environ.get('SERVER_PORT') | |
143 |
|
143 | |||
144 |
|
144 | |||
145 |
|
145 | |||
146 | def get_user_agent(environ): |
|
146 | def get_user_agent(environ): | |
147 | return environ.get('HTTP_USER_AGENT') |
|
147 | return environ.get('HTTP_USER_AGENT') | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def vcs_operation_context( |
|
150 | def vcs_operation_context( | |
151 | environ, repo_name, username, action, scm, check_locking=True, |
|
151 | environ, repo_name, username, action, scm, check_locking=True, | |
152 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): |
|
152 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): | |
153 | """ |
|
153 | """ | |
154 | Generate the context for a vcs operation, e.g. push or pull. |
|
154 | Generate the context for a vcs operation, e.g. push or pull. | |
155 |
|
155 | |||
156 | This context is passed over the layers so that hooks triggered by the |
|
156 | This context is passed over the layers so that hooks triggered by the | |
157 | vcs operation know details like the user, the user's IP address etc. |
|
157 | vcs operation know details like the user, the user's IP address etc. | |
158 |
|
158 | |||
159 | :param check_locking: Allows to switch of the computation of the locking |
|
159 | :param check_locking: Allows to switch of the computation of the locking | |
160 | data. This serves mainly the need of the simplevcs middleware to be |
|
160 | data. This serves mainly the need of the simplevcs middleware to be | |
161 | able to disable this for certain operations. |
|
161 | able to disable this for certain operations. | |
162 |
|
162 | |||
163 | """ |
|
163 | """ | |
164 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
164 | # Tri-state value: False: unlock, None: nothing, True: lock | |
165 | make_lock = None |
|
165 | make_lock = None | |
166 | locked_by = [None, None, None] |
|
166 | locked_by = [None, None, None] | |
167 | is_anonymous = username == User.DEFAULT_USER |
|
167 | is_anonymous = username == User.DEFAULT_USER | |
168 | user = User.get_by_username(username) |
|
168 | user = User.get_by_username(username) | |
169 | if not is_anonymous and check_locking: |
|
169 | if not is_anonymous and check_locking: | |
170 | log.debug('Checking locking on repository "%s"', repo_name) |
|
170 | log.debug('Checking locking on repository "%s"', repo_name) | |
171 | repo = Repository.get_by_repo_name(repo_name) |
|
171 | repo = Repository.get_by_repo_name(repo_name) | |
172 | make_lock, __, locked_by = repo.get_locking_state( |
|
172 | make_lock, __, locked_by = repo.get_locking_state( | |
173 | action, user.user_id) |
|
173 | action, user.user_id) | |
174 | user_id = user.user_id |
|
174 | user_id = user.user_id | |
175 | settings_model = VcsSettingsModel(repo=repo_name) |
|
175 | settings_model = VcsSettingsModel(repo=repo_name) | |
176 | ui_settings = settings_model.get_ui_settings() |
|
176 | ui_settings = settings_model.get_ui_settings() | |
177 |
|
177 | |||
178 | # NOTE(marcink): This should be also in sync with |
|
178 | # NOTE(marcink): This should be also in sync with | |
179 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data |
|
179 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data | |
180 | store = [x for x in ui_settings if x.key == '/'] |
|
180 | store = [x for x in ui_settings if x.key == '/'] | |
181 | repo_store = '' |
|
181 | repo_store = '' | |
182 | if store: |
|
182 | if store: | |
183 | repo_store = store[0].value |
|
183 | repo_store = store[0].value | |
184 |
|
184 | |||
185 | scm_data = { |
|
185 | scm_data = { | |
186 | 'ip': get_ip_addr(environ), |
|
186 | 'ip': get_ip_addr(environ), | |
187 | 'username': username, |
|
187 | 'username': username, | |
188 | 'user_id': user_id, |
|
188 | 'user_id': user_id, | |
189 | 'action': action, |
|
189 | 'action': action, | |
190 | 'repository': repo_name, |
|
190 | 'repository': repo_name, | |
191 | 'scm': scm, |
|
191 | 'scm': scm, | |
192 | 'config': rhodecode.CONFIG['__file__'], |
|
192 | 'config': rhodecode.CONFIG['__file__'], | |
193 | 'repo_store': repo_store, |
|
193 | 'repo_store': repo_store, | |
194 | 'make_lock': make_lock, |
|
194 | 'make_lock': make_lock, | |
195 | 'locked_by': locked_by, |
|
195 | 'locked_by': locked_by, | |
196 | 'server_url': utils2.get_server_url(environ), |
|
196 | 'server_url': utils2.get_server_url(environ), | |
197 | 'user_agent': get_user_agent(environ), |
|
197 | 'user_agent': get_user_agent(environ), | |
198 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
198 | 'hooks': get_enabled_hook_classes(ui_settings), | |
199 | 'is_shadow_repo': is_shadow_repo, |
|
199 | 'is_shadow_repo': is_shadow_repo, | |
200 | 'detect_force_push': detect_force_push, |
|
200 | 'detect_force_push': detect_force_push, | |
201 | 'check_branch_perms': check_branch_perms, |
|
201 | 'check_branch_perms': check_branch_perms, | |
202 | } |
|
202 | } | |
203 | return scm_data |
|
203 | return scm_data | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | class BasicAuth(AuthBasicAuthenticator): |
|
206 | class BasicAuth(AuthBasicAuthenticator): | |
207 |
|
207 | |||
208 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
208 | def __init__(self, realm, authfunc, registry, auth_http_code=None, | |
209 | initial_call_detection=False, acl_repo_name=None, rc_realm=''): |
|
209 | initial_call_detection=False, acl_repo_name=None, rc_realm=''): | |
210 | super().__init__(realm=realm, authfunc=authfunc) |
|
210 | super().__init__(realm=realm, authfunc=authfunc) | |
211 | self.realm = realm |
|
211 | self.realm = realm | |
212 | self.rc_realm = rc_realm |
|
212 | self.rc_realm = rc_realm | |
213 | self.initial_call = initial_call_detection |
|
213 | self.initial_call = initial_call_detection | |
214 | self.authfunc = authfunc |
|
214 | self.authfunc = authfunc | |
215 | self.registry = registry |
|
215 | self.registry = registry | |
216 | self.acl_repo_name = acl_repo_name |
|
216 | self.acl_repo_name = acl_repo_name | |
217 | self._rc_auth_http_code = auth_http_code |
|
217 | self._rc_auth_http_code = auth_http_code | |
218 |
|
218 | |||
219 | def _get_response_from_code(self, http_code, fallback): |
|
219 | def _get_response_from_code(self, http_code, fallback): | |
220 | try: |
|
220 | try: | |
221 | return get_exception(safe_int(http_code)) |
|
221 | return get_exception(safe_int(http_code)) | |
222 | except Exception: |
|
222 | except Exception: | |
223 | log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback) |
|
223 | log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback) | |
224 | return fallback |
|
224 | return fallback | |
225 |
|
225 | |||
226 | def get_rc_realm(self): |
|
226 | def get_rc_realm(self): | |
227 | return safe_str(self.rc_realm) |
|
227 | return safe_str(self.rc_realm) | |
228 |
|
228 | |||
229 | def build_authentication(self): |
|
229 | def build_authentication(self): | |
230 | header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')] |
|
230 | header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')] | |
231 |
|
231 | |||
232 | # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial |
|
232 | # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial | |
233 | # investigate if we still need it. |
|
233 | # investigate if we still need it. | |
234 | if self._rc_auth_http_code and not self.initial_call: |
|
234 | if self._rc_auth_http_code and not self.initial_call: | |
235 | # return alternative HTTP code if alternative http return code |
|
235 | # return alternative HTTP code if alternative http return code | |
236 | # is specified in RhodeCode config, but ONLY if it's not the |
|
236 | # is specified in RhodeCode config, but ONLY if it's not the | |
237 | # FIRST call |
|
237 | # FIRST call | |
238 | custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized) |
|
238 | custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized) | |
239 | log.debug('Using custom response class: %s', custom_response_klass) |
|
239 | log.debug('Using custom response class: %s', custom_response_klass) | |
240 | return custom_response_klass(headers=header) |
|
240 | return custom_response_klass(headers=header) | |
241 | return HTTPUnauthorized(headers=header) |
|
241 | return HTTPUnauthorized(headers=header) | |
242 |
|
242 | |||
243 | def authenticate(self, environ): |
|
243 | def authenticate(self, environ): | |
244 | authorization = paste.httpheaders.AUTHORIZATION(environ) |
|
244 | authorization = paste.httpheaders.AUTHORIZATION(environ) | |
245 | if not authorization: |
|
245 | if not authorization: | |
246 | return self.build_authentication() |
|
246 | return self.build_authentication() | |
247 | (auth_meth, auth_creds_b64) = authorization.split(' ', 1) |
|
247 | (auth_meth, auth_creds_b64) = authorization.split(' ', 1) | |
248 | if 'basic' != auth_meth.lower(): |
|
248 | if 'basic' != auth_meth.lower(): | |
249 | return self.build_authentication() |
|
249 | return self.build_authentication() | |
250 |
|
250 | |||
251 | credentials = safe_str(base64.b64decode(auth_creds_b64.strip())) |
|
251 | credentials = safe_str(base64.b64decode(auth_creds_b64.strip())) | |
252 | _parts = credentials.split(':', 1) |
|
252 | _parts = credentials.split(':', 1) | |
253 | if len(_parts) == 2: |
|
253 | if len(_parts) == 2: | |
254 | username, password = _parts |
|
254 | username, password = _parts | |
255 | auth_data = self.authfunc( |
|
255 | auth_data = self.authfunc( | |
256 | username, password, environ, VCS_TYPE, |
|
256 | username, password, environ, VCS_TYPE, | |
257 | registry=self.registry, acl_repo_name=self.acl_repo_name) |
|
257 | registry=self.registry, acl_repo_name=self.acl_repo_name) | |
258 | if auth_data: |
|
258 | if auth_data: | |
259 | return {'username': username, 'auth_data': auth_data} |
|
259 | return {'username': username, 'auth_data': auth_data} | |
260 | if username and password: |
|
260 | if username and password: | |
261 | # we mark that we actually executed authentication once, at |
|
261 | # we mark that we actually executed authentication once, at | |
262 | # that point we can use the alternative auth code |
|
262 | # that point we can use the alternative auth code | |
263 | self.initial_call = False |
|
263 | self.initial_call = False | |
264 |
|
264 | |||
265 | return self.build_authentication() |
|
265 | return self.build_authentication() | |
266 |
|
266 | |||
267 | __call__ = authenticate |
|
267 | __call__ = authenticate | |
268 |
|
268 | |||
269 |
|
269 | |||
270 | def calculate_version_hash(config): |
|
270 | def calculate_version_hash(config): | |
271 | return sha1( |
|
271 | return sha1( | |
272 | config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__) |
|
272 | config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__) | |
273 | )[:8] |
|
273 | )[:8] | |
274 |
|
274 | |||
275 |
|
275 | |||
276 | def get_current_lang(request): |
|
276 | def get_current_lang(request): | |
277 | return getattr(request, '_LOCALE_', request.locale_name) |
|
277 | return getattr(request, '_LOCALE_', request.locale_name) | |
278 |
|
278 | |||
279 |
|
279 | |||
280 | def attach_context_attributes(context, request, user_id=None, is_api=None): |
|
280 | def attach_context_attributes(context, request, user_id=None, is_api=None): | |
281 | """ |
|
281 | """ | |
282 | Attach variables into template context called `c`. |
|
282 | Attach variables into template context called `c`. | |
283 | """ |
|
283 | """ | |
284 | config = request.registry.settings |
|
284 | config = request.registry.settings | |
285 |
|
285 | |||
286 | rc_config = SettingsModel().get_all_settings(cache=True, from_request=False) |
|
286 | rc_config = SettingsModel().get_all_settings(cache=True, from_request=False) | |
287 | context.rc_config = rc_config |
|
287 | context.rc_config = rc_config | |
288 | context.rhodecode_version = rhodecode.__version__ |
|
288 | context.rhodecode_version = rhodecode.__version__ | |
289 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
289 | context.rhodecode_edition = config.get('rhodecode.edition') | |
290 | context.rhodecode_edition_id = config.get('rhodecode.edition_id') |
|
290 | context.rhodecode_edition_id = config.get('rhodecode.edition_id') | |
291 | # unique secret + version does not leak the version but keep consistency |
|
291 | # unique secret + version does not leak the version but keep consistency | |
292 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
292 | context.rhodecode_version_hash = calculate_version_hash(config) | |
293 |
|
293 | |||
294 | # Default language set for the incoming request |
|
294 | # Default language set for the incoming request | |
295 | context.language = get_current_lang(request) |
|
295 | context.language = get_current_lang(request) | |
296 |
|
296 | |||
297 | # Visual options |
|
297 | # Visual options | |
298 | context.visual = AttributeDict({}) |
|
298 | context.visual = AttributeDict({}) | |
299 |
|
299 | |||
300 | # DB stored Visual Items |
|
300 | # DB stored Visual Items | |
301 | context.visual.show_public_icon = str2bool( |
|
301 | context.visual.show_public_icon = str2bool( | |
302 | rc_config.get('rhodecode_show_public_icon')) |
|
302 | rc_config.get('rhodecode_show_public_icon')) | |
303 | context.visual.show_private_icon = str2bool( |
|
303 | context.visual.show_private_icon = str2bool( | |
304 | rc_config.get('rhodecode_show_private_icon')) |
|
304 | rc_config.get('rhodecode_show_private_icon')) | |
305 | context.visual.stylify_metatags = str2bool( |
|
305 | context.visual.stylify_metatags = str2bool( | |
306 | rc_config.get('rhodecode_stylify_metatags')) |
|
306 | rc_config.get('rhodecode_stylify_metatags')) | |
307 | context.visual.dashboard_items = safe_int( |
|
307 | context.visual.dashboard_items = safe_int( | |
308 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
308 | rc_config.get('rhodecode_dashboard_items', 100)) | |
309 | context.visual.admin_grid_items = safe_int( |
|
309 | context.visual.admin_grid_items = safe_int( | |
310 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
310 | rc_config.get('rhodecode_admin_grid_items', 100)) | |
311 | context.visual.show_revision_number = str2bool( |
|
311 | context.visual.show_revision_number = str2bool( | |
312 | rc_config.get('rhodecode_show_revision_number', True)) |
|
312 | rc_config.get('rhodecode_show_revision_number', True)) | |
313 | context.visual.show_sha_length = safe_int( |
|
313 | context.visual.show_sha_length = safe_int( | |
314 | rc_config.get('rhodecode_show_sha_length', 100)) |
|
314 | rc_config.get('rhodecode_show_sha_length', 100)) | |
315 | context.visual.repository_fields = str2bool( |
|
315 | context.visual.repository_fields = str2bool( | |
316 | rc_config.get('rhodecode_repository_fields')) |
|
316 | rc_config.get('rhodecode_repository_fields')) | |
317 | context.visual.show_version = str2bool( |
|
317 | context.visual.show_version = str2bool( | |
318 | rc_config.get('rhodecode_show_version')) |
|
318 | rc_config.get('rhodecode_show_version')) | |
319 | context.visual.use_gravatar = str2bool( |
|
319 | context.visual.use_gravatar = str2bool( | |
320 | rc_config.get('rhodecode_use_gravatar')) |
|
320 | rc_config.get('rhodecode_use_gravatar')) | |
321 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
321 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') | |
322 | context.visual.default_renderer = rc_config.get( |
|
322 | context.visual.default_renderer = rc_config.get( | |
323 | 'rhodecode_markup_renderer', 'rst') |
|
323 | 'rhodecode_markup_renderer', 'rst') | |
324 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
324 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES | |
325 | context.visual.rhodecode_support_url = \ |
|
325 | context.visual.rhodecode_support_url = \ | |
326 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
326 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') | |
327 |
|
327 | |||
328 | context.visual.affected_files_cut_off = 60 |
|
328 | context.visual.affected_files_cut_off = 60 | |
329 |
|
329 | |||
330 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
330 | context.pre_code = rc_config.get('rhodecode_pre_code') | |
331 | context.post_code = rc_config.get('rhodecode_post_code') |
|
331 | context.post_code = rc_config.get('rhodecode_post_code') | |
332 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
332 | context.rhodecode_name = rc_config.get('rhodecode_title') | |
333 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
333 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') | |
334 | # if we have specified default_encoding in the request, it has more |
|
334 | # if we have specified default_encoding in the request, it has more | |
335 | # priority |
|
335 | # priority | |
336 | if request.GET.get('default_encoding'): |
|
336 | if request.GET.get('default_encoding'): | |
337 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
337 | context.default_encodings.insert(0, request.GET.get('default_encoding')) | |
338 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
338 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') | |
339 | context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl') |
|
339 | context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl') | |
340 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') |
|
340 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') | |
341 |
|
341 | |||
342 | # INI stored |
|
342 | # INI stored | |
343 | context.labs_active = str2bool( |
|
343 | context.labs_active = str2bool( | |
344 | config.get('labs_settings_active', 'false')) |
|
344 | config.get('labs_settings_active', 'false')) | |
345 | context.ssh_enabled = str2bool( |
|
345 | context.ssh_enabled = str2bool( | |
346 | config.get('ssh.generate_authorized_keyfile', 'false')) |
|
346 | config.get('ssh.generate_authorized_keyfile', 'false')) | |
347 | context.ssh_key_generator_enabled = str2bool( |
|
347 | context.ssh_key_generator_enabled = str2bool( | |
348 | config.get('ssh.enable_ui_key_generator', 'true')) |
|
348 | config.get('ssh.enable_ui_key_generator', 'true')) | |
349 |
|
349 | |||
350 | context.visual.allow_custom_hooks_settings = str2bool( |
|
350 | context.visual.allow_custom_hooks_settings = str2bool( | |
351 | config.get('allow_custom_hooks_settings', True)) |
|
351 | config.get('allow_custom_hooks_settings', True)) | |
352 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
352 | context.debug_style = str2bool(config.get('debug_style', False)) | |
353 |
|
353 | |||
354 | context.rhodecode_instanceid = config.get('instance_id') |
|
354 | context.rhodecode_instanceid = config.get('instance_id') | |
355 |
|
355 | |||
356 | context.visual.cut_off_limit_diff = safe_int( |
|
356 | context.visual.cut_off_limit_diff = safe_int( | |
357 | config.get('cut_off_limit_diff'), default=0) |
|
357 | config.get('cut_off_limit_diff'), default=0) | |
358 | context.visual.cut_off_limit_file = safe_int( |
|
358 | context.visual.cut_off_limit_file = safe_int( | |
359 | config.get('cut_off_limit_file'), default=0) |
|
359 | config.get('cut_off_limit_file'), default=0) | |
360 |
|
360 | |||
361 | context.license = AttributeDict({}) |
|
361 | context.license = AttributeDict({}) | |
362 | context.license.hide_license_info = str2bool( |
|
362 | context.license.hide_license_info = str2bool( | |
363 | config.get('license.hide_license_info', False)) |
|
363 | config.get('license.hide_license_info', False)) | |
364 |
|
364 | |||
365 | # AppEnlight |
|
365 | # AppEnlight | |
366 | context.appenlight_enabled = config.get('appenlight', False) |
|
366 | context.appenlight_enabled = config.get('appenlight', False) | |
367 | context.appenlight_api_public_key = config.get( |
|
367 | context.appenlight_api_public_key = config.get( | |
368 | 'appenlight.api_public_key', '') |
|
368 | 'appenlight.api_public_key', '') | |
369 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
369 | context.appenlight_server_url = config.get('appenlight.server_url', '') | |
370 |
|
370 | |||
371 | diffmode = { |
|
371 | diffmode = { | |
372 | "unified": "unified", |
|
372 | "unified": "unified", | |
373 | "sideside": "sideside" |
|
373 | "sideside": "sideside" | |
374 | }.get(request.GET.get('diffmode')) |
|
374 | }.get(request.GET.get('diffmode')) | |
375 |
|
375 | |||
376 | if is_api is not None: |
|
376 | if is_api is not None: | |
377 | is_api = hasattr(request, 'rpc_user') |
|
377 | is_api = hasattr(request, 'rpc_user') | |
378 | session_attrs = { |
|
378 | session_attrs = { | |
379 | # defaults |
|
379 | # defaults | |
380 | "clone_url_format": "http", |
|
380 | "clone_url_format": "http", | |
381 | "diffmode": "sideside", |
|
381 | "diffmode": "sideside", | |
382 | "license_fingerprint": request.session.get('license_fingerprint') |
|
382 | "license_fingerprint": request.session.get('license_fingerprint') | |
383 | } |
|
383 | } | |
384 |
|
384 | |||
385 | if not is_api: |
|
385 | if not is_api: | |
386 | # don't access pyramid session for API calls |
|
386 | # don't access pyramid session for API calls | |
387 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): |
|
387 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): | |
388 | request.session['rc_user_session_attr.diffmode'] = diffmode |
|
388 | request.session['rc_user_session_attr.diffmode'] = diffmode | |
389 |
|
389 | |||
390 | # session settings per user |
|
390 | # session settings per user | |
391 |
|
391 | |||
392 | for k, v in list(request.session.items()): |
|
392 | for k, v in list(request.session.items()): | |
393 | pref = 'rc_user_session_attr.' |
|
393 | pref = 'rc_user_session_attr.' | |
394 | if k and k.startswith(pref): |
|
394 | if k and k.startswith(pref): | |
395 | k = k[len(pref):] |
|
395 | k = k[len(pref):] | |
396 | session_attrs[k] = v |
|
396 | session_attrs[k] = v | |
397 |
|
397 | |||
398 | context.user_session_attrs = session_attrs |
|
398 | context.user_session_attrs = session_attrs | |
399 |
|
399 | |||
400 | # JS template context |
|
400 | # JS template context | |
401 | context.template_context = { |
|
401 | context.template_context = { | |
402 | 'repo_name': None, |
|
402 | 'repo_name': None, | |
403 | 'repo_type': None, |
|
403 | 'repo_type': None, | |
404 | 'repo_landing_commit': None, |
|
404 | 'repo_landing_commit': None, | |
405 | 'rhodecode_user': { |
|
405 | 'rhodecode_user': { | |
406 | 'username': None, |
|
406 | 'username': None, | |
407 | 'email': None, |
|
407 | 'email': None, | |
408 | 'notification_status': False |
|
408 | 'notification_status': False | |
409 | }, |
|
409 | }, | |
410 | 'session_attrs': session_attrs, |
|
410 | 'session_attrs': session_attrs, | |
411 | 'visual': { |
|
411 | 'visual': { | |
412 | 'default_renderer': None |
|
412 | 'default_renderer': None | |
413 | }, |
|
413 | }, | |
414 | 'commit_data': { |
|
414 | 'commit_data': { | |
415 | 'commit_id': None |
|
415 | 'commit_id': None | |
416 | }, |
|
416 | }, | |
417 | 'pull_request_data': {'pull_request_id': None}, |
|
417 | 'pull_request_data': {'pull_request_id': None}, | |
418 | 'timeago': { |
|
418 | 'timeago': { | |
419 | 'refresh_time': 120 * 1000, |
|
419 | 'refresh_time': 120 * 1000, | |
420 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
420 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 | |
421 | }, |
|
421 | }, | |
422 | 'pyramid_dispatch': { |
|
422 | 'pyramid_dispatch': { | |
423 |
|
423 | |||
424 | }, |
|
424 | }, | |
425 | 'extra': {'plugins': {}} |
|
425 | 'extra': {'plugins': {}} | |
426 | } |
|
426 | } | |
427 | # END CONFIG VARS |
|
427 | # END CONFIG VARS | |
428 | if is_api: |
|
428 | if is_api: | |
429 | csrf_token = None |
|
429 | csrf_token = None | |
430 | else: |
|
430 | else: | |
431 | csrf_token = auth.get_csrf_token(session=request.session) |
|
431 | csrf_token = auth.get_csrf_token(session=request.session) | |
432 |
|
432 | |||
433 | context.csrf_token = csrf_token |
|
433 | context.csrf_token = csrf_token | |
434 | context.backends = list(rhodecode.BACKENDS.keys()) |
|
434 | context.backends = list(rhodecode.BACKENDS.keys()) | |
435 |
|
435 | |||
436 | unread_count = 0 |
|
436 | unread_count = 0 | |
437 | user_bookmark_list = [] |
|
437 | user_bookmark_list = [] | |
438 | if user_id: |
|
438 | if user_id: | |
439 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) |
|
439 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) | |
440 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) |
|
440 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) | |
441 | context.unread_notifications = unread_count |
|
441 | context.unread_notifications = unread_count | |
442 | context.bookmark_items = user_bookmark_list |
|
442 | context.bookmark_items = user_bookmark_list | |
443 |
|
443 | |||
444 | # web case |
|
444 | # web case | |
445 | if hasattr(request, 'user'): |
|
445 | if hasattr(request, 'user'): | |
446 | context.auth_user = request.user |
|
446 | context.auth_user = request.user | |
447 | context.rhodecode_user = request.user |
|
447 | context.rhodecode_user = request.user | |
448 |
|
448 | |||
449 | # api case |
|
449 | # api case | |
450 | if hasattr(request, 'rpc_user'): |
|
450 | if hasattr(request, 'rpc_user'): | |
451 | context.auth_user = request.rpc_user |
|
451 | context.auth_user = request.rpc_user | |
452 | context.rhodecode_user = request.rpc_user |
|
452 | context.rhodecode_user = request.rpc_user | |
453 |
|
453 | |||
454 | # attach the whole call context to the request |
|
454 | # attach the whole call context to the request | |
455 | request.set_call_context(context) |
|
455 | request.set_call_context(context) | |
456 |
|
456 | |||
457 |
|
457 | |||
458 | def get_auth_user(request): |
|
458 | def get_auth_user(request): | |
459 | environ = request.environ |
|
459 | environ = request.environ | |
460 | session = request.session |
|
460 | session = request.session | |
461 |
|
461 | |||
462 | ip_addr = get_ip_addr(environ) |
|
462 | ip_addr = get_ip_addr(environ) | |
463 |
|
463 | |||
464 | # make sure that we update permissions each time we call controller |
|
464 | # make sure that we update permissions each time we call controller | |
465 | _auth_token = ( |
|
465 | _auth_token = ( | |
466 | # ?auth_token=XXX |
|
466 | # ?auth_token=XXX | |
467 | request.GET.get('auth_token', '') |
|
467 | request.GET.get('auth_token', '') | |
468 | # ?api_key=XXX !LEGACY |
|
468 | # ?api_key=XXX !LEGACY | |
469 | or request.GET.get('api_key', '') |
|
469 | or request.GET.get('api_key', '') | |
470 | # or headers.... |
|
470 | # or headers.... | |
471 | or request.headers.get('X-Rc-Auth-Token', '') |
|
471 | or request.headers.get('X-Rc-Auth-Token', '') | |
472 | ) |
|
472 | ) | |
473 | if not _auth_token and request.matchdict: |
|
473 | if not _auth_token and request.matchdict: | |
474 | url_auth_token = request.matchdict.get('_auth_token') |
|
474 | url_auth_token = request.matchdict.get('_auth_token') | |
475 | _auth_token = url_auth_token |
|
475 | _auth_token = url_auth_token | |
476 | if _auth_token: |
|
476 | if _auth_token: | |
477 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) |
|
477 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) | |
478 |
|
478 | |||
479 | if _auth_token: |
|
479 | if _auth_token: | |
480 | # when using API_KEY we assume user exists, and |
|
480 | # when using API_KEY we assume user exists, and | |
481 | # doesn't need auth based on cookies. |
|
481 | # doesn't need auth based on cookies. | |
482 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
482 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) | |
483 | authenticated = False |
|
483 | authenticated = False | |
484 | else: |
|
484 | else: | |
485 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
485 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
486 | try: |
|
486 | try: | |
487 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
487 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), | |
488 | ip_addr=ip_addr) |
|
488 | ip_addr=ip_addr) | |
489 | except UserCreationError as e: |
|
489 | except UserCreationError as e: | |
490 | h.flash(e, 'error') |
|
490 | h.flash(e, 'error') | |
491 | # container auth or other auth functions that create users |
|
491 | # container auth or other auth functions that create users | |
492 | # on the fly can throw this exception signaling that there's |
|
492 | # on the fly can throw this exception signaling that there's | |
493 | # issue with user creation, explanation should be provided |
|
493 | # issue with user creation, explanation should be provided | |
494 | # in Exception itself. We then create a simple blank |
|
494 | # in Exception itself. We then create a simple blank | |
495 | # AuthUser |
|
495 | # AuthUser | |
496 | auth_user = AuthUser(ip_addr=ip_addr) |
|
496 | auth_user = AuthUser(ip_addr=ip_addr) | |
497 |
|
497 | |||
498 | # in case someone changes a password for user it triggers session |
|
498 | # in case someone changes a password for user it triggers session | |
499 | # flush and forces a re-login |
|
499 | # flush and forces a re-login | |
500 | if password_changed(auth_user, session): |
|
500 | if password_changed(auth_user, session): | |
501 | session.invalidate() |
|
501 | session.invalidate() | |
502 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
502 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
503 | auth_user = AuthUser(ip_addr=ip_addr) |
|
503 | auth_user = AuthUser(ip_addr=ip_addr) | |
504 |
|
504 | |||
505 | authenticated = cookie_store.get('is_authenticated') |
|
505 | authenticated = cookie_store.get('is_authenticated') | |
506 |
|
506 | |||
507 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
507 | if not auth_user.is_authenticated and auth_user.is_user_object: | |
508 | # user is not authenticated and not empty |
|
508 | # user is not authenticated and not empty | |
509 | auth_user.set_authenticated(authenticated) |
|
509 | auth_user.set_authenticated(authenticated) | |
510 |
|
510 | |||
511 | return auth_user, _auth_token |
|
511 | return auth_user, _auth_token | |
512 |
|
512 | |||
513 |
|
513 | |||
514 | def h_filter(s): |
|
514 | def h_filter(s): | |
515 | """ |
|
515 | """ | |
516 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
516 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` | |
517 | we wrap this with additional functionality that converts None to empty |
|
517 | we wrap this with additional functionality that converts None to empty | |
518 | strings |
|
518 | strings | |
519 | """ |
|
519 | """ | |
520 | if s is None: |
|
520 | if s is None: | |
521 | return markupsafe.Markup() |
|
521 | return markupsafe.Markup() | |
522 | return markupsafe.escape(s) |
|
522 | return markupsafe.escape(s) | |
523 |
|
523 | |||
524 |
|
524 | |||
525 | def add_events_routes(config): |
|
525 | def add_events_routes(config): | |
526 | """ |
|
526 | """ | |
527 | Adds routing that can be used in events. Because some events are triggered |
|
527 | Adds routing that can be used in events. Because some events are triggered | |
528 | outside of pyramid context, we need to bootstrap request with some |
|
528 | outside of pyramid context, we need to bootstrap request with some | |
529 | routing registered |
|
529 | routing registered | |
530 | """ |
|
530 | """ | |
531 |
|
531 | |||
532 | from rhodecode.apps._base import ADMIN_PREFIX |
|
532 | from rhodecode.apps._base import ADMIN_PREFIX | |
533 |
|
533 | |||
534 | config.add_route(name='home', pattern='/') |
|
534 | config.add_route(name='home', pattern='/') | |
535 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') |
|
535 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') | |
536 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') |
|
536 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') | |
537 |
|
537 | |||
538 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') |
|
538 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') | |
539 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') |
|
539 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') | |
540 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
540 | config.add_route(name='repo_summary', pattern='/{repo_name}') | |
541 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
541 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') | |
542 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
542 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') | |
543 |
|
543 | |||
544 | config.add_route(name='pullrequest_show', |
|
544 | config.add_route(name='pullrequest_show', | |
545 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
545 | pattern='/{repo_name}/pull-request/{pull_request_id}') | |
546 | config.add_route(name='pull_requests_global', |
|
546 | config.add_route(name='pull_requests_global', | |
547 | pattern='/pull-request/{pull_request_id}') |
|
547 | pattern='/pull-request/{pull_request_id}') | |
548 |
|
548 | |||
549 | config.add_route(name='repo_commit', |
|
549 | config.add_route(name='repo_commit', | |
550 | pattern='/{repo_name}/changeset/{commit_id}') |
|
550 | pattern='/{repo_name}/changeset/{commit_id}') | |
551 | config.add_route(name='repo_files', |
|
551 | config.add_route(name='repo_files', | |
552 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
552 | pattern='/{repo_name}/files/{commit_id}/{f_path}') | |
553 |
|
553 | |||
554 | config.add_route(name='hovercard_user', |
|
554 | config.add_route(name='hovercard_user', | |
555 | pattern='/_hovercard/user/{user_id}') |
|
555 | pattern='/_hovercard/user/{user_id}') | |
556 |
|
556 | |||
557 | config.add_route(name='hovercard_user_group', |
|
557 | config.add_route(name='hovercard_user_group', | |
558 | pattern='/_hovercard/user_group/{user_group_id}') |
|
558 | pattern='/_hovercard/user_group/{user_group_id}') | |
559 |
|
559 | |||
560 | config.add_route(name='hovercard_pull_request', |
|
560 | config.add_route(name='hovercard_pull_request', | |
561 | pattern='/_hovercard/pull_request/{pull_request_id}') |
|
561 | pattern='/_hovercard/pull_request/{pull_request_id}') | |
562 |
|
562 | |||
563 | config.add_route(name='hovercard_repo_commit', |
|
563 | config.add_route(name='hovercard_repo_commit', | |
564 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') |
|
564 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') | |
565 |
|
565 | |||
566 |
|
566 | |||
567 | def bootstrap_config(request, registry_name='RcTestRegistry'): |
|
567 | def bootstrap_config(request, registry_name='RcTestRegistry'): | |
568 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults |
|
568 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults | |
569 | import pyramid.testing |
|
569 | import pyramid.testing | |
570 | registry = pyramid.testing.Registry(registry_name) |
|
570 | registry = pyramid.testing.Registry(registry_name) | |
571 |
|
571 | |||
572 | global_config = {'__file__': ''} |
|
572 | global_config = {'__file__': ''} | |
573 |
|
573 | |||
574 | config = pyramid.testing.setUp(registry=registry, request=request) |
|
574 | config = pyramid.testing.setUp(registry=registry, request=request) | |
575 | sanitize_settings_and_apply_defaults(global_config, config.registry.settings) |
|
575 | sanitize_settings_and_apply_defaults(global_config, config.registry.settings) | |
576 |
|
576 | |||
577 | # allow pyramid lookup in testing |
|
577 | # allow pyramid lookup in testing | |
578 | config.include('pyramid_mako') |
|
578 | config.include('pyramid_mako') | |
579 | config.include('rhodecode.lib.rc_beaker') |
|
579 | config.include('rhodecode.lib.rc_beaker') | |
580 | config.include('rhodecode.lib.rc_cache') |
|
580 | config.include('rhodecode.lib.rc_cache') | |
581 |
config.include('rhodecode.lib. |
|
581 | config.include('rhodecode.lib.archive_cache') | |
582 | add_events_routes(config) |
|
582 | add_events_routes(config) | |
583 |
|
583 | |||
584 | return config |
|
584 | return config | |
585 |
|
585 | |||
586 |
|
586 | |||
587 | def bootstrap_request(**kwargs): |
|
587 | def bootstrap_request(**kwargs): | |
588 | """ |
|
588 | """ | |
589 | Returns a thin version of Request Object that is used in non-web context like testing/celery |
|
589 | Returns a thin version of Request Object that is used in non-web context like testing/celery | |
590 | """ |
|
590 | """ | |
591 |
|
591 | |||
592 | import pyramid.testing |
|
592 | import pyramid.testing | |
593 | from rhodecode.lib.request import ThinRequest as _ThinRequest |
|
593 | from rhodecode.lib.request import ThinRequest as _ThinRequest | |
594 |
|
594 | |||
595 | class ThinRequest(_ThinRequest): |
|
595 | class ThinRequest(_ThinRequest): | |
596 | application_url = kwargs.pop('application_url', 'http://example.com') |
|
596 | application_url = kwargs.pop('application_url', 'http://example.com') | |
597 | host = kwargs.pop('host', 'example.com:80') |
|
597 | host = kwargs.pop('host', 'example.com:80') | |
598 | domain = kwargs.pop('domain', 'example.com') |
|
598 | domain = kwargs.pop('domain', 'example.com') | |
599 |
|
599 | |||
600 | class ThinSession(pyramid.testing.DummySession): |
|
600 | class ThinSession(pyramid.testing.DummySession): | |
601 | def save(*arg, **kw): |
|
601 | def save(*arg, **kw): | |
602 | pass |
|
602 | pass | |
603 |
|
603 | |||
604 | request = ThinRequest(**kwargs) |
|
604 | request = ThinRequest(**kwargs) | |
605 | request.session = ThinSession() |
|
605 | request.session = ThinSession() | |
606 |
|
606 | |||
607 | return request |
|
607 | return request |
@@ -1,2198 +1,2230 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | """ | |
20 | Helper functions |
|
20 | Helper functions | |
21 |
|
21 | |||
22 | Consists of functions to typically be used within templates, but also |
|
22 | Consists of functions to typically be used within templates, but also | |
23 | available to Controllers. This module is available to both as 'h'. |
|
23 | available to Controllers. This module is available to both as 'h'. | |
24 | """ |
|
24 | """ | |
25 | import base64 |
|
25 | import base64 | |
26 | import collections |
|
26 | import collections | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import random |
|
29 | import random | |
30 | import hashlib |
|
30 | import hashlib | |
31 | import io |
|
31 | import io | |
32 | import textwrap |
|
32 | import textwrap | |
33 | import urllib.request |
|
33 | import urllib.request | |
34 | import urllib.parse |
|
34 | import urllib.parse | |
35 | import urllib.error |
|
35 | import urllib.error | |
36 | import math |
|
36 | import math | |
37 | import logging |
|
37 | import logging | |
38 | import re |
|
38 | import re | |
39 | import time |
|
39 | import time | |
40 | import string |
|
40 | import string | |
41 | import regex |
|
41 | import regex | |
42 | from collections import OrderedDict |
|
42 | from collections import OrderedDict | |
43 |
|
43 | |||
44 | import pygments |
|
44 | import pygments | |
45 | import itertools |
|
45 | import itertools | |
46 | import fnmatch |
|
46 | import fnmatch | |
47 |
|
47 | |||
48 | from datetime import datetime |
|
48 | from datetime import datetime | |
49 | from functools import partial |
|
49 | from functools import partial | |
50 | from pygments.formatters.html import HtmlFormatter |
|
50 | from pygments.formatters.html import HtmlFormatter | |
51 | from pygments.lexers import ( |
|
51 | from pygments.lexers import ( | |
52 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
52 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) | |
53 |
|
53 | |||
54 | from pyramid.threadlocal import get_current_request |
|
54 | from pyramid.threadlocal import get_current_request | |
55 | from tempita import looper |
|
55 | from tempita import looper | |
56 | from webhelpers2.html import literal, HTML, escape |
|
56 | from webhelpers2.html import literal, HTML, escape | |
57 | from webhelpers2.html._autolink import _auto_link_urls |
|
57 | from webhelpers2.html._autolink import _auto_link_urls | |
58 | from webhelpers2.html.tools import ( |
|
58 | from webhelpers2.html.tools import ( | |
59 | button_to, highlight, js_obfuscate, strip_links, strip_tags) |
|
59 | button_to, highlight, js_obfuscate, strip_links, strip_tags) | |
60 |
|
60 | |||
61 | from webhelpers2.text import ( |
|
61 | from webhelpers2.text import ( | |
62 | chop_at, collapse, convert_accented_entities, |
|
62 | chop_at, collapse, convert_accented_entities, | |
63 | convert_misc_entities, lchop, plural, rchop, remove_formatting, |
|
63 | convert_misc_entities, lchop, plural, rchop, remove_formatting, | |
64 | replace_whitespace, urlify, truncate, wrap_paragraphs) |
|
64 | replace_whitespace, urlify, truncate, wrap_paragraphs) | |
65 | from webhelpers2.date import time_ago_in_words |
|
65 | from webhelpers2.date import time_ago_in_words | |
66 |
|
66 | |||
67 | from webhelpers2.html.tags import ( |
|
67 | from webhelpers2.html.tags import ( | |
68 | _input, NotGiven, _make_safe_id_component as safeid, |
|
68 | _input, NotGiven, _make_safe_id_component as safeid, | |
69 | form as insecure_form, |
|
69 | form as insecure_form, | |
70 | auto_discovery_link, checkbox, end_form, file, |
|
70 | auto_discovery_link, checkbox, end_form, file, | |
71 | hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol, |
|
71 | hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol, | |
72 | stylesheet_link, submit, text, password, textarea, |
|
72 | stylesheet_link, submit, text, password, textarea, | |
73 | ul, radio, Options) |
|
73 | ul, radio, Options) | |
74 |
|
74 | |||
75 | from webhelpers2.number import format_byte_size |
|
75 | from webhelpers2.number import format_byte_size | |
76 | # python3.11 backport fixes for webhelpers2 |
|
76 | # python3.11 backport fixes for webhelpers2 | |
77 | from rhodecode import ConfigGet |
|
77 | from rhodecode import ConfigGet | |
78 | from rhodecode.lib._vendor.webhelpers_backports import raw_select |
|
78 | from rhodecode.lib._vendor.webhelpers_backports import raw_select | |
79 |
|
79 | |||
80 | from rhodecode.lib.action_parser import action_parser |
|
80 | from rhodecode.lib.action_parser import action_parser | |
81 | from rhodecode.lib.html_filters import sanitize_html |
|
81 | from rhodecode.lib.html_filters import sanitize_html | |
82 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage |
|
82 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage | |
83 | from rhodecode.lib import ext_json |
|
83 | from rhodecode.lib import ext_json | |
84 | from rhodecode.lib.ext_json import json |
|
84 | from rhodecode.lib.ext_json import json | |
85 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str |
|
85 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str | |
86 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
86 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer | |
87 | from rhodecode.lib.str_utils import safe_str |
|
87 | from rhodecode.lib.str_utils import safe_str | |
88 | from rhodecode.lib.utils2 import ( |
|
88 | from rhodecode.lib.utils2 import ( | |
89 | str2bool, |
|
89 | str2bool, | |
90 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, |
|
90 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, | |
91 | AttributeDict, safe_int, md5, md5_safe, get_host_info) |
|
91 | AttributeDict, safe_int, md5, md5_safe, get_host_info) | |
92 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
92 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links | |
93 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
93 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError | |
94 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
94 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit | |
95 | from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS |
|
95 | from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS | |
96 | from rhodecode.lib.index.search_utils import get_matching_line_offsets |
|
96 | from rhodecode.lib.index.search_utils import get_matching_line_offsets | |
97 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
97 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT | |
98 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
98 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
99 | from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore |
|
99 | from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore | |
100 | from rhodecode.model.repo_group import RepoGroupModel |
|
100 | from rhodecode.model.repo_group import RepoGroupModel | |
101 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
101 | from rhodecode.model.settings import IssueTrackerSettingsModel | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | log = logging.getLogger(__name__) |
|
104 | log = logging.getLogger(__name__) | |
105 |
|
105 | |||
106 |
|
106 | |||
107 | DEFAULT_USER = User.DEFAULT_USER |
|
107 | DEFAULT_USER = User.DEFAULT_USER | |
108 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
108 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def asset(path, ver=None, **kwargs): |
|
111 | def asset(path, ver=None, **kwargs): | |
112 | """ |
|
112 | """ | |
113 | Helper to generate a static asset file path for rhodecode assets |
|
113 | Helper to generate a static asset file path for rhodecode assets | |
114 |
|
114 | |||
115 | eg. h.asset('images/image.png', ver='3923') |
|
115 | eg. h.asset('images/image.png', ver='3923') | |
116 |
|
116 | |||
117 | :param path: path of asset |
|
117 | :param path: path of asset | |
118 | :param ver: optional version query param to append as ?ver= |
|
118 | :param ver: optional version query param to append as ?ver= | |
119 | """ |
|
119 | """ | |
120 | request = get_current_request() |
|
120 | request = get_current_request() | |
121 | query = {} |
|
121 | query = {} | |
122 | query.update(kwargs) |
|
122 | query.update(kwargs) | |
123 | if ver: |
|
123 | if ver: | |
124 | query = {'ver': ver} |
|
124 | query = {'ver': ver} | |
125 | return request.static_path( |
|
125 | return request.static_path( | |
126 | f'rhodecode:public/{path}', _query=query) |
|
126 | f'rhodecode:public/{path}', _query=query) | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | default_html_escape_table = { |
|
129 | default_html_escape_table = { | |
130 | ord('&'): '&', |
|
130 | ord('&'): '&', | |
131 | ord('<'): '<', |
|
131 | ord('<'): '<', | |
132 | ord('>'): '>', |
|
132 | ord('>'): '>', | |
133 | ord('"'): '"', |
|
133 | ord('"'): '"', | |
134 | ord("'"): ''', |
|
134 | ord("'"): ''', | |
135 | } |
|
135 | } | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
138 | def html_escape(text, html_escape_table=default_html_escape_table): | |
139 | """Produce entities within text.""" |
|
139 | """Produce entities within text.""" | |
140 | return text.translate(html_escape_table) |
|
140 | return text.translate(html_escape_table) | |
141 |
|
141 | |||
142 |
|
142 | |||
143 | def str_json(*args, **kwargs): |
|
143 | def str_json(*args, **kwargs): | |
144 | return ext_json.str_json(*args, **kwargs) |
|
144 | return ext_json.str_json(*args, **kwargs) | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | def formatted_str_json(*args, **kwargs): |
|
147 | def formatted_str_json(*args, **kwargs): | |
148 | return ext_json.formatted_str_json(*args, **kwargs) |
|
148 | return ext_json.formatted_str_json(*args, **kwargs) | |
149 |
|
149 | |||
150 |
|
150 | |||
151 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
151 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): | |
152 | """ |
|
152 | """ | |
153 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
153 | Truncate string ``s`` at the first occurrence of ``sub``. | |
154 |
|
154 | |||
155 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
155 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. | |
156 | """ |
|
156 | """ | |
157 | suffix_if_chopped = suffix_if_chopped or '' |
|
157 | suffix_if_chopped = suffix_if_chopped or '' | |
158 | pos = s.find(sub) |
|
158 | pos = s.find(sub) | |
159 | if pos == -1: |
|
159 | if pos == -1: | |
160 | return s |
|
160 | return s | |
161 |
|
161 | |||
162 | if inclusive: |
|
162 | if inclusive: | |
163 | pos += len(sub) |
|
163 | pos += len(sub) | |
164 |
|
164 | |||
165 | chopped = s[:pos] |
|
165 | chopped = s[:pos] | |
166 | left = s[pos:].strip() |
|
166 | left = s[pos:].strip() | |
167 |
|
167 | |||
168 | if left and suffix_if_chopped: |
|
168 | if left and suffix_if_chopped: | |
169 | chopped += suffix_if_chopped |
|
169 | chopped += suffix_if_chopped | |
170 |
|
170 | |||
171 | return chopped |
|
171 | return chopped | |
172 |
|
172 | |||
173 |
|
173 | |||
174 | def shorter(text, size=20, prefix=False): |
|
174 | def shorter(text, size=20, prefix=False): | |
175 | postfix = '...' |
|
175 | postfix = '...' | |
176 | if len(text) > size: |
|
176 | if len(text) > size: | |
177 | if prefix: |
|
177 | if prefix: | |
178 | # shorten in front |
|
178 | # shorten in front | |
179 | return postfix + text[-(size - len(postfix)):] |
|
179 | return postfix + text[-(size - len(postfix)):] | |
180 | else: |
|
180 | else: | |
181 | return text[:size - len(postfix)] + postfix |
|
181 | return text[:size - len(postfix)] + postfix | |
182 | return text |
|
182 | return text | |
183 |
|
183 | |||
184 |
|
184 | |||
185 | def reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
185 | def reset(name, value=None, id=NotGiven, type="reset", **attrs): | |
186 | """ |
|
186 | """ | |
187 | Reset button |
|
187 | Reset button | |
188 | """ |
|
188 | """ | |
189 | return _input(type, name, value, id, attrs) |
|
189 | return _input(type, name, value, id, attrs) | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | def select(name, selected_values, options, id=NotGiven, **attrs): |
|
192 | def select(name, selected_values, options, id=NotGiven, **attrs): | |
193 |
|
193 | |||
194 | if isinstance(options, (list, tuple)): |
|
194 | if isinstance(options, (list, tuple)): | |
195 | options_iter = options |
|
195 | options_iter = options | |
196 | # Handle old value,label lists ... where value also can be value,label lists |
|
196 | # Handle old value,label lists ... where value also can be value,label lists | |
197 | options = Options() |
|
197 | options = Options() | |
198 | for opt in options_iter: |
|
198 | for opt in options_iter: | |
199 | if isinstance(opt, tuple) and len(opt) == 2: |
|
199 | if isinstance(opt, tuple) and len(opt) == 2: | |
200 | value, label = opt |
|
200 | value, label = opt | |
201 | elif isinstance(opt, str): |
|
201 | elif isinstance(opt, str): | |
202 | value = label = opt |
|
202 | value = label = opt | |
203 | else: |
|
203 | else: | |
204 | raise ValueError('invalid select option type %r' % type(opt)) |
|
204 | raise ValueError('invalid select option type %r' % type(opt)) | |
205 |
|
205 | |||
206 | if isinstance(value, (list, tuple)): |
|
206 | if isinstance(value, (list, tuple)): | |
207 | option_group = options.add_optgroup(label) |
|
207 | option_group = options.add_optgroup(label) | |
208 | for opt2 in value: |
|
208 | for opt2 in value: | |
209 | if isinstance(opt2, tuple) and len(opt2) == 2: |
|
209 | if isinstance(opt2, tuple) and len(opt2) == 2: | |
210 | group_value, group_label = opt2 |
|
210 | group_value, group_label = opt2 | |
211 | elif isinstance(opt2, str): |
|
211 | elif isinstance(opt2, str): | |
212 | group_value = group_label = opt2 |
|
212 | group_value = group_label = opt2 | |
213 | else: |
|
213 | else: | |
214 | raise ValueError('invalid select option type %r' % type(opt2)) |
|
214 | raise ValueError('invalid select option type %r' % type(opt2)) | |
215 |
|
215 | |||
216 | option_group.add_option(group_label, group_value) |
|
216 | option_group.add_option(group_label, group_value) | |
217 | else: |
|
217 | else: | |
218 | options.add_option(label, value) |
|
218 | options.add_option(label, value) | |
219 |
|
219 | |||
220 | return raw_select(name, selected_values, options, id=id, **attrs) |
|
220 | return raw_select(name, selected_values, options, id=id, **attrs) | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | def branding(name, length=40): |
|
223 | def branding(name, length=40): | |
224 | return truncate(name, length, indicator="") |
|
224 | return truncate(name, length, indicator="") | |
225 |
|
225 | |||
226 |
|
226 | |||
227 | def FID(raw_id, path): |
|
227 | def FID(raw_id, path): | |
228 | """ |
|
228 | """ | |
229 | Creates a unique ID for filenode based on it's hash of path and commit |
|
229 | Creates a unique ID for filenode based on it's hash of path and commit | |
230 | it's safe to use in urls |
|
230 | it's safe to use in urls | |
231 |
|
231 | |||
232 | :param raw_id: |
|
232 | :param raw_id: | |
233 | :param path: |
|
233 | :param path: | |
234 | """ |
|
234 | """ | |
235 |
|
235 | |||
236 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
236 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) | |
237 |
|
237 | |||
238 |
|
238 | |||
239 | class _GetError(object): |
|
239 | class _GetError(object): | |
240 | """Get error from form_errors, and represent it as span wrapped error |
|
240 | """Get error from form_errors, and represent it as span wrapped error | |
241 | message |
|
241 | message | |
242 |
|
242 | |||
243 | :param field_name: field to fetch errors for |
|
243 | :param field_name: field to fetch errors for | |
244 | :param form_errors: form errors dict |
|
244 | :param form_errors: form errors dict | |
245 | """ |
|
245 | """ | |
246 |
|
246 | |||
247 | def __call__(self, field_name, form_errors): |
|
247 | def __call__(self, field_name, form_errors): | |
248 | tmpl = """<span class="error_msg">%s</span>""" |
|
248 | tmpl = """<span class="error_msg">%s</span>""" | |
249 | if form_errors and field_name in form_errors: |
|
249 | if form_errors and field_name in form_errors: | |
250 | return literal(tmpl % form_errors.get(field_name)) |
|
250 | return literal(tmpl % form_errors.get(field_name)) | |
251 |
|
251 | |||
252 |
|
252 | |||
253 | get_error = _GetError() |
|
253 | get_error = _GetError() | |
254 |
|
254 | |||
255 |
|
255 | |||
256 | class _ToolTip(object): |
|
256 | class _ToolTip(object): | |
257 |
|
257 | |||
258 | def __call__(self, tooltip_title, trim_at=50): |
|
258 | def __call__(self, tooltip_title, trim_at=50): | |
259 | """ |
|
259 | """ | |
260 | Special function just to wrap our text into nice formatted |
|
260 | Special function just to wrap our text into nice formatted | |
261 | autowrapped text |
|
261 | autowrapped text | |
262 |
|
262 | |||
263 | :param tooltip_title: |
|
263 | :param tooltip_title: | |
264 | """ |
|
264 | """ | |
265 | tooltip_title = escape(tooltip_title) |
|
265 | tooltip_title = escape(tooltip_title) | |
266 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
266 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') | |
267 | return tooltip_title |
|
267 | return tooltip_title | |
268 |
|
268 | |||
269 |
|
269 | |||
270 | tooltip = _ToolTip() |
|
270 | tooltip = _ToolTip() | |
271 |
|
271 | |||
272 | files_icon = '<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>' |
|
272 | files_icon = '<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>' | |
273 |
|
273 | |||
274 |
|
274 | |||
275 | def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None, |
|
275 | def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None, | |
276 | limit_items=False, linkify_last_item=False, hide_last_item=False, |
|
276 | limit_items=False, linkify_last_item=False, hide_last_item=False, | |
277 | copy_path_icon=True): |
|
277 | copy_path_icon=True): | |
278 |
|
278 | |||
279 | if at_ref: |
|
279 | if at_ref: | |
280 | route_qry = {'at': at_ref} |
|
280 | route_qry = {'at': at_ref} | |
281 | default_landing_ref = at_ref or landing_ref_name or commit_id |
|
281 | default_landing_ref = at_ref or landing_ref_name or commit_id | |
282 | else: |
|
282 | else: | |
283 | route_qry = None |
|
283 | route_qry = None | |
284 | default_landing_ref = commit_id |
|
284 | default_landing_ref = commit_id | |
285 |
|
285 | |||
286 | # first segment is a `HOME` link to repo files root location |
|
286 | # first segment is a `HOME` link to repo files root location | |
287 | root_name = literal('<i class="icon-home"></i>') |
|
287 | root_name = literal('<i class="icon-home"></i>') | |
288 |
|
288 | |||
289 | url_segments = [ |
|
289 | url_segments = [ | |
290 | link_to( |
|
290 | link_to( | |
291 | root_name, |
|
291 | root_name, | |
292 | repo_files_by_ref_url( |
|
292 | repo_files_by_ref_url( | |
293 | repo_name, |
|
293 | repo_name, | |
294 | repo_type, |
|
294 | repo_type, | |
295 | f_path=None, # None here is a special case for SVN repos, |
|
295 | f_path=None, # None here is a special case for SVN repos, | |
296 | # that won't prefix with a ref |
|
296 | # that won't prefix with a ref | |
297 | ref_name=default_landing_ref, |
|
297 | ref_name=default_landing_ref, | |
298 | commit_id=commit_id, |
|
298 | commit_id=commit_id, | |
299 | query=route_qry |
|
299 | query=route_qry | |
300 | ) |
|
300 | ) | |
301 | )] |
|
301 | )] | |
302 |
|
302 | |||
303 | path_segments = file_path.split('/') |
|
303 | path_segments = file_path.split('/') | |
304 | last_cnt = len(path_segments) - 1 |
|
304 | last_cnt = len(path_segments) - 1 | |
305 | for cnt, segment in enumerate(path_segments): |
|
305 | for cnt, segment in enumerate(path_segments): | |
306 | if not segment: |
|
306 | if not segment: | |
307 | continue |
|
307 | continue | |
308 | segment_html = escape(segment) |
|
308 | segment_html = escape(segment) | |
309 |
|
309 | |||
310 | last_item = cnt == last_cnt |
|
310 | last_item = cnt == last_cnt | |
311 |
|
311 | |||
312 | if last_item and hide_last_item: |
|
312 | if last_item and hide_last_item: | |
313 | # iterate over and hide last element |
|
313 | # iterate over and hide last element | |
314 | continue |
|
314 | continue | |
315 |
|
315 | |||
316 | if last_item and linkify_last_item is False: |
|
316 | if last_item and linkify_last_item is False: | |
317 | # plain version |
|
317 | # plain version | |
318 | url_segments.append(segment_html) |
|
318 | url_segments.append(segment_html) | |
319 | else: |
|
319 | else: | |
320 | url_segments.append( |
|
320 | url_segments.append( | |
321 | link_to( |
|
321 | link_to( | |
322 | segment_html, |
|
322 | segment_html, | |
323 | repo_files_by_ref_url( |
|
323 | repo_files_by_ref_url( | |
324 | repo_name, |
|
324 | repo_name, | |
325 | repo_type, |
|
325 | repo_type, | |
326 | f_path='/'.join(path_segments[:cnt + 1]), |
|
326 | f_path='/'.join(path_segments[:cnt + 1]), | |
327 | ref_name=default_landing_ref, |
|
327 | ref_name=default_landing_ref, | |
328 | commit_id=commit_id, |
|
328 | commit_id=commit_id, | |
329 | query=route_qry |
|
329 | query=route_qry | |
330 | ), |
|
330 | ), | |
331 | )) |
|
331 | )) | |
332 |
|
332 | |||
333 | limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:] |
|
333 | limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:] | |
334 | if limit_items and len(limited_url_segments) < len(url_segments): |
|
334 | if limit_items and len(limited_url_segments) < len(url_segments): | |
335 | url_segments = limited_url_segments |
|
335 | url_segments = limited_url_segments | |
336 |
|
336 | |||
337 | full_path = file_path |
|
337 | full_path = file_path | |
338 | if copy_path_icon: |
|
338 | if copy_path_icon: | |
339 | icon = files_icon.format(escape(full_path)) |
|
339 | icon = files_icon.format(escape(full_path)) | |
340 | else: |
|
340 | else: | |
341 | icon = '' |
|
341 | icon = '' | |
342 |
|
342 | |||
343 | if file_path == '': |
|
343 | if file_path == '': | |
344 | return root_name |
|
344 | return root_name | |
345 | else: |
|
345 | else: | |
346 | return literal(' / '.join(url_segments) + icon) |
|
346 | return literal(' / '.join(url_segments) + icon) | |
347 |
|
347 | |||
348 |
|
348 | |||
349 | def files_url_data(request): |
|
349 | def files_url_data(request): | |
350 | matchdict = request.matchdict |
|
350 | matchdict = request.matchdict | |
351 |
|
351 | |||
352 | if 'f_path' not in matchdict: |
|
352 | if 'f_path' not in matchdict: | |
353 | matchdict['f_path'] = '' |
|
353 | matchdict['f_path'] = '' | |
354 | else: |
|
354 | else: | |
355 | matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path'])) |
|
355 | matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path'])) | |
356 | if 'commit_id' not in matchdict: |
|
356 | if 'commit_id' not in matchdict: | |
357 | matchdict['commit_id'] = 'tip' |
|
357 | matchdict['commit_id'] = 'tip' | |
358 |
|
358 | |||
359 | return ext_json.str_json(matchdict) |
|
359 | return ext_json.str_json(matchdict) | |
360 |
|
360 | |||
361 |
|
361 | |||
362 | def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ): |
|
362 | def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ): | |
363 | _is_svn = is_svn(db_repo_type) |
|
363 | _is_svn = is_svn(db_repo_type) | |
364 | final_f_path = f_path |
|
364 | final_f_path = f_path | |
365 |
|
365 | |||
366 | if _is_svn: |
|
366 | if _is_svn: | |
367 | """ |
|
367 | """ | |
368 | For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with |
|
368 | For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with | |
369 | actually commit_id followed by the ref_name. This should be done only in case |
|
369 | actually commit_id followed by the ref_name. This should be done only in case | |
370 | This is a initial landing url, without additional paths. |
|
370 | This is a initial landing url, without additional paths. | |
371 |
|
371 | |||
372 | like: /1000/tags/1.0.0/?at=tags/1.0.0 |
|
372 | like: /1000/tags/1.0.0/?at=tags/1.0.0 | |
373 | """ |
|
373 | """ | |
374 |
|
374 | |||
375 | if ref_name and ref_name != 'tip': |
|
375 | if ref_name and ref_name != 'tip': | |
376 | # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it |
|
376 | # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it | |
377 | # for SVN we only do this magic prefix if it's root, .eg landing revision |
|
377 | # for SVN we only do this magic prefix if it's root, .eg landing revision | |
378 | # of files link. If we are in the tree we don't need this since we traverse the url |
|
378 | # of files link. If we are in the tree we don't need this since we traverse the url | |
379 | # that has everything stored |
|
379 | # that has everything stored | |
380 | if f_path in ['', '/']: |
|
380 | if f_path in ['', '/']: | |
381 | final_f_path = '/'.join([ref_name, f_path]) |
|
381 | final_f_path = '/'.join([ref_name, f_path]) | |
382 |
|
382 | |||
383 | # SVN always needs a commit_id explicitly, without a named REF |
|
383 | # SVN always needs a commit_id explicitly, without a named REF | |
384 | default_commit_id = commit_id |
|
384 | default_commit_id = commit_id | |
385 | else: |
|
385 | else: | |
386 | """ |
|
386 | """ | |
387 | For git and mercurial we construct a new URL using the names instead of commit_id |
|
387 | For git and mercurial we construct a new URL using the names instead of commit_id | |
388 | like: /master/some_path?at=master |
|
388 | like: /master/some_path?at=master | |
389 | """ |
|
389 | """ | |
390 | # We currently do not support branches with slashes |
|
390 | # We currently do not support branches with slashes | |
391 | if '/' in ref_name: |
|
391 | if '/' in ref_name: | |
392 | default_commit_id = commit_id |
|
392 | default_commit_id = commit_id | |
393 | else: |
|
393 | else: | |
394 | default_commit_id = ref_name |
|
394 | default_commit_id = ref_name | |
395 |
|
395 | |||
396 | # sometimes we pass f_path as None, to indicate explicit no prefix, |
|
396 | # sometimes we pass f_path as None, to indicate explicit no prefix, | |
397 | # we translate it to string to not have None |
|
397 | # we translate it to string to not have None | |
398 | final_f_path = final_f_path or '' |
|
398 | final_f_path = final_f_path or '' | |
399 |
|
399 | |||
400 | files_url = route_path( |
|
400 | files_url = route_path( | |
401 | 'repo_files', |
|
401 | 'repo_files', | |
402 | repo_name=db_repo_name, |
|
402 | repo_name=db_repo_name, | |
403 | commit_id=default_commit_id, |
|
403 | commit_id=default_commit_id, | |
404 | f_path=final_f_path, |
|
404 | f_path=final_f_path, | |
405 | _query=query |
|
405 | _query=query | |
406 | ) |
|
406 | ) | |
407 | return files_url |
|
407 | return files_url | |
408 |
|
408 | |||
409 |
|
409 | |||
410 | def code_highlight(code, lexer, formatter, use_hl_filter=False): |
|
410 | def code_highlight(code, lexer, formatter, use_hl_filter=False): | |
411 | """ |
|
411 | """ | |
412 | Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. |
|
412 | Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. | |
413 |
|
413 | |||
414 | If ``outfile`` is given and a valid file object (an object |
|
414 | If ``outfile`` is given and a valid file object (an object | |
415 | with a ``write`` method), the result will be written to it, otherwise |
|
415 | with a ``write`` method), the result will be written to it, otherwise | |
416 | it is returned as a string. |
|
416 | it is returned as a string. | |
417 | """ |
|
417 | """ | |
418 | if use_hl_filter: |
|
418 | if use_hl_filter: | |
419 | # add HL filter |
|
419 | # add HL filter | |
420 | from rhodecode.lib.index import search_utils |
|
420 | from rhodecode.lib.index import search_utils | |
421 | lexer.add_filter(search_utils.ElasticSearchHLFilter()) |
|
421 | lexer.add_filter(search_utils.ElasticSearchHLFilter()) | |
422 | return pygments.format(pygments.lex(code, lexer), formatter) |
|
422 | return pygments.format(pygments.lex(code, lexer), formatter) | |
423 |
|
423 | |||
424 |
|
424 | |||
425 | class CodeHtmlFormatter(HtmlFormatter): |
|
425 | class CodeHtmlFormatter(HtmlFormatter): | |
426 | """ |
|
426 | """ | |
427 | My code Html Formatter for source codes |
|
427 | My code Html Formatter for source codes | |
428 | """ |
|
428 | """ | |
429 |
|
429 | |||
430 | def wrap(self, source): |
|
430 | def wrap(self, source): | |
431 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
431 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) | |
432 |
|
432 | |||
433 | def _wrap_code(self, source): |
|
433 | def _wrap_code(self, source): | |
434 | for cnt, it in enumerate(source): |
|
434 | for cnt, it in enumerate(source): | |
435 | i, t = it |
|
435 | i, t = it | |
436 | t = f'<div id="L{cnt+1}">{t}</div>' |
|
436 | t = f'<div id="L{cnt+1}">{t}</div>' | |
437 | yield i, t |
|
437 | yield i, t | |
438 |
|
438 | |||
439 | def _wrap_tablelinenos(self, inner): |
|
439 | def _wrap_tablelinenos(self, inner): | |
440 | dummyoutfile = io.StringIO() |
|
440 | dummyoutfile = io.StringIO() | |
441 | lncount = 0 |
|
441 | lncount = 0 | |
442 | for t, line in inner: |
|
442 | for t, line in inner: | |
443 | if t: |
|
443 | if t: | |
444 | lncount += 1 |
|
444 | lncount += 1 | |
445 | dummyoutfile.write(line) |
|
445 | dummyoutfile.write(line) | |
446 |
|
446 | |||
447 | fl = self.linenostart |
|
447 | fl = self.linenostart | |
448 | mw = len(str(lncount + fl - 1)) |
|
448 | mw = len(str(lncount + fl - 1)) | |
449 | sp = self.linenospecial |
|
449 | sp = self.linenospecial | |
450 | st = self.linenostep |
|
450 | st = self.linenostep | |
451 | la = self.lineanchors |
|
451 | la = self.lineanchors | |
452 | aln = self.anchorlinenos |
|
452 | aln = self.anchorlinenos | |
453 | nocls = self.noclasses |
|
453 | nocls = self.noclasses | |
454 | if sp: |
|
454 | if sp: | |
455 | lines = [] |
|
455 | lines = [] | |
456 |
|
456 | |||
457 | for i in range(fl, fl + lncount): |
|
457 | for i in range(fl, fl + lncount): | |
458 | if i % st == 0: |
|
458 | if i % st == 0: | |
459 | if i % sp == 0: |
|
459 | if i % sp == 0: | |
460 | if aln: |
|
460 | if aln: | |
461 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
461 | lines.append('<a href="#%s%d" class="special">%*d</a>' % | |
462 | (la, i, mw, i)) |
|
462 | (la, i, mw, i)) | |
463 | else: |
|
463 | else: | |
464 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
464 | lines.append('<span class="special">%*d</span>' % (mw, i)) | |
465 | else: |
|
465 | else: | |
466 | if aln: |
|
466 | if aln: | |
467 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
467 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
468 | else: |
|
468 | else: | |
469 | lines.append('%*d' % (mw, i)) |
|
469 | lines.append('%*d' % (mw, i)) | |
470 | else: |
|
470 | else: | |
471 | lines.append('') |
|
471 | lines.append('') | |
472 | ls = '\n'.join(lines) |
|
472 | ls = '\n'.join(lines) | |
473 | else: |
|
473 | else: | |
474 | lines = [] |
|
474 | lines = [] | |
475 | for i in range(fl, fl + lncount): |
|
475 | for i in range(fl, fl + lncount): | |
476 | if i % st == 0: |
|
476 | if i % st == 0: | |
477 | if aln: |
|
477 | if aln: | |
478 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
478 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
479 | else: |
|
479 | else: | |
480 | lines.append('%*d' % (mw, i)) |
|
480 | lines.append('%*d' % (mw, i)) | |
481 | else: |
|
481 | else: | |
482 | lines.append('') |
|
482 | lines.append('') | |
483 | ls = '\n'.join(lines) |
|
483 | ls = '\n'.join(lines) | |
484 |
|
484 | |||
485 | # in case you wonder about the seemingly redundant <div> here: since the |
|
485 | # in case you wonder about the seemingly redundant <div> here: since the | |
486 | # content in the other cell also is wrapped in a div, some browsers in |
|
486 | # content in the other cell also is wrapped in a div, some browsers in | |
487 | # some configurations seem to mess up the formatting... |
|
487 | # some configurations seem to mess up the formatting... | |
488 | if nocls: |
|
488 | if nocls: | |
489 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
489 | yield 0, ('<table class="%stable">' % self.cssclass + | |
490 | '<tr><td><div class="linenodiv" ' |
|
490 | '<tr><td><div class="linenodiv" ' | |
491 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
491 | 'style="background-color: #f0f0f0; padding-right: 10px">' | |
492 | '<pre style="line-height: 125%">' + |
|
492 | '<pre style="line-height: 125%">' + | |
493 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
493 | ls + '</pre></div></td><td id="hlcode" class="code">') | |
494 | else: |
|
494 | else: | |
495 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
495 | yield 0, ('<table class="%stable">' % self.cssclass + | |
496 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
496 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + | |
497 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
497 | ls + '</pre></div></td><td id="hlcode" class="code">') | |
498 | yield 0, dummyoutfile.getvalue() |
|
498 | yield 0, dummyoutfile.getvalue() | |
499 | yield 0, '</td></tr></table>' |
|
499 | yield 0, '</td></tr></table>' | |
500 |
|
500 | |||
501 |
|
501 | |||
502 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
502 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): | |
503 | def __init__(self, **kw): |
|
503 | def __init__(self, **kw): | |
504 | # only show these line numbers if set |
|
504 | # only show these line numbers if set | |
505 | self.only_lines = kw.pop('only_line_numbers', []) |
|
505 | self.only_lines = kw.pop('only_line_numbers', []) | |
506 | self.query_terms = kw.pop('query_terms', []) |
|
506 | self.query_terms = kw.pop('query_terms', []) | |
507 | self.max_lines = kw.pop('max_lines', 5) |
|
507 | self.max_lines = kw.pop('max_lines', 5) | |
508 | self.line_context = kw.pop('line_context', 3) |
|
508 | self.line_context = kw.pop('line_context', 3) | |
509 | self.url = kw.pop('url', None) |
|
509 | self.url = kw.pop('url', None) | |
510 |
|
510 | |||
511 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
511 | super(CodeHtmlFormatter, self).__init__(**kw) | |
512 |
|
512 | |||
513 | def _wrap_code(self, source): |
|
513 | def _wrap_code(self, source): | |
514 | for cnt, it in enumerate(source): |
|
514 | for cnt, it in enumerate(source): | |
515 | i, t = it |
|
515 | i, t = it | |
516 | t = '<pre>%s</pre>' % t |
|
516 | t = '<pre>%s</pre>' % t | |
517 | yield i, t |
|
517 | yield i, t | |
518 |
|
518 | |||
519 | def _wrap_tablelinenos(self, inner): |
|
519 | def _wrap_tablelinenos(self, inner): | |
520 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
520 | yield 0, '<table class="code-highlight %stable">' % self.cssclass | |
521 |
|
521 | |||
522 | last_shown_line_number = 0 |
|
522 | last_shown_line_number = 0 | |
523 | current_line_number = 1 |
|
523 | current_line_number = 1 | |
524 |
|
524 | |||
525 | for t, line in inner: |
|
525 | for t, line in inner: | |
526 | if not t: |
|
526 | if not t: | |
527 | yield t, line |
|
527 | yield t, line | |
528 | continue |
|
528 | continue | |
529 |
|
529 | |||
530 | if current_line_number in self.only_lines: |
|
530 | if current_line_number in self.only_lines: | |
531 | if last_shown_line_number + 1 != current_line_number: |
|
531 | if last_shown_line_number + 1 != current_line_number: | |
532 | yield 0, '<tr>' |
|
532 | yield 0, '<tr>' | |
533 | yield 0, '<td class="line">...</td>' |
|
533 | yield 0, '<td class="line">...</td>' | |
534 | yield 0, '<td id="hlcode" class="code"></td>' |
|
534 | yield 0, '<td id="hlcode" class="code"></td>' | |
535 | yield 0, '</tr>' |
|
535 | yield 0, '</tr>' | |
536 |
|
536 | |||
537 | yield 0, '<tr>' |
|
537 | yield 0, '<tr>' | |
538 | if self.url: |
|
538 | if self.url: | |
539 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
539 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( | |
540 | self.url, current_line_number, current_line_number) |
|
540 | self.url, current_line_number, current_line_number) | |
541 | else: |
|
541 | else: | |
542 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
542 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( | |
543 | current_line_number) |
|
543 | current_line_number) | |
544 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
544 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' | |
545 | yield 0, '</tr>' |
|
545 | yield 0, '</tr>' | |
546 |
|
546 | |||
547 | last_shown_line_number = current_line_number |
|
547 | last_shown_line_number = current_line_number | |
548 |
|
548 | |||
549 | current_line_number += 1 |
|
549 | current_line_number += 1 | |
550 |
|
550 | |||
551 | yield 0, '</table>' |
|
551 | yield 0, '</table>' | |
552 |
|
552 | |||
553 |
|
553 | |||
554 | def hsv_to_rgb(h, s, v): |
|
554 | def hsv_to_rgb(h, s, v): | |
555 | """ Convert hsv color values to rgb """ |
|
555 | """ Convert hsv color values to rgb """ | |
556 |
|
556 | |||
557 | if s == 0.0: |
|
557 | if s == 0.0: | |
558 | return v, v, v |
|
558 | return v, v, v | |
559 | i = int(h * 6.0) # XXX assume int() truncates! |
|
559 | i = int(h * 6.0) # XXX assume int() truncates! | |
560 | f = (h * 6.0) - i |
|
560 | f = (h * 6.0) - i | |
561 | p = v * (1.0 - s) |
|
561 | p = v * (1.0 - s) | |
562 | q = v * (1.0 - s * f) |
|
562 | q = v * (1.0 - s * f) | |
563 | t = v * (1.0 - s * (1.0 - f)) |
|
563 | t = v * (1.0 - s * (1.0 - f)) | |
564 | i = i % 6 |
|
564 | i = i % 6 | |
565 | if i == 0: |
|
565 | if i == 0: | |
566 | return v, t, p |
|
566 | return v, t, p | |
567 | if i == 1: |
|
567 | if i == 1: | |
568 | return q, v, p |
|
568 | return q, v, p | |
569 | if i == 2: |
|
569 | if i == 2: | |
570 | return p, v, t |
|
570 | return p, v, t | |
571 | if i == 3: |
|
571 | if i == 3: | |
572 | return p, q, v |
|
572 | return p, q, v | |
573 | if i == 4: |
|
573 | if i == 4: | |
574 | return t, p, v |
|
574 | return t, p, v | |
575 | if i == 5: |
|
575 | if i == 5: | |
576 | return v, p, q |
|
576 | return v, p, q | |
577 |
|
577 | |||
578 |
|
578 | |||
579 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
579 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): | |
580 | """ |
|
580 | """ | |
581 | Generator for getting n of evenly distributed colors using |
|
581 | Generator for getting n of evenly distributed colors using | |
582 | hsv color and golden ratio. It always return same order of colors |
|
582 | hsv color and golden ratio. It always return same order of colors | |
583 |
|
583 | |||
584 | :param n: number of colors to generate |
|
584 | :param n: number of colors to generate | |
585 | :param saturation: saturation of returned colors |
|
585 | :param saturation: saturation of returned colors | |
586 | :param lightness: lightness of returned colors |
|
586 | :param lightness: lightness of returned colors | |
587 | :returns: RGB tuple |
|
587 | :returns: RGB tuple | |
588 | """ |
|
588 | """ | |
589 |
|
589 | |||
590 | golden_ratio = 0.618033988749895 |
|
590 | golden_ratio = 0.618033988749895 | |
591 | h = 0.22717784590367374 |
|
591 | h = 0.22717784590367374 | |
592 |
|
592 | |||
593 | for _ in range(n): |
|
593 | for _ in range(n): | |
594 | h += golden_ratio |
|
594 | h += golden_ratio | |
595 | h %= 1 |
|
595 | h %= 1 | |
596 | HSV_tuple = [h, saturation, lightness] |
|
596 | HSV_tuple = [h, saturation, lightness] | |
597 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
597 | RGB_tuple = hsv_to_rgb(*HSV_tuple) | |
598 | yield [str(int(x * 256)) for x in RGB_tuple] |
|
598 | yield [str(int(x * 256)) for x in RGB_tuple] | |
599 |
|
599 | |||
600 |
|
600 | |||
601 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
601 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): | |
602 | """ |
|
602 | """ | |
603 | Returns a function which when called with an argument returns a unique |
|
603 | Returns a function which when called with an argument returns a unique | |
604 | color for that argument, eg. |
|
604 | color for that argument, eg. | |
605 |
|
605 | |||
606 | :param n: number of colors to generate |
|
606 | :param n: number of colors to generate | |
607 | :param saturation: saturation of returned colors |
|
607 | :param saturation: saturation of returned colors | |
608 | :param lightness: lightness of returned colors |
|
608 | :param lightness: lightness of returned colors | |
609 | :returns: css RGB string |
|
609 | :returns: css RGB string | |
610 |
|
610 | |||
611 | >>> color_hash = color_hasher() |
|
611 | >>> color_hash = color_hasher() | |
612 | >>> color_hash('hello') |
|
612 | >>> color_hash('hello') | |
613 | 'rgb(34, 12, 59)' |
|
613 | 'rgb(34, 12, 59)' | |
614 | >>> color_hash('hello') |
|
614 | >>> color_hash('hello') | |
615 | 'rgb(34, 12, 59)' |
|
615 | 'rgb(34, 12, 59)' | |
616 | >>> color_hash('other') |
|
616 | >>> color_hash('other') | |
617 | 'rgb(90, 224, 159)' |
|
617 | 'rgb(90, 224, 159)' | |
618 | """ |
|
618 | """ | |
619 |
|
619 | |||
620 | color_dict = {} |
|
620 | color_dict = {} | |
621 | cgenerator = unique_color_generator( |
|
621 | cgenerator = unique_color_generator( | |
622 | saturation=saturation, lightness=lightness) |
|
622 | saturation=saturation, lightness=lightness) | |
623 |
|
623 | |||
624 | def get_color_string(thing): |
|
624 | def get_color_string(thing): | |
625 | if thing in color_dict: |
|
625 | if thing in color_dict: | |
626 | col = color_dict[thing] |
|
626 | col = color_dict[thing] | |
627 | else: |
|
627 | else: | |
628 | col = color_dict[thing] = next(cgenerator) |
|
628 | col = color_dict[thing] = next(cgenerator) | |
629 | return "rgb(%s)" % (', '.join(col)) |
|
629 | return "rgb(%s)" % (', '.join(col)) | |
630 |
|
630 | |||
631 | return get_color_string |
|
631 | return get_color_string | |
632 |
|
632 | |||
633 |
|
633 | |||
634 | def get_lexer_safe(mimetype=None, filepath=None): |
|
634 | def get_lexer_safe(mimetype=None, filepath=None): | |
635 | """ |
|
635 | """ | |
636 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
636 | Tries to return a relevant pygments lexer using mimetype/filepath name, | |
637 | defaulting to plain text if none could be found |
|
637 | defaulting to plain text if none could be found | |
638 | """ |
|
638 | """ | |
639 | lexer = None |
|
639 | lexer = None | |
640 | try: |
|
640 | try: | |
641 | if mimetype: |
|
641 | if mimetype: | |
642 | lexer = get_lexer_for_mimetype(mimetype) |
|
642 | lexer = get_lexer_for_mimetype(mimetype) | |
643 | if not lexer: |
|
643 | if not lexer: | |
644 | lexer = get_lexer_for_filename(filepath) |
|
644 | lexer = get_lexer_for_filename(filepath) | |
645 | except pygments.util.ClassNotFound: |
|
645 | except pygments.util.ClassNotFound: | |
646 | pass |
|
646 | pass | |
647 |
|
647 | |||
648 | if not lexer: |
|
648 | if not lexer: | |
649 | lexer = get_lexer_by_name('text') |
|
649 | lexer = get_lexer_by_name('text') | |
650 |
|
650 | |||
651 | return lexer |
|
651 | return lexer | |
652 |
|
652 | |||
653 |
|
653 | |||
654 | def get_lexer_for_filenode(filenode): |
|
654 | def get_lexer_for_filenode(filenode): | |
655 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
655 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer | |
656 | return lexer |
|
656 | return lexer | |
657 |
|
657 | |||
658 |
|
658 | |||
659 | def pygmentize(filenode, **kwargs): |
|
659 | def pygmentize(filenode, **kwargs): | |
660 | """ |
|
660 | """ | |
661 | pygmentize function using pygments |
|
661 | pygmentize function using pygments | |
662 |
|
662 | |||
663 | :param filenode: |
|
663 | :param filenode: | |
664 | """ |
|
664 | """ | |
665 | lexer = get_lexer_for_filenode(filenode) |
|
665 | lexer = get_lexer_for_filenode(filenode) | |
666 | return literal(code_highlight(filenode.content, lexer, |
|
666 | return literal(code_highlight(filenode.content, lexer, | |
667 | CodeHtmlFormatter(**kwargs))) |
|
667 | CodeHtmlFormatter(**kwargs))) | |
668 |
|
668 | |||
669 |
|
669 | |||
670 | def is_following_repo(repo_name, user_id): |
|
670 | def is_following_repo(repo_name, user_id): | |
671 | from rhodecode.model.scm import ScmModel |
|
671 | from rhodecode.model.scm import ScmModel | |
672 | return ScmModel().is_following_repo(repo_name, user_id) |
|
672 | return ScmModel().is_following_repo(repo_name, user_id) | |
673 |
|
673 | |||
674 |
|
674 | |||
675 | class _Message(object): |
|
675 | class _Message(object): | |
676 | """A message returned by ``Flash.pop_messages()``. |
|
676 | """A message returned by ``Flash.pop_messages()``. | |
677 |
|
677 | |||
678 | Converting the message to a string returns the message text. Instances |
|
678 | Converting the message to a string returns the message text. Instances | |
679 | also have the following attributes: |
|
679 | also have the following attributes: | |
680 |
|
680 | |||
681 | * ``message``: the message text. |
|
681 | * ``message``: the message text. | |
682 | * ``category``: the category specified when the message was created. |
|
682 | * ``category``: the category specified when the message was created. | |
683 | """ |
|
683 | """ | |
684 |
|
684 | |||
685 | def __init__(self, category, message, sub_data=None): |
|
685 | def __init__(self, category, message, sub_data=None): | |
686 | self.category = category |
|
686 | self.category = category | |
687 | self.message = message |
|
687 | self.message = message | |
688 | self.sub_data = sub_data or {} |
|
688 | self.sub_data = sub_data or {} | |
689 |
|
689 | |||
690 | def __str__(self): |
|
690 | def __str__(self): | |
691 | return self.message |
|
691 | return self.message | |
692 |
|
692 | |||
693 | __unicode__ = __str__ |
|
693 | __unicode__ = __str__ | |
694 |
|
694 | |||
695 | def __html__(self): |
|
695 | def __html__(self): | |
696 | return escape(safe_str(self.message)) |
|
696 | return escape(safe_str(self.message)) | |
697 |
|
697 | |||
698 |
|
698 | |||
699 | class Flash(object): |
|
699 | class Flash(object): | |
700 | # List of allowed categories. If None, allow any category. |
|
700 | # List of allowed categories. If None, allow any category. | |
701 | categories = ["warning", "notice", "error", "success"] |
|
701 | categories = ["warning", "notice", "error", "success"] | |
702 |
|
702 | |||
703 | # Default category if none is specified. |
|
703 | # Default category if none is specified. | |
704 | default_category = "notice" |
|
704 | default_category = "notice" | |
705 |
|
705 | |||
706 | def __init__(self, session_key="flash", categories=None, |
|
706 | def __init__(self, session_key="flash", categories=None, | |
707 | default_category=None): |
|
707 | default_category=None): | |
708 | """ |
|
708 | """ | |
709 | Instantiate a ``Flash`` object. |
|
709 | Instantiate a ``Flash`` object. | |
710 |
|
710 | |||
711 | ``session_key`` is the key to save the messages under in the user's |
|
711 | ``session_key`` is the key to save the messages under in the user's | |
712 | session. |
|
712 | session. | |
713 |
|
713 | |||
714 | ``categories`` is an optional list which overrides the default list |
|
714 | ``categories`` is an optional list which overrides the default list | |
715 | of categories. |
|
715 | of categories. | |
716 |
|
716 | |||
717 | ``default_category`` overrides the default category used for messages |
|
717 | ``default_category`` overrides the default category used for messages | |
718 | when none is specified. |
|
718 | when none is specified. | |
719 | """ |
|
719 | """ | |
720 | self.session_key = session_key |
|
720 | self.session_key = session_key | |
721 | if categories is not None: |
|
721 | if categories is not None: | |
722 | self.categories = categories |
|
722 | self.categories = categories | |
723 | if default_category is not None: |
|
723 | if default_category is not None: | |
724 | self.default_category = default_category |
|
724 | self.default_category = default_category | |
725 | if self.categories and self.default_category not in self.categories: |
|
725 | if self.categories and self.default_category not in self.categories: | |
726 | raise ValueError( |
|
726 | raise ValueError( | |
727 | "unrecognized default category %r" % (self.default_category,)) |
|
727 | "unrecognized default category %r" % (self.default_category,)) | |
728 |
|
728 | |||
729 | def pop_messages(self, session=None, request=None): |
|
729 | def pop_messages(self, session=None, request=None): | |
730 | """ |
|
730 | """ | |
731 | Return all accumulated messages and delete them from the session. |
|
731 | Return all accumulated messages and delete them from the session. | |
732 |
|
732 | |||
733 | The return value is a list of ``Message`` objects. |
|
733 | The return value is a list of ``Message`` objects. | |
734 | """ |
|
734 | """ | |
735 | messages = [] |
|
735 | messages = [] | |
736 |
|
736 | |||
737 | if not session: |
|
737 | if not session: | |
738 | if not request: |
|
738 | if not request: | |
739 | request = get_current_request() |
|
739 | request = get_current_request() | |
740 | session = request.session |
|
740 | session = request.session | |
741 |
|
741 | |||
742 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
742 | # Pop the 'old' pylons flash messages. They are tuples of the form | |
743 | # (category, message) |
|
743 | # (category, message) | |
744 | for cat, msg in session.pop(self.session_key, []): |
|
744 | for cat, msg in session.pop(self.session_key, []): | |
745 | messages.append(_Message(cat, msg)) |
|
745 | messages.append(_Message(cat, msg)) | |
746 |
|
746 | |||
747 | # Pop the 'new' pyramid flash messages for each category as list |
|
747 | # Pop the 'new' pyramid flash messages for each category as list | |
748 | # of strings. |
|
748 | # of strings. | |
749 | for cat in self.categories: |
|
749 | for cat in self.categories: | |
750 | for msg in session.pop_flash(queue=cat): |
|
750 | for msg in session.pop_flash(queue=cat): | |
751 | sub_data = {} |
|
751 | sub_data = {} | |
752 | if hasattr(msg, 'rsplit'): |
|
752 | if hasattr(msg, 'rsplit'): | |
753 | flash_data = msg.rsplit('|DELIM|', 1) |
|
753 | flash_data = msg.rsplit('|DELIM|', 1) | |
754 | org_message = flash_data[0] |
|
754 | org_message = flash_data[0] | |
755 | if len(flash_data) > 1: |
|
755 | if len(flash_data) > 1: | |
756 | sub_data = json.loads(flash_data[1]) |
|
756 | sub_data = json.loads(flash_data[1]) | |
757 | else: |
|
757 | else: | |
758 | org_message = msg |
|
758 | org_message = msg | |
759 |
|
759 | |||
760 | messages.append(_Message(cat, org_message, sub_data=sub_data)) |
|
760 | messages.append(_Message(cat, org_message, sub_data=sub_data)) | |
761 |
|
761 | |||
762 | # Map messages from the default queue to the 'notice' category. |
|
762 | # Map messages from the default queue to the 'notice' category. | |
763 | for msg in session.pop_flash(): |
|
763 | for msg in session.pop_flash(): | |
764 | messages.append(_Message('notice', msg)) |
|
764 | messages.append(_Message('notice', msg)) | |
765 |
|
765 | |||
766 | session.save() |
|
766 | session.save() | |
767 | return messages |
|
767 | return messages | |
768 |
|
768 | |||
769 | def json_alerts(self, session=None, request=None): |
|
769 | def json_alerts(self, session=None, request=None): | |
770 | payloads = [] |
|
770 | payloads = [] | |
771 | messages = flash.pop_messages(session=session, request=request) or [] |
|
771 | messages = flash.pop_messages(session=session, request=request) or [] | |
772 | for message in messages: |
|
772 | for message in messages: | |
773 | payloads.append({ |
|
773 | payloads.append({ | |
774 | 'message': { |
|
774 | 'message': { | |
775 | 'message': '{}'.format(message.message), |
|
775 | 'message': '{}'.format(message.message), | |
776 | 'level': message.category, |
|
776 | 'level': message.category, | |
777 | 'force': True, |
|
777 | 'force': True, | |
778 | 'subdata': message.sub_data |
|
778 | 'subdata': message.sub_data | |
779 | } |
|
779 | } | |
780 | }) |
|
780 | }) | |
781 | return safe_str(json.dumps(payloads)) |
|
781 | return safe_str(json.dumps(payloads)) | |
782 |
|
782 | |||
783 | def __call__(self, message, category=None, ignore_duplicate=True, |
|
783 | def __call__(self, message, category=None, ignore_duplicate=True, | |
784 | session=None, request=None): |
|
784 | session=None, request=None): | |
785 |
|
785 | |||
786 | if not session: |
|
786 | if not session: | |
787 | if not request: |
|
787 | if not request: | |
788 | request = get_current_request() |
|
788 | request = get_current_request() | |
789 | session = request.session |
|
789 | session = request.session | |
790 |
|
790 | |||
791 | session.flash( |
|
791 | session.flash( | |
792 | message, queue=category, allow_duplicate=not ignore_duplicate) |
|
792 | message, queue=category, allow_duplicate=not ignore_duplicate) | |
793 |
|
793 | |||
794 |
|
794 | |||
795 | flash = Flash() |
|
795 | flash = Flash() | |
796 |
|
796 | |||
797 | #============================================================================== |
|
797 | #============================================================================== | |
798 | # SCM FILTERS available via h. |
|
798 | # SCM FILTERS available via h. | |
799 | #============================================================================== |
|
799 | #============================================================================== | |
800 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
800 | from rhodecode.lib.vcs.utils import author_name, author_email | |
801 | from rhodecode.lib.utils2 import age, age_from_seconds |
|
801 | from rhodecode.lib.utils2 import age, age_from_seconds | |
802 | from rhodecode.model.db import User, ChangesetStatus |
|
802 | from rhodecode.model.db import User, ChangesetStatus | |
803 |
|
803 | |||
804 |
|
804 | |||
805 | email = author_email |
|
805 | email = author_email | |
806 |
|
806 | |||
807 |
|
807 | |||
808 | def capitalize(raw_text): |
|
808 | def capitalize(raw_text): | |
809 | return raw_text.capitalize() |
|
809 | return raw_text.capitalize() | |
810 |
|
810 | |||
811 |
|
811 | |||
812 | def short_id(long_id): |
|
812 | def short_id(long_id): | |
813 | return long_id[:12] |
|
813 | return long_id[:12] | |
814 |
|
814 | |||
815 |
|
815 | |||
816 | def hide_credentials(url): |
|
816 | def hide_credentials(url): | |
817 | from rhodecode.lib.utils2 import credentials_filter |
|
817 | from rhodecode.lib.utils2 import credentials_filter | |
818 | return credentials_filter(url) |
|
818 | return credentials_filter(url) | |
819 |
|
819 | |||
820 | import zoneinfo |
|
820 | import zoneinfo | |
821 | import tzlocal |
|
821 | import tzlocal | |
822 | local_timezone = tzlocal.get_localzone() |
|
822 | local_timezone = tzlocal.get_localzone() | |
823 |
|
823 | |||
824 |
|
824 | |||
825 | def get_timezone(datetime_iso, time_is_local=False): |
|
825 | def get_timezone(datetime_iso, time_is_local=False): | |
826 | tzinfo = '+00:00' |
|
826 | tzinfo = '+00:00' | |
827 |
|
827 | |||
828 | # detect if we have a timezone info, otherwise, add it |
|
828 | # detect if we have a timezone info, otherwise, add it | |
829 | if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
829 | if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: | |
830 | force_timezone = os.environ.get('RC_TIMEZONE', '') |
|
830 | force_timezone = os.environ.get('RC_TIMEZONE', '') | |
831 | if force_timezone: |
|
831 | if force_timezone: | |
832 | force_timezone = zoneinfo.ZoneInfo(force_timezone) |
|
832 | force_timezone = zoneinfo.ZoneInfo(force_timezone) | |
833 | timezone = force_timezone or local_timezone |
|
833 | timezone = force_timezone or local_timezone | |
834 |
|
834 | |||
835 | offset = datetime_iso.replace(tzinfo=timezone).strftime('%z') |
|
835 | offset = datetime_iso.replace(tzinfo=timezone).strftime('%z') | |
836 | tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) |
|
836 | tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) | |
837 | return tzinfo |
|
837 | return tzinfo | |
838 |
|
838 | |||
839 |
|
839 | |||
840 | def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): |
|
840 | def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): | |
841 | title = value or format_date(datetime_iso) |
|
841 | title = value or format_date(datetime_iso) | |
842 | tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local) |
|
842 | tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local) | |
843 |
|
843 | |||
844 | return literal( |
|
844 | return literal( | |
845 | '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format( |
|
845 | '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format( | |
846 | cls='tooltip' if tooltip else '', |
|
846 | cls='tooltip' if tooltip else '', | |
847 | tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '', |
|
847 | tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '', | |
848 | title=title, dt=datetime_iso, tzinfo=tzinfo |
|
848 | title=title, dt=datetime_iso, tzinfo=tzinfo | |
849 | )) |
|
849 | )) | |
850 |
|
850 | |||
851 |
|
851 | |||
852 | def _shorten_commit_id(commit_id, commit_len=None): |
|
852 | def _shorten_commit_id(commit_id, commit_len=None): | |
853 | if commit_len is None: |
|
853 | if commit_len is None: | |
854 | request = get_current_request() |
|
854 | request = get_current_request() | |
855 | commit_len = request.call_context.visual.show_sha_length |
|
855 | commit_len = request.call_context.visual.show_sha_length | |
856 | return commit_id[:commit_len] |
|
856 | return commit_id[:commit_len] | |
857 |
|
857 | |||
858 |
|
858 | |||
859 | def show_id(commit, show_idx=None, commit_len=None): |
|
859 | def show_id(commit, show_idx=None, commit_len=None): | |
860 | """ |
|
860 | """ | |
861 | Configurable function that shows ID |
|
861 | Configurable function that shows ID | |
862 | by default it's r123:fffeeefffeee |
|
862 | by default it's r123:fffeeefffeee | |
863 |
|
863 | |||
864 | :param commit: commit instance |
|
864 | :param commit: commit instance | |
865 | """ |
|
865 | """ | |
866 | if show_idx is None: |
|
866 | if show_idx is None: | |
867 | request = get_current_request() |
|
867 | request = get_current_request() | |
868 | show_idx = request.call_context.visual.show_revision_number |
|
868 | show_idx = request.call_context.visual.show_revision_number | |
869 |
|
869 | |||
870 | raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len) |
|
870 | raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len) | |
871 | if show_idx: |
|
871 | if show_idx: | |
872 | return 'r%s:%s' % (commit.idx, raw_id) |
|
872 | return 'r%s:%s' % (commit.idx, raw_id) | |
873 | else: |
|
873 | else: | |
874 | return '%s' % (raw_id, ) |
|
874 | return '%s' % (raw_id, ) | |
875 |
|
875 | |||
876 |
|
876 | |||
877 | def format_date(date): |
|
877 | def format_date(date): | |
878 | """ |
|
878 | """ | |
879 | use a standardized formatting for dates used in RhodeCode |
|
879 | use a standardized formatting for dates used in RhodeCode | |
880 |
|
880 | |||
881 | :param date: date/datetime object |
|
881 | :param date: date/datetime object | |
882 | :return: formatted date |
|
882 | :return: formatted date | |
883 | """ |
|
883 | """ | |
884 |
|
884 | |||
885 | if date: |
|
885 | if date: | |
886 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
886 | _fmt = "%a, %d %b %Y %H:%M:%S" | |
887 | return safe_str(date.strftime(_fmt)) |
|
887 | return safe_str(date.strftime(_fmt)) | |
888 |
|
888 | |||
889 | return "" |
|
889 | return "" | |
890 |
|
890 | |||
891 |
|
891 | |||
892 | class _RepoChecker(object): |
|
892 | class _RepoChecker(object): | |
893 |
|
893 | |||
894 | def __init__(self, backend_alias): |
|
894 | def __init__(self, backend_alias): | |
895 | self._backend_alias = backend_alias |
|
895 | self._backend_alias = backend_alias | |
896 |
|
896 | |||
897 | def __call__(self, repository): |
|
897 | def __call__(self, repository): | |
898 | if hasattr(repository, 'alias'): |
|
898 | if hasattr(repository, 'alias'): | |
899 | _type = repository.alias |
|
899 | _type = repository.alias | |
900 | elif hasattr(repository, 'repo_type'): |
|
900 | elif hasattr(repository, 'repo_type'): | |
901 | _type = repository.repo_type |
|
901 | _type = repository.repo_type | |
902 | else: |
|
902 | else: | |
903 | _type = repository |
|
903 | _type = repository | |
904 | return _type == self._backend_alias |
|
904 | return _type == self._backend_alias | |
905 |
|
905 | |||
906 |
|
906 | |||
907 | is_git = _RepoChecker('git') |
|
907 | is_git = _RepoChecker('git') | |
908 | is_hg = _RepoChecker('hg') |
|
908 | is_hg = _RepoChecker('hg') | |
909 | is_svn = _RepoChecker('svn') |
|
909 | is_svn = _RepoChecker('svn') | |
910 |
|
910 | |||
911 |
|
911 | |||
912 | def get_repo_type_by_name(repo_name): |
|
912 | def get_repo_type_by_name(repo_name): | |
913 | repo = Repository.get_by_repo_name(repo_name) |
|
913 | repo = Repository.get_by_repo_name(repo_name) | |
914 | if repo: |
|
914 | if repo: | |
915 | return repo.repo_type |
|
915 | return repo.repo_type | |
916 |
|
916 | |||
917 |
|
917 | |||
918 | def is_svn_without_proxy(repository): |
|
918 | def is_svn_without_proxy(repository): | |
919 | if is_svn(repository): |
|
919 | if is_svn(repository): | |
920 | return not ConfigGet().get_bool('vcs.svn.proxy.enabled') |
|
920 | return not ConfigGet().get_bool('vcs.svn.proxy.enabled') | |
921 | return False |
|
921 | return False | |
922 |
|
922 | |||
923 |
|
923 | |||
924 | def discover_user(author): |
|
924 | def discover_user(author): | |
925 | """ |
|
925 | """ | |
926 | Tries to discover RhodeCode User based on the author string. Author string |
|
926 | Tries to discover RhodeCode User based on the author string. Author string | |
927 | is typically `FirstName LastName <email@address.com>` |
|
927 | is typically `FirstName LastName <email@address.com>` | |
928 | """ |
|
928 | """ | |
929 |
|
929 | |||
930 | # if author is already an instance use it for extraction |
|
930 | # if author is already an instance use it for extraction | |
931 | if isinstance(author, User): |
|
931 | if isinstance(author, User): | |
932 | return author |
|
932 | return author | |
933 |
|
933 | |||
934 | # Valid email in the attribute passed, see if they're in the system |
|
934 | # Valid email in the attribute passed, see if they're in the system | |
935 | _email = author_email(author) |
|
935 | _email = author_email(author) | |
936 | if _email != '': |
|
936 | if _email != '': | |
937 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
937 | user = User.get_by_email(_email, case_insensitive=True, cache=True) | |
938 | if user is not None: |
|
938 | if user is not None: | |
939 | return user |
|
939 | return user | |
940 |
|
940 | |||
941 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
941 | # Maybe it's a username, we try to extract it and fetch by username ? | |
942 | _author = author_name(author) |
|
942 | _author = author_name(author) | |
943 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
943 | user = User.get_by_username(_author, case_insensitive=True, cache=True) | |
944 | if user is not None: |
|
944 | if user is not None: | |
945 | return user |
|
945 | return user | |
946 |
|
946 | |||
947 | return None |
|
947 | return None | |
948 |
|
948 | |||
949 |
|
949 | |||
950 | def email_or_none(author): |
|
950 | def email_or_none(author): | |
951 | # extract email from the commit string |
|
951 | # extract email from the commit string | |
952 | _email = author_email(author) |
|
952 | _email = author_email(author) | |
953 |
|
953 | |||
954 | # If we have an email, use it, otherwise |
|
954 | # If we have an email, use it, otherwise | |
955 | # see if it contains a username we can get an email from |
|
955 | # see if it contains a username we can get an email from | |
956 | if _email != '': |
|
956 | if _email != '': | |
957 | return _email |
|
957 | return _email | |
958 | else: |
|
958 | else: | |
959 | user = User.get_by_username( |
|
959 | user = User.get_by_username( | |
960 | author_name(author), case_insensitive=True, cache=True) |
|
960 | author_name(author), case_insensitive=True, cache=True) | |
961 |
|
961 | |||
962 | if user is not None: |
|
962 | if user is not None: | |
963 | return user.email |
|
963 | return user.email | |
964 |
|
964 | |||
965 | # No valid email, not a valid user in the system, none! |
|
965 | # No valid email, not a valid user in the system, none! | |
966 | return None |
|
966 | return None | |
967 |
|
967 | |||
968 |
|
968 | |||
969 | def link_to_user(author, length=0, **kwargs): |
|
969 | def link_to_user(author, length=0, **kwargs): | |
970 | user = discover_user(author) |
|
970 | user = discover_user(author) | |
971 | # user can be None, but if we have it already it means we can re-use it |
|
971 | # user can be None, but if we have it already it means we can re-use it | |
972 | # in the person() function, so we save 1 intensive-query |
|
972 | # in the person() function, so we save 1 intensive-query | |
973 | if user: |
|
973 | if user: | |
974 | author = user |
|
974 | author = user | |
975 |
|
975 | |||
976 | display_person = person(author, 'username_or_name_or_email') |
|
976 | display_person = person(author, 'username_or_name_or_email') | |
977 | if length: |
|
977 | if length: | |
978 | display_person = shorter(display_person, length) |
|
978 | display_person = shorter(display_person, length) | |
979 |
|
979 | |||
980 | if user and user.username != user.DEFAULT_USER: |
|
980 | if user and user.username != user.DEFAULT_USER: | |
981 | return link_to( |
|
981 | return link_to( | |
982 | escape(display_person), |
|
982 | escape(display_person), | |
983 | route_path('user_profile', username=user.username), |
|
983 | route_path('user_profile', username=user.username), | |
984 | **kwargs) |
|
984 | **kwargs) | |
985 | else: |
|
985 | else: | |
986 | return escape(display_person) |
|
986 | return escape(display_person) | |
987 |
|
987 | |||
988 |
|
988 | |||
989 | def link_to_group(users_group_name, **kwargs): |
|
989 | def link_to_group(users_group_name, **kwargs): | |
990 | return link_to( |
|
990 | return link_to( | |
991 | escape(users_group_name), |
|
991 | escape(users_group_name), | |
992 | route_path('user_group_profile', user_group_name=users_group_name), |
|
992 | route_path('user_group_profile', user_group_name=users_group_name), | |
993 | **kwargs) |
|
993 | **kwargs) | |
994 |
|
994 | |||
995 |
|
995 | |||
996 | def person(author, show_attr="username_and_name"): |
|
996 | def person(author, show_attr="username_and_name"): | |
997 | user = discover_user(author) |
|
997 | user = discover_user(author) | |
998 | if user: |
|
998 | if user: | |
999 | return getattr(user, show_attr) |
|
999 | return getattr(user, show_attr) | |
1000 | else: |
|
1000 | else: | |
1001 | _author = author_name(author) |
|
1001 | _author = author_name(author) | |
1002 | _email = email(author) |
|
1002 | _email = email(author) | |
1003 | return _author or _email |
|
1003 | return _author or _email | |
1004 |
|
1004 | |||
1005 |
|
1005 | |||
1006 | def author_string(email): |
|
1006 | def author_string(email): | |
1007 | if email: |
|
1007 | if email: | |
1008 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
1008 | user = User.get_by_email(email, case_insensitive=True, cache=True) | |
1009 | if user: |
|
1009 | if user: | |
1010 | if user.first_name or user.last_name: |
|
1010 | if user.first_name or user.last_name: | |
1011 | return '%s %s <%s>' % ( |
|
1011 | return '%s %s <%s>' % ( | |
1012 | user.first_name, user.last_name, email) |
|
1012 | user.first_name, user.last_name, email) | |
1013 | else: |
|
1013 | else: | |
1014 | return email |
|
1014 | return email | |
1015 | else: |
|
1015 | else: | |
1016 | return email |
|
1016 | return email | |
1017 | else: |
|
1017 | else: | |
1018 | return None |
|
1018 | return None | |
1019 |
|
1019 | |||
1020 |
|
1020 | |||
1021 | def person_by_id(id_, show_attr="username_and_name"): |
|
1021 | def person_by_id(id_, show_attr="username_and_name"): | |
1022 | # attr to return from fetched user |
|
1022 | # attr to return from fetched user | |
1023 | def person_getter(usr): |
|
1023 | def person_getter(usr): | |
1024 | return getattr(usr, show_attr) |
|
1024 | return getattr(usr, show_attr) | |
1025 |
|
1025 | |||
1026 | #maybe it's an ID ? |
|
1026 | #maybe it's an ID ? | |
1027 | if str(id_).isdigit() or isinstance(id_, int): |
|
1027 | if str(id_).isdigit() or isinstance(id_, int): | |
1028 | id_ = int(id_) |
|
1028 | id_ = int(id_) | |
1029 | user = User.get(id_) |
|
1029 | user = User.get(id_) | |
1030 | if user is not None: |
|
1030 | if user is not None: | |
1031 | return person_getter(user) |
|
1031 | return person_getter(user) | |
1032 | return id_ |
|
1032 | return id_ | |
1033 |
|
1033 | |||
1034 |
|
1034 | |||
1035 | def gravatar_with_user(request, author, show_disabled=False, tooltip=False): |
|
1035 | def gravatar_with_user(request, author, show_disabled=False, tooltip=False): | |
1036 | _render = request.get_partial_renderer('rhodecode:templates/base/base.mako') |
|
1036 | _render = request.get_partial_renderer('rhodecode:templates/base/base.mako') | |
1037 | return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip) |
|
1037 | return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip) | |
1038 |
|
1038 | |||
1039 |
|
1039 | |||
1040 | tags_patterns = OrderedDict( |
|
1040 | tags_patterns = OrderedDict( | |
1041 | ( |
|
1041 | ( | |
1042 | ( |
|
1042 | ( | |
1043 | "lang", |
|
1043 | "lang", | |
1044 | ( |
|
1044 | ( | |
1045 | re.compile(r"\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]"), |
|
1045 | re.compile(r"\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]"), | |
1046 | '<div class="metatag" tag="lang">\\2</div>', |
|
1046 | '<div class="metatag" tag="lang">\\2</div>', | |
1047 | ), |
|
1047 | ), | |
1048 | ), |
|
1048 | ), | |
1049 | ( |
|
1049 | ( | |
1050 | "see", |
|
1050 | "see", | |
1051 | ( |
|
1051 | ( | |
1052 | re.compile(r"\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]"), |
|
1052 | re.compile(r"\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]"), | |
1053 | '<div class="metatag" tag="see">see: \\1 </div>', |
|
1053 | '<div class="metatag" tag="see">see: \\1 </div>', | |
1054 | ), |
|
1054 | ), | |
1055 | ), |
|
1055 | ), | |
1056 | ( |
|
1056 | ( | |
1057 | "url", |
|
1057 | "url", | |
1058 | ( |
|
1058 | ( | |
1059 | re.compile( |
|
1059 | re.compile( | |
1060 | r"\[url\ \=\>\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]" |
|
1060 | r"\[url\ \=\>\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]" | |
1061 | ), |
|
1061 | ), | |
1062 | '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>', |
|
1062 | '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>', | |
1063 | ), |
|
1063 | ), | |
1064 | ), |
|
1064 | ), | |
1065 | ( |
|
1065 | ( | |
1066 | "license", |
|
1066 | "license", | |
1067 | ( |
|
1067 | ( | |
1068 | re.compile( |
|
1068 | re.compile( | |
1069 | r"\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]" |
|
1069 | r"\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]" | |
1070 | ), |
|
1070 | ), | |
1071 | # don't make it a raw string here... |
|
1071 | # don't make it a raw string here... | |
1072 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', |
|
1072 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', | |
1073 | ), |
|
1073 | ), | |
1074 | ), |
|
1074 | ), | |
1075 | ( |
|
1075 | ( | |
1076 | "ref", |
|
1076 | "ref", | |
1077 | ( |
|
1077 | ( | |
1078 | re.compile( |
|
1078 | re.compile( | |
1079 | r"\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]" |
|
1079 | r"\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]" | |
1080 | ), |
|
1080 | ), | |
1081 | '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>', |
|
1081 | '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>', | |
1082 | ), |
|
1082 | ), | |
1083 | ), |
|
1083 | ), | |
1084 | ( |
|
1084 | ( | |
1085 | "state", |
|
1085 | "state", | |
1086 | ( |
|
1086 | ( | |
1087 | re.compile(r"\[(stable|featured|stale|dead|dev|deprecated)\]"), |
|
1087 | re.compile(r"\[(stable|featured|stale|dead|dev|deprecated)\]"), | |
1088 | '<div class="metatag" tag="state \\1">\\1</div>', |
|
1088 | '<div class="metatag" tag="state \\1">\\1</div>', | |
1089 | ), |
|
1089 | ), | |
1090 | ), |
|
1090 | ), | |
1091 | # label in grey |
|
1091 | # label in grey | |
1092 | ( |
|
1092 | ( | |
1093 | "label", |
|
1093 | "label", | |
1094 | (re.compile(r"\[([a-z]+)\]"), '<div class="metatag" tag="label">\\1</div>'), |
|
1094 | (re.compile(r"\[([a-z]+)\]"), '<div class="metatag" tag="label">\\1</div>'), | |
1095 | ), |
|
1095 | ), | |
1096 | # generic catch all in grey |
|
1096 | # generic catch all in grey | |
1097 | ( |
|
1097 | ( | |
1098 | "generic", |
|
1098 | "generic", | |
1099 | ( |
|
1099 | ( | |
1100 | re.compile(r"\[([a-zA-Z0-9\.\-\_]+)\]"), |
|
1100 | re.compile(r"\[([a-zA-Z0-9\.\-\_]+)\]"), | |
1101 | '<div class="metatag" tag="generic">\\1</div>', |
|
1101 | '<div class="metatag" tag="generic">\\1</div>', | |
1102 | ), |
|
1102 | ), | |
1103 | ), |
|
1103 | ), | |
1104 | ) |
|
1104 | ) | |
1105 | ) |
|
1105 | ) | |
1106 |
|
1106 | |||
1107 |
|
1107 | |||
1108 | def extract_metatags(value): |
|
1108 | def extract_metatags(value): | |
1109 | """ |
|
1109 | """ | |
1110 | Extract supported meta-tags from given text value |
|
1110 | Extract supported meta-tags from given text value | |
1111 | """ |
|
1111 | """ | |
1112 | tags = [] |
|
1112 | tags = [] | |
1113 | if not value: |
|
1113 | if not value: | |
1114 | return tags, '' |
|
1114 | return tags, '' | |
1115 |
|
1115 | |||
1116 | for key, val in list(tags_patterns.items()): |
|
1116 | for key, val in list(tags_patterns.items()): | |
1117 | pat, replace_html = val |
|
1117 | pat, replace_html = val | |
1118 | tags.extend([(key, x.group()) for x in pat.finditer(value)]) |
|
1118 | tags.extend([(key, x.group()) for x in pat.finditer(value)]) | |
1119 | value = pat.sub('', value) |
|
1119 | value = pat.sub('', value) | |
1120 |
|
1120 | |||
1121 | return tags, value |
|
1121 | return tags, value | |
1122 |
|
1122 | |||
1123 |
|
1123 | |||
1124 | def style_metatag(tag_type, value): |
|
1124 | def style_metatag(tag_type, value): | |
1125 | """ |
|
1125 | """ | |
1126 | converts tags from value into html equivalent |
|
1126 | converts tags from value into html equivalent | |
1127 | """ |
|
1127 | """ | |
1128 | if not value: |
|
1128 | if not value: | |
1129 | return '' |
|
1129 | return '' | |
1130 |
|
1130 | |||
1131 | html_value = value |
|
1131 | html_value = value | |
1132 | tag_data = tags_patterns.get(tag_type) |
|
1132 | tag_data = tags_patterns.get(tag_type) | |
1133 | if tag_data: |
|
1133 | if tag_data: | |
1134 | pat, replace_html = tag_data |
|
1134 | pat, replace_html = tag_data | |
1135 | # convert to plain `str` instead of a markup tag to be used in |
|
1135 | # convert to plain `str` instead of a markup tag to be used in | |
1136 | # regex expressions. safe_str doesn't work here |
|
1136 | # regex expressions. safe_str doesn't work here | |
1137 | html_value = pat.sub(replace_html, value) |
|
1137 | html_value = pat.sub(replace_html, value) | |
1138 |
|
1138 | |||
1139 | return html_value |
|
1139 | return html_value | |
1140 |
|
1140 | |||
1141 |
|
1141 | |||
1142 | def bool2icon(value, show_at_false=True): |
|
1142 | def bool2icon(value, show_at_false=True): | |
1143 | """ |
|
1143 | """ | |
1144 | Returns boolean value of a given value, represented as html element with |
|
1144 | Returns boolean value of a given value, represented as html element with | |
1145 | classes that will represent icons |
|
1145 | classes that will represent icons | |
1146 |
|
1146 | |||
1147 | :param value: given value to convert to html node |
|
1147 | :param value: given value to convert to html node | |
1148 | """ |
|
1148 | """ | |
1149 |
|
1149 | |||
1150 | if value: # does bool conversion |
|
1150 | if value: # does bool conversion | |
1151 | return HTML.tag('i', class_="icon-true", title='True') |
|
1151 | return HTML.tag('i', class_="icon-true", title='True') | |
1152 | else: # not true as bool |
|
1152 | else: # not true as bool | |
1153 | if show_at_false: |
|
1153 | if show_at_false: | |
1154 | return HTML.tag('i', class_="icon-false", title='False') |
|
1154 | return HTML.tag('i', class_="icon-false", title='False') | |
1155 | return HTML.tag('i') |
|
1155 | return HTML.tag('i') | |
1156 |
|
1156 | |||
1157 |
|
1157 | |||
1158 | def b64(inp): |
|
1158 | def b64(inp): | |
1159 | return base64.b64encode(safe_bytes(inp)) |
|
1159 | return base64.b64encode(safe_bytes(inp)) | |
1160 |
|
1160 | |||
1161 | #============================================================================== |
|
1161 | #============================================================================== | |
1162 | # PERMS |
|
1162 | # PERMS | |
1163 | #============================================================================== |
|
1163 | #============================================================================== | |
1164 | from rhodecode.lib.auth import ( |
|
1164 | from rhodecode.lib.auth import ( | |
1165 | HasPermissionAny, HasPermissionAll, |
|
1165 | HasPermissionAny, HasPermissionAll, | |
1166 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, |
|
1166 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, | |
1167 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, |
|
1167 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, | |
1168 | csrf_token_key, AuthUser) |
|
1168 | csrf_token_key, AuthUser) | |
1169 |
|
1169 | |||
1170 |
|
1170 | |||
1171 | #============================================================================== |
|
1171 | #============================================================================== | |
1172 | # GRAVATAR URL |
|
1172 | # GRAVATAR URL | |
1173 | #============================================================================== |
|
1173 | #============================================================================== | |
1174 | class InitialsGravatar(object): |
|
1174 | class InitialsGravatar(object): | |
1175 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1175 | def __init__(self, email_address, first_name, last_name, size=30, | |
1176 | background=None, text_color='#fff'): |
|
1176 | background=None, text_color='#fff'): | |
1177 | self.size = size |
|
1177 | self.size = size | |
1178 | self.first_name = first_name |
|
1178 | self.first_name = first_name | |
1179 | self.last_name = last_name |
|
1179 | self.last_name = last_name | |
1180 | self.email_address = email_address |
|
1180 | self.email_address = email_address | |
1181 | self.background = background or self.str2color(email_address) |
|
1181 | self.background = background or self.str2color(email_address) | |
1182 | self.text_color = text_color |
|
1182 | self.text_color = text_color | |
1183 |
|
1183 | |||
1184 | def get_color_bank(self): |
|
1184 | def get_color_bank(self): | |
1185 | """ |
|
1185 | """ | |
1186 | returns a predefined list of colors that gravatars can use. |
|
1186 | returns a predefined list of colors that gravatars can use. | |
1187 | Those are randomized distinct colors that guarantee readability and |
|
1187 | Those are randomized distinct colors that guarantee readability and | |
1188 | uniqueness. |
|
1188 | uniqueness. | |
1189 |
|
1189 | |||
1190 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1190 | generated with: http://phrogz.net/css/distinct-colors.html | |
1191 | """ |
|
1191 | """ | |
1192 | return [ |
|
1192 | return [ | |
1193 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1193 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', | |
1194 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1194 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', | |
1195 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1195 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', | |
1196 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1196 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', | |
1197 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1197 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', | |
1198 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1198 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', | |
1199 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1199 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', | |
1200 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1200 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', | |
1201 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1201 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', | |
1202 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1202 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', | |
1203 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1203 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', | |
1204 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1204 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', | |
1205 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1205 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', | |
1206 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1206 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', | |
1207 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1207 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', | |
1208 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1208 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', | |
1209 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1209 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', | |
1210 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1210 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', | |
1211 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1211 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', | |
1212 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1212 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', | |
1213 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1213 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', | |
1214 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1214 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', | |
1215 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1215 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', | |
1216 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1216 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', | |
1217 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1217 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', | |
1218 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1218 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', | |
1219 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1219 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', | |
1220 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1220 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', | |
1221 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1221 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', | |
1222 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1222 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', | |
1223 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1223 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', | |
1224 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1224 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', | |
1225 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1225 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', | |
1226 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1226 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', | |
1227 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1227 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', | |
1228 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1228 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', | |
1229 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1229 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', | |
1230 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1230 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', | |
1231 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1231 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', | |
1232 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1232 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', | |
1233 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1233 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', | |
1234 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1234 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', | |
1235 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1235 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', | |
1236 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1236 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', | |
1237 | '#4f8c46', '#368dd9', '#5c0073' |
|
1237 | '#4f8c46', '#368dd9', '#5c0073' | |
1238 | ] |
|
1238 | ] | |
1239 |
|
1239 | |||
1240 | def rgb_to_hex_color(self, rgb_tuple): |
|
1240 | def rgb_to_hex_color(self, rgb_tuple): | |
1241 | """ |
|
1241 | """ | |
1242 | Converts an rgb_tuple passed to an hex color. |
|
1242 | Converts an rgb_tuple passed to an hex color. | |
1243 |
|
1243 | |||
1244 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1244 | :param rgb_tuple: tuple with 3 ints represents rgb color space | |
1245 | """ |
|
1245 | """ | |
1246 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1246 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) | |
1247 |
|
1247 | |||
1248 | def email_to_int_list(self, email_str): |
|
1248 | def email_to_int_list(self, email_str): | |
1249 | """ |
|
1249 | """ | |
1250 | Get every byte of the hex digest value of email and turn it to integer. |
|
1250 | Get every byte of the hex digest value of email and turn it to integer. | |
1251 | It's going to be always between 0-255 |
|
1251 | It's going to be always between 0-255 | |
1252 | """ |
|
1252 | """ | |
1253 | digest = md5_safe(email_str.lower()) |
|
1253 | digest = md5_safe(email_str.lower()) | |
1254 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1254 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] | |
1255 |
|
1255 | |||
1256 | def pick_color_bank_index(self, email_str, color_bank): |
|
1256 | def pick_color_bank_index(self, email_str, color_bank): | |
1257 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1257 | return self.email_to_int_list(email_str)[0] % len(color_bank) | |
1258 |
|
1258 | |||
1259 | def str2color(self, email_str): |
|
1259 | def str2color(self, email_str): | |
1260 | """ |
|
1260 | """ | |
1261 | Tries to map in a stable algorithm an email to color |
|
1261 | Tries to map in a stable algorithm an email to color | |
1262 |
|
1262 | |||
1263 | :param email_str: |
|
1263 | :param email_str: | |
1264 | """ |
|
1264 | """ | |
1265 | color_bank = self.get_color_bank() |
|
1265 | color_bank = self.get_color_bank() | |
1266 | # pick position (module it's length so we always find it in the |
|
1266 | # pick position (module it's length so we always find it in the | |
1267 | # bank even if it's smaller than 256 values |
|
1267 | # bank even if it's smaller than 256 values | |
1268 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1268 | pos = self.pick_color_bank_index(email_str, color_bank) | |
1269 | return color_bank[pos] |
|
1269 | return color_bank[pos] | |
1270 |
|
1270 | |||
1271 | def normalize_email(self, email_address): |
|
1271 | def normalize_email(self, email_address): | |
1272 | # default host used to fill in the fake/missing email |
|
1272 | # default host used to fill in the fake/missing email | |
1273 | default_host = 'localhost' |
|
1273 | default_host = 'localhost' | |
1274 |
|
1274 | |||
1275 | if not email_address: |
|
1275 | if not email_address: | |
1276 | email_address = f'{User.DEFAULT_USER}@{default_host}' |
|
1276 | email_address = f'{User.DEFAULT_USER}@{default_host}' | |
1277 |
|
1277 | |||
1278 | email_address = safe_str(email_address) |
|
1278 | email_address = safe_str(email_address) | |
1279 |
|
1279 | |||
1280 | if '@' not in email_address: |
|
1280 | if '@' not in email_address: | |
1281 | email_address = f'{email_address}@{default_host}' |
|
1281 | email_address = f'{email_address}@{default_host}' | |
1282 |
|
1282 | |||
1283 | if email_address.endswith('@'): |
|
1283 | if email_address.endswith('@'): | |
1284 | email_address = f'{email_address}{default_host}' |
|
1284 | email_address = f'{email_address}{default_host}' | |
1285 |
|
1285 | |||
1286 | email_address = convert_special_chars(email_address) |
|
1286 | email_address = convert_special_chars(email_address) | |
1287 |
|
1287 | |||
1288 | return email_address |
|
1288 | return email_address | |
1289 |
|
1289 | |||
1290 | def get_initials(self): |
|
1290 | def get_initials(self): | |
1291 | """ |
|
1291 | """ | |
1292 | Returns 2 letter initials calculated based on the input. |
|
1292 | Returns 2 letter initials calculated based on the input. | |
1293 | The algorithm picks first given email address, and takes first letter |
|
1293 | The algorithm picks first given email address, and takes first letter | |
1294 | of part before @, and then the first letter of server name. In case |
|
1294 | of part before @, and then the first letter of server name. In case | |
1295 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1295 | the part before @ is in a format of `somestring.somestring2` it replaces | |
1296 | the server letter with first letter of somestring2 |
|
1296 | the server letter with first letter of somestring2 | |
1297 |
|
1297 | |||
1298 | In case function was initialized with both first and lastname, this |
|
1298 | In case function was initialized with both first and lastname, this | |
1299 | overrides the extraction from email by first letter of the first and |
|
1299 | overrides the extraction from email by first letter of the first and | |
1300 | last name. We add special logic to that functionality, In case Full name |
|
1300 | last name. We add special logic to that functionality, In case Full name | |
1301 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1301 | is compound, like Guido Von Rossum, we use last part of the last name | |
1302 | (Von Rossum) picking `R`. |
|
1302 | (Von Rossum) picking `R`. | |
1303 |
|
1303 | |||
1304 | Function also normalizes the non-ascii characters to they ascii |
|
1304 | Function also normalizes the non-ascii characters to they ascii | |
1305 | representation, eg Ą => A |
|
1305 | representation, eg Ą => A | |
1306 | """ |
|
1306 | """ | |
1307 | # replace non-ascii to ascii |
|
1307 | # replace non-ascii to ascii | |
1308 | first_name = convert_special_chars(self.first_name) |
|
1308 | first_name = convert_special_chars(self.first_name) | |
1309 | last_name = convert_special_chars(self.last_name) |
|
1309 | last_name = convert_special_chars(self.last_name) | |
1310 | # multi word last names, Guido Von Rossum, we take the last part only |
|
1310 | # multi word last names, Guido Von Rossum, we take the last part only | |
1311 | last_name = last_name.split(' ', 1)[-1] |
|
1311 | last_name = last_name.split(' ', 1)[-1] | |
1312 |
|
1312 | |||
1313 | # do NFKD encoding, and also make sure email has proper format |
|
1313 | # do NFKD encoding, and also make sure email has proper format | |
1314 | email_address = self.normalize_email(self.email_address) |
|
1314 | email_address = self.normalize_email(self.email_address) | |
1315 |
|
1315 | |||
1316 | # first push the email initials |
|
1316 | # first push the email initials | |
1317 | prefix, server = email_address.split('@', 1) |
|
1317 | prefix, server = email_address.split('@', 1) | |
1318 |
|
1318 | |||
1319 | # check if prefix is maybe a 'first_name.last_name' syntax |
|
1319 | # check if prefix is maybe a 'first_name.last_name' syntax | |
1320 | _dot_split = prefix.rsplit('.', 1) |
|
1320 | _dot_split = prefix.rsplit('.', 1) | |
1321 | if len(_dot_split) == 2 and _dot_split[1]: |
|
1321 | if len(_dot_split) == 2 and _dot_split[1]: | |
1322 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1322 | initials = [_dot_split[0][0], _dot_split[1][0]] | |
1323 | else: |
|
1323 | else: | |
1324 | initials = [prefix[0], server[0]] |
|
1324 | initials = [prefix[0], server[0]] | |
1325 |
|
1325 | |||
1326 | # get first letter of first and last names to create initials |
|
1326 | # get first letter of first and last names to create initials | |
1327 | fn_letter = (first_name or " ")[0].strip() |
|
1327 | fn_letter = (first_name or " ")[0].strip() | |
1328 | ln_letter = (last_name or " ")[0].strip() |
|
1328 | ln_letter = (last_name or " ")[0].strip() | |
1329 |
|
1329 | |||
1330 | if fn_letter: |
|
1330 | if fn_letter: | |
1331 | initials[0] = fn_letter |
|
1331 | initials[0] = fn_letter | |
1332 |
|
1332 | |||
1333 | if ln_letter: |
|
1333 | if ln_letter: | |
1334 | initials[1] = ln_letter |
|
1334 | initials[1] = ln_letter | |
1335 |
|
1335 | |||
1336 | return ''.join(initials).upper() |
|
1336 | return ''.join(initials).upper() | |
1337 |
|
1337 | |||
1338 | def get_img_data_by_type(self, font_family, img_type): |
|
1338 | def get_img_data_by_type(self, font_family, img_type): | |
1339 | default_user = """ |
|
1339 | default_user = """ | |
1340 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1340 | <svg xmlns="http://www.w3.org/2000/svg" | |
1341 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1341 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" | |
1342 | viewBox="-15 -10 439.165 429.164" |
|
1342 | viewBox="-15 -10 439.165 429.164" | |
1343 |
|
1343 | |||
1344 | xml:space="preserve" |
|
1344 | xml:space="preserve" | |
1345 | font-family="{font_family}" |
|
1345 | font-family="{font_family}" | |
1346 | style="background:{background};" > |
|
1346 | style="background:{background};" > | |
1347 |
|
1347 | |||
1348 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1348 | <path d="M204.583,216.671c50.664,0,91.74-48.075, | |
1349 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1349 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 | |
1350 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1350 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, | |
1351 | 168.596,153.916,216.671, |
|
1351 | 168.596,153.916,216.671, | |
1352 | 204.583,216.671z" fill="{text_color}"/> |
|
1352 | 204.583,216.671z" fill="{text_color}"/> | |
1353 | <path d="M407.164,374.717L360.88, |
|
1353 | <path d="M407.164,374.717L360.88, | |
1354 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1354 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 | |
1355 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1355 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, | |
1356 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1356 | 15.366-44.203,23.488-69.076,23.488c-24.877, | |
1357 | 0-48.762-8.122-69.078-23.488 |
|
1357 | 0-48.762-8.122-69.078-23.488 | |
1358 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1358 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, | |
1359 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1359 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 | |
1360 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1360 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, | |
1361 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1361 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, | |
1362 | 19.402-10.527 C409.699,390.129, |
|
1362 | 19.402-10.527 C409.699,390.129, | |
1363 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1363 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> | |
1364 | </svg>""".format( |
|
1364 | </svg>""".format( | |
1365 | size=self.size, |
|
1365 | size=self.size, | |
1366 | background='#979797', # @grey4 |
|
1366 | background='#979797', # @grey4 | |
1367 | text_color=self.text_color, |
|
1367 | text_color=self.text_color, | |
1368 | font_family=font_family) |
|
1368 | font_family=font_family) | |
1369 |
|
1369 | |||
1370 | return { |
|
1370 | return { | |
1371 | "default_user": default_user |
|
1371 | "default_user": default_user | |
1372 | }[img_type] |
|
1372 | }[img_type] | |
1373 |
|
1373 | |||
1374 | def get_img_data(self, svg_type=None): |
|
1374 | def get_img_data(self, svg_type=None): | |
1375 | """ |
|
1375 | """ | |
1376 | generates the svg metadata for image |
|
1376 | generates the svg metadata for image | |
1377 | """ |
|
1377 | """ | |
1378 | fonts = [ |
|
1378 | fonts = [ | |
1379 | '-apple-system', |
|
1379 | '-apple-system', | |
1380 | 'BlinkMacSystemFont', |
|
1380 | 'BlinkMacSystemFont', | |
1381 | 'Segoe UI', |
|
1381 | 'Segoe UI', | |
1382 | 'Roboto', |
|
1382 | 'Roboto', | |
1383 | 'Oxygen-Sans', |
|
1383 | 'Oxygen-Sans', | |
1384 | 'Ubuntu', |
|
1384 | 'Ubuntu', | |
1385 | 'Cantarell', |
|
1385 | 'Cantarell', | |
1386 | 'Helvetica Neue', |
|
1386 | 'Helvetica Neue', | |
1387 | 'sans-serif' |
|
1387 | 'sans-serif' | |
1388 | ] |
|
1388 | ] | |
1389 | font_family = ','.join(fonts) |
|
1389 | font_family = ','.join(fonts) | |
1390 | if svg_type: |
|
1390 | if svg_type: | |
1391 | return self.get_img_data_by_type(font_family, svg_type) |
|
1391 | return self.get_img_data_by_type(font_family, svg_type) | |
1392 |
|
1392 | |||
1393 | initials = self.get_initials() |
|
1393 | initials = self.get_initials() | |
1394 | img_data = """ |
|
1394 | img_data = """ | |
1395 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1395 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" | |
1396 | width="{size}" height="{size}" |
|
1396 | width="{size}" height="{size}" | |
1397 | style="width: 100%; height: 100%; background-color: {background}" |
|
1397 | style="width: 100%; height: 100%; background-color: {background}" | |
1398 | viewBox="0 0 {size} {size}"> |
|
1398 | viewBox="0 0 {size} {size}"> | |
1399 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1399 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" | |
1400 | pointer-events="auto" fill="{text_color}" |
|
1400 | pointer-events="auto" fill="{text_color}" | |
1401 | font-family="{font_family}" |
|
1401 | font-family="{font_family}" | |
1402 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1402 | style="font-weight: 400; font-size: {f_size}px;">{text} | |
1403 | </text> |
|
1403 | </text> | |
1404 | </svg>""".format( |
|
1404 | </svg>""".format( | |
1405 | size=self.size, |
|
1405 | size=self.size, | |
1406 | f_size=self.size/2.05, # scale the text inside the box nicely |
|
1406 | f_size=self.size/2.05, # scale the text inside the box nicely | |
1407 | background=self.background, |
|
1407 | background=self.background, | |
1408 | text_color=self.text_color, |
|
1408 | text_color=self.text_color, | |
1409 | text=initials.upper(), |
|
1409 | text=initials.upper(), | |
1410 | font_family=font_family) |
|
1410 | font_family=font_family) | |
1411 |
|
1411 | |||
1412 | return img_data |
|
1412 | return img_data | |
1413 |
|
1413 | |||
1414 | def generate_svg(self, svg_type=None): |
|
1414 | def generate_svg(self, svg_type=None): | |
1415 | img_data = base64_to_str(self.get_img_data(svg_type)) |
|
1415 | img_data = base64_to_str(self.get_img_data(svg_type)) | |
1416 | return "data:image/svg+xml;base64,{}".format(img_data) |
|
1416 | return "data:image/svg+xml;base64,{}".format(img_data) | |
1417 |
|
1417 | |||
1418 |
|
1418 | |||
1419 | def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False): |
|
1419 | def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False): | |
1420 |
|
1420 | |||
1421 | svg_type = None |
|
1421 | svg_type = None | |
1422 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1422 | if email_address == User.DEFAULT_USER_EMAIL: | |
1423 | svg_type = 'default_user' |
|
1423 | svg_type = 'default_user' | |
1424 |
|
1424 | |||
1425 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1425 | klass = InitialsGravatar(email_address, first_name, last_name, size) | |
1426 |
|
1426 | |||
1427 | if store_on_disk: |
|
1427 | if store_on_disk: | |
1428 | from rhodecode.apps.file_store import utils as store_utils |
|
1428 | from rhodecode.apps.file_store import utils as store_utils | |
1429 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ |
|
1429 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ | |
1430 | FileOverSizeException |
|
1430 | FileOverSizeException | |
1431 | from rhodecode.model.db import Session |
|
1431 | from rhodecode.model.db import Session | |
1432 |
|
1432 | |||
1433 | image_key = md5_safe(email_address.lower() |
|
1433 | image_key = md5_safe(email_address.lower() | |
1434 | + first_name.lower() + last_name.lower()) |
|
1434 | + first_name.lower() + last_name.lower()) | |
1435 |
|
1435 | |||
1436 | storage = store_utils.get_file_storage(request.registry.settings) |
|
1436 | storage = store_utils.get_file_storage(request.registry.settings) | |
1437 | filename = '{}.svg'.format(image_key) |
|
1437 | filename = '{}.svg'.format(image_key) | |
1438 | subdir = 'gravatars' |
|
1438 | subdir = 'gravatars' | |
1439 | # since final name has a counter, we apply the 0 |
|
1439 | # since final name has a counter, we apply the 0 | |
1440 | uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) |
|
1440 | uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) | |
1441 | store_uid = os.path.join(subdir, uid) |
|
1441 | store_uid = os.path.join(subdir, uid) | |
1442 |
|
1442 | |||
1443 | db_entry = FileStore.get_by_store_uid(store_uid) |
|
1443 | db_entry = FileStore.get_by_store_uid(store_uid) | |
1444 | if db_entry: |
|
1444 | if db_entry: | |
1445 | return request.route_path('download_file', fid=store_uid) |
|
1445 | return request.route_path('download_file', fid=store_uid) | |
1446 |
|
1446 | |||
1447 | img_data = klass.get_img_data(svg_type=svg_type) |
|
1447 | img_data = klass.get_img_data(svg_type=svg_type) | |
1448 | img_file = store_utils.bytes_to_file_obj(img_data) |
|
1448 | img_file = store_utils.bytes_to_file_obj(img_data) | |
1449 |
|
1449 | |||
1450 | try: |
|
1450 | try: | |
1451 | store_uid, metadata = storage.save_file( |
|
1451 | store_uid, metadata = storage.save_file( | |
1452 | img_file, filename, directory=subdir, |
|
1452 | img_file, filename, directory=subdir, | |
1453 | extensions=['.svg'], randomized_name=False) |
|
1453 | extensions=['.svg'], randomized_name=False) | |
1454 | except (FileNotAllowedException, FileOverSizeException): |
|
1454 | except (FileNotAllowedException, FileOverSizeException): | |
1455 | raise |
|
1455 | raise | |
1456 |
|
1456 | |||
1457 | try: |
|
1457 | try: | |
1458 | entry = FileStore.create( |
|
1458 | entry = FileStore.create( | |
1459 | file_uid=store_uid, filename=metadata["filename"], |
|
1459 | file_uid=store_uid, filename=metadata["filename"], | |
1460 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
1460 | file_hash=metadata["sha256"], file_size=metadata["size"], | |
1461 | file_display_name=filename, |
|
1461 | file_display_name=filename, | |
1462 | file_description=f'user gravatar `{safe_str(filename)}`', |
|
1462 | file_description=f'user gravatar `{safe_str(filename)}`', | |
1463 | hidden=True, check_acl=False, user_id=1 |
|
1463 | hidden=True, check_acl=False, user_id=1 | |
1464 | ) |
|
1464 | ) | |
1465 | Session().add(entry) |
|
1465 | Session().add(entry) | |
1466 | Session().commit() |
|
1466 | Session().commit() | |
1467 | log.debug('Stored upload in DB as %s', entry) |
|
1467 | log.debug('Stored upload in DB as %s', entry) | |
1468 | except Exception: |
|
1468 | except Exception: | |
1469 | raise |
|
1469 | raise | |
1470 |
|
1470 | |||
1471 | return request.route_path('download_file', fid=store_uid) |
|
1471 | return request.route_path('download_file', fid=store_uid) | |
1472 |
|
1472 | |||
1473 | else: |
|
1473 | else: | |
1474 | return klass.generate_svg(svg_type=svg_type) |
|
1474 | return klass.generate_svg(svg_type=svg_type) | |
1475 |
|
1475 | |||
1476 |
|
1476 | |||
1477 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): |
|
1477 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): | |
1478 | return safe_str(gravatar_url_tmpl)\ |
|
1478 | return safe_str(gravatar_url_tmpl)\ | |
1479 | .replace('{email}', email_address) \ |
|
1479 | .replace('{email}', email_address) \ | |
1480 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1480 | .replace('{md5email}', md5_safe(email_address.lower())) \ | |
1481 | .replace('{netloc}', request.host) \ |
|
1481 | .replace('{netloc}', request.host) \ | |
1482 | .replace('{scheme}', request.scheme) \ |
|
1482 | .replace('{scheme}', request.scheme) \ | |
1483 | .replace('{size}', safe_str(size)) |
|
1483 | .replace('{size}', safe_str(size)) | |
1484 |
|
1484 | |||
1485 |
|
1485 | |||
1486 | def gravatar_url(email_address, size=30, request=None): |
|
1486 | def gravatar_url(email_address, size=30, request=None): | |
1487 | request = request or get_current_request() |
|
1487 | request = request or get_current_request() | |
1488 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1488 | _use_gravatar = request.call_context.visual.use_gravatar | |
1489 |
|
1489 | |||
1490 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1490 | email_address = email_address or User.DEFAULT_USER_EMAIL | |
1491 | if isinstance(email_address, str): |
|
1491 | if isinstance(email_address, str): | |
1492 | # hashlib crashes on unicode items |
|
1492 | # hashlib crashes on unicode items | |
1493 | email_address = safe_str(email_address) |
|
1493 | email_address = safe_str(email_address) | |
1494 |
|
1494 | |||
1495 | # empty email or default user |
|
1495 | # empty email or default user | |
1496 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1496 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: | |
1497 | return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1497 | return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size) | |
1498 |
|
1498 | |||
1499 | if _use_gravatar: |
|
1499 | if _use_gravatar: | |
1500 | gravatar_url_tmpl = request.call_context.visual.gravatar_url \ |
|
1500 | gravatar_url_tmpl = request.call_context.visual.gravatar_url \ | |
1501 | or User.DEFAULT_GRAVATAR_URL |
|
1501 | or User.DEFAULT_GRAVATAR_URL | |
1502 | return gravatar_external(request, gravatar_url_tmpl, email_address, size=size) |
|
1502 | return gravatar_external(request, gravatar_url_tmpl, email_address, size=size) | |
1503 |
|
1503 | |||
1504 | else: |
|
1504 | else: | |
1505 | return initials_gravatar(request, email_address, '', '', size=size) |
|
1505 | return initials_gravatar(request, email_address, '', '', size=size) | |
1506 |
|
1506 | |||
1507 |
|
1507 | |||
1508 | def breadcrumb_repo_link(repo): |
|
1508 | def breadcrumb_repo_link(repo): | |
1509 | """ |
|
1509 | """ | |
1510 | Makes a breadcrumbs path link to repo |
|
1510 | Makes a breadcrumbs path link to repo | |
1511 |
|
1511 | |||
1512 | ex:: |
|
1512 | ex:: | |
1513 | group >> subgroup >> repo |
|
1513 | group >> subgroup >> repo | |
1514 |
|
1514 | |||
1515 | :param repo: a Repository instance |
|
1515 | :param repo: a Repository instance | |
1516 | """ |
|
1516 | """ | |
1517 |
|
1517 | |||
1518 | path = [ |
|
1518 | path = [ | |
1519 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), |
|
1519 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), | |
1520 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1520 | title='last change:{}'.format(format_date(group.last_commit_change))) | |
1521 | for group in repo.groups_with_parents |
|
1521 | for group in repo.groups_with_parents | |
1522 | ] + [ |
|
1522 | ] + [ | |
1523 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), |
|
1523 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), | |
1524 | title='last change:{}'.format(format_date(repo.last_commit_change))) |
|
1524 | title='last change:{}'.format(format_date(repo.last_commit_change))) | |
1525 | ] |
|
1525 | ] | |
1526 |
|
1526 | |||
1527 | return literal(' » '.join(path)) |
|
1527 | return literal(' » '.join(path)) | |
1528 |
|
1528 | |||
1529 |
|
1529 | |||
1530 | def breadcrumb_repo_group_link(repo_group): |
|
1530 | def breadcrumb_repo_group_link(repo_group): | |
1531 | """ |
|
1531 | """ | |
1532 | Makes a breadcrumbs path link to repo |
|
1532 | Makes a breadcrumbs path link to repo | |
1533 |
|
1533 | |||
1534 | ex:: |
|
1534 | ex:: | |
1535 | group >> subgroup |
|
1535 | group >> subgroup | |
1536 |
|
1536 | |||
1537 | :param repo_group: a Repository Group instance |
|
1537 | :param repo_group: a Repository Group instance | |
1538 | """ |
|
1538 | """ | |
1539 |
|
1539 | |||
1540 | path = [ |
|
1540 | path = [ | |
1541 | link_to(group.name, |
|
1541 | link_to(group.name, | |
1542 | route_path('repo_group_home', repo_group_name=group.group_name), |
|
1542 | route_path('repo_group_home', repo_group_name=group.group_name), | |
1543 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1543 | title='last change:{}'.format(format_date(group.last_commit_change))) | |
1544 | for group in repo_group.parents |
|
1544 | for group in repo_group.parents | |
1545 | ] + [ |
|
1545 | ] + [ | |
1546 | link_to(repo_group.name, |
|
1546 | link_to(repo_group.name, | |
1547 | route_path('repo_group_home', repo_group_name=repo_group.group_name), |
|
1547 | route_path('repo_group_home', repo_group_name=repo_group.group_name), | |
1548 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) |
|
1548 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) | |
1549 | ] |
|
1549 | ] | |
1550 |
|
1550 | |||
1551 | return literal(' » '.join(path)) |
|
1551 | return literal(' » '.join(path)) | |
1552 |
|
1552 | |||
1553 |
|
1553 | |||
1554 | def format_byte_size_binary(file_size): |
|
1554 | def format_byte_size_binary(file_size): | |
1555 | """ |
|
1555 | """ | |
1556 | Formats file/folder sizes to standard. |
|
1556 | Formats file/folder sizes to standard. | |
1557 | """ |
|
1557 | """ | |
1558 | if file_size is None: |
|
1558 | if file_size is None: | |
1559 | file_size = 0 |
|
1559 | file_size = 0 | |
1560 |
|
1560 | |||
1561 | formatted_size = format_byte_size(file_size, binary=True) |
|
1561 | formatted_size = format_byte_size(file_size, binary=True) | |
1562 | return formatted_size |
|
1562 | return formatted_size | |
1563 |
|
1563 | |||
1564 |
|
1564 | |||
1565 | def urlify_text(text_, safe=True, **href_attrs): |
|
1565 | def urlify_text(text_, safe=True, **href_attrs): | |
1566 | """ |
|
1566 | """ | |
1567 | Extract urls from text and make html links out of them |
|
1567 | Extract urls from text and make html links out of them | |
1568 | """ |
|
1568 | """ | |
1569 |
|
1569 | |||
1570 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1570 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' | |
1571 | r'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1571 | r'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') | |
1572 |
|
1572 | |||
1573 | def url_func(match_obj): |
|
1573 | def url_func(match_obj): | |
1574 | url_full = match_obj.groups()[0] |
|
1574 | url_full = match_obj.groups()[0] | |
1575 | a_options = dict(href_attrs) |
|
1575 | a_options = dict(href_attrs) | |
1576 | a_options['href'] = url_full |
|
1576 | a_options['href'] = url_full | |
1577 | a_text = url_full |
|
1577 | a_text = url_full | |
1578 | return HTML.tag("a", a_text, **a_options) |
|
1578 | return HTML.tag("a", a_text, **a_options) | |
1579 |
|
1579 | |||
1580 | _new_text = url_pat.sub(url_func, text_) |
|
1580 | _new_text = url_pat.sub(url_func, text_) | |
1581 |
|
1581 | |||
1582 | if safe: |
|
1582 | if safe: | |
1583 | return literal(_new_text) |
|
1583 | return literal(_new_text) | |
1584 | return _new_text |
|
1584 | return _new_text | |
1585 |
|
1585 | |||
1586 |
|
1586 | |||
1587 | def urlify_commits(text_, repo_name): |
|
1587 | def urlify_commits(text_, repo_name): | |
1588 | """ |
|
1588 | """ | |
1589 | Extract commit ids from text and make link from them |
|
1589 | Extract commit ids from text and make link from them | |
1590 |
|
1590 | |||
1591 | :param text_: |
|
1591 | :param text_: | |
1592 | :param repo_name: repo name to build the URL with |
|
1592 | :param repo_name: repo name to build the URL with | |
1593 | """ |
|
1593 | """ | |
1594 |
|
1594 | |||
1595 | url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1595 | url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') | |
1596 |
|
1596 | |||
1597 | def url_func(match_obj): |
|
1597 | def url_func(match_obj): | |
1598 | commit_id = match_obj.groups()[1] |
|
1598 | commit_id = match_obj.groups()[1] | |
1599 | pref = match_obj.groups()[0] |
|
1599 | pref = match_obj.groups()[0] | |
1600 | suf = match_obj.groups()[2] |
|
1600 | suf = match_obj.groups()[2] | |
1601 |
|
1601 | |||
1602 | tmpl = ( |
|
1602 | tmpl = ( | |
1603 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">' |
|
1603 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">' | |
1604 | '%(commit_id)s</a>%(suf)s' |
|
1604 | '%(commit_id)s</a>%(suf)s' | |
1605 | ) |
|
1605 | ) | |
1606 | return tmpl % { |
|
1606 | return tmpl % { | |
1607 | 'pref': pref, |
|
1607 | 'pref': pref, | |
1608 | 'cls': 'revision-link', |
|
1608 | 'cls': 'revision-link', | |
1609 | 'url': route_url( |
|
1609 | 'url': route_url( | |
1610 | 'repo_commit', repo_name=repo_name, commit_id=commit_id), |
|
1610 | 'repo_commit', repo_name=repo_name, commit_id=commit_id), | |
1611 | 'commit_id': commit_id, |
|
1611 | 'commit_id': commit_id, | |
1612 | 'suf': suf, |
|
1612 | 'suf': suf, | |
1613 | 'hovercard_alt': 'Commit: {}'.format(commit_id), |
|
1613 | 'hovercard_alt': 'Commit: {}'.format(commit_id), | |
1614 | 'hovercard_url': route_url( |
|
1614 | 'hovercard_url': route_url( | |
1615 | 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id) |
|
1615 | 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id) | |
1616 | } |
|
1616 | } | |
1617 |
|
1617 | |||
1618 | new_text = url_pat.sub(url_func, text_) |
|
1618 | new_text = url_pat.sub(url_func, text_) | |
1619 |
|
1619 | |||
1620 | return new_text |
|
1620 | return new_text | |
1621 |
|
1621 | |||
1622 |
|
1622 | |||
1623 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1623 | def _process_url_func(match_obj, repo_name, uid, entry, | |
1624 | return_raw_data=False, link_format='html'): |
|
1624 | return_raw_data=False, link_format='html'): | |
1625 | pref = '' |
|
1625 | pref = '' | |
1626 | if match_obj.group().startswith(' '): |
|
1626 | if match_obj.group().startswith(' '): | |
1627 | pref = ' ' |
|
1627 | pref = ' ' | |
1628 |
|
1628 | |||
1629 | issue_id = ''.join(match_obj.groups()) |
|
1629 | issue_id = ''.join(match_obj.groups()) | |
1630 |
|
1630 | |||
1631 | if link_format == 'html': |
|
1631 | if link_format == 'html': | |
1632 | tmpl = ( |
|
1632 | tmpl = ( | |
1633 | '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">' |
|
1633 | '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">' | |
1634 | '%(issue-prefix)s%(id-repr)s' |
|
1634 | '%(issue-prefix)s%(id-repr)s' | |
1635 | '</a>') |
|
1635 | '</a>') | |
1636 | elif link_format == 'html+hovercard': |
|
1636 | elif link_format == 'html+hovercard': | |
1637 | tmpl = ( |
|
1637 | tmpl = ( | |
1638 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">' |
|
1638 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">' | |
1639 | '%(issue-prefix)s%(id-repr)s' |
|
1639 | '%(issue-prefix)s%(id-repr)s' | |
1640 | '</a>') |
|
1640 | '</a>') | |
1641 | elif link_format in ['rst', 'rst+hovercard']: |
|
1641 | elif link_format in ['rst', 'rst+hovercard']: | |
1642 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1642 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' | |
1643 | elif link_format in ['markdown', 'markdown+hovercard']: |
|
1643 | elif link_format in ['markdown', 'markdown+hovercard']: | |
1644 | tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1644 | tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)' | |
1645 | else: |
|
1645 | else: | |
1646 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1646 | raise ValueError('Bad link_format:{}'.format(link_format)) | |
1647 |
|
1647 | |||
1648 | (repo_name_cleaned, |
|
1648 | (repo_name_cleaned, | |
1649 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name) |
|
1649 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name) | |
1650 |
|
1650 | |||
1651 | # variables replacement |
|
1651 | # variables replacement | |
1652 | named_vars = { |
|
1652 | named_vars = { | |
1653 | 'id': issue_id, |
|
1653 | 'id': issue_id, | |
1654 | 'repo': repo_name, |
|
1654 | 'repo': repo_name, | |
1655 | 'repo_name': repo_name_cleaned, |
|
1655 | 'repo_name': repo_name_cleaned, | |
1656 | 'group_name': parent_group_name, |
|
1656 | 'group_name': parent_group_name, | |
1657 | # set dummy keys so we always have them |
|
1657 | # set dummy keys so we always have them | |
1658 | 'hostname': '', |
|
1658 | 'hostname': '', | |
1659 | 'netloc': '', |
|
1659 | 'netloc': '', | |
1660 | 'scheme': '' |
|
1660 | 'scheme': '' | |
1661 | } |
|
1661 | } | |
1662 |
|
1662 | |||
1663 | request = get_current_request() |
|
1663 | request = get_current_request() | |
1664 | if request: |
|
1664 | if request: | |
1665 | # exposes, hostname, netloc, scheme |
|
1665 | # exposes, hostname, netloc, scheme | |
1666 | host_data = get_host_info(request) |
|
1666 | host_data = get_host_info(request) | |
1667 | named_vars.update(host_data) |
|
1667 | named_vars.update(host_data) | |
1668 |
|
1668 | |||
1669 | # named regex variables |
|
1669 | # named regex variables | |
1670 | named_vars.update(match_obj.groupdict()) |
|
1670 | named_vars.update(match_obj.groupdict()) | |
1671 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1671 | _url = string.Template(entry['url']).safe_substitute(**named_vars) | |
1672 | desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars) |
|
1672 | desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars) | |
1673 | hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) |
|
1673 | hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) | |
1674 |
|
1674 | |||
1675 | def quote_cleaner(input_str): |
|
1675 | def quote_cleaner(input_str): | |
1676 | """Remove quotes as it's HTML""" |
|
1676 | """Remove quotes as it's HTML""" | |
1677 | return input_str.replace('"', '') |
|
1677 | return input_str.replace('"', '') | |
1678 |
|
1678 | |||
1679 | data = { |
|
1679 | data = { | |
1680 | 'pref': pref, |
|
1680 | 'pref': pref, | |
1681 | 'cls': quote_cleaner('issue-tracker-link'), |
|
1681 | 'cls': quote_cleaner('issue-tracker-link'), | |
1682 | 'url': quote_cleaner(_url), |
|
1682 | 'url': quote_cleaner(_url), | |
1683 | 'id-repr': issue_id, |
|
1683 | 'id-repr': issue_id, | |
1684 | 'issue-prefix': entry['pref'], |
|
1684 | 'issue-prefix': entry['pref'], | |
1685 | 'serv': entry['url'], |
|
1685 | 'serv': entry['url'], | |
1686 | 'title': sanitize_html(desc, strip=True), |
|
1686 | 'title': sanitize_html(desc, strip=True), | |
1687 | 'hovercard_url': hovercard_url |
|
1687 | 'hovercard_url': hovercard_url | |
1688 | } |
|
1688 | } | |
1689 |
|
1689 | |||
1690 | if return_raw_data: |
|
1690 | if return_raw_data: | |
1691 | return { |
|
1691 | return { | |
1692 | 'id': issue_id, |
|
1692 | 'id': issue_id, | |
1693 | 'url': _url |
|
1693 | 'url': _url | |
1694 | } |
|
1694 | } | |
1695 | return tmpl % data |
|
1695 | return tmpl % data | |
1696 |
|
1696 | |||
1697 |
|
1697 | |||
1698 | def get_active_pattern_entries(repo_name): |
|
1698 | def get_active_pattern_entries(repo_name): | |
1699 | repo = None |
|
1699 | repo = None | |
1700 | if repo_name: |
|
1700 | if repo_name: | |
1701 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1701 | # Retrieving repo_name to avoid invalid repo_name to explode on | |
1702 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1702 | # IssueTrackerSettingsModel but still passing invalid name further down | |
1703 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1703 | repo = Repository.get_by_repo_name(repo_name, cache=True) | |
1704 |
|
1704 | |||
1705 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1705 | settings_model = IssueTrackerSettingsModel(repo=repo) | |
1706 | active_entries = settings_model.get_settings(cache=True) |
|
1706 | active_entries = settings_model.get_settings(cache=True) | |
1707 | return active_entries |
|
1707 | return active_entries | |
1708 |
|
1708 | |||
1709 |
|
1709 | |||
1710 | pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)') |
|
1710 | pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)') | |
1711 |
|
1711 | |||
1712 | allowed_link_formats = [ |
|
1712 | allowed_link_formats = [ | |
1713 | 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] |
|
1713 | 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] | |
1714 |
|
1714 | |||
1715 | compile_cache = { |
|
1715 | compile_cache = { | |
1716 |
|
1716 | |||
1717 | } |
|
1717 | } | |
1718 |
|
1718 | |||
1719 |
|
1719 | |||
1720 | def process_patterns(text_string, repo_name, link_format='html', active_entries=None): |
|
1720 | def process_patterns(text_string, repo_name, link_format='html', active_entries=None): | |
1721 |
|
1721 | |||
1722 | if link_format not in allowed_link_formats: |
|
1722 | if link_format not in allowed_link_formats: | |
1723 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1723 | raise ValueError('Link format can be only one of:{} got {}'.format( | |
1724 | allowed_link_formats, link_format)) |
|
1724 | allowed_link_formats, link_format)) | |
1725 | issues_data = [] |
|
1725 | issues_data = [] | |
1726 | errors = [] |
|
1726 | errors = [] | |
1727 | new_text = text_string |
|
1727 | new_text = text_string | |
1728 |
|
1728 | |||
1729 | if active_entries is None: |
|
1729 | if active_entries is None: | |
1730 | log.debug('Fetch active issue tracker patterns for repo: %s', repo_name) |
|
1730 | log.debug('Fetch active issue tracker patterns for repo: %s', repo_name) | |
1731 | active_entries = get_active_pattern_entries(repo_name) |
|
1731 | active_entries = get_active_pattern_entries(repo_name) | |
1732 |
|
1732 | |||
1733 | log.debug('Got %s pattern entries to process', len(active_entries)) |
|
1733 | log.debug('Got %s pattern entries to process', len(active_entries)) | |
1734 |
|
1734 | |||
1735 | for uid, entry in list(active_entries.items()): |
|
1735 | for uid, entry in list(active_entries.items()): | |
1736 |
|
1736 | |||
1737 | if not (entry['pat'] and entry['url']): |
|
1737 | if not (entry['pat'] and entry['url']): | |
1738 | log.debug('skipping due to missing data') |
|
1738 | log.debug('skipping due to missing data') | |
1739 | continue |
|
1739 | continue | |
1740 |
|
1740 | |||
1741 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s', |
|
1741 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s', | |
1742 | uid, entry['pat'], entry['url'], entry['pref']) |
|
1742 | uid, entry['pat'], entry['url'], entry['pref']) | |
1743 |
|
1743 | |||
1744 | if entry.get('pat_compiled'): |
|
1744 | if entry.get('pat_compiled'): | |
1745 | pattern = entry['pat_compiled'] |
|
1745 | pattern = entry['pat_compiled'] | |
1746 | elif entry['pat'] in compile_cache: |
|
1746 | elif entry['pat'] in compile_cache: | |
1747 | pattern = compile_cache[entry['pat']] |
|
1747 | pattern = compile_cache[entry['pat']] | |
1748 | else: |
|
1748 | else: | |
1749 | try: |
|
1749 | try: | |
1750 | pattern = regex.compile(r'%s' % entry['pat']) |
|
1750 | pattern = regex.compile(r'%s' % entry['pat']) | |
1751 | except regex.error as e: |
|
1751 | except regex.error as e: | |
1752 | regex_err = ValueError('{}:{}'.format(entry['pat'], e)) |
|
1752 | regex_err = ValueError('{}:{}'.format(entry['pat'], e)) | |
1753 | log.exception('issue tracker pattern: `%s` failed to compile', regex_err) |
|
1753 | log.exception('issue tracker pattern: `%s` failed to compile', regex_err) | |
1754 | errors.append(regex_err) |
|
1754 | errors.append(regex_err) | |
1755 | continue |
|
1755 | continue | |
1756 | compile_cache[entry['pat']] = pattern |
|
1756 | compile_cache[entry['pat']] = pattern | |
1757 |
|
1757 | |||
1758 | data_func = partial( |
|
1758 | data_func = partial( | |
1759 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1759 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, | |
1760 | return_raw_data=True) |
|
1760 | return_raw_data=True) | |
1761 |
|
1761 | |||
1762 | for match_obj in pattern.finditer(text_string): |
|
1762 | for match_obj in pattern.finditer(text_string): | |
1763 | issues_data.append(data_func(match_obj)) |
|
1763 | issues_data.append(data_func(match_obj)) | |
1764 |
|
1764 | |||
1765 | url_func = partial( |
|
1765 | url_func = partial( | |
1766 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1766 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, | |
1767 | link_format=link_format) |
|
1767 | link_format=link_format) | |
1768 |
|
1768 | |||
1769 | new_text = pattern.sub(url_func, new_text) |
|
1769 | new_text = pattern.sub(url_func, new_text) | |
1770 | log.debug('processed prefix:uid `%s`', uid) |
|
1770 | log.debug('processed prefix:uid `%s`', uid) | |
1771 |
|
1771 | |||
1772 | # finally use global replace, eg !123 -> pr-link, those will not catch |
|
1772 | # finally use global replace, eg !123 -> pr-link, those will not catch | |
1773 | # if already similar pattern exists |
|
1773 | # if already similar pattern exists | |
1774 | server_url = '${scheme}://${netloc}' |
|
1774 | server_url = '${scheme}://${netloc}' | |
1775 | pr_entry = { |
|
1775 | pr_entry = { | |
1776 | 'pref': '!', |
|
1776 | 'pref': '!', | |
1777 | 'url': server_url + '/_admin/pull-requests/${id}', |
|
1777 | 'url': server_url + '/_admin/pull-requests/${id}', | |
1778 | 'desc': 'Pull Request !${id}', |
|
1778 | 'desc': 'Pull Request !${id}', | |
1779 | 'hovercard_url': server_url + '/_hovercard/pull_request/${id}' |
|
1779 | 'hovercard_url': server_url + '/_hovercard/pull_request/${id}' | |
1780 | } |
|
1780 | } | |
1781 | pr_url_func = partial( |
|
1781 | pr_url_func = partial( | |
1782 | _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None, |
|
1782 | _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None, | |
1783 | link_format=link_format+'+hovercard') |
|
1783 | link_format=link_format+'+hovercard') | |
1784 | new_text = pr_pattern_re.sub(pr_url_func, new_text) |
|
1784 | new_text = pr_pattern_re.sub(pr_url_func, new_text) | |
1785 | log.debug('processed !pr pattern') |
|
1785 | log.debug('processed !pr pattern') | |
1786 |
|
1786 | |||
1787 | return new_text, issues_data, errors |
|
1787 | return new_text, issues_data, errors | |
1788 |
|
1788 | |||
1789 |
|
1789 | |||
1790 | def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None, |
|
1790 | def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None, | |
1791 | issues_container_callback=None, error_container=None): |
|
1791 | issues_container_callback=None, error_container=None): | |
1792 | """ |
|
1792 | """ | |
1793 | Parses given text message and makes proper links. |
|
1793 | Parses given text message and makes proper links. | |
1794 | issues are linked to given issue-server, and rest is a commit link |
|
1794 | issues are linked to given issue-server, and rest is a commit link | |
1795 | """ |
|
1795 | """ | |
1796 |
|
1796 | |||
1797 | def escaper(_text): |
|
1797 | def escaper(_text): | |
1798 | return _text.replace('<', '<').replace('>', '>') |
|
1798 | return _text.replace('<', '<').replace('>', '>') | |
1799 |
|
1799 | |||
1800 | new_text = escaper(commit_text) |
|
1800 | new_text = escaper(commit_text) | |
1801 |
|
1801 | |||
1802 | # extract http/https links and make them real urls |
|
1802 | # extract http/https links and make them real urls | |
1803 | new_text = urlify_text(new_text, safe=False) |
|
1803 | new_text = urlify_text(new_text, safe=False) | |
1804 |
|
1804 | |||
1805 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1805 | # urlify commits - extract commit ids and make link out of them, if we have | |
1806 | # the scope of repository present. |
|
1806 | # the scope of repository present. | |
1807 | if repository: |
|
1807 | if repository: | |
1808 | new_text = urlify_commits(new_text, repository) |
|
1808 | new_text = urlify_commits(new_text, repository) | |
1809 |
|
1809 | |||
1810 | # process issue tracker patterns |
|
1810 | # process issue tracker patterns | |
1811 | new_text, issues, errors = process_patterns( |
|
1811 | new_text, issues, errors = process_patterns( | |
1812 | new_text, repository or '', active_entries=active_pattern_entries) |
|
1812 | new_text, repository or '', active_entries=active_pattern_entries) | |
1813 |
|
1813 | |||
1814 | if issues_container_callback is not None: |
|
1814 | if issues_container_callback is not None: | |
1815 | for issue in issues: |
|
1815 | for issue in issues: | |
1816 | issues_container_callback(issue) |
|
1816 | issues_container_callback(issue) | |
1817 |
|
1817 | |||
1818 | if error_container is not None: |
|
1818 | if error_container is not None: | |
1819 | error_container.extend(errors) |
|
1819 | error_container.extend(errors) | |
1820 |
|
1820 | |||
1821 | return literal(new_text) |
|
1821 | return literal(new_text) | |
1822 |
|
1822 | |||
1823 |
|
1823 | |||
1824 | def render_binary(repo_name, file_obj): |
|
1824 | def render_binary(repo_name, file_obj): | |
1825 | """ |
|
1825 | """ | |
1826 | Choose how to render a binary file |
|
1826 | Choose how to render a binary file | |
1827 | """ |
|
1827 | """ | |
1828 |
|
1828 | |||
1829 | # unicode |
|
1829 | # unicode | |
1830 | filename = file_obj.name |
|
1830 | filename = file_obj.name | |
1831 |
|
1831 | |||
1832 | # images |
|
1832 | # images | |
1833 | for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']: |
|
1833 | for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']: | |
1834 | if fnmatch.fnmatch(filename, pat=ext): |
|
1834 | if fnmatch.fnmatch(filename, pat=ext): | |
1835 | src = route_path( |
|
1835 | src = route_path( | |
1836 | 'repo_file_raw', repo_name=repo_name, |
|
1836 | 'repo_file_raw', repo_name=repo_name, | |
1837 | commit_id=file_obj.commit.raw_id, |
|
1837 | commit_id=file_obj.commit.raw_id, | |
1838 | f_path=file_obj.path) |
|
1838 | f_path=file_obj.path) | |
1839 |
|
1839 | |||
1840 | return literal( |
|
1840 | return literal( | |
1841 | '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src)) |
|
1841 | '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src)) | |
1842 |
|
1842 | |||
1843 |
|
1843 | |||
1844 | def renderer_from_filename(filename, exclude=None): |
|
1844 | def renderer_from_filename(filename, exclude=None): | |
1845 | """ |
|
1845 | """ | |
1846 | choose a renderer based on filename, this works only for text based files |
|
1846 | choose a renderer based on filename, this works only for text based files | |
1847 | """ |
|
1847 | """ | |
1848 |
|
1848 | |||
1849 | # ipython |
|
1849 | # ipython | |
1850 | for ext in ['*.ipynb']: |
|
1850 | for ext in ['*.ipynb']: | |
1851 | if fnmatch.fnmatch(filename, pat=ext): |
|
1851 | if fnmatch.fnmatch(filename, pat=ext): | |
1852 | return 'jupyter' |
|
1852 | return 'jupyter' | |
1853 |
|
1853 | |||
1854 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1854 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) | |
1855 | if is_markup: |
|
1855 | if is_markup: | |
1856 | return is_markup |
|
1856 | return is_markup | |
1857 | return None |
|
1857 | return None | |
1858 |
|
1858 | |||
1859 |
|
1859 | |||
1860 | def render(source, renderer='rst', mentions=False, relative_urls=None, |
|
1860 | def render(source, renderer='rst', mentions=False, relative_urls=None, | |
1861 | repo_name=None, active_pattern_entries=None, issues_container_callback=None): |
|
1861 | repo_name=None, active_pattern_entries=None, issues_container_callback=None): | |
1862 |
|
1862 | |||
1863 | def maybe_convert_relative_links(html_source): |
|
1863 | def maybe_convert_relative_links(html_source): | |
1864 | if relative_urls: |
|
1864 | if relative_urls: | |
1865 | return relative_links(html_source, relative_urls) |
|
1865 | return relative_links(html_source, relative_urls) | |
1866 | return html_source |
|
1866 | return html_source | |
1867 |
|
1867 | |||
1868 | if renderer == 'plain': |
|
1868 | if renderer == 'plain': | |
1869 | return literal( |
|
1869 | return literal( | |
1870 | MarkupRenderer.plain(source, leading_newline=False)) |
|
1870 | MarkupRenderer.plain(source, leading_newline=False)) | |
1871 |
|
1871 | |||
1872 | elif renderer == 'rst': |
|
1872 | elif renderer == 'rst': | |
1873 | if repo_name: |
|
1873 | if repo_name: | |
1874 | # process patterns on comments if we pass in repo name |
|
1874 | # process patterns on comments if we pass in repo name | |
1875 | source, issues, errors = process_patterns( |
|
1875 | source, issues, errors = process_patterns( | |
1876 | source, repo_name, link_format='rst', |
|
1876 | source, repo_name, link_format='rst', | |
1877 | active_entries=active_pattern_entries) |
|
1877 | active_entries=active_pattern_entries) | |
1878 | if issues_container_callback is not None: |
|
1878 | if issues_container_callback is not None: | |
1879 | for issue in issues: |
|
1879 | for issue in issues: | |
1880 | issues_container_callback(issue) |
|
1880 | issues_container_callback(issue) | |
1881 |
|
1881 | |||
1882 | rendered_block = maybe_convert_relative_links( |
|
1882 | rendered_block = maybe_convert_relative_links( | |
1883 | MarkupRenderer.rst(source, mentions=mentions)) |
|
1883 | MarkupRenderer.rst(source, mentions=mentions)) | |
1884 |
|
1884 | |||
1885 | return literal(f'<div class="rst-block">{rendered_block}</div>') |
|
1885 | return literal(f'<div class="rst-block">{rendered_block}</div>') | |
1886 |
|
1886 | |||
1887 | elif renderer == 'markdown': |
|
1887 | elif renderer == 'markdown': | |
1888 | if repo_name: |
|
1888 | if repo_name: | |
1889 | # process patterns on comments if we pass in repo name |
|
1889 | # process patterns on comments if we pass in repo name | |
1890 | source, issues, errors = process_patterns( |
|
1890 | source, issues, errors = process_patterns( | |
1891 | source, repo_name, link_format='markdown', |
|
1891 | source, repo_name, link_format='markdown', | |
1892 | active_entries=active_pattern_entries) |
|
1892 | active_entries=active_pattern_entries) | |
1893 | if issues_container_callback is not None: |
|
1893 | if issues_container_callback is not None: | |
1894 | for issue in issues: |
|
1894 | for issue in issues: | |
1895 | issues_container_callback(issue) |
|
1895 | issues_container_callback(issue) | |
1896 |
|
1896 | |||
1897 | rendered_block = maybe_convert_relative_links( |
|
1897 | rendered_block = maybe_convert_relative_links( | |
1898 | MarkupRenderer.markdown(source, flavored=True, mentions=mentions)) |
|
1898 | MarkupRenderer.markdown(source, flavored=True, mentions=mentions)) | |
1899 | return literal(f'<div class="markdown-block">{rendered_block}</div>') |
|
1899 | return literal(f'<div class="markdown-block">{rendered_block}</div>') | |
1900 |
|
1900 | |||
1901 | elif renderer == 'jupyter': |
|
1901 | elif renderer == 'jupyter': | |
1902 | rendered_block = maybe_convert_relative_links( |
|
1902 | rendered_block = maybe_convert_relative_links( | |
1903 | MarkupRenderer.jupyter(source)) |
|
1903 | MarkupRenderer.jupyter(source)) | |
1904 | return literal(f'<div class="ipynb">{rendered_block}</div>') |
|
1904 | return literal(f'<div class="ipynb">{rendered_block}</div>') | |
1905 |
|
1905 | |||
1906 | # None means just show the file-source |
|
1906 | # None means just show the file-source | |
1907 | return None |
|
1907 | return None | |
1908 |
|
1908 | |||
1909 |
|
1909 | |||
1910 | def commit_status(repo, commit_id): |
|
1910 | def commit_status(repo, commit_id): | |
1911 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1911 | return ChangesetStatusModel().get_status(repo, commit_id) | |
1912 |
|
1912 | |||
1913 |
|
1913 | |||
1914 | def commit_status_lbl(commit_status): |
|
1914 | def commit_status_lbl(commit_status): | |
1915 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1915 | return dict(ChangesetStatus.STATUSES).get(commit_status) | |
1916 |
|
1916 | |||
1917 |
|
1917 | |||
1918 | def commit_time(repo_name, commit_id): |
|
1918 | def commit_time(repo_name, commit_id): | |
1919 | repo = Repository.get_by_repo_name(repo_name) |
|
1919 | repo = Repository.get_by_repo_name(repo_name) | |
1920 | commit = repo.get_commit(commit_id=commit_id) |
|
1920 | commit = repo.get_commit(commit_id=commit_id) | |
1921 | return commit.date |
|
1921 | return commit.date | |
1922 |
|
1922 | |||
1923 |
|
1923 | |||
1924 | def get_permission_name(key): |
|
1924 | def get_permission_name(key): | |
1925 | return dict(Permission.PERMS).get(key) |
|
1925 | return dict(Permission.PERMS).get(key) | |
1926 |
|
1926 | |||
1927 |
|
1927 | |||
1928 | def journal_filter_help(request): |
|
1928 | def journal_filter_help(request): | |
1929 | _ = request.translate |
|
1929 | _ = request.translate | |
1930 | from rhodecode.lib.audit_logger import ACTIONS |
|
1930 | from rhodecode.lib.audit_logger import ACTIONS | |
1931 | actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80)) |
|
1931 | actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80)) | |
1932 |
|
1932 | |||
1933 | return _( |
|
1933 | return _( | |
1934 | 'Example filter terms:\n' + |
|
1934 | 'Example filter terms:\n' + | |
1935 | ' repository:vcs\n' + |
|
1935 | ' repository:vcs\n' + | |
1936 | ' username:marcin\n' + |
|
1936 | ' username:marcin\n' + | |
1937 | ' username:(NOT marcin)\n' + |
|
1937 | ' username:(NOT marcin)\n' + | |
1938 | ' action:*push*\n' + |
|
1938 | ' action:*push*\n' + | |
1939 | ' ip:127.0.0.1\n' + |
|
1939 | ' ip:127.0.0.1\n' + | |
1940 | ' date:20120101\n' + |
|
1940 | ' date:20120101\n' + | |
1941 | ' date:[20120101100000 TO 20120102]\n' + |
|
1941 | ' date:[20120101100000 TO 20120102]\n' + | |
1942 | '\n' + |
|
1942 | '\n' + | |
1943 | 'Actions: {actions}\n' + |
|
1943 | 'Actions: {actions}\n' + | |
1944 | '\n' + |
|
1944 | '\n' + | |
1945 | 'Generate wildcards using \'*\' character:\n' + |
|
1945 | 'Generate wildcards using \'*\' character:\n' + | |
1946 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1946 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + | |
1947 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1947 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + | |
1948 | '\n' + |
|
1948 | '\n' + | |
1949 | 'Optional AND / OR operators in queries\n' + |
|
1949 | 'Optional AND / OR operators in queries\n' + | |
1950 | ' "repository:vcs OR repository:test"\n' + |
|
1950 | ' "repository:vcs OR repository:test"\n' + | |
1951 | ' "username:test AND repository:test*"\n' |
|
1951 | ' "username:test AND repository:test*"\n' | |
1952 | ).format(actions=actions) |
|
1952 | ).format(actions=actions) | |
1953 |
|
1953 | |||
1954 |
|
1954 | |||
1955 | def not_mapped_error(repo_name): |
|
1955 | def not_mapped_error(repo_name): | |
1956 | from rhodecode.translation import _ |
|
1956 | from rhodecode.translation import _ | |
1957 | flash(_('%s repository is not mapped to db perhaps' |
|
1957 | flash(_('%s repository is not mapped to db perhaps' | |
1958 | ' it was created or renamed from the filesystem' |
|
1958 | ' it was created or renamed from the filesystem' | |
1959 | ' please run the application again' |
|
1959 | ' please run the application again' | |
1960 | ' in order to rescan repositories') % repo_name, category='error') |
|
1960 | ' in order to rescan repositories') % repo_name, category='error') | |
1961 |
|
1961 | |||
1962 |
|
1962 | |||
1963 | def ip_range(ip_addr): |
|
1963 | def ip_range(ip_addr): | |
1964 | from rhodecode.model.db import UserIpMap |
|
1964 | from rhodecode.model.db import UserIpMap | |
1965 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1965 | s, e = UserIpMap._get_ip_range(ip_addr) | |
1966 | return '%s - %s' % (s, e) |
|
1966 | return '%s - %s' % (s, e) | |
1967 |
|
1967 | |||
1968 |
|
1968 | |||
1969 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1969 | def form(url, method='post', needs_csrf_token=True, **attrs): | |
1970 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1970 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" | |
1971 | if method.lower() != 'get' and needs_csrf_token: |
|
1971 | if method.lower() != 'get' and needs_csrf_token: | |
1972 | raise Exception( |
|
1972 | raise Exception( | |
1973 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1973 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + | |
1974 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1974 | 'CSRF token. If the endpoint does not require such token you can ' + | |
1975 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1975 | 'explicitly set the parameter needs_csrf_token to false.') | |
1976 |
|
1976 | |||
1977 | return insecure_form(url, method=method, **attrs) |
|
1977 | return insecure_form(url, method=method, **attrs) | |
1978 |
|
1978 | |||
1979 |
|
1979 | |||
1980 | def secure_form(form_url, method="POST", multipart=False, **attrs): |
|
1980 | def secure_form(form_url, method="POST", multipart=False, **attrs): | |
1981 | """Start a form tag that points the action to an url. This |
|
1981 | """Start a form tag that points the action to an url. This | |
1982 | form tag will also include the hidden field containing |
|
1982 | form tag will also include the hidden field containing | |
1983 | the auth token. |
|
1983 | the auth token. | |
1984 |
|
1984 | |||
1985 | The url options should be given either as a string, or as a |
|
1985 | The url options should be given either as a string, or as a | |
1986 | ``url()`` function. The method for the form defaults to POST. |
|
1986 | ``url()`` function. The method for the form defaults to POST. | |
1987 |
|
1987 | |||
1988 | Options: |
|
1988 | Options: | |
1989 |
|
1989 | |||
1990 | ``multipart`` |
|
1990 | ``multipart`` | |
1991 | If set to True, the enctype is set to "multipart/form-data". |
|
1991 | If set to True, the enctype is set to "multipart/form-data". | |
1992 | ``method`` |
|
1992 | ``method`` | |
1993 | The method to use when submitting the form, usually either |
|
1993 | The method to use when submitting the form, usually either | |
1994 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1994 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a | |
1995 | hidden input with name _method is added to simulate the verb |
|
1995 | hidden input with name _method is added to simulate the verb | |
1996 | over POST. |
|
1996 | over POST. | |
1997 |
|
1997 | |||
1998 | """ |
|
1998 | """ | |
1999 |
|
1999 | |||
2000 | if 'request' in attrs: |
|
2000 | if 'request' in attrs: | |
2001 | session = attrs['request'].session |
|
2001 | session = attrs['request'].session | |
2002 | del attrs['request'] |
|
2002 | del attrs['request'] | |
2003 | else: |
|
2003 | else: | |
2004 | raise ValueError( |
|
2004 | raise ValueError( | |
2005 | 'Calling this form requires request= to be passed as argument') |
|
2005 | 'Calling this form requires request= to be passed as argument') | |
2006 |
|
2006 | |||
2007 | _form = insecure_form(form_url, method, multipart, **attrs) |
|
2007 | _form = insecure_form(form_url, method, multipart, **attrs) | |
2008 | token = literal( |
|
2008 | token = literal( | |
2009 | '<input type="hidden" name="{}" value="{}">'.format( |
|
2009 | '<input type="hidden" name="{}" value="{}">'.format( | |
2010 | csrf_token_key, get_csrf_token(session))) |
|
2010 | csrf_token_key, get_csrf_token(session))) | |
2011 |
|
2011 | |||
2012 | return literal("%s\n%s" % (_form, token)) |
|
2012 | return literal("%s\n%s" % (_form, token)) | |
2013 |
|
2013 | |||
2014 |
|
2014 | |||
2015 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
2015 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): | |
2016 | select_html = select(name, selected, options, **attrs) |
|
2016 | select_html = select(name, selected, options, **attrs) | |
2017 |
|
2017 | |||
2018 | select2 = """ |
|
2018 | select2 = """ | |
2019 | <script> |
|
2019 | <script> | |
2020 | $(document).ready(function() { |
|
2020 | $(document).ready(function() { | |
2021 | $('#%s').select2({ |
|
2021 | $('#%s').select2({ | |
2022 | containerCssClass: 'drop-menu %s', |
|
2022 | containerCssClass: 'drop-menu %s', | |
2023 | dropdownCssClass: 'drop-menu-dropdown', |
|
2023 | dropdownCssClass: 'drop-menu-dropdown', | |
2024 | dropdownAutoWidth: true%s |
|
2024 | dropdownAutoWidth: true%s | |
2025 | }); |
|
2025 | }); | |
2026 | }); |
|
2026 | }); | |
2027 | </script> |
|
2027 | </script> | |
2028 | """ |
|
2028 | """ | |
2029 |
|
2029 | |||
2030 | filter_option = """, |
|
2030 | filter_option = """, | |
2031 | minimumResultsForSearch: -1 |
|
2031 | minimumResultsForSearch: -1 | |
2032 | """ |
|
2032 | """ | |
2033 | input_id = attrs.get('id') or name |
|
2033 | input_id = attrs.get('id') or name | |
2034 | extra_classes = ' '.join(attrs.pop('extra_classes', [])) |
|
2034 | extra_classes = ' '.join(attrs.pop('extra_classes', [])) | |
2035 | filter_enabled = "" if enable_filter else filter_option |
|
2035 | filter_enabled = "" if enable_filter else filter_option | |
2036 | select_script = literal(select2 % (input_id, extra_classes, filter_enabled)) |
|
2036 | select_script = literal(select2 % (input_id, extra_classes, filter_enabled)) | |
2037 |
|
2037 | |||
2038 | return literal(select_html+select_script) |
|
2038 | return literal(select_html+select_script) | |
2039 |
|
2039 | |||
2040 |
|
2040 | |||
2041 | def get_visual_attr(tmpl_context_var, attr_name): |
|
2041 | def get_visual_attr(tmpl_context_var, attr_name): | |
2042 | """ |
|
2042 | """ | |
2043 | A safe way to get a variable from visual variable of template context |
|
2043 | A safe way to get a variable from visual variable of template context | |
2044 |
|
2044 | |||
2045 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
2045 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` | |
2046 | :param attr_name: name of the attribute we fetch from the c.visual |
|
2046 | :param attr_name: name of the attribute we fetch from the c.visual | |
2047 | """ |
|
2047 | """ | |
2048 | visual = getattr(tmpl_context_var, 'visual', None) |
|
2048 | visual = getattr(tmpl_context_var, 'visual', None) | |
2049 | if not visual: |
|
2049 | if not visual: | |
2050 | return |
|
2050 | return | |
2051 | else: |
|
2051 | else: | |
2052 | return getattr(visual, attr_name, None) |
|
2052 | return getattr(visual, attr_name, None) | |
2053 |
|
2053 | |||
2054 |
|
2054 | |||
2055 | def get_last_path_part(file_node): |
|
2055 | def get_last_path_part(file_node): | |
2056 | if not file_node.path: |
|
2056 | if not file_node.path: | |
2057 | return '/' |
|
2057 | return '/' | |
2058 |
|
2058 | |||
2059 | path = safe_str(file_node.path.split('/')[-1]) |
|
2059 | path = safe_str(file_node.path.split('/')[-1]) | |
2060 | return '../' + path |
|
2060 | return '../' + path | |
2061 |
|
2061 | |||
2062 |
|
2062 | |||
2063 | def route_url(*args, **kwargs): |
|
2063 | def route_url(*args, **kwargs): | |
2064 | """ |
|
2064 | """ | |
2065 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
2065 | Wrapper around pyramids `route_url` (fully qualified url) function. | |
2066 | """ |
|
2066 | """ | |
2067 | req = get_current_request() |
|
2067 | req = get_current_request() | |
2068 | return req.route_url(*args, **kwargs) |
|
2068 | return req.route_url(*args, **kwargs) | |
2069 |
|
2069 | |||
2070 |
|
2070 | |||
2071 | def route_path(*args, **kwargs): |
|
2071 | def route_path(*args, **kwargs): | |
2072 | """ |
|
2072 | """ | |
2073 | Wrapper around pyramids `route_path` function. |
|
2073 | Wrapper around pyramids `route_path` function. | |
2074 | """ |
|
2074 | """ | |
2075 | req = get_current_request() |
|
2075 | req = get_current_request() | |
2076 | return req.route_path(*args, **kwargs) |
|
2076 | return req.route_path(*args, **kwargs) | |
2077 |
|
2077 | |||
2078 |
|
2078 | |||
2079 | def route_path_or_none(*args, **kwargs): |
|
2079 | def route_path_or_none(*args, **kwargs): | |
2080 | try: |
|
2080 | try: | |
2081 | return route_path(*args, **kwargs) |
|
2081 | return route_path(*args, **kwargs) | |
2082 | except KeyError: |
|
2082 | except KeyError: | |
2083 | return None |
|
2083 | return None | |
2084 |
|
2084 | |||
2085 |
|
2085 | |||
2086 | def current_route_path(request, **kw): |
|
2086 | def current_route_path(request, **kw): | |
2087 | new_args = request.GET.mixed() |
|
2087 | new_args = request.GET.mixed() | |
2088 | new_args.update(kw) |
|
2088 | new_args.update(kw) | |
2089 | return request.current_route_path(_query=new_args) |
|
2089 | return request.current_route_path(_query=new_args) | |
2090 |
|
2090 | |||
2091 |
|
2091 | |||
2092 | def curl_api_example(method, args): |
|
2092 | def curl_api_example(method, args): | |
2093 | args_json = json.dumps(OrderedDict([ |
|
2093 | args_json = json.dumps(OrderedDict([ | |
2094 | ('id', 1), |
|
2094 | ('id', 1), | |
2095 | ('auth_token', 'SECRET'), |
|
2095 | ('auth_token', 'SECRET'), | |
2096 | ('method', method), |
|
2096 | ('method', method), | |
2097 | ('args', args) |
|
2097 | ('args', args) | |
2098 | ])) |
|
2098 | ])) | |
2099 |
|
2099 | |||
2100 | return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format( |
|
2100 | return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format( | |
2101 | api_url=route_url('apiv2'), |
|
2101 | api_url=route_url('apiv2'), | |
2102 | args_json=args_json |
|
2102 | args_json=args_json | |
2103 | ) |
|
2103 | ) | |
2104 |
|
2104 | |||
2105 |
|
2105 | |||
2106 | def api_call_example(method, args): |
|
2106 | def api_call_example(method, args): | |
2107 | """ |
|
2107 | """ | |
2108 | Generates an API call example via CURL |
|
2108 | Generates an API call example via CURL | |
2109 | """ |
|
2109 | """ | |
2110 | curl_call = curl_api_example(method, args) |
|
2110 | curl_call = curl_api_example(method, args) | |
2111 |
|
2111 | |||
2112 | return literal( |
|
2112 | return literal( | |
2113 | curl_call + |
|
2113 | curl_call + | |
2114 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2114 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " | |
2115 | "and needs to be of `api calls` role." |
|
2115 | "and needs to be of `api calls` role." | |
2116 | .format(token_url=route_url('my_account_auth_tokens'))) |
|
2116 | .format(token_url=route_url('my_account_auth_tokens'))) | |
2117 |
|
2117 | |||
2118 |
|
2118 | |||
2119 | def notification_description(notification, request): |
|
2119 | def notification_description(notification, request): | |
2120 | """ |
|
2120 | """ | |
2121 | Generate notification human readable description based on notification type |
|
2121 | Generate notification human readable description based on notification type | |
2122 | """ |
|
2122 | """ | |
2123 | from rhodecode.model.notification import NotificationModel |
|
2123 | from rhodecode.model.notification import NotificationModel | |
2124 | return NotificationModel().make_description( |
|
2124 | return NotificationModel().make_description( | |
2125 | notification, translate=request.translate) |
|
2125 | notification, translate=request.translate) | |
2126 |
|
2126 | |||
2127 |
|
2127 | |||
2128 | def go_import_header(request, db_repo=None): |
|
2128 | def go_import_header(request, db_repo=None): | |
2129 | """ |
|
2129 | """ | |
2130 | Creates a header for go-import functionality in Go Lang |
|
2130 | Creates a header for go-import functionality in Go Lang | |
2131 | """ |
|
2131 | """ | |
2132 |
|
2132 | |||
2133 | if not db_repo: |
|
2133 | if not db_repo: | |
2134 | return |
|
2134 | return | |
2135 | if 'go-get' not in request.GET: |
|
2135 | if 'go-get' not in request.GET: | |
2136 | return |
|
2136 | return | |
2137 |
|
2137 | |||
2138 | clone_url = db_repo.clone_url() |
|
2138 | clone_url = db_repo.clone_url() | |
2139 | prefix = re.split(r'^https?:\/\/', clone_url)[-1] |
|
2139 | prefix = re.split(r'^https?:\/\/', clone_url)[-1] | |
2140 | # we have a repo and go-get flag, |
|
2140 | # we have a repo and go-get flag, | |
2141 | return literal('<meta name="go-import" content="{} {} {}">'.format( |
|
2141 | return literal('<meta name="go-import" content="{} {} {}">'.format( | |
2142 | prefix, db_repo.repo_type, clone_url)) |
|
2142 | prefix, db_repo.repo_type, clone_url)) | |
2143 |
|
2143 | |||
2144 |
|
2144 | |||
2145 | def reviewer_as_json(*args, **kwargs): |
|
2145 | def reviewer_as_json(*args, **kwargs): | |
2146 | from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json |
|
2146 | from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json | |
2147 | return _reviewer_as_json(*args, **kwargs) |
|
2147 | return _reviewer_as_json(*args, **kwargs) | |
2148 |
|
2148 | |||
2149 |
|
2149 | |||
2150 | def get_repo_view_type(request): |
|
2150 | def get_repo_view_type(request): | |
2151 | route_name = request.matched_route.name |
|
2151 | route_name = request.matched_route.name | |
2152 | route_to_view_type = { |
|
2152 | route_to_view_type = { | |
2153 | 'repo_changelog': 'commits', |
|
2153 | 'repo_changelog': 'commits', | |
2154 | 'repo_commits': 'commits', |
|
2154 | 'repo_commits': 'commits', | |
2155 | 'repo_files': 'files', |
|
2155 | 'repo_files': 'files', | |
2156 | 'repo_summary': 'summary', |
|
2156 | 'repo_summary': 'summary', | |
2157 | 'repo_commit': 'commit' |
|
2157 | 'repo_commit': 'commit' | |
2158 | } |
|
2158 | } | |
2159 |
|
2159 | |||
2160 | return route_to_view_type.get(route_name) |
|
2160 | return route_to_view_type.get(route_name) | |
2161 |
|
2161 | |||
2162 |
|
2162 | |||
2163 | def is_active(menu_entry, selected): |
|
2163 | def is_active(menu_entry, selected): | |
2164 | """ |
|
2164 | """ | |
2165 | Returns active class for selecting menus in templates |
|
2165 | Returns active class for selecting menus in templates | |
2166 | <li class=${h.is_active('settings', current_active)}></li> |
|
2166 | <li class=${h.is_active('settings', current_active)}></li> | |
2167 | """ |
|
2167 | """ | |
2168 | if not isinstance(menu_entry, list): |
|
2168 | if not isinstance(menu_entry, list): | |
2169 | menu_entry = [menu_entry] |
|
2169 | menu_entry = [menu_entry] | |
2170 |
|
2170 | |||
2171 | if selected in menu_entry: |
|
2171 | if selected in menu_entry: | |
2172 | return "active" |
|
2172 | return "active" | |
2173 |
|
2173 | |||
2174 |
|
2174 | |||
2175 | class IssuesRegistry(object): |
|
2175 | class IssuesRegistry(object): | |
2176 | """ |
|
2176 | """ | |
2177 | issue_registry = IssuesRegistry() |
|
2177 | issue_registry = IssuesRegistry() | |
2178 | some_func(issues_callback=issues_registry(...)) |
|
2178 | some_func(issues_callback=issues_registry(...)) | |
2179 | """ |
|
2179 | """ | |
2180 |
|
2180 | |||
2181 | def __init__(self): |
|
2181 | def __init__(self): | |
2182 | self.issues = [] |
|
2182 | self.issues = [] | |
2183 | self.unique_issues = collections.defaultdict(lambda: []) |
|
2183 | self.unique_issues = collections.defaultdict(lambda: []) | |
2184 |
|
2184 | |||
2185 | def __call__(self, commit_dict=None): |
|
2185 | def __call__(self, commit_dict=None): | |
2186 | def callback(issue): |
|
2186 | def callback(issue): | |
2187 | if commit_dict and issue: |
|
2187 | if commit_dict and issue: | |
2188 | issue['commit'] = commit_dict |
|
2188 | issue['commit'] = commit_dict | |
2189 | self.issues.append(issue) |
|
2189 | self.issues.append(issue) | |
2190 | self.unique_issues[issue['id']].append(issue) |
|
2190 | self.unique_issues[issue['id']].append(issue) | |
2191 | return callback |
|
2191 | return callback | |
2192 |
|
2192 | |||
2193 | def get_issues(self): |
|
2193 | def get_issues(self): | |
2194 | return self.issues |
|
2194 | return self.issues | |
2195 |
|
2195 | |||
2196 | @property |
|
2196 | @property | |
2197 | def issues_unique_count(self): |
|
2197 | def issues_unique_count(self): | |
2198 | return len(set(i['id'] for i in self.issues)) |
|
2198 | return len(set(i['id'] for i in self.issues)) | |
|
2199 | ||||
|
2200 | ||||
|
2201 | def get_directory_statistics(start_path): | |||
|
2202 | """ | |||
|
2203 | total_files, total_size, directory_stats = get_directory_statistics(start_path) | |||
|
2204 | ||||
|
2205 | print(f"Directory statistics for: {start_path}\n") | |||
|
2206 | print(f"Total files: {total_files}") | |||
|
2207 | print(f"Total size: {format_size(total_size)}\n") | |||
|
2208 | ||||
|
2209 | :param start_path: | |||
|
2210 | :return: | |||
|
2211 | """ | |||
|
2212 | ||||
|
2213 | total_files = 0 | |||
|
2214 | total_size = 0 | |||
|
2215 | directory_stats = {} | |||
|
2216 | ||||
|
2217 | for dir_path, dir_names, file_names in os.walk(start_path): | |||
|
2218 | dir_size = 0 | |||
|
2219 | file_count = len(file_names) | |||
|
2220 | ||||
|
2221 | for fname in file_names: | |||
|
2222 | filepath = os.path.join(dir_path, fname) | |||
|
2223 | file_size = os.path.getsize(filepath) | |||
|
2224 | dir_size += file_size | |||
|
2225 | ||||
|
2226 | directory_stats[dir_path] = {'file_count': file_count, 'size': dir_size} | |||
|
2227 | total_files += file_count | |||
|
2228 | total_size += dir_size | |||
|
2229 | ||||
|
2230 | return total_files, total_size, directory_stats |
@@ -1,834 +1,827 b'' | |||||
1 | # Copyright (C) 2017-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2017-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | import os |
|
20 | import os | |
21 | import sys |
|
21 | import sys | |
22 | import time |
|
22 | import time | |
23 | import platform |
|
23 | import platform | |
24 | import collections |
|
24 | import collections | |
25 | import psutil |
|
25 | import psutil | |
26 | from functools import wraps |
|
26 | from functools import wraps | |
27 |
|
27 | |||
28 | import pkg_resources |
|
28 | import pkg_resources | |
29 | import logging |
|
29 | import logging | |
30 | import resource |
|
30 | import resource | |
31 |
|
31 | |||
32 | import configparser |
|
32 | import configparser | |
33 |
|
33 | |||
34 | from rc_license.models import LicenseModel |
|
34 | from rc_license.models import LicenseModel | |
35 | from rhodecode.lib.str_utils import safe_str |
|
35 | from rhodecode.lib.str_utils import safe_str | |
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | _NA = 'NOT AVAILABLE' |
|
40 | _NA = 'NOT AVAILABLE' | |
41 | _NA_FLOAT = 0.0 |
|
41 | _NA_FLOAT = 0.0 | |
42 |
|
42 | |||
43 | STATE_OK = 'ok' |
|
43 | STATE_OK = 'ok' | |
44 | STATE_ERR = 'error' |
|
44 | STATE_ERR = 'error' | |
45 | STATE_WARN = 'warning' |
|
45 | STATE_WARN = 'warning' | |
46 |
|
46 | |||
47 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} |
|
47 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | registered_helpers = {} |
|
50 | registered_helpers = {} | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | def register_sysinfo(func): |
|
53 | def register_sysinfo(func): | |
54 | """ |
|
54 | """ | |
55 | @register_helper |
|
55 | @register_helper | |
56 | def db_check(): |
|
56 | def db_check(): | |
57 | pass |
|
57 | pass | |
58 |
|
58 | |||
59 | db_check == registered_helpers['db_check'] |
|
59 | db_check == registered_helpers['db_check'] | |
60 | """ |
|
60 | """ | |
61 | global registered_helpers |
|
61 | global registered_helpers | |
62 | registered_helpers[func.__name__] = func |
|
62 | registered_helpers[func.__name__] = func | |
63 |
|
63 | |||
64 | @wraps(func) |
|
64 | @wraps(func) | |
65 | def _wrapper(*args, **kwargs): |
|
65 | def _wrapper(*args, **kwargs): | |
66 | return func(*args, **kwargs) |
|
66 | return func(*args, **kwargs) | |
67 | return _wrapper |
|
67 | return _wrapper | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | # HELPERS |
|
70 | # HELPERS | |
71 | def percentage(part: (int, float), whole: (int, float)): |
|
71 | def percentage(part: (int, float), whole: (int, float)): | |
72 | whole = float(whole) |
|
72 | whole = float(whole) | |
73 | if whole > 0: |
|
73 | if whole > 0: | |
74 | return round(100 * float(part) / whole, 1) |
|
74 | return round(100 * float(part) / whole, 1) | |
75 | return 0.0 |
|
75 | return 0.0 | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | def get_storage_size(storage_path): |
|
78 | def get_storage_size(storage_path): | |
79 | sizes = [] |
|
79 | sizes = [] | |
80 | for file_ in os.listdir(storage_path): |
|
80 | for file_ in os.listdir(storage_path): | |
81 | storage_file = os.path.join(storage_path, file_) |
|
81 | storage_file = os.path.join(storage_path, file_) | |
82 | if os.path.isfile(storage_file): |
|
82 | if os.path.isfile(storage_file): | |
83 | try: |
|
83 | try: | |
84 | sizes.append(os.path.getsize(storage_file)) |
|
84 | sizes.append(os.path.getsize(storage_file)) | |
85 | except OSError: |
|
85 | except OSError: | |
86 | log.exception('Failed to get size of storage file %s', storage_file) |
|
86 | log.exception('Failed to get size of storage file %s', storage_file) | |
87 | pass |
|
87 | pass | |
88 |
|
88 | |||
89 | return sum(sizes) |
|
89 | return sum(sizes) | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | def get_resource(resource_type): |
|
92 | def get_resource(resource_type): | |
93 | try: |
|
93 | try: | |
94 | return resource.getrlimit(resource_type) |
|
94 | return resource.getrlimit(resource_type) | |
95 | except Exception: |
|
95 | except Exception: | |
96 | return 'NOT_SUPPORTED' |
|
96 | return 'NOT_SUPPORTED' | |
97 |
|
97 | |||
98 |
|
98 | |||
99 | def get_cert_path(ini_path): |
|
99 | def get_cert_path(ini_path): | |
100 | default = '/etc/ssl/certs/ca-certificates.crt' |
|
100 | default = '/etc/ssl/certs/ca-certificates.crt' | |
101 | control_ca_bundle = os.path.join( |
|
101 | control_ca_bundle = os.path.join( | |
102 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), |
|
102 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), | |
103 | '/etc/ssl/certs/ca-certificates.crt') |
|
103 | '/etc/ssl/certs/ca-certificates.crt') | |
104 | if os.path.isfile(control_ca_bundle): |
|
104 | if os.path.isfile(control_ca_bundle): | |
105 | default = control_ca_bundle |
|
105 | default = control_ca_bundle | |
106 |
|
106 | |||
107 | return default |
|
107 | return default | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class SysInfoRes(object): |
|
110 | class SysInfoRes(object): | |
111 | def __init__(self, value, state=None, human_value=None): |
|
111 | def __init__(self, value, state=None, human_value=None): | |
112 | self.value = value |
|
112 | self.value = value | |
113 | self.state = state or STATE_OK_DEFAULT |
|
113 | self.state = state or STATE_OK_DEFAULT | |
114 | self.human_value = human_value or value |
|
114 | self.human_value = human_value or value | |
115 |
|
115 | |||
116 | def __json__(self): |
|
116 | def __json__(self): | |
117 | return { |
|
117 | return { | |
118 | 'value': self.value, |
|
118 | 'value': self.value, | |
119 | 'state': self.state, |
|
119 | 'state': self.state, | |
120 | 'human_value': self.human_value, |
|
120 | 'human_value': self.human_value, | |
121 | } |
|
121 | } | |
122 |
|
122 | |||
123 | def get_value(self): |
|
123 | def get_value(self): | |
124 | return self.__json__() |
|
124 | return self.__json__() | |
125 |
|
125 | |||
126 | def __str__(self): |
|
126 | def __str__(self): | |
127 | return f'<SysInfoRes({self.__json__()})>' |
|
127 | return f'<SysInfoRes({self.__json__()})>' | |
128 |
|
128 | |||
129 |
|
129 | |||
130 | class SysInfo(object): |
|
130 | class SysInfo(object): | |
131 |
|
131 | |||
132 | def __init__(self, func_name, **kwargs): |
|
132 | def __init__(self, func_name, **kwargs): | |
133 | self.function_name = func_name |
|
133 | self.function_name = func_name | |
134 | self.value = _NA |
|
134 | self.value = _NA | |
135 | self.state = None |
|
135 | self.state = None | |
136 | self.kwargs = kwargs or {} |
|
136 | self.kwargs = kwargs or {} | |
137 |
|
137 | |||
138 | def __call__(self): |
|
138 | def __call__(self): | |
139 | computed = self.compute(**self.kwargs) |
|
139 | computed = self.compute(**self.kwargs) | |
140 | if not isinstance(computed, SysInfoRes): |
|
140 | if not isinstance(computed, SysInfoRes): | |
141 | raise ValueError( |
|
141 | raise ValueError( | |
142 | 'computed value for {} is not instance of ' |
|
142 | 'computed value for {} is not instance of ' | |
143 | '{}, got {} instead'.format( |
|
143 | '{}, got {} instead'.format( | |
144 | self.function_name, SysInfoRes, type(computed))) |
|
144 | self.function_name, SysInfoRes, type(computed))) | |
145 | return computed.__json__() |
|
145 | return computed.__json__() | |
146 |
|
146 | |||
147 | def __str__(self): |
|
147 | def __str__(self): | |
148 | return f'<SysInfo({self.function_name})>' |
|
148 | return f'<SysInfo({self.function_name})>' | |
149 |
|
149 | |||
150 | def compute(self, **kwargs): |
|
150 | def compute(self, **kwargs): | |
151 | return self.function_name(**kwargs) |
|
151 | return self.function_name(**kwargs) | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | # SysInfo functions |
|
154 | # SysInfo functions | |
155 | @register_sysinfo |
|
155 | @register_sysinfo | |
156 | def python_info(): |
|
156 | def python_info(): | |
157 | value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}', |
|
157 | value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}', | |
158 | executable=sys.executable) |
|
158 | executable=sys.executable) | |
159 | return SysInfoRes(value=value) |
|
159 | return SysInfoRes(value=value) | |
160 |
|
160 | |||
161 |
|
161 | |||
162 | @register_sysinfo |
|
162 | @register_sysinfo | |
163 | def py_modules(): |
|
163 | def py_modules(): | |
164 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) |
|
164 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) | |
165 | for p in pkg_resources.working_set]) |
|
165 | for p in pkg_resources.working_set]) | |
166 |
|
166 | |||
167 | value = sorted(mods.items(), key=lambda k: k[0].lower()) |
|
167 | value = sorted(mods.items(), key=lambda k: k[0].lower()) | |
168 | return SysInfoRes(value=value) |
|
168 | return SysInfoRes(value=value) | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | @register_sysinfo |
|
171 | @register_sysinfo | |
172 | def platform_type(): |
|
172 | def platform_type(): | |
173 | from rhodecode.lib.utils import generate_platform_uuid |
|
173 | from rhodecode.lib.utils import generate_platform_uuid | |
174 |
|
174 | |||
175 | value = dict( |
|
175 | value = dict( | |
176 | name=safe_str(platform.platform()), |
|
176 | name=safe_str(platform.platform()), | |
177 | uuid=generate_platform_uuid() |
|
177 | uuid=generate_platform_uuid() | |
178 | ) |
|
178 | ) | |
179 | return SysInfoRes(value=value) |
|
179 | return SysInfoRes(value=value) | |
180 |
|
180 | |||
181 |
|
181 | |||
182 | @register_sysinfo |
|
182 | @register_sysinfo | |
183 | def locale_info(): |
|
183 | def locale_info(): | |
184 | import locale |
|
184 | import locale | |
185 |
|
185 | |||
186 | def safe_get_locale(locale_name): |
|
186 | def safe_get_locale(locale_name): | |
187 | try: |
|
187 | try: | |
188 | locale.getlocale(locale_name) |
|
188 | locale.getlocale(locale_name) | |
189 | except TypeError: |
|
189 | except TypeError: | |
190 | return f'FAILED_LOCALE_GET:{locale_name}' |
|
190 | return f'FAILED_LOCALE_GET:{locale_name}' | |
191 |
|
191 | |||
192 | value = dict( |
|
192 | value = dict( | |
193 | locale_default=locale.getlocale(), |
|
193 | locale_default=locale.getlocale(), | |
194 | locale_lc_all=safe_get_locale(locale.LC_ALL), |
|
194 | locale_lc_all=safe_get_locale(locale.LC_ALL), | |
195 | locale_lc_ctype=safe_get_locale(locale.LC_CTYPE), |
|
195 | locale_lc_ctype=safe_get_locale(locale.LC_CTYPE), | |
196 | lang_env=os.environ.get('LANG'), |
|
196 | lang_env=os.environ.get('LANG'), | |
197 | lc_all_env=os.environ.get('LC_ALL'), |
|
197 | lc_all_env=os.environ.get('LC_ALL'), | |
198 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), |
|
198 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), | |
199 | ) |
|
199 | ) | |
200 | human_value = \ |
|
200 | human_value = \ | |
201 | f"LANG: {value['lang_env']}, \ |
|
201 | f"LANG: {value['lang_env']}, \ | |
202 | locale LC_ALL: {value['locale_lc_all']}, \ |
|
202 | locale LC_ALL: {value['locale_lc_all']}, \ | |
203 | locale LC_CTYPE: {value['locale_lc_ctype']}, \ |
|
203 | locale LC_CTYPE: {value['locale_lc_ctype']}, \ | |
204 | Default locales: {value['locale_default']}" |
|
204 | Default locales: {value['locale_default']}" | |
205 |
|
205 | |||
206 | return SysInfoRes(value=value, human_value=human_value) |
|
206 | return SysInfoRes(value=value, human_value=human_value) | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | @register_sysinfo |
|
209 | @register_sysinfo | |
210 | def ulimit_info(): |
|
210 | def ulimit_info(): | |
211 | data = collections.OrderedDict([ |
|
211 | data = collections.OrderedDict([ | |
212 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), |
|
212 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), | |
213 | ('file size', get_resource(resource.RLIMIT_FSIZE)), |
|
213 | ('file size', get_resource(resource.RLIMIT_FSIZE)), | |
214 | ('stack size', get_resource(resource.RLIMIT_STACK)), |
|
214 | ('stack size', get_resource(resource.RLIMIT_STACK)), | |
215 | ('core file size', get_resource(resource.RLIMIT_CORE)), |
|
215 | ('core file size', get_resource(resource.RLIMIT_CORE)), | |
216 | ('address space size', get_resource(resource.RLIMIT_AS)), |
|
216 | ('address space size', get_resource(resource.RLIMIT_AS)), | |
217 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), |
|
217 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), | |
218 | ('heap size', get_resource(resource.RLIMIT_DATA)), |
|
218 | ('heap size', get_resource(resource.RLIMIT_DATA)), | |
219 | ('rss size', get_resource(resource.RLIMIT_RSS)), |
|
219 | ('rss size', get_resource(resource.RLIMIT_RSS)), | |
220 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), |
|
220 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), | |
221 | ('open files', get_resource(resource.RLIMIT_NOFILE)), |
|
221 | ('open files', get_resource(resource.RLIMIT_NOFILE)), | |
222 | ]) |
|
222 | ]) | |
223 |
|
223 | |||
224 | text = ', '.join(f'{k}:{v}' for k, v in data.items()) |
|
224 | text = ', '.join(f'{k}:{v}' for k, v in data.items()) | |
225 |
|
225 | |||
226 | value = { |
|
226 | value = { | |
227 | 'limits': data, |
|
227 | 'limits': data, | |
228 | 'text': text, |
|
228 | 'text': text, | |
229 | } |
|
229 | } | |
230 | return SysInfoRes(value=value) |
|
230 | return SysInfoRes(value=value) | |
231 |
|
231 | |||
232 |
|
232 | |||
233 | @register_sysinfo |
|
233 | @register_sysinfo | |
234 | def uptime(): |
|
234 | def uptime(): | |
235 | from rhodecode.lib.helpers import age, time_to_datetime |
|
235 | from rhodecode.lib.helpers import age, time_to_datetime | |
236 | from rhodecode.translation import TranslationString |
|
236 | from rhodecode.translation import TranslationString | |
237 |
|
237 | |||
238 | value = dict(boot_time=0, uptime=0, text='') |
|
238 | value = dict(boot_time=0, uptime=0, text='') | |
239 | state = STATE_OK_DEFAULT |
|
239 | state = STATE_OK_DEFAULT | |
240 |
|
240 | |||
241 | boot_time = psutil.boot_time() |
|
241 | boot_time = psutil.boot_time() | |
242 | value['boot_time'] = boot_time |
|
242 | value['boot_time'] = boot_time | |
243 | value['uptime'] = time.time() - boot_time |
|
243 | value['uptime'] = time.time() - boot_time | |
244 |
|
244 | |||
245 | date_or_age = age(time_to_datetime(boot_time)) |
|
245 | date_or_age = age(time_to_datetime(boot_time)) | |
246 | if isinstance(date_or_age, TranslationString): |
|
246 | if isinstance(date_or_age, TranslationString): | |
247 | date_or_age = date_or_age.interpolate() |
|
247 | date_or_age = date_or_age.interpolate() | |
248 |
|
248 | |||
249 | human_value = value.copy() |
|
249 | human_value = value.copy() | |
250 | human_value['boot_time'] = time_to_datetime(boot_time) |
|
250 | human_value['boot_time'] = time_to_datetime(boot_time) | |
251 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) |
|
251 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) | |
252 |
|
252 | |||
253 | human_value['text'] = f'Server started {date_or_age}' |
|
253 | human_value['text'] = f'Server started {date_or_age}' | |
254 | return SysInfoRes(value=value, human_value=human_value) |
|
254 | return SysInfoRes(value=value, human_value=human_value) | |
255 |
|
255 | |||
256 |
|
256 | |||
257 | @register_sysinfo |
|
257 | @register_sysinfo | |
258 | def memory(): |
|
258 | def memory(): | |
259 | from rhodecode.lib.helpers import format_byte_size_binary |
|
259 | from rhodecode.lib.helpers import format_byte_size_binary | |
260 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, |
|
260 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, | |
261 | percent_used=0, free=0, inactive=0, active=0, shared=0, |
|
261 | percent_used=0, free=0, inactive=0, active=0, shared=0, | |
262 | total=0, buffers=0, text='') |
|
262 | total=0, buffers=0, text='') | |
263 |
|
263 | |||
264 | state = STATE_OK_DEFAULT |
|
264 | state = STATE_OK_DEFAULT | |
265 |
|
265 | |||
266 | value.update(dict(psutil.virtual_memory()._asdict())) |
|
266 | value.update(dict(psutil.virtual_memory()._asdict())) | |
267 | value['used_real'] = value['total'] - value['available'] |
|
267 | value['used_real'] = value['total'] - value['available'] | |
268 | value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1) |
|
268 | value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1) | |
269 |
|
269 | |||
270 | human_value = value.copy() |
|
270 | human_value = value.copy() | |
271 | human_value['text'] = '{}/{}, {}% used'.format( |
|
271 | human_value['text'] = '{}/{}, {}% used'.format( | |
272 | format_byte_size_binary(value['used_real']), |
|
272 | format_byte_size_binary(value['used_real']), | |
273 | format_byte_size_binary(value['total']), |
|
273 | format_byte_size_binary(value['total']), | |
274 | value['percent_used']) |
|
274 | value['percent_used']) | |
275 |
|
275 | |||
276 | keys = list(value.keys())[::] |
|
276 | keys = list(value.keys())[::] | |
277 | keys.pop(keys.index('percent')) |
|
277 | keys.pop(keys.index('percent')) | |
278 | keys.pop(keys.index('percent_used')) |
|
278 | keys.pop(keys.index('percent_used')) | |
279 | keys.pop(keys.index('text')) |
|
279 | keys.pop(keys.index('text')) | |
280 | for k in keys: |
|
280 | for k in keys: | |
281 | human_value[k] = format_byte_size_binary(value[k]) |
|
281 | human_value[k] = format_byte_size_binary(value[k]) | |
282 |
|
282 | |||
283 | if state['type'] == STATE_OK and value['percent_used'] > 90: |
|
283 | if state['type'] == STATE_OK and value['percent_used'] > 90: | |
284 | msg = 'Critical: your available RAM memory is very low.' |
|
284 | msg = 'Critical: your available RAM memory is very low.' | |
285 | state = {'message': msg, 'type': STATE_ERR} |
|
285 | state = {'message': msg, 'type': STATE_ERR} | |
286 |
|
286 | |||
287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: |
|
287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: | |
288 | msg = 'Warning: your available RAM memory is running low.' |
|
288 | msg = 'Warning: your available RAM memory is running low.' | |
289 | state = {'message': msg, 'type': STATE_WARN} |
|
289 | state = {'message': msg, 'type': STATE_WARN} | |
290 |
|
290 | |||
291 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
291 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
292 |
|
292 | |||
293 |
|
293 | |||
294 | @register_sysinfo |
|
294 | @register_sysinfo | |
295 | def machine_load(): |
|
295 | def machine_load(): | |
296 | value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''} |
|
296 | value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''} | |
297 | state = STATE_OK_DEFAULT |
|
297 | state = STATE_OK_DEFAULT | |
298 |
|
298 | |||
299 | # load averages |
|
299 | # load averages | |
300 | if hasattr(psutil.os, 'getloadavg'): |
|
300 | if hasattr(psutil.os, 'getloadavg'): | |
301 | value.update(dict( |
|
301 | value.update(dict( | |
302 | list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg())) |
|
302 | list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg())) | |
303 | )) |
|
303 | )) | |
304 |
|
304 | |||
305 | human_value = value.copy() |
|
305 | human_value = value.copy() | |
306 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( |
|
306 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( | |
307 | value['1_min'], value['5_min'], value['15_min']) |
|
307 | value['1_min'], value['5_min'], value['15_min']) | |
308 |
|
308 | |||
309 | if state['type'] == STATE_OK and value['15_min'] > 5.0: |
|
309 | if state['type'] == STATE_OK and value['15_min'] > 5.0: | |
310 | msg = 'Warning: your machine load is very high.' |
|
310 | msg = 'Warning: your machine load is very high.' | |
311 | state = {'message': msg, 'type': STATE_WARN} |
|
311 | state = {'message': msg, 'type': STATE_WARN} | |
312 |
|
312 | |||
313 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
313 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
314 |
|
314 | |||
315 |
|
315 | |||
316 | @register_sysinfo |
|
316 | @register_sysinfo | |
317 | def cpu(): |
|
317 | def cpu(): | |
318 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} |
|
318 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} | |
319 | state = STATE_OK_DEFAULT |
|
319 | state = STATE_OK_DEFAULT | |
320 |
|
320 | |||
321 | value['cpu'] = psutil.cpu_percent(0.5) |
|
321 | value['cpu'] = psutil.cpu_percent(0.5) | |
322 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) |
|
322 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) | |
323 | value['cpu_count'] = psutil.cpu_count() |
|
323 | value['cpu_count'] = psutil.cpu_count() | |
324 |
|
324 | |||
325 | human_value = value.copy() |
|
325 | human_value = value.copy() | |
326 | human_value['text'] = f'{value["cpu_count"]} cores at {value["cpu"]} %' |
|
326 | human_value['text'] = f'{value["cpu_count"]} cores at {value["cpu"]} %' | |
327 |
|
327 | |||
328 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
328 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
329 |
|
329 | |||
330 |
|
330 | |||
331 | @register_sysinfo |
|
331 | @register_sysinfo | |
332 | def storage(): |
|
332 | def storage(): | |
333 | from rhodecode.lib.helpers import format_byte_size_binary |
|
333 | from rhodecode.lib.helpers import format_byte_size_binary | |
334 | from rhodecode.lib.utils import get_rhodecode_repo_store_path |
|
334 | from rhodecode.lib.utils import get_rhodecode_repo_store_path | |
335 | path = get_rhodecode_repo_store_path() |
|
335 | path = get_rhodecode_repo_store_path() | |
336 |
|
336 | |||
337 | value = dict(percent=0, used=0, total=0, path=path, text='') |
|
337 | value = dict(percent=0, used=0, total=0, path=path, text='') | |
338 | state = STATE_OK_DEFAULT |
|
338 | state = STATE_OK_DEFAULT | |
339 |
|
339 | |||
340 | try: |
|
340 | try: | |
341 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
341 | value.update(dict(psutil.disk_usage(path)._asdict())) | |
342 | except Exception as e: |
|
342 | except Exception as e: | |
343 | log.exception('Failed to fetch disk info') |
|
343 | log.exception('Failed to fetch disk info') | |
344 | state = {'message': str(e), 'type': STATE_ERR} |
|
344 | state = {'message': str(e), 'type': STATE_ERR} | |
345 |
|
345 | |||
346 | human_value = value.copy() |
|
346 | human_value = value.copy() | |
347 | human_value['used'] = format_byte_size_binary(value['used']) |
|
347 | human_value['used'] = format_byte_size_binary(value['used']) | |
348 | human_value['total'] = format_byte_size_binary(value['total']) |
|
348 | human_value['total'] = format_byte_size_binary(value['total']) | |
349 | human_value['text'] = "{}/{}, {}% used".format( |
|
349 | human_value['text'] = "{}/{}, {}% used".format( | |
350 | format_byte_size_binary(value['used']), |
|
350 | format_byte_size_binary(value['used']), | |
351 | format_byte_size_binary(value['total']), |
|
351 | format_byte_size_binary(value['total']), | |
352 | value['percent']) |
|
352 | value['percent']) | |
353 |
|
353 | |||
354 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
354 | if state['type'] == STATE_OK and value['percent'] > 90: | |
355 | msg = 'Critical: your disk space is very low.' |
|
355 | msg = 'Critical: your disk space is very low.' | |
356 | state = {'message': msg, 'type': STATE_ERR} |
|
356 | state = {'message': msg, 'type': STATE_ERR} | |
357 |
|
357 | |||
358 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
358 | elif state['type'] == STATE_OK and value['percent'] > 70: | |
359 | msg = 'Warning: your disk space is running low.' |
|
359 | msg = 'Warning: your disk space is running low.' | |
360 | state = {'message': msg, 'type': STATE_WARN} |
|
360 | state = {'message': msg, 'type': STATE_WARN} | |
361 |
|
361 | |||
362 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
362 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
363 |
|
363 | |||
364 |
|
364 | |||
365 | @register_sysinfo |
|
365 | @register_sysinfo | |
366 | def storage_inodes(): |
|
366 | def storage_inodes(): | |
367 | from rhodecode.lib.utils import get_rhodecode_repo_store_path |
|
367 | from rhodecode.lib.utils import get_rhodecode_repo_store_path | |
368 | path = get_rhodecode_repo_store_path() |
|
368 | path = get_rhodecode_repo_store_path() | |
369 |
|
369 | |||
370 | value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='') |
|
370 | value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='') | |
371 | state = STATE_OK_DEFAULT |
|
371 | state = STATE_OK_DEFAULT | |
372 |
|
372 | |||
373 | try: |
|
373 | try: | |
374 | i_stat = os.statvfs(path) |
|
374 | i_stat = os.statvfs(path) | |
375 | value['free'] = i_stat.f_ffree |
|
375 | value['free'] = i_stat.f_ffree | |
376 | value['used'] = i_stat.f_files-i_stat.f_favail |
|
376 | value['used'] = i_stat.f_files-i_stat.f_favail | |
377 | value['total'] = i_stat.f_files |
|
377 | value['total'] = i_stat.f_files | |
378 | value['percent'] = percentage(value['used'], value['total']) |
|
378 | value['percent'] = percentage(value['used'], value['total']) | |
379 | except Exception as e: |
|
379 | except Exception as e: | |
380 | log.exception('Failed to fetch disk inodes info') |
|
380 | log.exception('Failed to fetch disk inodes info') | |
381 | state = {'message': str(e), 'type': STATE_ERR} |
|
381 | state = {'message': str(e), 'type': STATE_ERR} | |
382 |
|
382 | |||
383 | human_value = value.copy() |
|
383 | human_value = value.copy() | |
384 | human_value['text'] = "{}/{}, {}% used".format( |
|
384 | human_value['text'] = "{}/{}, {}% used".format( | |
385 | value['used'], value['total'], value['percent']) |
|
385 | value['used'], value['total'], value['percent']) | |
386 |
|
386 | |||
387 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
387 | if state['type'] == STATE_OK and value['percent'] > 90: | |
388 | msg = 'Critical: your disk free inodes are very low.' |
|
388 | msg = 'Critical: your disk free inodes are very low.' | |
389 | state = {'message': msg, 'type': STATE_ERR} |
|
389 | state = {'message': msg, 'type': STATE_ERR} | |
390 |
|
390 | |||
391 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
391 | elif state['type'] == STATE_OK and value['percent'] > 70: | |
392 | msg = 'Warning: your disk free inodes are running low.' |
|
392 | msg = 'Warning: your disk free inodes are running low.' | |
393 | state = {'message': msg, 'type': STATE_WARN} |
|
393 | state = {'message': msg, 'type': STATE_WARN} | |
394 |
|
394 | |||
395 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
395 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
396 |
|
396 | |||
397 |
|
397 | |||
398 | @register_sysinfo |
|
398 | @register_sysinfo | |
399 | def storage_archives(): |
|
399 | def storage_archives(): | |
400 | import rhodecode |
|
400 | import rhodecode | |
401 | from rhodecode.lib.helpers import format_byte_size_binary |
|
401 | from rhodecode.lib.helpers import format_byte_size_binary | |
402 |
from rhodecode.lib. |
|
402 | from rhodecode.lib.archive_cache import get_archival_cache_store | |
403 |
|
403 | |||
404 | storage_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type') |
|
404 | storage_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type') | |
405 | storage_key = 'archive_cache.filesystem.store_dir' |
|
|||
406 |
|
405 | |||
407 | default_msg = 'Archive cache storage is controlled by '\ |
|
406 | value = dict(percent=0, used=0, total=0, items=0, path='', text='', type=storage_type) | |
408 | f'{storage_key}=/path/to/cache option in the .ini file' |
|
|||
409 | path = rhodecode.ConfigGet().get_str(storage_key, missing=default_msg) |
|
|||
410 |
|
||||
411 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='', type=storage_type) |
|
|||
412 | state = STATE_OK_DEFAULT |
|
407 | state = STATE_OK_DEFAULT | |
413 | try: |
|
408 | try: | |
414 | if storage_type != 'filesystem': |
|
409 | d_cache = get_archival_cache_store(config=rhodecode.CONFIG) | |
415 | # raise Exc to stop reporting on different type |
|
|||
416 | raise ValueError('Storage type must be "filesystem"') |
|
|||
417 |
|
410 | |||
418 |
total_files, total_size, _directory_stats = |
|
411 | total_files, total_size, _directory_stats = d_cache.get_statistics() | |
419 |
|
412 | |||
420 | value.update({ |
|
413 | value.update({ | |
421 | 'percent': 100, |
|
414 | 'percent': 100, | |
422 | 'used': total_size, |
|
415 | 'used': total_size, | |
423 | 'total': total_size, |
|
416 | 'total': total_size, | |
424 | 'items': total_files |
|
417 | 'items': total_files, | |
|
418 | 'path': d_cache.storage_path | |||
425 | }) |
|
419 | }) | |
426 |
|
420 | |||
427 | except Exception as e: |
|
421 | except Exception as e: | |
428 | log.exception('failed to fetch archive cache storage') |
|
422 | log.exception('failed to fetch archive cache storage') | |
429 | state = {'message': str(e), 'type': STATE_ERR} |
|
423 | state = {'message': str(e), 'type': STATE_ERR} | |
430 |
|
424 | |||
431 | human_value = value.copy() |
|
425 | human_value = value.copy() | |
432 | human_value['used'] = format_byte_size_binary(value['used']) |
|
426 | human_value['used'] = format_byte_size_binary(value['used']) | |
433 | human_value['total'] = format_byte_size_binary(value['total']) |
|
427 | human_value['total'] = format_byte_size_binary(value['total']) | |
434 | human_value['text'] = "{} ({} items)".format( |
|
428 | human_value['text'] = "{} ({} items)".format( | |
435 | human_value['used'], value['items']) |
|
429 | human_value['used'], value['items']) | |
436 |
|
430 | |||
437 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
431 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
438 |
|
432 | |||
439 |
|
433 | |||
440 | @register_sysinfo |
|
434 | @register_sysinfo | |
441 | def storage_gist(): |
|
435 | def storage_gist(): | |
442 | from rhodecode.model.gist import GIST_STORE_LOC |
|
436 | from rhodecode.model.gist import GIST_STORE_LOC | |
443 | from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path |
|
437 | from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path | |
444 | from rhodecode.lib.helpers import format_byte_size_binary |
|
438 | from rhodecode.lib.helpers import format_byte_size_binary, get_directory_statistics | |
445 | from rhodecode.lib.rc_cache.archive_cache.utils import get_directory_statistics |
|
|||
446 |
|
439 | |||
447 | path = safe_str(os.path.join( |
|
440 | path = safe_str(os.path.join( | |
448 | get_rhodecode_repo_store_path(), GIST_STORE_LOC)) |
|
441 | get_rhodecode_repo_store_path(), GIST_STORE_LOC)) | |
449 |
|
442 | |||
450 | # gist storage |
|
443 | # gist storage | |
451 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
444 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') | |
452 | state = STATE_OK_DEFAULT |
|
445 | state = STATE_OK_DEFAULT | |
453 |
|
446 | |||
454 | try: |
|
447 | try: | |
455 | total_files, total_size, _directory_stats = get_directory_statistics(path) |
|
448 | total_files, total_size, _directory_stats = get_directory_statistics(path) | |
456 | value.update({ |
|
449 | value.update({ | |
457 | 'percent': 100, |
|
450 | 'percent': 100, | |
458 | 'used': total_size, |
|
451 | 'used': total_size, | |
459 | 'total': total_size, |
|
452 | 'total': total_size, | |
460 | 'items': total_files |
|
453 | 'items': total_files | |
461 | }) |
|
454 | }) | |
462 | except Exception as e: |
|
455 | except Exception as e: | |
463 | log.exception('failed to fetch gist storage items') |
|
456 | log.exception('failed to fetch gist storage items') | |
464 | state = {'message': str(e), 'type': STATE_ERR} |
|
457 | state = {'message': str(e), 'type': STATE_ERR} | |
465 |
|
458 | |||
466 | human_value = value.copy() |
|
459 | human_value = value.copy() | |
467 | human_value['used'] = format_byte_size_binary(value['used']) |
|
460 | human_value['used'] = format_byte_size_binary(value['used']) | |
468 | human_value['total'] = format_byte_size_binary(value['total']) |
|
461 | human_value['total'] = format_byte_size_binary(value['total']) | |
469 | human_value['text'] = "{} ({} items)".format( |
|
462 | human_value['text'] = "{} ({} items)".format( | |
470 | human_value['used'], value['items']) |
|
463 | human_value['used'], value['items']) | |
471 |
|
464 | |||
472 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
465 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
473 |
|
466 | |||
474 |
|
467 | |||
475 | @register_sysinfo |
|
468 | @register_sysinfo | |
476 | def storage_temp(): |
|
469 | def storage_temp(): | |
477 | import tempfile |
|
470 | import tempfile | |
478 | from rhodecode.lib.helpers import format_byte_size_binary |
|
471 | from rhodecode.lib.helpers import format_byte_size_binary | |
479 |
|
472 | |||
480 | path = tempfile.gettempdir() |
|
473 | path = tempfile.gettempdir() | |
481 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
474 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') | |
482 | state = STATE_OK_DEFAULT |
|
475 | state = STATE_OK_DEFAULT | |
483 |
|
476 | |||
484 | if not psutil: |
|
477 | if not psutil: | |
485 | return SysInfoRes(value=value, state=state) |
|
478 | return SysInfoRes(value=value, state=state) | |
486 |
|
479 | |||
487 | try: |
|
480 | try: | |
488 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
481 | value.update(dict(psutil.disk_usage(path)._asdict())) | |
489 | except Exception as e: |
|
482 | except Exception as e: | |
490 | log.exception('Failed to fetch temp dir info') |
|
483 | log.exception('Failed to fetch temp dir info') | |
491 | state = {'message': str(e), 'type': STATE_ERR} |
|
484 | state = {'message': str(e), 'type': STATE_ERR} | |
492 |
|
485 | |||
493 | human_value = value.copy() |
|
486 | human_value = value.copy() | |
494 | human_value['used'] = format_byte_size_binary(value['used']) |
|
487 | human_value['used'] = format_byte_size_binary(value['used']) | |
495 | human_value['total'] = format_byte_size_binary(value['total']) |
|
488 | human_value['total'] = format_byte_size_binary(value['total']) | |
496 | human_value['text'] = "{}/{}, {}% used".format( |
|
489 | human_value['text'] = "{}/{}, {}% used".format( | |
497 | format_byte_size_binary(value['used']), |
|
490 | format_byte_size_binary(value['used']), | |
498 | format_byte_size_binary(value['total']), |
|
491 | format_byte_size_binary(value['total']), | |
499 | value['percent']) |
|
492 | value['percent']) | |
500 |
|
493 | |||
501 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
494 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
502 |
|
495 | |||
503 |
|
496 | |||
504 | @register_sysinfo |
|
497 | @register_sysinfo | |
505 | def search_info(): |
|
498 | def search_info(): | |
506 | import rhodecode |
|
499 | import rhodecode | |
507 | from rhodecode.lib.index import searcher_from_config |
|
500 | from rhodecode.lib.index import searcher_from_config | |
508 |
|
501 | |||
509 | backend = rhodecode.CONFIG.get('search.module', '') |
|
502 | backend = rhodecode.CONFIG.get('search.module', '') | |
510 | location = rhodecode.CONFIG.get('search.location', '') |
|
503 | location = rhodecode.CONFIG.get('search.location', '') | |
511 |
|
504 | |||
512 | try: |
|
505 | try: | |
513 | searcher = searcher_from_config(rhodecode.CONFIG) |
|
506 | searcher = searcher_from_config(rhodecode.CONFIG) | |
514 | searcher = searcher.__class__.__name__ |
|
507 | searcher = searcher.__class__.__name__ | |
515 | except Exception: |
|
508 | except Exception: | |
516 | searcher = None |
|
509 | searcher = None | |
517 |
|
510 | |||
518 | value = dict( |
|
511 | value = dict( | |
519 | backend=backend, searcher=searcher, location=location, text='') |
|
512 | backend=backend, searcher=searcher, location=location, text='') | |
520 | state = STATE_OK_DEFAULT |
|
513 | state = STATE_OK_DEFAULT | |
521 |
|
514 | |||
522 | human_value = value.copy() |
|
515 | human_value = value.copy() | |
523 | human_value['text'] = "backend:`{}`".format(human_value['backend']) |
|
516 | human_value['text'] = "backend:`{}`".format(human_value['backend']) | |
524 |
|
517 | |||
525 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
518 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
526 |
|
519 | |||
527 |
|
520 | |||
528 | @register_sysinfo |
|
521 | @register_sysinfo | |
529 | def git_info(): |
|
522 | def git_info(): | |
530 | from rhodecode.lib.vcs.backends import git |
|
523 | from rhodecode.lib.vcs.backends import git | |
531 | state = STATE_OK_DEFAULT |
|
524 | state = STATE_OK_DEFAULT | |
532 | value = human_value = '' |
|
525 | value = human_value = '' | |
533 | try: |
|
526 | try: | |
534 | value = git.discover_git_version(raise_on_exc=True) |
|
527 | value = git.discover_git_version(raise_on_exc=True) | |
535 | human_value = f'version reported from VCSServer: {value}' |
|
528 | human_value = f'version reported from VCSServer: {value}' | |
536 | except Exception as e: |
|
529 | except Exception as e: | |
537 | state = {'message': str(e), 'type': STATE_ERR} |
|
530 | state = {'message': str(e), 'type': STATE_ERR} | |
538 |
|
531 | |||
539 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
532 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
540 |
|
533 | |||
541 |
|
534 | |||
542 | @register_sysinfo |
|
535 | @register_sysinfo | |
543 | def hg_info(): |
|
536 | def hg_info(): | |
544 | from rhodecode.lib.vcs.backends import hg |
|
537 | from rhodecode.lib.vcs.backends import hg | |
545 | state = STATE_OK_DEFAULT |
|
538 | state = STATE_OK_DEFAULT | |
546 | value = human_value = '' |
|
539 | value = human_value = '' | |
547 | try: |
|
540 | try: | |
548 | value = hg.discover_hg_version(raise_on_exc=True) |
|
541 | value = hg.discover_hg_version(raise_on_exc=True) | |
549 | human_value = f'version reported from VCSServer: {value}' |
|
542 | human_value = f'version reported from VCSServer: {value}' | |
550 | except Exception as e: |
|
543 | except Exception as e: | |
551 | state = {'message': str(e), 'type': STATE_ERR} |
|
544 | state = {'message': str(e), 'type': STATE_ERR} | |
552 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
545 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
553 |
|
546 | |||
554 |
|
547 | |||
555 | @register_sysinfo |
|
548 | @register_sysinfo | |
556 | def svn_info(): |
|
549 | def svn_info(): | |
557 | from rhodecode.lib.vcs.backends import svn |
|
550 | from rhodecode.lib.vcs.backends import svn | |
558 | state = STATE_OK_DEFAULT |
|
551 | state = STATE_OK_DEFAULT | |
559 | value = human_value = '' |
|
552 | value = human_value = '' | |
560 | try: |
|
553 | try: | |
561 | value = svn.discover_svn_version(raise_on_exc=True) |
|
554 | value = svn.discover_svn_version(raise_on_exc=True) | |
562 | human_value = f'version reported from VCSServer: {value}' |
|
555 | human_value = f'version reported from VCSServer: {value}' | |
563 | except Exception as e: |
|
556 | except Exception as e: | |
564 | state = {'message': str(e), 'type': STATE_ERR} |
|
557 | state = {'message': str(e), 'type': STATE_ERR} | |
565 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
558 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
566 |
|
559 | |||
567 |
|
560 | |||
568 | @register_sysinfo |
|
561 | @register_sysinfo | |
569 | def vcs_backends(): |
|
562 | def vcs_backends(): | |
570 | import rhodecode |
|
563 | import rhodecode | |
571 | value = rhodecode.CONFIG.get('vcs.backends') |
|
564 | value = rhodecode.CONFIG.get('vcs.backends') | |
572 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) |
|
565 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) | |
573 | return SysInfoRes(value=value, human_value=human_value) |
|
566 | return SysInfoRes(value=value, human_value=human_value) | |
574 |
|
567 | |||
575 |
|
568 | |||
576 | @register_sysinfo |
|
569 | @register_sysinfo | |
577 | def vcs_server(): |
|
570 | def vcs_server(): | |
578 | import rhodecode |
|
571 | import rhodecode | |
579 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
572 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data | |
580 |
|
573 | |||
581 | server_url = rhodecode.CONFIG.get('vcs.server') |
|
574 | server_url = rhodecode.CONFIG.get('vcs.server') | |
582 | enabled = rhodecode.CONFIG.get('vcs.server.enable') |
|
575 | enabled = rhodecode.CONFIG.get('vcs.server.enable') | |
583 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' |
|
576 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' | |
584 | state = STATE_OK_DEFAULT |
|
577 | state = STATE_OK_DEFAULT | |
585 | version = None |
|
578 | version = None | |
586 | workers = 0 |
|
579 | workers = 0 | |
587 |
|
580 | |||
588 | try: |
|
581 | try: | |
589 | data = get_vcsserver_service_data() |
|
582 | data = get_vcsserver_service_data() | |
590 | if data and 'version' in data: |
|
583 | if data and 'version' in data: | |
591 | version = data['version'] |
|
584 | version = data['version'] | |
592 |
|
585 | |||
593 | if data and 'config' in data: |
|
586 | if data and 'config' in data: | |
594 | conf = data['config'] |
|
587 | conf = data['config'] | |
595 | workers = conf.get('workers', 'NOT AVAILABLE') |
|
588 | workers = conf.get('workers', 'NOT AVAILABLE') | |
596 |
|
589 | |||
597 | connection = 'connected' |
|
590 | connection = 'connected' | |
598 | except Exception as e: |
|
591 | except Exception as e: | |
599 | connection = 'failed' |
|
592 | connection = 'failed' | |
600 | state = {'message': str(e), 'type': STATE_ERR} |
|
593 | state = {'message': str(e), 'type': STATE_ERR} | |
601 |
|
594 | |||
602 | value = dict( |
|
595 | value = dict( | |
603 | url=server_url, |
|
596 | url=server_url, | |
604 | enabled=enabled, |
|
597 | enabled=enabled, | |
605 | protocol=protocol, |
|
598 | protocol=protocol, | |
606 | connection=connection, |
|
599 | connection=connection, | |
607 | version=version, |
|
600 | version=version, | |
608 | text='', |
|
601 | text='', | |
609 | ) |
|
602 | ) | |
610 |
|
603 | |||
611 | human_value = value.copy() |
|
604 | human_value = value.copy() | |
612 | human_value['text'] = \ |
|
605 | human_value['text'] = \ | |
613 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( |
|
606 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( | |
614 | url=server_url, ver=version, workers=workers, mode=protocol, |
|
607 | url=server_url, ver=version, workers=workers, mode=protocol, | |
615 | conn=connection) |
|
608 | conn=connection) | |
616 |
|
609 | |||
617 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
610 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
618 |
|
611 | |||
619 |
|
612 | |||
620 | @register_sysinfo |
|
613 | @register_sysinfo | |
621 | def vcs_server_config(): |
|
614 | def vcs_server_config(): | |
622 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
615 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data | |
623 | state = STATE_OK_DEFAULT |
|
616 | state = STATE_OK_DEFAULT | |
624 |
|
617 | |||
625 | value = {} |
|
618 | value = {} | |
626 | try: |
|
619 | try: | |
627 | data = get_vcsserver_service_data() |
|
620 | data = get_vcsserver_service_data() | |
628 | value = data['app_config'] |
|
621 | value = data['app_config'] | |
629 | except Exception as e: |
|
622 | except Exception as e: | |
630 | state = {'message': str(e), 'type': STATE_ERR} |
|
623 | state = {'message': str(e), 'type': STATE_ERR} | |
631 |
|
624 | |||
632 | human_value = value.copy() |
|
625 | human_value = value.copy() | |
633 | human_value['text'] = 'VCS Server config' |
|
626 | human_value['text'] = 'VCS Server config' | |
634 |
|
627 | |||
635 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
628 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
636 |
|
629 | |||
637 |
|
630 | |||
638 | @register_sysinfo |
|
631 | @register_sysinfo | |
639 | def rhodecode_app_info(): |
|
632 | def rhodecode_app_info(): | |
640 | import rhodecode |
|
633 | import rhodecode | |
641 | edition = rhodecode.CONFIG.get('rhodecode.edition') |
|
634 | edition = rhodecode.CONFIG.get('rhodecode.edition') | |
642 |
|
635 | |||
643 | value = dict( |
|
636 | value = dict( | |
644 | rhodecode_version=rhodecode.__version__, |
|
637 | rhodecode_version=rhodecode.__version__, | |
645 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), |
|
638 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), | |
646 | text='' |
|
639 | text='' | |
647 | ) |
|
640 | ) | |
648 | human_value = value.copy() |
|
641 | human_value = value.copy() | |
649 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( |
|
642 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( | |
650 | edition=edition, ver=value['rhodecode_version'] |
|
643 | edition=edition, ver=value['rhodecode_version'] | |
651 | ) |
|
644 | ) | |
652 | return SysInfoRes(value=value, human_value=human_value) |
|
645 | return SysInfoRes(value=value, human_value=human_value) | |
653 |
|
646 | |||
654 |
|
647 | |||
655 | @register_sysinfo |
|
648 | @register_sysinfo | |
656 | def rhodecode_config(): |
|
649 | def rhodecode_config(): | |
657 | import rhodecode |
|
650 | import rhodecode | |
658 | path = rhodecode.CONFIG.get('__file__') |
|
651 | path = rhodecode.CONFIG.get('__file__') | |
659 | rhodecode_ini_safe = rhodecode.CONFIG.copy() |
|
652 | rhodecode_ini_safe = rhodecode.CONFIG.copy() | |
660 | cert_path = get_cert_path(path) |
|
653 | cert_path = get_cert_path(path) | |
661 |
|
654 | |||
662 | try: |
|
655 | try: | |
663 | config = configparser.ConfigParser() |
|
656 | config = configparser.ConfigParser() | |
664 | config.read(path) |
|
657 | config.read(path) | |
665 | parsed_ini = config |
|
658 | parsed_ini = config | |
666 | if parsed_ini.has_section('server:main'): |
|
659 | if parsed_ini.has_section('server:main'): | |
667 | parsed_ini = dict(parsed_ini.items('server:main')) |
|
660 | parsed_ini = dict(parsed_ini.items('server:main')) | |
668 | except Exception: |
|
661 | except Exception: | |
669 | log.exception('Failed to read .ini file for display') |
|
662 | log.exception('Failed to read .ini file for display') | |
670 | parsed_ini = {} |
|
663 | parsed_ini = {} | |
671 |
|
664 | |||
672 | rhodecode_ini_safe['server:main'] = parsed_ini |
|
665 | rhodecode_ini_safe['server:main'] = parsed_ini | |
673 |
|
666 | |||
674 | blacklist = [ |
|
667 | blacklist = [ | |
675 | f'rhodecode_{LicenseModel.LICENSE_DB_KEY}', |
|
668 | f'rhodecode_{LicenseModel.LICENSE_DB_KEY}', | |
676 | 'routes.map', |
|
669 | 'routes.map', | |
677 | 'sqlalchemy.db1.url', |
|
670 | 'sqlalchemy.db1.url', | |
678 | 'channelstream.secret', |
|
671 | 'channelstream.secret', | |
679 | 'beaker.session.secret', |
|
672 | 'beaker.session.secret', | |
680 | 'rhodecode.encrypted_values.secret', |
|
673 | 'rhodecode.encrypted_values.secret', | |
681 | 'rhodecode_auth_github_consumer_key', |
|
674 | 'rhodecode_auth_github_consumer_key', | |
682 | 'rhodecode_auth_github_consumer_secret', |
|
675 | 'rhodecode_auth_github_consumer_secret', | |
683 | 'rhodecode_auth_google_consumer_key', |
|
676 | 'rhodecode_auth_google_consumer_key', | |
684 | 'rhodecode_auth_google_consumer_secret', |
|
677 | 'rhodecode_auth_google_consumer_secret', | |
685 | 'rhodecode_auth_bitbucket_consumer_secret', |
|
678 | 'rhodecode_auth_bitbucket_consumer_secret', | |
686 | 'rhodecode_auth_bitbucket_consumer_key', |
|
679 | 'rhodecode_auth_bitbucket_consumer_key', | |
687 | 'rhodecode_auth_twitter_consumer_secret', |
|
680 | 'rhodecode_auth_twitter_consumer_secret', | |
688 | 'rhodecode_auth_twitter_consumer_key', |
|
681 | 'rhodecode_auth_twitter_consumer_key', | |
689 |
|
682 | |||
690 | 'rhodecode_auth_twitter_secret', |
|
683 | 'rhodecode_auth_twitter_secret', | |
691 | 'rhodecode_auth_github_secret', |
|
684 | 'rhodecode_auth_github_secret', | |
692 | 'rhodecode_auth_google_secret', |
|
685 | 'rhodecode_auth_google_secret', | |
693 | 'rhodecode_auth_bitbucket_secret', |
|
686 | 'rhodecode_auth_bitbucket_secret', | |
694 |
|
687 | |||
695 | 'appenlight.api_key', |
|
688 | 'appenlight.api_key', | |
696 | ('app_conf', 'sqlalchemy.db1.url') |
|
689 | ('app_conf', 'sqlalchemy.db1.url') | |
697 | ] |
|
690 | ] | |
698 | for k in blacklist: |
|
691 | for k in blacklist: | |
699 | if isinstance(k, tuple): |
|
692 | if isinstance(k, tuple): | |
700 | section, key = k |
|
693 | section, key = k | |
701 | if section in rhodecode_ini_safe: |
|
694 | if section in rhodecode_ini_safe: | |
702 | rhodecode_ini_safe[section] = '**OBFUSCATED**' |
|
695 | rhodecode_ini_safe[section] = '**OBFUSCATED**' | |
703 | else: |
|
696 | else: | |
704 | rhodecode_ini_safe.pop(k, None) |
|
697 | rhodecode_ini_safe.pop(k, None) | |
705 |
|
698 | |||
706 | # TODO: maybe put some CONFIG checks here ? |
|
699 | # TODO: maybe put some CONFIG checks here ? | |
707 | return SysInfoRes(value={'config': rhodecode_ini_safe, |
|
700 | return SysInfoRes(value={'config': rhodecode_ini_safe, | |
708 | 'path': path, 'cert_path': cert_path}) |
|
701 | 'path': path, 'cert_path': cert_path}) | |
709 |
|
702 | |||
710 |
|
703 | |||
711 | @register_sysinfo |
|
704 | @register_sysinfo | |
712 | def database_info(): |
|
705 | def database_info(): | |
713 | import rhodecode |
|
706 | import rhodecode | |
714 | from sqlalchemy.engine import url as engine_url |
|
707 | from sqlalchemy.engine import url as engine_url | |
715 | from rhodecode.model import meta |
|
708 | from rhodecode.model import meta | |
716 | from rhodecode.model.meta import Session |
|
709 | from rhodecode.model.meta import Session | |
717 | from rhodecode.model.db import DbMigrateVersion |
|
710 | from rhodecode.model.db import DbMigrateVersion | |
718 |
|
711 | |||
719 | state = STATE_OK_DEFAULT |
|
712 | state = STATE_OK_DEFAULT | |
720 |
|
713 | |||
721 | db_migrate = DbMigrateVersion.query().filter( |
|
714 | db_migrate = DbMigrateVersion.query().filter( | |
722 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() |
|
715 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() | |
723 |
|
716 | |||
724 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) |
|
717 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) | |
725 |
|
718 | |||
726 | try: |
|
719 | try: | |
727 | engine = meta.get_engine() |
|
720 | engine = meta.get_engine() | |
728 | db_server_info = engine.dialect._get_server_version_info( |
|
721 | db_server_info = engine.dialect._get_server_version_info( | |
729 | Session.connection(bind=engine)) |
|
722 | Session.connection(bind=engine)) | |
730 | db_version = '.'.join(map(str, db_server_info)) |
|
723 | db_version = '.'.join(map(str, db_server_info)) | |
731 | except Exception: |
|
724 | except Exception: | |
732 | log.exception('failed to fetch db version') |
|
725 | log.exception('failed to fetch db version') | |
733 | db_version = 'UNKNOWN' |
|
726 | db_version = 'UNKNOWN' | |
734 |
|
727 | |||
735 | db_info = dict( |
|
728 | db_info = dict( | |
736 | migrate_version=db_migrate.version, |
|
729 | migrate_version=db_migrate.version, | |
737 | type=db_url_obj.get_backend_name(), |
|
730 | type=db_url_obj.get_backend_name(), | |
738 | version=db_version, |
|
731 | version=db_version, | |
739 | url=repr(db_url_obj) |
|
732 | url=repr(db_url_obj) | |
740 | ) |
|
733 | ) | |
741 | current_version = db_migrate.version |
|
734 | current_version = db_migrate.version | |
742 | expected_version = rhodecode.__dbversion__ |
|
735 | expected_version = rhodecode.__dbversion__ | |
743 | if state['type'] == STATE_OK and current_version != expected_version: |
|
736 | if state['type'] == STATE_OK and current_version != expected_version: | |
744 | msg = 'Critical: database schema mismatch, ' \ |
|
737 | msg = 'Critical: database schema mismatch, ' \ | |
745 | 'expected version {}, got {}. ' \ |
|
738 | 'expected version {}, got {}. ' \ | |
746 | 'Please run migrations on your database.'.format( |
|
739 | 'Please run migrations on your database.'.format( | |
747 | expected_version, current_version) |
|
740 | expected_version, current_version) | |
748 | state = {'message': msg, 'type': STATE_ERR} |
|
741 | state = {'message': msg, 'type': STATE_ERR} | |
749 |
|
742 | |||
750 | human_value = db_info.copy() |
|
743 | human_value = db_info.copy() | |
751 | human_value['url'] = "{} @ migration version: {}".format( |
|
744 | human_value['url'] = "{} @ migration version: {}".format( | |
752 | db_info['url'], db_info['migrate_version']) |
|
745 | db_info['url'], db_info['migrate_version']) | |
753 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) |
|
746 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) | |
754 | return SysInfoRes(value=db_info, state=state, human_value=human_value) |
|
747 | return SysInfoRes(value=db_info, state=state, human_value=human_value) | |
755 |
|
748 | |||
756 |
|
749 | |||
757 | @register_sysinfo |
|
750 | @register_sysinfo | |
758 | def server_info(environ): |
|
751 | def server_info(environ): | |
759 | import rhodecode |
|
752 | import rhodecode | |
760 | from rhodecode.lib.base import get_server_ip_addr, get_server_port |
|
753 | from rhodecode.lib.base import get_server_ip_addr, get_server_port | |
761 |
|
754 | |||
762 | value = { |
|
755 | value = { | |
763 | 'server_ip': '{}:{}'.format( |
|
756 | 'server_ip': '{}:{}'.format( | |
764 | get_server_ip_addr(environ, log_errors=False), |
|
757 | get_server_ip_addr(environ, log_errors=False), | |
765 | get_server_port(environ) |
|
758 | get_server_port(environ) | |
766 | ), |
|
759 | ), | |
767 | 'server_id': rhodecode.CONFIG.get('instance_id'), |
|
760 | 'server_id': rhodecode.CONFIG.get('instance_id'), | |
768 | } |
|
761 | } | |
769 | return SysInfoRes(value=value) |
|
762 | return SysInfoRes(value=value) | |
770 |
|
763 | |||
771 |
|
764 | |||
772 | @register_sysinfo |
|
765 | @register_sysinfo | |
773 | def usage_info(): |
|
766 | def usage_info(): | |
774 | from rhodecode.model.db import User, Repository, true |
|
767 | from rhodecode.model.db import User, Repository, true | |
775 | value = { |
|
768 | value = { | |
776 | 'users': User.query().count(), |
|
769 | 'users': User.query().count(), | |
777 | 'users_active': User.query().filter(User.active == true()).count(), |
|
770 | 'users_active': User.query().filter(User.active == true()).count(), | |
778 | 'repositories': Repository.query().count(), |
|
771 | 'repositories': Repository.query().count(), | |
779 | 'repository_types': { |
|
772 | 'repository_types': { | |
780 | 'hg': Repository.query().filter( |
|
773 | 'hg': Repository.query().filter( | |
781 | Repository.repo_type == 'hg').count(), |
|
774 | Repository.repo_type == 'hg').count(), | |
782 | 'git': Repository.query().filter( |
|
775 | 'git': Repository.query().filter( | |
783 | Repository.repo_type == 'git').count(), |
|
776 | Repository.repo_type == 'git').count(), | |
784 | 'svn': Repository.query().filter( |
|
777 | 'svn': Repository.query().filter( | |
785 | Repository.repo_type == 'svn').count(), |
|
778 | Repository.repo_type == 'svn').count(), | |
786 | }, |
|
779 | }, | |
787 | } |
|
780 | } | |
788 | return SysInfoRes(value=value) |
|
781 | return SysInfoRes(value=value) | |
789 |
|
782 | |||
790 |
|
783 | |||
791 | def get_system_info(environ): |
|
784 | def get_system_info(environ): | |
792 | environ = environ or {} |
|
785 | environ = environ or {} | |
793 | return { |
|
786 | return { | |
794 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), |
|
787 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), | |
795 | 'rhodecode_config': SysInfo(rhodecode_config)(), |
|
788 | 'rhodecode_config': SysInfo(rhodecode_config)(), | |
796 | 'rhodecode_usage': SysInfo(usage_info)(), |
|
789 | 'rhodecode_usage': SysInfo(usage_info)(), | |
797 | 'python': SysInfo(python_info)(), |
|
790 | 'python': SysInfo(python_info)(), | |
798 | 'py_modules': SysInfo(py_modules)(), |
|
791 | 'py_modules': SysInfo(py_modules)(), | |
799 |
|
792 | |||
800 | 'platform': SysInfo(platform_type)(), |
|
793 | 'platform': SysInfo(platform_type)(), | |
801 | 'locale': SysInfo(locale_info)(), |
|
794 | 'locale': SysInfo(locale_info)(), | |
802 | 'server': SysInfo(server_info, environ=environ)(), |
|
795 | 'server': SysInfo(server_info, environ=environ)(), | |
803 | 'database': SysInfo(database_info)(), |
|
796 | 'database': SysInfo(database_info)(), | |
804 | 'ulimit': SysInfo(ulimit_info)(), |
|
797 | 'ulimit': SysInfo(ulimit_info)(), | |
805 | 'storage': SysInfo(storage)(), |
|
798 | 'storage': SysInfo(storage)(), | |
806 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
799 | 'storage_inodes': SysInfo(storage_inodes)(), | |
807 | 'storage_archive': SysInfo(storage_archives)(), |
|
800 | 'storage_archive': SysInfo(storage_archives)(), | |
808 | 'storage_gist': SysInfo(storage_gist)(), |
|
801 | 'storage_gist': SysInfo(storage_gist)(), | |
809 | 'storage_temp': SysInfo(storage_temp)(), |
|
802 | 'storage_temp': SysInfo(storage_temp)(), | |
810 |
|
803 | |||
811 | 'search': SysInfo(search_info)(), |
|
804 | 'search': SysInfo(search_info)(), | |
812 |
|
805 | |||
813 | 'uptime': SysInfo(uptime)(), |
|
806 | 'uptime': SysInfo(uptime)(), | |
814 | 'load': SysInfo(machine_load)(), |
|
807 | 'load': SysInfo(machine_load)(), | |
815 | 'cpu': SysInfo(cpu)(), |
|
808 | 'cpu': SysInfo(cpu)(), | |
816 | 'memory': SysInfo(memory)(), |
|
809 | 'memory': SysInfo(memory)(), | |
817 |
|
810 | |||
818 | 'vcs_backends': SysInfo(vcs_backends)(), |
|
811 | 'vcs_backends': SysInfo(vcs_backends)(), | |
819 | 'vcs_server': SysInfo(vcs_server)(), |
|
812 | 'vcs_server': SysInfo(vcs_server)(), | |
820 |
|
813 | |||
821 | 'vcs_server_config': SysInfo(vcs_server_config)(), |
|
814 | 'vcs_server_config': SysInfo(vcs_server_config)(), | |
822 |
|
815 | |||
823 | 'git': SysInfo(git_info)(), |
|
816 | 'git': SysInfo(git_info)(), | |
824 | 'hg': SysInfo(hg_info)(), |
|
817 | 'hg': SysInfo(hg_info)(), | |
825 | 'svn': SysInfo(svn_info)(), |
|
818 | 'svn': SysInfo(svn_info)(), | |
826 | } |
|
819 | } | |
827 |
|
820 | |||
828 |
|
821 | |||
829 | def load_system_info(key): |
|
822 | def load_system_info(key): | |
830 | """ |
|
823 | """ | |
831 | get_sys_info('vcs_server') |
|
824 | get_sys_info('vcs_server') | |
832 | get_sys_info('database') |
|
825 | get_sys_info('database') | |
833 | """ |
|
826 | """ | |
834 | return SysInfo(registered_helpers[key])() |
|
827 | return SysInfo(registered_helpers[key])() |
@@ -1,226 +1,226 b'' | |||||
1 |
|
1 | |||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
4 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
6 | # (only), as published by the Free Software Foundation. |
|
6 | # (only), as published by the Free Software Foundation. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU Affero General Public License |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | # |
|
15 | # | |
16 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
19 | |||
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode.lib.config_utils import get_app_config |
|
22 | from rhodecode.lib.config_utils import get_app_config | |
23 | from rhodecode.tests.fixture import TestINI |
|
23 | from rhodecode.tests.fixture import TestINI | |
24 | from rhodecode.tests import TESTS_TMP_PATH |
|
24 | from rhodecode.tests import TESTS_TMP_PATH | |
25 | from rhodecode.tests.server_utils import RcVCSServer |
|
25 | from rhodecode.tests.server_utils import RcVCSServer | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | @pytest.fixture(scope='session') |
|
28 | @pytest.fixture(scope='session') | |
29 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
29 | def vcsserver(request, vcsserver_port, vcsserver_factory): | |
30 | """ |
|
30 | """ | |
31 | Session scope VCSServer. |
|
31 | Session scope VCSServer. | |
32 |
|
32 | |||
33 | Tests which need the VCSServer have to rely on this fixture in order |
|
33 | Tests which need the VCSServer have to rely on this fixture in order | |
34 | to ensure it will be running. |
|
34 | to ensure it will be running. | |
35 |
|
35 | |||
36 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
36 | For specific needs, the fixture vcsserver_factory can be used. It allows to | |
37 | adjust the configuration file for the test run. |
|
37 | adjust the configuration file for the test run. | |
38 |
|
38 | |||
39 | Command line args: |
|
39 | Command line args: | |
40 |
|
40 | |||
41 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
41 | --without-vcsserver: Allows to switch this fixture off. You have to | |
42 | manually start the server. |
|
42 | manually start the server. | |
43 |
|
43 | |||
44 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
44 | --vcsserver-port: Will expect the VCSServer to listen on this port. | |
45 | """ |
|
45 | """ | |
46 |
|
46 | |||
47 | if not request.config.getoption('with_vcsserver'): |
|
47 | if not request.config.getoption('with_vcsserver'): | |
48 | return None |
|
48 | return None | |
49 |
|
49 | |||
50 | return vcsserver_factory( |
|
50 | return vcsserver_factory( | |
51 | request, vcsserver_port=vcsserver_port) |
|
51 | request, vcsserver_port=vcsserver_port) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | @pytest.fixture(scope='session') |
|
54 | @pytest.fixture(scope='session') | |
55 | def vcsserver_factory(tmpdir_factory): |
|
55 | def vcsserver_factory(tmpdir_factory): | |
56 | """ |
|
56 | """ | |
57 | Use this if you need a running vcsserver with a special configuration. |
|
57 | Use this if you need a running vcsserver with a special configuration. | |
58 | """ |
|
58 | """ | |
59 |
|
59 | |||
60 | def factory(request, overrides=(), vcsserver_port=None, |
|
60 | def factory(request, overrides=(), vcsserver_port=None, | |
61 | log_file=None, workers='2'): |
|
61 | log_file=None, workers='2'): | |
62 |
|
62 | |||
63 | if vcsserver_port is None: |
|
63 | if vcsserver_port is None: | |
64 | vcsserver_port = get_available_port() |
|
64 | vcsserver_port = get_available_port() | |
65 |
|
65 | |||
66 | overrides = list(overrides) |
|
66 | overrides = list(overrides) | |
67 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
67 | overrides.append({'server:main': {'port': vcsserver_port}}) | |
68 |
|
68 | |||
69 | option_name = 'vcsserver_config_http' |
|
69 | option_name = 'vcsserver_config_http' | |
70 | override_option_name = 'vcsserver_config_override' |
|
70 | override_option_name = 'vcsserver_config_override' | |
71 | config_file = get_config( |
|
71 | config_file = get_config( | |
72 | request.config, option_name=option_name, |
|
72 | request.config, option_name=option_name, | |
73 | override_option_name=override_option_name, overrides=overrides, |
|
73 | override_option_name=override_option_name, overrides=overrides, | |
74 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
74 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
75 | prefix='test_vcs_') |
|
75 | prefix='test_vcs_') | |
76 |
|
76 | |||
77 | server = RcVCSServer(config_file, log_file, workers) |
|
77 | server = RcVCSServer(config_file, log_file, workers) | |
78 | server.start() |
|
78 | server.start() | |
79 |
|
79 | |||
80 | @request.addfinalizer |
|
80 | @request.addfinalizer | |
81 | def cleanup(): |
|
81 | def cleanup(): | |
82 | server.shutdown() |
|
82 | server.shutdown() | |
83 |
|
83 | |||
84 | server.wait_until_ready() |
|
84 | server.wait_until_ready() | |
85 | return server |
|
85 | return server | |
86 |
|
86 | |||
87 | return factory |
|
87 | return factory | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | def _use_log_level(config): |
|
90 | def _use_log_level(config): | |
91 | level = config.getoption('test_loglevel') or 'critical' |
|
91 | level = config.getoption('test_loglevel') or 'critical' | |
92 | return level.upper() |
|
92 | return level.upper() | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | @pytest.fixture(scope='session') |
|
95 | @pytest.fixture(scope='session') | |
96 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
96 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): | |
97 | option_name = 'pyramid_config' |
|
97 | option_name = 'pyramid_config' | |
98 | log_level = _use_log_level(request.config) |
|
98 | log_level = _use_log_level(request.config) | |
99 |
|
99 | |||
100 | overrides = [ |
|
100 | overrides = [ | |
101 | {'server:main': {'port': rcserver_port}}, |
|
101 | {'server:main': {'port': rcserver_port}}, | |
102 | {'app:main': { |
|
102 | {'app:main': { | |
103 | 'cache_dir': '%(here)s/rc_data', |
|
103 | 'cache_dir': '%(here)s/rc-tests/rc_data', | |
104 | 'vcs.server': f'localhost:{vcsserver_port}', |
|
104 | 'vcs.server': f'localhost:{vcsserver_port}', | |
105 | # johbo: We will always start the VCSServer on our own based on the |
|
105 | # johbo: We will always start the VCSServer on our own based on the | |
106 | # fixtures of the test cases. For the test run it must always be |
|
106 | # fixtures of the test cases. For the test run it must always be | |
107 | # off in the INI file. |
|
107 | # off in the INI file. | |
108 | 'vcs.start_server': 'false', |
|
108 | 'vcs.start_server': 'false', | |
109 |
|
109 | |||
110 | 'vcs.server.protocol': 'http', |
|
110 | 'vcs.server.protocol': 'http', | |
111 | 'vcs.scm_app_implementation': 'http', |
|
111 | 'vcs.scm_app_implementation': 'http', | |
112 | 'vcs.svn.proxy.enabled': 'true', |
|
112 | 'vcs.svn.proxy.enabled': 'true', | |
113 | 'vcs.hooks.protocol': 'http', |
|
113 | 'vcs.hooks.protocol': 'http', | |
114 | 'vcs.hooks.host': '*', |
|
114 | 'vcs.hooks.host': '*', | |
115 | 'repo_store.path': TESTS_TMP_PATH, |
|
115 | 'repo_store.path': TESTS_TMP_PATH, | |
116 | 'app.service_api.token': 'service_secret_token', |
|
116 | 'app.service_api.token': 'service_secret_token', | |
117 | }}, |
|
117 | }}, | |
118 |
|
118 | |||
119 | {'handler_console': { |
|
119 | {'handler_console': { | |
120 | 'class': 'StreamHandler', |
|
120 | 'class': 'StreamHandler', | |
121 | 'args': '(sys.stderr,)', |
|
121 | 'args': '(sys.stderr,)', | |
122 | 'level': log_level, |
|
122 | 'level': log_level, | |
123 | }}, |
|
123 | }}, | |
124 |
|
124 | |||
125 | ] |
|
125 | ] | |
126 |
|
126 | |||
127 | filename = get_config( |
|
127 | filename = get_config( | |
128 | request.config, option_name=option_name, |
|
128 | request.config, option_name=option_name, | |
129 | override_option_name='{}_override'.format(option_name), |
|
129 | override_option_name='{}_override'.format(option_name), | |
130 | overrides=overrides, |
|
130 | overrides=overrides, | |
131 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
131 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
132 | prefix='test_rce_') |
|
132 | prefix='test_rce_') | |
133 | return filename |
|
133 | return filename | |
134 |
|
134 | |||
135 |
|
135 | |||
136 | @pytest.fixture(scope='session') |
|
136 | @pytest.fixture(scope='session') | |
137 | def ini_settings(ini_config): |
|
137 | def ini_settings(ini_config): | |
138 | ini_path = ini_config |
|
138 | ini_path = ini_config | |
139 | return get_app_config(ini_path) |
|
139 | return get_app_config(ini_path) | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | def get_available_port(min_port=40000, max_port=55555): |
|
142 | def get_available_port(min_port=40000, max_port=55555): | |
143 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
143 | from rhodecode.lib.utils2 import get_available_port as _get_port | |
144 | return _get_port(min_port, max_port) |
|
144 | return _get_port(min_port, max_port) | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | @pytest.fixture(scope='session') |
|
147 | @pytest.fixture(scope='session') | |
148 | def rcserver_port(request): |
|
148 | def rcserver_port(request): | |
149 | port = get_available_port() |
|
149 | port = get_available_port() | |
150 | print(f'Using rhodecode port {port}') |
|
150 | print(f'Using rhodecode port {port}') | |
151 | return port |
|
151 | return port | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | @pytest.fixture(scope='session') |
|
154 | @pytest.fixture(scope='session') | |
155 | def vcsserver_port(request): |
|
155 | def vcsserver_port(request): | |
156 | port = request.config.getoption('--vcsserver-port') |
|
156 | port = request.config.getoption('--vcsserver-port') | |
157 | if port is None: |
|
157 | if port is None: | |
158 | port = get_available_port() |
|
158 | port = get_available_port() | |
159 | print(f'Using vcsserver port {port}') |
|
159 | print(f'Using vcsserver port {port}') | |
160 | return port |
|
160 | return port | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | @pytest.fixture(scope='session') |
|
163 | @pytest.fixture(scope='session') | |
164 | def available_port_factory(): |
|
164 | def available_port_factory(): | |
165 | """ |
|
165 | """ | |
166 | Returns a callable which returns free port numbers. |
|
166 | Returns a callable which returns free port numbers. | |
167 | """ |
|
167 | """ | |
168 | return get_available_port |
|
168 | return get_available_port | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | @pytest.fixture() |
|
171 | @pytest.fixture() | |
172 | def available_port(available_port_factory): |
|
172 | def available_port(available_port_factory): | |
173 | """ |
|
173 | """ | |
174 | Gives you one free port for the current test. |
|
174 | Gives you one free port for the current test. | |
175 |
|
175 | |||
176 | Uses "available_port_factory" to retrieve the port. |
|
176 | Uses "available_port_factory" to retrieve the port. | |
177 | """ |
|
177 | """ | |
178 | return available_port_factory() |
|
178 | return available_port_factory() | |
179 |
|
179 | |||
180 |
|
180 | |||
181 | @pytest.fixture(scope='session') |
|
181 | @pytest.fixture(scope='session') | |
182 | def testini_factory(tmpdir_factory, ini_config): |
|
182 | def testini_factory(tmpdir_factory, ini_config): | |
183 | """ |
|
183 | """ | |
184 | Factory to create an INI file based on TestINI. |
|
184 | Factory to create an INI file based on TestINI. | |
185 |
|
185 | |||
186 | It will make sure to place the INI file in the correct directory. |
|
186 | It will make sure to place the INI file in the correct directory. | |
187 | """ |
|
187 | """ | |
188 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
188 | basetemp = tmpdir_factory.getbasetemp().strpath | |
189 | return TestIniFactory(basetemp, ini_config) |
|
189 | return TestIniFactory(basetemp, ini_config) | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | class TestIniFactory(object): |
|
192 | class TestIniFactory(object): | |
193 |
|
193 | |||
194 | def __init__(self, basetemp, template_ini): |
|
194 | def __init__(self, basetemp, template_ini): | |
195 | self._basetemp = basetemp |
|
195 | self._basetemp = basetemp | |
196 | self._template_ini = template_ini |
|
196 | self._template_ini = template_ini | |
197 |
|
197 | |||
198 | def __call__(self, ini_params, new_file_prefix='test'): |
|
198 | def __call__(self, ini_params, new_file_prefix='test'): | |
199 | ini_file = TestINI( |
|
199 | ini_file = TestINI( | |
200 | self._template_ini, ini_params=ini_params, |
|
200 | self._template_ini, ini_params=ini_params, | |
201 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
201 | new_file_prefix=new_file_prefix, dir=self._basetemp) | |
202 | result = ini_file.create() |
|
202 | result = ini_file.create() | |
203 | return result |
|
203 | return result | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | def get_config( |
|
206 | def get_config( | |
207 | config, option_name, override_option_name, overrides=None, |
|
207 | config, option_name, override_option_name, overrides=None, | |
208 | basetemp=None, prefix='test'): |
|
208 | basetemp=None, prefix='test'): | |
209 | """ |
|
209 | """ | |
210 | Find a configuration file and apply overrides for the given `prefix`. |
|
210 | Find a configuration file and apply overrides for the given `prefix`. | |
211 | """ |
|
211 | """ | |
212 | config_file = ( |
|
212 | config_file = ( | |
213 | config.getoption(option_name) or config.getini(option_name)) |
|
213 | config.getoption(option_name) or config.getini(option_name)) | |
214 | if not config_file: |
|
214 | if not config_file: | |
215 | pytest.exit( |
|
215 | pytest.exit( | |
216 | "Configuration error, could not extract {}.".format(option_name)) |
|
216 | "Configuration error, could not extract {}.".format(option_name)) | |
217 |
|
217 | |||
218 | overrides = overrides or [] |
|
218 | overrides = overrides or [] | |
219 | config_override = config.getoption(override_option_name) |
|
219 | config_override = config.getoption(override_option_name) | |
220 | if config_override: |
|
220 | if config_override: | |
221 | overrides.append(config_override) |
|
221 | overrides.append(config_override) | |
222 | temp_ini_file = TestINI( |
|
222 | temp_ini_file = TestINI( | |
223 | config_file, ini_params=overrides, new_file_prefix=prefix, |
|
223 | config_file, ini_params=overrides, new_file_prefix=prefix, | |
224 | dir=basetemp) |
|
224 | dir=basetemp) | |
225 |
|
225 | |||
226 | return temp_ini_file.create() |
|
226 | return temp_ini_file.create() |
@@ -1,785 +1,827 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = true |
|
8 | debug = true | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, | |
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py | |
31 | host = 127.0.0.1 |
|
31 | host = 127.0.0.1 | |
32 | port = 10020 |
|
32 | port = 10020 | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | ; ########################### |
|
35 | ; ########################### | |
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py |
|
39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
43 |
|
43 | |||
44 | ; Prefix middleware for RhodeCode. |
|
44 | ; Prefix middleware for RhodeCode. | |
45 | ; recommended when using proxy setup. |
|
45 | ; recommended when using proxy setup. | |
46 | ; allows to set RhodeCode under a prefix in server. |
|
46 | ; allows to set RhodeCode under a prefix in server. | |
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
48 | ; And set your prefix like: `prefix = /custom_prefix` | |
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
50 | ; to make your cookies only work on prefix url |
|
50 | ; to make your cookies only work on prefix url | |
51 | [filter:proxy-prefix] |
|
51 | [filter:proxy-prefix] | |
52 | use = egg:PasteDeploy#prefix |
|
52 | use = egg:PasteDeploy#prefix | |
53 | prefix = / |
|
53 | prefix = / | |
54 |
|
54 | |||
55 | [app:main] |
|
55 | [app:main] | |
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
57 | ; of this file |
|
57 | ; of this file | |
58 | ; Each option in the app:main can be override by an environmental variable |
|
58 | ; Each option in the app:main can be override by an environmental variable | |
59 | ; |
|
59 | ; | |
60 | ;To override an option: |
|
60 | ;To override an option: | |
61 | ; |
|
61 | ; | |
62 | ;RC_<KeyName> |
|
62 | ;RC_<KeyName> | |
63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
63 | ;Everything should be uppercase, . and - should be replaced by _. | |
64 | ;For example, if you have these configuration settings: |
|
64 | ;For example, if you have these configuration settings: | |
65 | ;rc_cache.repo_object.backend = foo |
|
65 | ;rc_cache.repo_object.backend = foo | |
66 | ;can be overridden by |
|
66 | ;can be overridden by | |
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
68 |
|
68 | |||
69 | use = egg:rhodecode-enterprise-ce |
|
69 | use = egg:rhodecode-enterprise-ce | |
70 |
|
70 | |||
71 | ; enable proxy prefix middleware, defined above |
|
71 | ; enable proxy prefix middleware, defined above | |
72 | #filter-with = proxy-prefix |
|
72 | #filter-with = proxy-prefix | |
73 |
|
73 | |||
74 | ; encryption key used to encrypt social plugin tokens, |
|
74 | ; encryption key used to encrypt social plugin tokens, | |
75 | ; remote_urls with credentials etc, if not set it defaults to |
|
75 | ; remote_urls with credentials etc, if not set it defaults to | |
76 | ; `beaker.session.secret` |
|
76 | ; `beaker.session.secret` | |
77 | #rhodecode.encrypted_values.secret = |
|
77 | #rhodecode.encrypted_values.secret = | |
78 |
|
78 | |||
79 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
79 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
81 | #rhodecode.encrypted_values.strict = false |
|
81 | #rhodecode.encrypted_values.strict = false | |
82 |
|
82 | |||
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
84 | ; fernet is safer, and we strongly recommend switching to it. |
|
84 | ; fernet is safer, and we strongly recommend switching to it. | |
85 | ; Due to backward compatibility aes is used as default. |
|
85 | ; Due to backward compatibility aes is used as default. | |
86 | #rhodecode.encrypted_values.algorithm = fernet |
|
86 | #rhodecode.encrypted_values.algorithm = fernet | |
87 |
|
87 | |||
88 | ; Return gzipped responses from RhodeCode (static files/application) |
|
88 | ; Return gzipped responses from RhodeCode (static files/application) | |
89 | gzip_responses = false |
|
89 | gzip_responses = false | |
90 |
|
90 | |||
91 | ; Auto-generate javascript routes file on startup |
|
91 | ; Auto-generate javascript routes file on startup | |
92 | generate_js_files = false |
|
92 | generate_js_files = false | |
93 |
|
93 | |||
94 | ; System global default language. |
|
94 | ; System global default language. | |
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
96 | lang = en |
|
96 | lang = en | |
97 |
|
97 | |||
98 | ; Perform a full repository scan and import on each server start. |
|
98 | ; Perform a full repository scan and import on each server start. | |
99 | ; Settings this to true could lead to very long startup time. |
|
99 | ; Settings this to true could lead to very long startup time. | |
100 | startup.import_repos = true |
|
100 | startup.import_repos = true | |
101 |
|
101 | |||
102 | ; URL at which the application is running. This is used for Bootstrapping |
|
102 | ; URL at which the application is running. This is used for Bootstrapping | |
103 | ; requests in context when no web request is available. Used in ishell, or |
|
103 | ; requests in context when no web request is available. Used in ishell, or | |
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
105 | app.base_url = http://rhodecode.local |
|
105 | app.base_url = http://rhodecode.local | |
106 |
|
106 | |||
107 | ; Host at which the Service API is running. |
|
107 | ; Host at which the Service API is running. | |
108 | app.service_api.host = http://rhodecode.local:10020 |
|
108 | app.service_api.host = http://rhodecode.local:10020 | |
109 |
|
109 | |||
110 | ; Secret for Service API authentication. |
|
110 | ; Secret for Service API authentication. | |
111 | app.service_api.token = |
|
111 | app.service_api.token = | |
112 |
|
112 | |||
113 | ; Unique application ID. Should be a random unique string for security. |
|
113 | ; Unique application ID. Should be a random unique string for security. | |
114 | app_instance_uuid = rc-production |
|
114 | app_instance_uuid = rc-production | |
115 |
|
115 | |||
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
117 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
117 | ; commit, or pull request exceeds this limit this diff will be displayed | |
118 | ; partially. E.g 512000 == 512Kb |
|
118 | ; partially. E.g 512000 == 512Kb | |
119 | cut_off_limit_diff = 1024000 |
|
119 | cut_off_limit_diff = 1024000 | |
120 |
|
120 | |||
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
122 | ; file inside diff which exceeds this limit will be displayed partially. |
|
122 | ; file inside diff which exceeds this limit will be displayed partially. | |
123 | ; E.g 128000 == 128Kb |
|
123 | ; E.g 128000 == 128Kb | |
124 | cut_off_limit_file = 256000 |
|
124 | cut_off_limit_file = 256000 | |
125 |
|
125 | |||
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
127 | vcs_full_cache = false |
|
127 | vcs_full_cache = false | |
128 |
|
128 | |||
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
131 | force_https = false |
|
131 | force_https = false | |
132 |
|
132 | |||
133 | ; use Strict-Transport-Security headers |
|
133 | ; use Strict-Transport-Security headers | |
134 | use_htsts = false |
|
134 | use_htsts = false | |
135 |
|
135 | |||
136 | ; Set to true if your repos are exposed using the dumb protocol |
|
136 | ; Set to true if your repos are exposed using the dumb protocol | |
137 | git_update_server_info = false |
|
137 | git_update_server_info = false | |
138 |
|
138 | |||
139 | ; RSS/ATOM feed options |
|
139 | ; RSS/ATOM feed options | |
140 | rss_cut_off_limit = 256000 |
|
140 | rss_cut_off_limit = 256000 | |
141 | rss_items_per_page = 10 |
|
141 | rss_items_per_page = 10 | |
142 | rss_include_diff = false |
|
142 | rss_include_diff = false | |
143 |
|
143 | |||
144 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
144 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
145 | ; url that does rewrites to _admin/gists/{gistid}. |
|
145 | ; url that does rewrites to _admin/gists/{gistid}. | |
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
148 | gist_alias_url = |
|
148 | gist_alias_url = | |
149 |
|
149 | |||
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
151 | ; used for access. |
|
151 | ; used for access. | |
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
153 | ; came from the the logged in user who own this authentication token. |
|
153 | ; came from the the logged in user who own this authentication token. | |
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
155 | ; authentication token. Such view would be only accessible when used together |
|
155 | ; authentication token. Such view would be only accessible when used together | |
156 | ; with this authentication token |
|
156 | ; with this authentication token | |
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
158 | ; The list should be "," separated and on a single line. |
|
158 | ; The list should be "," separated and on a single line. | |
159 | ; Most common views to enable: |
|
159 | ; Most common views to enable: | |
160 |
|
160 | |||
161 | # RepoCommitsView:repo_commit_download |
|
161 | # RepoCommitsView:repo_commit_download | |
162 | # RepoCommitsView:repo_commit_patch |
|
162 | # RepoCommitsView:repo_commit_patch | |
163 | # RepoCommitsView:repo_commit_raw |
|
163 | # RepoCommitsView:repo_commit_raw | |
164 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
164 | # RepoCommitsView:repo_commit_raw@TOKEN | |
165 | # RepoFilesView:repo_files_diff |
|
165 | # RepoFilesView:repo_files_diff | |
166 | # RepoFilesView:repo_archivefile |
|
166 | # RepoFilesView:repo_archivefile | |
167 | # RepoFilesView:repo_file_raw |
|
167 | # RepoFilesView:repo_file_raw | |
168 | # GistView:* |
|
168 | # GistView:* | |
169 | api_access_controllers_whitelist = |
|
169 | api_access_controllers_whitelist = | |
170 |
|
170 | |||
171 | ; Default encoding used to convert from and to unicode |
|
171 | ; Default encoding used to convert from and to unicode | |
172 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
172 | ; can be also a comma separated list of encoding in case of mixed encodings | |
173 | default_encoding = UTF-8 |
|
173 | default_encoding = UTF-8 | |
174 |
|
174 | |||
175 | ; instance-id prefix |
|
175 | ; instance-id prefix | |
176 | ; a prefix key for this instance used for cache invalidation when running |
|
176 | ; a prefix key for this instance used for cache invalidation when running | |
177 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
177 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
178 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
178 | ; all running RhodeCode instances. Leave empty if you don't use it | |
179 | instance_id = |
|
179 | instance_id = | |
180 |
|
180 | |||
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
182 | ; of an authentication plugin also if it is disabled by it's settings. |
|
182 | ; of an authentication plugin also if it is disabled by it's settings. | |
183 | ; This could be useful if you are unable to log in to the system due to broken |
|
183 | ; This could be useful if you are unable to log in to the system due to broken | |
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
185 | ; module to log in again and fix the settings. |
|
185 | ; module to log in again and fix the settings. | |
186 | ; Available builtin plugin IDs (hash is part of the ID): |
|
186 | ; Available builtin plugin IDs (hash is part of the ID): | |
187 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
187 | ; egg:rhodecode-enterprise-ce#rhodecode | |
188 | ; egg:rhodecode-enterprise-ce#pam |
|
188 | ; egg:rhodecode-enterprise-ce#pam | |
189 | ; egg:rhodecode-enterprise-ce#ldap |
|
189 | ; egg:rhodecode-enterprise-ce#ldap | |
190 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
190 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
191 | ; egg:rhodecode-enterprise-ce#headers |
|
191 | ; egg:rhodecode-enterprise-ce#headers | |
192 | ; egg:rhodecode-enterprise-ce#crowd |
|
192 | ; egg:rhodecode-enterprise-ce#crowd | |
193 |
|
193 | |||
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
195 |
|
195 | |||
196 | ; Flag to control loading of legacy plugins in py:/path format |
|
196 | ; Flag to control loading of legacy plugins in py:/path format | |
197 | auth_plugin.import_legacy_plugins = true |
|
197 | auth_plugin.import_legacy_plugins = true | |
198 |
|
198 | |||
199 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
199 | ; alternative return HTTP header for failed authentication. Default HTTP | |
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
201 | ; handling that causing a series of failed authentication calls. |
|
201 | ; handling that causing a series of failed authentication calls. | |
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
203 | ; This will be served instead of default 401 on bad authentication |
|
203 | ; This will be served instead of default 401 on bad authentication | |
204 | auth_ret_code = |
|
204 | auth_ret_code = | |
205 |
|
205 | |||
206 | ; use special detection method when serving auth_ret_code, instead of serving |
|
206 | ; use special detection method when serving auth_ret_code, instead of serving | |
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
208 | ; and then serve auth_ret_code to clients |
|
208 | ; and then serve auth_ret_code to clients | |
209 | auth_ret_code_detection = false |
|
209 | auth_ret_code_detection = false | |
210 |
|
210 | |||
211 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
211 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
212 | ; codes don't break the transactions while 4XX codes do |
|
212 | ; codes don't break the transactions while 4XX codes do | |
213 | lock_ret_code = 423 |
|
213 | lock_ret_code = 423 | |
214 |
|
214 | |||
215 | ; Filesystem location were repositories should be stored |
|
215 | ; Filesystem location were repositories should be stored | |
216 | repo_store.path = /var/opt/rhodecode_repo_store |
|
216 | repo_store.path = /var/opt/rhodecode_repo_store | |
217 |
|
217 | |||
218 | ; allows to setup custom hooks in settings page |
|
218 | ; allows to setup custom hooks in settings page | |
219 | allow_custom_hooks_settings = true |
|
219 | allow_custom_hooks_settings = true | |
220 |
|
220 | |||
221 | ; Generated license token required for EE edition license. |
|
221 | ; Generated license token required for EE edition license. | |
222 | ; New generated token value can be found in Admin > settings > license page. |
|
222 | ; New generated token value can be found in Admin > settings > license page. | |
223 | license_token = abra-cada-bra1-rce3 |
|
223 | license_token = abra-cada-bra1-rce3 | |
224 |
|
224 | |||
225 | ; This flag hides sensitive information on the license page such as token, and license data |
|
225 | ; This flag hides sensitive information on the license page such as token, and license data | |
226 | license.hide_license_info = false |
|
226 | license.hide_license_info = false | |
227 |
|
227 | |||
228 | ; supervisor connection uri, for managing supervisor and logs. |
|
228 | ; supervisor connection uri, for managing supervisor and logs. | |
229 | supervisor.uri = |
|
229 | supervisor.uri = | |
230 |
|
230 | |||
231 | ; supervisord group name/id we only want this RC instance to handle |
|
231 | ; supervisord group name/id we only want this RC instance to handle | |
232 | supervisor.group_id = dev |
|
232 | supervisor.group_id = dev | |
233 |
|
233 | |||
234 | ; Display extended labs settings |
|
234 | ; Display extended labs settings | |
235 | labs_settings_active = true |
|
235 | labs_settings_active = true | |
236 |
|
236 | |||
237 | ; Custom exception store path, defaults to TMPDIR |
|
237 | ; Custom exception store path, defaults to TMPDIR | |
238 | ; This is used to store exception from RhodeCode in shared directory |
|
238 | ; This is used to store exception from RhodeCode in shared directory | |
239 | #exception_tracker.store_path = |
|
239 | #exception_tracker.store_path = | |
240 |
|
240 | |||
241 | ; Send email with exception details when it happens |
|
241 | ; Send email with exception details when it happens | |
242 | #exception_tracker.send_email = false |
|
242 | #exception_tracker.send_email = false | |
243 |
|
243 | |||
244 | ; Comma separated list of recipients for exception emails, |
|
244 | ; Comma separated list of recipients for exception emails, | |
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
246 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
246 | ; Can be left empty, then emails will be sent to ALL super-admins | |
247 | #exception_tracker.send_email_recipients = |
|
247 | #exception_tracker.send_email_recipients = | |
248 |
|
248 | |||
249 | ; optional prefix to Add to email Subject |
|
249 | ; optional prefix to Add to email Subject | |
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
251 |
|
251 | |||
252 | ; File store configuration. This is used to store and serve uploaded files |
|
252 | ; File store configuration. This is used to store and serve uploaded files | |
253 | file_store.enabled = true |
|
253 | file_store.enabled = true | |
254 |
|
254 | |||
255 | ; Storage backend, available options are: local |
|
255 | ; Storage backend, available options are: local | |
256 | file_store.backend = local |
|
256 | file_store.backend = local | |
257 |
|
257 | |||
258 | ; path to store the uploaded binaries and artifacts |
|
258 | ; path to store the uploaded binaries and artifacts | |
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store | |
260 |
|
260 | |||
261 | ; Uncomment and set this path to control settings for archive download cache. |
|
261 | ||
|
262 | ; Redis url to acquire/check generation of archives locks | |||
|
263 | archive_cache.locking.url = redis://redis:6379/1 | |||
|
264 | ||||
|
265 | ; Storage backend, only 'filesystem' and 'objectstore' are available now | |||
|
266 | archive_cache.backend.type = filesystem | |||
|
267 | ||||
|
268 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
269 | ; e.g http://minio:9000 | |||
|
270 | archive_cache.objectstore.url = http://s3-minio:9000 | |||
|
271 | ||||
|
272 | ; key for s3 auth | |||
|
273 | archive_cache.objectstore.key = key | |||
|
274 | ||||
|
275 | ; secret for s3 auth | |||
|
276 | archive_cache.objectstore.secret = secret | |||
|
277 | ||||
|
278 | ; number of sharded buckets to create to distribute archives across | |||
|
279 | ; default is 8 shards | |||
|
280 | archive_cache.objectstore.bucket_shards = 8 | |||
|
281 | ||||
|
282 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
283 | archive_cache.objectstore.retry = false | |||
|
284 | ||||
|
285 | ; number of seconds to wait for next try using retry | |||
|
286 | archive_cache.objectstore.retry_backoff = 1 | |||
|
287 | ||||
|
288 | ; how many tries do do a retry fetch from this backend | |||
|
289 | archive_cache.objectstore.retry_attempts = 10 | |||
|
290 | ||||
|
291 | ; Default is $cache_dir/archive_cache if not set | |||
262 | ; Generated repo archives will be cached at this location |
|
292 | ; Generated repo archives will be cached at this location | |
263 | ; and served from the cache during subsequent requests for the same archive of |
|
293 | ; and served from the cache during subsequent requests for the same archive of | |
264 | ; the repository. This path is important to be shared across filesystems and with |
|
294 | ; the repository. This path is important to be shared across filesystems and with | |
265 | ; RhodeCode and vcsserver |
|
295 | ; RhodeCode and vcsserver | |
266 |
|
296 | archive_cache.filesystem.store_dir = %(here)s/rc-tests/archive_cache | ||
267 | ; Default is $cache_dir/archive_cache if not set |
|
|||
268 | archive_cache.store_dir = /var/opt/rhodecode_data/tarballcache |
|
|||
269 |
|
297 | |||
270 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
298 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
271 |
archive_cache.cache_size_gb = |
|
299 | archive_cache.filesystem.cache_size_gb = 2 | |
|
300 | ||||
|
301 | ; Eviction policy used to clear out after cache_size_gb limit is reached | |||
|
302 | archive_cache.filesystem.eviction_policy = least-recently-stored | |||
272 |
|
303 | |||
273 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
304 | ; By default cache uses sharding technique, this specifies how many shards are there | |
274 | archive_cache.cache_shards = 10 |
|
305 | ; default is 8 shards | |
|
306 | archive_cache.filesystem.cache_shards = 8 | |||
|
307 | ||||
|
308 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |||
|
309 | archive_cache.filesystem.retry = false | |||
|
310 | ||||
|
311 | ; number of seconds to wait for next try using retry | |||
|
312 | archive_cache.filesystem.retry_backoff = 1 | |||
|
313 | ||||
|
314 | ; how many tries do do a retry fetch from this backend | |||
|
315 | archive_cache.filesystem.retry_attempts = 10 | |||
|
316 | ||||
275 |
|
317 | |||
276 | ; ############# |
|
318 | ; ############# | |
277 | ; CELERY CONFIG |
|
319 | ; CELERY CONFIG | |
278 | ; ############# |
|
320 | ; ############# | |
279 |
|
321 | |||
280 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
322 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
281 |
|
323 | |||
282 | use_celery = false |
|
324 | use_celery = false | |
283 |
|
325 | |||
284 | ; path to store schedule database |
|
326 | ; path to store schedule database | |
285 | #celerybeat-schedule.path = |
|
327 | #celerybeat-schedule.path = | |
286 |
|
328 | |||
287 | ; connection url to the message broker (default redis) |
|
329 | ; connection url to the message broker (default redis) | |
288 | celery.broker_url = redis://redis:6379/8 |
|
330 | celery.broker_url = redis://redis:6379/8 | |
289 |
|
331 | |||
290 | ; results backend to get results for (default redis) |
|
332 | ; results backend to get results for (default redis) | |
291 | celery.result_backend = redis://redis:6379/8 |
|
333 | celery.result_backend = redis://redis:6379/8 | |
292 |
|
334 | |||
293 | ; rabbitmq example |
|
335 | ; rabbitmq example | |
294 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
336 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
295 |
|
337 | |||
296 | ; maximum tasks to execute before worker restart |
|
338 | ; maximum tasks to execute before worker restart | |
297 | celery.max_tasks_per_child = 20 |
|
339 | celery.max_tasks_per_child = 20 | |
298 |
|
340 | |||
299 | ; tasks will never be sent to the queue, but executed locally instead. |
|
341 | ; tasks will never be sent to the queue, but executed locally instead. | |
300 | celery.task_always_eager = true |
|
342 | celery.task_always_eager = true | |
301 | celery.task_store_eager_result = true |
|
343 | celery.task_store_eager_result = true | |
302 |
|
344 | |||
303 | ; ############# |
|
345 | ; ############# | |
304 | ; DOGPILE CACHE |
|
346 | ; DOGPILE CACHE | |
305 | ; ############# |
|
347 | ; ############# | |
306 |
|
348 | |||
307 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
349 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
308 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
350 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
309 | cache_dir = /var/opt/rhodecode_data |
|
351 | cache_dir = /var/opt/rhodecode_data | |
310 |
|
352 | |||
311 | ; ********************************************* |
|
353 | ; ********************************************* | |
312 | ; `sql_cache_short` cache for heavy SQL queries |
|
354 | ; `sql_cache_short` cache for heavy SQL queries | |
313 | ; Only supported backend is `memory_lru` |
|
355 | ; Only supported backend is `memory_lru` | |
314 | ; ********************************************* |
|
356 | ; ********************************************* | |
315 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
357 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
316 | rc_cache.sql_cache_short.expiration_time = 0 |
|
358 | rc_cache.sql_cache_short.expiration_time = 0 | |
317 |
|
359 | |||
318 |
|
360 | |||
319 | ; ***************************************************** |
|
361 | ; ***************************************************** | |
320 | ; `cache_repo_longterm` cache for repo object instances |
|
362 | ; `cache_repo_longterm` cache for repo object instances | |
321 | ; Only supported backend is `memory_lru` |
|
363 | ; Only supported backend is `memory_lru` | |
322 | ; ***************************************************** |
|
364 | ; ***************************************************** | |
323 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
365 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
324 | ; by default we use 30 Days, cache is still invalidated on push |
|
366 | ; by default we use 30 Days, cache is still invalidated on push | |
325 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
367 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
326 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
368 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
327 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
369 | rc_cache.cache_repo_longterm.max_size = 10000 | |
328 |
|
370 | |||
329 |
|
371 | |||
330 | ; ********************************************* |
|
372 | ; ********************************************* | |
331 | ; `cache_general` cache for general purpose use |
|
373 | ; `cache_general` cache for general purpose use | |
332 | ; for simplicity use rc.file_namespace backend, |
|
374 | ; for simplicity use rc.file_namespace backend, | |
333 | ; for performance and scale use rc.redis |
|
375 | ; for performance and scale use rc.redis | |
334 | ; ********************************************* |
|
376 | ; ********************************************* | |
335 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
377 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
336 | rc_cache.cache_general.expiration_time = 43200 |
|
378 | rc_cache.cache_general.expiration_time = 43200 | |
337 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
379 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
338 | rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db |
|
380 | rc_cache.cache_general.arguments.filename = %(here)s/rc-tests/cache-backend/cache_general_db | |
339 |
|
381 | |||
340 | ; alternative `cache_general` redis backend with distributed lock |
|
382 | ; alternative `cache_general` redis backend with distributed lock | |
341 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
383 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
342 | #rc_cache.cache_general.expiration_time = 300 |
|
384 | #rc_cache.cache_general.expiration_time = 300 | |
343 |
|
385 | |||
344 | ; redis_expiration_time needs to be greater then expiration_time |
|
386 | ; redis_expiration_time needs to be greater then expiration_time | |
345 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
387 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
346 |
|
388 | |||
347 | #rc_cache.cache_general.arguments.host = localhost |
|
389 | #rc_cache.cache_general.arguments.host = localhost | |
348 | #rc_cache.cache_general.arguments.port = 6379 |
|
390 | #rc_cache.cache_general.arguments.port = 6379 | |
349 | #rc_cache.cache_general.arguments.db = 0 |
|
391 | #rc_cache.cache_general.arguments.db = 0 | |
350 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
392 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
351 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
393 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
352 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
394 | #rc_cache.cache_general.arguments.distributed_lock = true | |
353 |
|
395 | |||
354 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
396 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
355 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
397 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
356 |
|
398 | |||
357 | ; ************************************************* |
|
399 | ; ************************************************* | |
358 | ; `cache_perms` cache for permission tree, auth TTL |
|
400 | ; `cache_perms` cache for permission tree, auth TTL | |
359 | ; for simplicity use rc.file_namespace backend, |
|
401 | ; for simplicity use rc.file_namespace backend, | |
360 | ; for performance and scale use rc.redis |
|
402 | ; for performance and scale use rc.redis | |
361 | ; ************************************************* |
|
403 | ; ************************************************* | |
362 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
404 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
363 | rc_cache.cache_perms.expiration_time = 0 |
|
405 | rc_cache.cache_perms.expiration_time = 0 | |
364 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
406 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
365 | rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db |
|
407 | rc_cache.cache_perms.arguments.filename = %(here)s/rc-tests/cache-backend/cache_perms_db | |
366 |
|
408 | |||
367 | ; alternative `cache_perms` redis backend with distributed lock |
|
409 | ; alternative `cache_perms` redis backend with distributed lock | |
368 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
410 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
369 | #rc_cache.cache_perms.expiration_time = 300 |
|
411 | #rc_cache.cache_perms.expiration_time = 300 | |
370 |
|
412 | |||
371 | ; redis_expiration_time needs to be greater then expiration_time |
|
413 | ; redis_expiration_time needs to be greater then expiration_time | |
372 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
414 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
373 |
|
415 | |||
374 | #rc_cache.cache_perms.arguments.host = localhost |
|
416 | #rc_cache.cache_perms.arguments.host = localhost | |
375 | #rc_cache.cache_perms.arguments.port = 6379 |
|
417 | #rc_cache.cache_perms.arguments.port = 6379 | |
376 | #rc_cache.cache_perms.arguments.db = 0 |
|
418 | #rc_cache.cache_perms.arguments.db = 0 | |
377 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
419 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
378 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
420 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
379 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
421 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
380 |
|
422 | |||
381 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
423 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
382 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
424 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
383 |
|
425 | |||
384 | ; *************************************************** |
|
426 | ; *************************************************** | |
385 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
427 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
386 | ; for simplicity use rc.file_namespace backend, |
|
428 | ; for simplicity use rc.file_namespace backend, | |
387 | ; for performance and scale use rc.redis |
|
429 | ; for performance and scale use rc.redis | |
388 | ; *************************************************** |
|
430 | ; *************************************************** | |
389 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
431 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
390 | rc_cache.cache_repo.expiration_time = 2592000 |
|
432 | rc_cache.cache_repo.expiration_time = 2592000 | |
391 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
433 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
392 | rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db |
|
434 | rc_cache.cache_repo.arguments.filename = %(here)s/rc-tests/cache-backend/cache_repo_db | |
393 |
|
435 | |||
394 | ; alternative `cache_repo` redis backend with distributed lock |
|
436 | ; alternative `cache_repo` redis backend with distributed lock | |
395 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
437 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
396 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
438 | #rc_cache.cache_repo.expiration_time = 2592000 | |
397 |
|
439 | |||
398 | ; redis_expiration_time needs to be greater then expiration_time |
|
440 | ; redis_expiration_time needs to be greater then expiration_time | |
399 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
441 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
400 |
|
442 | |||
401 | #rc_cache.cache_repo.arguments.host = localhost |
|
443 | #rc_cache.cache_repo.arguments.host = localhost | |
402 | #rc_cache.cache_repo.arguments.port = 6379 |
|
444 | #rc_cache.cache_repo.arguments.port = 6379 | |
403 | #rc_cache.cache_repo.arguments.db = 1 |
|
445 | #rc_cache.cache_repo.arguments.db = 1 | |
404 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
446 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
405 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
447 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
406 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
448 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
407 |
|
449 | |||
408 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
450 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
409 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
451 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
410 |
|
452 | |||
411 | ; ############## |
|
453 | ; ############## | |
412 | ; BEAKER SESSION |
|
454 | ; BEAKER SESSION | |
413 | ; ############## |
|
455 | ; ############## | |
414 |
|
456 | |||
415 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
457 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
416 | ; types are file, ext:redis, ext:database, ext:memcached |
|
458 | ; types are file, ext:redis, ext:database, ext:memcached | |
417 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
459 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
418 | beaker.session.type = file |
|
460 | beaker.session.type = file | |
419 | beaker.session.data_dir = %(here)s/rc-tests/data/sessions |
|
461 | beaker.session.data_dir = %(here)s/rc-tests/data/sessions | |
420 |
|
462 | |||
421 | ; Redis based sessions |
|
463 | ; Redis based sessions | |
422 | #beaker.session.type = ext:redis |
|
464 | #beaker.session.type = ext:redis | |
423 | #beaker.session.url = redis://redis:6379/2 |
|
465 | #beaker.session.url = redis://redis:6379/2 | |
424 |
|
466 | |||
425 | ; DB based session, fast, and allows easy management over logged in users |
|
467 | ; DB based session, fast, and allows easy management over logged in users | |
426 | #beaker.session.type = ext:database |
|
468 | #beaker.session.type = ext:database | |
427 | #beaker.session.table_name = db_session |
|
469 | #beaker.session.table_name = db_session | |
428 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
470 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
429 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
471 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
430 | #beaker.session.sa.pool_recycle = 3600 |
|
472 | #beaker.session.sa.pool_recycle = 3600 | |
431 | #beaker.session.sa.echo = false |
|
473 | #beaker.session.sa.echo = false | |
432 |
|
474 | |||
433 | beaker.session.key = rhodecode |
|
475 | beaker.session.key = rhodecode | |
434 | beaker.session.secret = test-rc-uytcxaz |
|
476 | beaker.session.secret = test-rc-uytcxaz | |
435 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
477 | beaker.session.lock_dir = %(here)s/rc-tests/data/sessions/lock | |
436 |
|
478 | |||
437 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
479 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
438 | ; you must disable beaker.session.secret to use this |
|
480 | ; you must disable beaker.session.secret to use this | |
439 | #beaker.session.encrypt_key = key_for_encryption |
|
481 | #beaker.session.encrypt_key = key_for_encryption | |
440 | #beaker.session.validate_key = validation_key |
|
482 | #beaker.session.validate_key = validation_key | |
441 |
|
483 | |||
442 | ; Sets session as invalid (also logging out user) if it haven not been |
|
484 | ; Sets session as invalid (also logging out user) if it haven not been | |
443 | ; accessed for given amount of time in seconds |
|
485 | ; accessed for given amount of time in seconds | |
444 | beaker.session.timeout = 2592000 |
|
486 | beaker.session.timeout = 2592000 | |
445 | beaker.session.httponly = true |
|
487 | beaker.session.httponly = true | |
446 |
|
488 | |||
447 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
489 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
448 | #beaker.session.cookie_path = /custom_prefix |
|
490 | #beaker.session.cookie_path = /custom_prefix | |
449 |
|
491 | |||
450 | ; Set https secure cookie |
|
492 | ; Set https secure cookie | |
451 | beaker.session.secure = false |
|
493 | beaker.session.secure = false | |
452 |
|
494 | |||
453 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
495 | ; default cookie expiration time in seconds, set to `true` to set expire | |
454 | ; at browser close |
|
496 | ; at browser close | |
455 | #beaker.session.cookie_expires = 3600 |
|
497 | #beaker.session.cookie_expires = 3600 | |
456 |
|
498 | |||
457 | ; ############################# |
|
499 | ; ############################# | |
458 | ; SEARCH INDEXING CONFIGURATION |
|
500 | ; SEARCH INDEXING CONFIGURATION | |
459 | ; ############################# |
|
501 | ; ############################# | |
460 |
|
502 | |||
461 | ; Full text search indexer is available in rhodecode-tools under |
|
503 | ; Full text search indexer is available in rhodecode-tools under | |
462 | ; `rhodecode-tools index` command |
|
504 | ; `rhodecode-tools index` command | |
463 |
|
505 | |||
464 | ; WHOOSH Backend, doesn't require additional services to run |
|
506 | ; WHOOSH Backend, doesn't require additional services to run | |
465 | ; it works good with few dozen repos |
|
507 | ; it works good with few dozen repos | |
466 | search.module = rhodecode.lib.index.whoosh |
|
508 | search.module = rhodecode.lib.index.whoosh | |
467 | search.location = %(here)s/data/index |
|
509 | search.location = %(here)s/rc-tests/data/index | |
468 |
|
510 | |||
469 | ; #################### |
|
511 | ; #################### | |
470 | ; CHANNELSTREAM CONFIG |
|
512 | ; CHANNELSTREAM CONFIG | |
471 | ; #################### |
|
513 | ; #################### | |
472 |
|
514 | |||
473 | ; channelstream enables persistent connections and live notification |
|
515 | ; channelstream enables persistent connections and live notification | |
474 | ; in the system. It's also used by the chat system |
|
516 | ; in the system. It's also used by the chat system | |
475 |
|
517 | |||
476 | channelstream.enabled = false |
|
518 | channelstream.enabled = false | |
477 |
|
519 | |||
478 | ; server address for channelstream server on the backend |
|
520 | ; server address for channelstream server on the backend | |
479 | channelstream.server = channelstream:9800 |
|
521 | channelstream.server = channelstream:9800 | |
480 |
|
522 | |||
481 | ; location of the channelstream server from outside world |
|
523 | ; location of the channelstream server from outside world | |
482 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
524 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
483 | ; by external HTTP server such as Nginx or Apache |
|
525 | ; by external HTTP server such as Nginx or Apache | |
484 | ; see Nginx/Apache configuration examples in our docs |
|
526 | ; see Nginx/Apache configuration examples in our docs | |
485 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
527 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
486 | channelstream.secret = ENV_GENERATED |
|
528 | channelstream.secret = ENV_GENERATED | |
487 | channelstream.history.location = %(here)s/channelstream_history |
|
529 | channelstream.history.location = %(here)s/rc-tests/channelstream_history | |
488 |
|
530 | |||
489 | ; Internal application path that Javascript uses to connect into. |
|
531 | ; Internal application path that Javascript uses to connect into. | |
490 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
532 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
491 | channelstream.proxy_path = /_channelstream |
|
533 | channelstream.proxy_path = /_channelstream | |
492 |
|
534 | |||
493 |
|
535 | |||
494 | ; ############################## |
|
536 | ; ############################## | |
495 | ; MAIN RHODECODE DATABASE CONFIG |
|
537 | ; MAIN RHODECODE DATABASE CONFIG | |
496 | ; ############################## |
|
538 | ; ############################## | |
497 |
|
539 | |||
498 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
540 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
499 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
541 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
500 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
542 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
501 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
543 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
502 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
544 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
503 |
|
545 | |||
504 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30 |
|
546 | sqlalchemy.db1.url = sqlite:///%(here)s/rc-tests/rhodecode_test.db?timeout=30 | |
505 |
|
547 | |||
506 | ; see sqlalchemy docs for other advanced settings |
|
548 | ; see sqlalchemy docs for other advanced settings | |
507 | ; print the sql statements to output |
|
549 | ; print the sql statements to output | |
508 | sqlalchemy.db1.echo = false |
|
550 | sqlalchemy.db1.echo = false | |
509 |
|
551 | |||
510 | ; recycle the connections after this amount of seconds |
|
552 | ; recycle the connections after this amount of seconds | |
511 | sqlalchemy.db1.pool_recycle = 3600 |
|
553 | sqlalchemy.db1.pool_recycle = 3600 | |
512 |
|
554 | |||
513 | ; the number of connections to keep open inside the connection pool. |
|
555 | ; the number of connections to keep open inside the connection pool. | |
514 | ; 0 indicates no limit |
|
556 | ; 0 indicates no limit | |
515 | ; the general calculus with gevent is: |
|
557 | ; the general calculus with gevent is: | |
516 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
558 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, | |
517 | ; then increase pool size + max overflow so that they add up to 500. |
|
559 | ; then increase pool size + max overflow so that they add up to 500. | |
518 | #sqlalchemy.db1.pool_size = 5 |
|
560 | #sqlalchemy.db1.pool_size = 5 | |
519 |
|
561 | |||
520 | ; The number of connections to allow in connection pool "overflow", that is |
|
562 | ; The number of connections to allow in connection pool "overflow", that is | |
521 | ; connections that can be opened above and beyond the pool_size setting, |
|
563 | ; connections that can be opened above and beyond the pool_size setting, | |
522 | ; which defaults to five. |
|
564 | ; which defaults to five. | |
523 | #sqlalchemy.db1.max_overflow = 10 |
|
565 | #sqlalchemy.db1.max_overflow = 10 | |
524 |
|
566 | |||
525 | ; Connection check ping, used to detect broken database connections |
|
567 | ; Connection check ping, used to detect broken database connections | |
526 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
568 | ; could be enabled to better handle cases if MySQL has gone away errors | |
527 | #sqlalchemy.db1.ping_connection = true |
|
569 | #sqlalchemy.db1.ping_connection = true | |
528 |
|
570 | |||
529 | ; ########## |
|
571 | ; ########## | |
530 | ; VCS CONFIG |
|
572 | ; VCS CONFIG | |
531 | ; ########## |
|
573 | ; ########## | |
532 | vcs.server.enable = true |
|
574 | vcs.server.enable = true | |
533 | vcs.server = vcsserver:10010 |
|
575 | vcs.server = vcsserver:10010 | |
534 |
|
576 | |||
535 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
577 | ; Web server connectivity protocol, responsible for web based VCS operations | |
536 | ; Available protocols are: |
|
578 | ; Available protocols are: | |
537 | ; `http` - use http-rpc backend (default) |
|
579 | ; `http` - use http-rpc backend (default) | |
538 | vcs.server.protocol = http |
|
580 | vcs.server.protocol = http | |
539 |
|
581 | |||
540 | ; Push/Pull operations protocol, available options are: |
|
582 | ; Push/Pull operations protocol, available options are: | |
541 | ; `http` - use http-rpc backend (default) |
|
583 | ; `http` - use http-rpc backend (default) | |
542 | vcs.scm_app_implementation = http |
|
584 | vcs.scm_app_implementation = http | |
543 |
|
585 | |||
544 | ; Push/Pull operations hooks protocol, available options are: |
|
586 | ; Push/Pull operations hooks protocol, available options are: | |
545 | ; `http` - use http-rpc backend (default) |
|
587 | ; `http` - use http-rpc backend (default) | |
546 | ; `celery` - use celery based hooks |
|
588 | ; `celery` - use celery based hooks | |
547 | vcs.hooks.protocol = http |
|
589 | vcs.hooks.protocol = http | |
548 |
|
590 | |||
549 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
591 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
550 | ; accessible via network. |
|
592 | ; accessible via network. | |
551 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
593 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
552 | vcs.hooks.host = * |
|
594 | vcs.hooks.host = * | |
553 |
|
595 | |||
554 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
596 | ; Start VCSServer with this instance as a subprocess, useful for development | |
555 | vcs.start_server = false |
|
597 | vcs.start_server = false | |
556 |
|
598 | |||
557 | ; List of enabled VCS backends, available options are: |
|
599 | ; List of enabled VCS backends, available options are: | |
558 | ; `hg` - mercurial |
|
600 | ; `hg` - mercurial | |
559 | ; `git` - git |
|
601 | ; `git` - git | |
560 | ; `svn` - subversion |
|
602 | ; `svn` - subversion | |
561 | vcs.backends = hg, git, svn |
|
603 | vcs.backends = hg, git, svn | |
562 |
|
604 | |||
563 | ; Wait this number of seconds before killing connection to the vcsserver |
|
605 | ; Wait this number of seconds before killing connection to the vcsserver | |
564 | vcs.connection_timeout = 3600 |
|
606 | vcs.connection_timeout = 3600 | |
565 |
|
607 | |||
566 | ; Cache flag to cache vcsserver remote calls locally |
|
608 | ; Cache flag to cache vcsserver remote calls locally | |
567 | ; It uses cache_region `cache_repo` |
|
609 | ; It uses cache_region `cache_repo` | |
568 | vcs.methods.cache = false |
|
610 | vcs.methods.cache = false | |
569 |
|
611 | |||
570 | ; #################################################### |
|
612 | ; #################################################### | |
571 | ; Subversion proxy support (mod_dav_svn) |
|
613 | ; Subversion proxy support (mod_dav_svn) | |
572 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
614 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
573 | ; #################################################### |
|
615 | ; #################################################### | |
574 |
|
616 | |||
575 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
617 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
576 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
618 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
577 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
619 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
578 | #vcs.svn.compatible_version = 1.8 |
|
620 | #vcs.svn.compatible_version = 1.8 | |
579 |
|
621 | |||
580 | ; Enable SVN proxy of requests over HTTP |
|
622 | ; Enable SVN proxy of requests over HTTP | |
581 | vcs.svn.proxy.enabled = true |
|
623 | vcs.svn.proxy.enabled = true | |
582 |
|
624 | |||
583 | ; host to connect to running SVN subsystem |
|
625 | ; host to connect to running SVN subsystem | |
584 | vcs.svn.proxy.host = http://svn:8090 |
|
626 | vcs.svn.proxy.host = http://svn:8090 | |
585 |
|
627 | |||
586 | ; Enable or disable the config file generation. |
|
628 | ; Enable or disable the config file generation. | |
587 | svn.proxy.generate_config = false |
|
629 | svn.proxy.generate_config = false | |
588 |
|
630 | |||
589 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
631 | ; Generate config file with `SVNListParentPath` set to `On`. | |
590 | svn.proxy.list_parent_path = true |
|
632 | svn.proxy.list_parent_path = true | |
591 |
|
633 | |||
592 | ; Set location and file name of generated config file. |
|
634 | ; Set location and file name of generated config file. | |
593 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
635 | svn.proxy.config_file_path = %(here)s/rc-tests/mod_dav_svn.conf | |
594 |
|
636 | |||
595 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
637 | ; alternative mod_dav config template. This needs to be a valid mako template | |
596 | ; Example template can be found in the source code: |
|
638 | ; Example template can be found in the source code: | |
597 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
639 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
598 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
640 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
599 |
|
641 | |||
600 | ; Used as a prefix to the `Location` block in the generated config file. |
|
642 | ; Used as a prefix to the `Location` block in the generated config file. | |
601 | ; In most cases it should be set to `/`. |
|
643 | ; In most cases it should be set to `/`. | |
602 | svn.proxy.location_root = / |
|
644 | svn.proxy.location_root = / | |
603 |
|
645 | |||
604 | ; Command to reload the mod dav svn configuration on change. |
|
646 | ; Command to reload the mod dav svn configuration on change. | |
605 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
647 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
606 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
648 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
607 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
649 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
608 |
|
650 | |||
609 | ; If the timeout expires before the reload command finishes, the command will |
|
651 | ; If the timeout expires before the reload command finishes, the command will | |
610 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
652 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
611 | #svn.proxy.reload_timeout = 10 |
|
653 | #svn.proxy.reload_timeout = 10 | |
612 |
|
654 | |||
613 | ; #################### |
|
655 | ; #################### | |
614 | ; SSH Support Settings |
|
656 | ; SSH Support Settings | |
615 | ; #################### |
|
657 | ; #################### | |
616 |
|
658 | |||
617 | ; Defines if a custom authorized_keys file should be created and written on |
|
659 | ; Defines if a custom authorized_keys file should be created and written on | |
618 | ; any change user ssh keys. Setting this to false also disables possibility |
|
660 | ; any change user ssh keys. Setting this to false also disables possibility | |
619 | ; of adding SSH keys by users from web interface. Super admins can still |
|
661 | ; of adding SSH keys by users from web interface. Super admins can still | |
620 | ; manage SSH Keys. |
|
662 | ; manage SSH Keys. | |
621 | ssh.generate_authorized_keyfile = true |
|
663 | ssh.generate_authorized_keyfile = true | |
622 |
|
664 | |||
623 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
665 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
624 | # ssh.authorized_keys_ssh_opts = |
|
666 | # ssh.authorized_keys_ssh_opts = | |
625 |
|
667 | |||
626 | ; Path to the authorized_keys file where the generate entries are placed. |
|
668 | ; Path to the authorized_keys file where the generate entries are placed. | |
627 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
669 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
628 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
670 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
629 | ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode |
|
671 | ssh.authorized_keys_file_path = %(here)s/rc-tests/authorized_keys_rhodecode | |
630 |
|
672 | |||
631 | ; Command to execute the SSH wrapper. The binary is available in the |
|
673 | ; Command to execute the SSH wrapper. The binary is available in the | |
632 | ; RhodeCode installation directory. |
|
674 | ; RhodeCode installation directory. | |
633 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
675 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
634 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
676 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
635 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
677 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
636 |
|
678 | |||
637 | ; Allow shell when executing the ssh-wrapper command |
|
679 | ; Allow shell when executing the ssh-wrapper command | |
638 | ssh.wrapper_cmd_allow_shell = false |
|
680 | ssh.wrapper_cmd_allow_shell = false | |
639 |
|
681 | |||
640 | ; Enables logging, and detailed output send back to the client during SSH |
|
682 | ; Enables logging, and detailed output send back to the client during SSH | |
641 | ; operations. Useful for debugging, shouldn't be used in production. |
|
683 | ; operations. Useful for debugging, shouldn't be used in production. | |
642 | ssh.enable_debug_logging = false |
|
684 | ssh.enable_debug_logging = false | |
643 |
|
685 | |||
644 | ; Paths to binary executable, by default they are the names, but we can |
|
686 | ; Paths to binary executable, by default they are the names, but we can | |
645 | ; override them if we want to use a custom one |
|
687 | ; override them if we want to use a custom one | |
646 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg |
|
688 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg | |
647 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git |
|
689 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git | |
648 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve |
|
690 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve | |
649 |
|
691 | |||
650 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
692 | ; Enables SSH key generator web interface. Disabling this still allows users | |
651 | ; to add their own keys. |
|
693 | ; to add their own keys. | |
652 | ssh.enable_ui_key_generator = true |
|
694 | ssh.enable_ui_key_generator = true | |
653 |
|
695 | |||
654 | ; Statsd client config, this is used to send metrics to statsd |
|
696 | ; Statsd client config, this is used to send metrics to statsd | |
655 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
697 | ; We recommend setting statsd_exported and scrape them using Prometheus | |
656 | #statsd.enabled = false |
|
698 | #statsd.enabled = false | |
657 | #statsd.statsd_host = 0.0.0.0 |
|
699 | #statsd.statsd_host = 0.0.0.0 | |
658 | #statsd.statsd_port = 8125 |
|
700 | #statsd.statsd_port = 8125 | |
659 | #statsd.statsd_prefix = |
|
701 | #statsd.statsd_prefix = | |
660 | #statsd.statsd_ipv6 = false |
|
702 | #statsd.statsd_ipv6 = false | |
661 |
|
703 | |||
662 | ; configure logging automatically at server startup set to false |
|
704 | ; configure logging automatically at server startup set to false | |
663 | ; to use the below custom logging config. |
|
705 | ; to use the below custom logging config. | |
664 | ; RC_LOGGING_FORMATTER |
|
706 | ; RC_LOGGING_FORMATTER | |
665 | ; RC_LOGGING_LEVEL |
|
707 | ; RC_LOGGING_LEVEL | |
666 | ; env variables can control the settings for logging in case of autoconfigure |
|
708 | ; env variables can control the settings for logging in case of autoconfigure | |
667 |
|
709 | |||
668 | logging.autoconfigure = false |
|
710 | logging.autoconfigure = false | |
669 |
|
711 | |||
670 | ; specify your own custom logging config file to configure logging |
|
712 | ; specify your own custom logging config file to configure logging | |
671 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
713 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
672 |
|
714 | |||
673 | ; Dummy marker to add new entries after. |
|
715 | ; Dummy marker to add new entries after. | |
674 | ; Add any custom entries below. Please don't remove this marker. |
|
716 | ; Add any custom entries below. Please don't remove this marker. | |
675 | custom.conf = 1 |
|
717 | custom.conf = 1 | |
676 |
|
718 | |||
677 |
|
719 | |||
678 | ; ##################### |
|
720 | ; ##################### | |
679 | ; LOGGING CONFIGURATION |
|
721 | ; LOGGING CONFIGURATION | |
680 | ; ##################### |
|
722 | ; ##################### | |
681 |
|
723 | |||
682 | [loggers] |
|
724 | [loggers] | |
683 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile |
|
725 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile | |
684 |
|
726 | |||
685 | [handlers] |
|
727 | [handlers] | |
686 | keys = console, console_sql |
|
728 | keys = console, console_sql | |
687 |
|
729 | |||
688 | [formatters] |
|
730 | [formatters] | |
689 | keys = generic, json, color_formatter, color_formatter_sql |
|
731 | keys = generic, json, color_formatter, color_formatter_sql | |
690 |
|
732 | |||
691 | ; ####### |
|
733 | ; ####### | |
692 | ; LOGGERS |
|
734 | ; LOGGERS | |
693 | ; ####### |
|
735 | ; ####### | |
694 | [logger_root] |
|
736 | [logger_root] | |
695 | level = NOTSET |
|
737 | level = NOTSET | |
696 | handlers = console |
|
738 | handlers = console | |
697 |
|
739 | |||
698 | [logger_routes] |
|
740 | [logger_routes] | |
699 | level = DEBUG |
|
741 | level = DEBUG | |
700 | handlers = |
|
742 | handlers = | |
701 | qualname = routes.middleware |
|
743 | qualname = routes.middleware | |
702 | ## "level = DEBUG" logs the route matched and routing variables. |
|
744 | ## "level = DEBUG" logs the route matched and routing variables. | |
703 | propagate = 1 |
|
745 | propagate = 1 | |
704 |
|
746 | |||
705 | [logger_sqlalchemy] |
|
747 | [logger_sqlalchemy] | |
706 | level = INFO |
|
748 | level = INFO | |
707 | handlers = console_sql |
|
749 | handlers = console_sql | |
708 | qualname = sqlalchemy.engine |
|
750 | qualname = sqlalchemy.engine | |
709 | propagate = 0 |
|
751 | propagate = 0 | |
710 |
|
752 | |||
711 | [logger_beaker] |
|
753 | [logger_beaker] | |
712 | level = DEBUG |
|
754 | level = DEBUG | |
713 | handlers = |
|
755 | handlers = | |
714 | qualname = beaker.container |
|
756 | qualname = beaker.container | |
715 | propagate = 1 |
|
757 | propagate = 1 | |
716 |
|
758 | |||
717 | [logger_dogpile] |
|
759 | [logger_dogpile] | |
718 | level = INFO |
|
760 | level = INFO | |
719 | handlers = console |
|
761 | handlers = console | |
720 | qualname = dogpile |
|
762 | qualname = dogpile | |
721 | propagate = 1 |
|
763 | propagate = 1 | |
722 |
|
764 | |||
723 | [logger_rhodecode] |
|
765 | [logger_rhodecode] | |
724 | level = DEBUG |
|
766 | level = DEBUG | |
725 | handlers = |
|
767 | handlers = | |
726 | qualname = rhodecode |
|
768 | qualname = rhodecode | |
727 | propagate = 1 |
|
769 | propagate = 1 | |
728 |
|
770 | |||
729 | [logger_ssh_wrapper] |
|
771 | [logger_ssh_wrapper] | |
730 | level = DEBUG |
|
772 | level = DEBUG | |
731 | handlers = |
|
773 | handlers = | |
732 | qualname = ssh_wrapper |
|
774 | qualname = ssh_wrapper | |
733 | propagate = 1 |
|
775 | propagate = 1 | |
734 |
|
776 | |||
735 | [logger_celery] |
|
777 | [logger_celery] | |
736 | level = DEBUG |
|
778 | level = DEBUG | |
737 | handlers = |
|
779 | handlers = | |
738 | qualname = celery |
|
780 | qualname = celery | |
739 |
|
781 | |||
740 |
|
782 | |||
741 | ; ######## |
|
783 | ; ######## | |
742 | ; HANDLERS |
|
784 | ; HANDLERS | |
743 | ; ######## |
|
785 | ; ######## | |
744 |
|
786 | |||
745 | [handler_console] |
|
787 | [handler_console] | |
746 | class = StreamHandler |
|
788 | class = StreamHandler | |
747 | args = (sys.stderr, ) |
|
789 | args = (sys.stderr, ) | |
748 | level = DEBUG |
|
790 | level = DEBUG | |
749 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
791 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
750 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
792 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
751 | formatter = generic |
|
793 | formatter = generic | |
752 |
|
794 | |||
753 | [handler_console_sql] |
|
795 | [handler_console_sql] | |
754 | ; "level = DEBUG" logs SQL queries and results. |
|
796 | ; "level = DEBUG" logs SQL queries and results. | |
755 | ; "level = INFO" logs SQL queries. |
|
797 | ; "level = INFO" logs SQL queries. | |
756 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
798 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
757 | class = StreamHandler |
|
799 | class = StreamHandler | |
758 | args = (sys.stderr, ) |
|
800 | args = (sys.stderr, ) | |
759 | level = WARN |
|
801 | level = WARN | |
760 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
802 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
761 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
803 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
762 | formatter = generic |
|
804 | formatter = generic | |
763 |
|
805 | |||
764 | ; ########## |
|
806 | ; ########## | |
765 | ; FORMATTERS |
|
807 | ; FORMATTERS | |
766 | ; ########## |
|
808 | ; ########## | |
767 |
|
809 | |||
768 | [formatter_generic] |
|
810 | [formatter_generic] | |
769 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
811 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
770 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
812 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
771 | datefmt = %Y-%m-%d %H:%M:%S |
|
813 | datefmt = %Y-%m-%d %H:%M:%S | |
772 |
|
814 | |||
773 | [formatter_color_formatter] |
|
815 | [formatter_color_formatter] | |
774 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
816 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
775 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
817 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
776 | datefmt = %Y-%m-%d %H:%M:%S |
|
818 | datefmt = %Y-%m-%d %H:%M:%S | |
777 |
|
819 | |||
778 | [formatter_color_formatter_sql] |
|
820 | [formatter_color_formatter_sql] | |
779 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
821 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
780 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
822 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
781 | datefmt = %Y-%m-%d %H:%M:%S |
|
823 | datefmt = %Y-%m-%d %H:%M:%S | |
782 |
|
824 | |||
783 | [formatter_json] |
|
825 | [formatter_json] | |
784 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
826 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
785 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
827 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,198 +1,198 b'' | |||||
1 |
|
1 | |||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
4 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
6 | # (only), as published by the Free Software Foundation. |
|
6 | # (only), as published by the Free Software Foundation. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU Affero General Public License |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | # |
|
15 | # | |
16 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
19 | |||
20 | import datetime |
|
20 | import datetime | |
21 | import os |
|
21 | import os | |
22 | import shutil |
|
22 | import shutil | |
23 | import tarfile |
|
23 | import tarfile | |
24 | import zipfile |
|
24 | import zipfile | |
25 | import io |
|
25 | import io | |
26 |
|
26 | |||
27 | import mock |
|
27 | import mock | |
28 | import pytest |
|
28 | import pytest | |
29 |
|
29 | |||
30 | import rhodecode |
|
30 | import rhodecode | |
31 |
from rhodecode.lib. |
|
31 | from rhodecode.lib.archive_cache import get_archival_config | |
32 | from rhodecode.lib.str_utils import ascii_bytes |
|
32 | from rhodecode.lib.str_utils import ascii_bytes | |
33 | from rhodecode.lib.vcs.backends import base |
|
33 | from rhodecode.lib.vcs.backends import base | |
34 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError |
|
34 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError | |
35 | from rhodecode.lib.vcs.nodes import FileNode |
|
35 | from rhodecode.lib.vcs.nodes import FileNode | |
36 | from rhodecode.tests.vcs.conftest import BackendTestMixin |
|
36 | from rhodecode.tests.vcs.conftest import BackendTestMixin | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | @pytest.fixture() |
|
39 | @pytest.fixture() | |
40 | def d_cache_config(): |
|
40 | def d_cache_config(): | |
41 | return get_archival_config(config=rhodecode.CONFIG) |
|
41 | return get_archival_config(config=rhodecode.CONFIG) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | @pytest.mark.usefixtures("vcs_repository_support") |
|
44 | @pytest.mark.usefixtures("vcs_repository_support") | |
45 | class TestArchives(BackendTestMixin): |
|
45 | class TestArchives(BackendTestMixin): | |
46 |
|
46 | |||
47 | @classmethod |
|
47 | @classmethod | |
48 | def _get_commits(cls): |
|
48 | def _get_commits(cls): | |
49 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
49 | start_date = datetime.datetime(2010, 1, 1, 20) | |
50 | yield { |
|
50 | yield { | |
51 | 'message': 'Initial Commit', |
|
51 | 'message': 'Initial Commit', | |
52 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
52 | 'author': 'Joe Doe <joe.doe@example.com>', | |
53 | 'date': start_date + datetime.timedelta(hours=12), |
|
53 | 'date': start_date + datetime.timedelta(hours=12), | |
54 | 'added': [ |
|
54 | 'added': [ | |
55 | FileNode(b'executable_0o100755', b'mode_755', mode=0o100755), |
|
55 | FileNode(b'executable_0o100755', b'mode_755', mode=0o100755), | |
56 | FileNode(b'executable_0o100500', b'mode_500', mode=0o100500), |
|
56 | FileNode(b'executable_0o100500', b'mode_500', mode=0o100500), | |
57 | FileNode(b'not_executable', b'mode_644', mode=0o100644), |
|
57 | FileNode(b'not_executable', b'mode_644', mode=0o100644), | |
58 | ], |
|
58 | ], | |
59 | } |
|
59 | } | |
60 | for x in range(5): |
|
60 | for x in range(5): | |
61 | yield { |
|
61 | yield { | |
62 | 'message': 'Commit %d' % x, |
|
62 | 'message': 'Commit %d' % x, | |
63 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
63 | 'author': 'Joe Doe <joe.doe@example.com>', | |
64 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
64 | 'date': start_date + datetime.timedelta(hours=12 * x), | |
65 | 'added': [ |
|
65 | 'added': [ | |
66 | FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x), |
|
66 | FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x), | |
67 | ], |
|
67 | ], | |
68 | } |
|
68 | } | |
69 |
|
69 | |||
70 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
70 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) | |
71 | def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): |
|
71 | def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): | |
72 |
|
72 | |||
73 | archive_node = tmp_path / 'archive-node' |
|
73 | archive_node = tmp_path / 'archive-node' | |
74 | archive_node.touch() |
|
74 | archive_node.touch() | |
75 |
|
75 | |||
76 | archive_lnk = self.tip.archive_repo( |
|
76 | archive_lnk = self.tip.archive_repo( | |
77 | str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config) |
|
77 | str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config) | |
78 |
|
78 | |||
79 | out_dir = tmpdir |
|
79 | out_dir = tmpdir | |
80 | out_file = tarfile.open(str(archive_lnk), f'r|{compressor}') |
|
80 | out_file = tarfile.open(str(archive_lnk), f'r|{compressor}') | |
81 | out_file.extractall(out_dir) |
|
81 | out_file.extractall(out_dir) | |
82 | out_file.close() |
|
82 | out_file.close() | |
83 |
|
83 | |||
84 | for x in range(5): |
|
84 | for x in range(5): | |
85 | node_path = '%d/file_%d.txt' % (x, x) |
|
85 | node_path = '%d/file_%d.txt' % (x, x) | |
86 | with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f: |
|
86 | with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f: | |
87 | file_content = f.read() |
|
87 | file_content = f.read() | |
88 | assert file_content == self.tip.get_node(node_path).content |
|
88 | assert file_content == self.tip.get_node(node_path).content | |
89 |
|
89 | |||
90 | shutil.rmtree(out_dir) |
|
90 | shutil.rmtree(out_dir) | |
91 |
|
91 | |||
92 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
92 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) | |
93 | def test_archive_tar_symlink(self, compressor): |
|
93 | def test_archive_tar_symlink(self, compressor): | |
94 | pytest.skip('Not supported') |
|
94 | pytest.skip('Not supported') | |
95 |
|
95 | |||
96 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
96 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) | |
97 | def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): |
|
97 | def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): | |
98 | archive_node = tmp_path / 'archive-node' |
|
98 | archive_node = tmp_path / 'archive-node' | |
99 | archive_node.touch() |
|
99 | archive_node.touch() | |
100 |
|
100 | |||
101 | archive_lnk = self.tip.archive_repo( |
|
101 | archive_lnk = self.tip.archive_repo( | |
102 | str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config) |
|
102 | str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config) | |
103 |
|
103 | |||
104 | out_dir = tmpdir |
|
104 | out_dir = tmpdir | |
105 | out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor)) |
|
105 | out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor)) | |
106 | out_file.extractall(out_dir) |
|
106 | out_file.extractall(out_dir) | |
107 | out_file.close() |
|
107 | out_file.close() | |
108 |
|
108 | |||
109 | def dest(inp): |
|
109 | def dest(inp): | |
110 | return os.path.join(out_dir, "repo/" + inp) |
|
110 | return os.path.join(out_dir, "repo/" + inp) | |
111 |
|
111 | |||
112 | assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644' |
|
112 | assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644' | |
113 |
|
113 | |||
114 | def test_archive_zip(self, tmp_path, d_cache_config): |
|
114 | def test_archive_zip(self, tmp_path, d_cache_config): | |
115 | archive_node = tmp_path / 'archive-node' |
|
115 | archive_node = tmp_path / 'archive-node' | |
116 | archive_node.touch() |
|
116 | archive_node.touch() | |
117 |
|
117 | |||
118 | archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', |
|
118 | archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', | |
119 | archive_dir_name='repo', cache_config=d_cache_config) |
|
119 | archive_dir_name='repo', cache_config=d_cache_config) | |
120 | zip_file = zipfile.ZipFile(str(archive_lnk)) |
|
120 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |
121 |
|
121 | |||
122 | for x in range(5): |
|
122 | for x in range(5): | |
123 | node_path = '%d/file_%d.txt' % (x, x) |
|
123 | node_path = '%d/file_%d.txt' % (x, x) | |
124 | data = zip_file.read(f'repo/{node_path}') |
|
124 | data = zip_file.read(f'repo/{node_path}') | |
125 |
|
125 | |||
126 | decompressed = io.BytesIO() |
|
126 | decompressed = io.BytesIO() | |
127 | decompressed.write(data) |
|
127 | decompressed.write(data) | |
128 | assert decompressed.getvalue() == \ |
|
128 | assert decompressed.getvalue() == \ | |
129 | self.tip.get_node(node_path).content |
|
129 | self.tip.get_node(node_path).content | |
130 | decompressed.close() |
|
130 | decompressed.close() | |
131 |
|
131 | |||
132 | def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): |
|
132 | def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): | |
133 | archive_node = tmp_path / 'archive-node' |
|
133 | archive_node = tmp_path / 'archive-node' | |
134 | archive_node.touch() |
|
134 | archive_node.touch() | |
135 |
|
135 | |||
136 | archive_lnk = self.tip.archive_repo( |
|
136 | archive_lnk = self.tip.archive_repo( | |
137 | str(archive_node), kind='zip', |
|
137 | str(archive_node), kind='zip', | |
138 | archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config) |
|
138 | archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config) | |
139 |
|
139 | |||
140 | zip_file = zipfile.ZipFile(str(archive_lnk)) |
|
140 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |
141 | metafile = zip_file.read('repo/.archival.txt') |
|
141 | metafile = zip_file.read('repo/.archival.txt') | |
142 |
|
142 | |||
143 | raw_id = ascii_bytes(self.tip.raw_id) |
|
143 | raw_id = ascii_bytes(self.tip.raw_id) | |
144 | assert b'commit_id:%b' % raw_id in metafile |
|
144 | assert b'commit_id:%b' % raw_id in metafile | |
145 |
|
145 | |||
146 | for x in range(5): |
|
146 | for x in range(5): | |
147 | node_path = '%d/file_%d.txt' % (x, x) |
|
147 | node_path = '%d/file_%d.txt' % (x, x) | |
148 | data = zip_file.read(f'repo/{node_path}') |
|
148 | data = zip_file.read(f'repo/{node_path}') | |
149 | decompressed = io.BytesIO() |
|
149 | decompressed = io.BytesIO() | |
150 | decompressed.write(data) |
|
150 | decompressed.write(data) | |
151 | assert decompressed.getvalue() == \ |
|
151 | assert decompressed.getvalue() == \ | |
152 | self.tip.get_node(node_path).content |
|
152 | self.tip.get_node(node_path).content | |
153 | decompressed.close() |
|
153 | decompressed.close() | |
154 |
|
154 | |||
155 | def test_archive_wrong_kind(self, tmp_path, d_cache_config): |
|
155 | def test_archive_wrong_kind(self, tmp_path, d_cache_config): | |
156 | archive_node = tmp_path / 'archive-node' |
|
156 | archive_node = tmp_path / 'archive-node' | |
157 | archive_node.touch() |
|
157 | archive_node.touch() | |
158 |
|
158 | |||
159 | with pytest.raises(ImproperArchiveTypeError): |
|
159 | with pytest.raises(ImproperArchiveTypeError): | |
160 | self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config) |
|
160 | self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config) | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | @pytest.fixture() |
|
163 | @pytest.fixture() | |
164 | def base_commit(): |
|
164 | def base_commit(): | |
165 | """ |
|
165 | """ | |
166 | Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`. |
|
166 | Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`. | |
167 | """ |
|
167 | """ | |
168 | commit = base.BaseCommit() |
|
168 | commit = base.BaseCommit() | |
169 | commit.repository = mock.Mock() |
|
169 | commit.repository = mock.Mock() | |
170 | commit.repository.name = 'fake_repo' |
|
170 | commit.repository.name = 'fake_repo' | |
171 | commit.short_id = 'fake_id' |
|
171 | commit.short_id = 'fake_id' | |
172 | return commit |
|
172 | return commit | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | def test_validate_archive_prefix_enforces_non_ascii_as_prefix(base_commit): |
|
175 | def test_validate_archive_prefix_enforces_non_ascii_as_prefix(base_commit): | |
176 | with pytest.raises(VCSError): |
|
176 | with pytest.raises(VCSError): | |
177 | base_commit._validate_archive_prefix("Ünïcödë") |
|
177 | base_commit._validate_archive_prefix("Ünïcödë") | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | def test_validate_archive_prefix_empty_prefix(base_commit): |
|
180 | def test_validate_archive_prefix_empty_prefix(base_commit): | |
181 | # TODO: johbo: Should raise a ValueError here. |
|
181 | # TODO: johbo: Should raise a ValueError here. | |
182 | with pytest.raises(VCSError): |
|
182 | with pytest.raises(VCSError): | |
183 | base_commit._validate_archive_prefix('') |
|
183 | base_commit._validate_archive_prefix('') | |
184 |
|
184 | |||
185 |
|
185 | |||
186 | def test_validate_archive_prefix_with_leading_slash(base_commit): |
|
186 | def test_validate_archive_prefix_with_leading_slash(base_commit): | |
187 | # TODO: johbo: Should raise a ValueError here. |
|
187 | # TODO: johbo: Should raise a ValueError here. | |
188 | with pytest.raises(VCSError): |
|
188 | with pytest.raises(VCSError): | |
189 | base_commit._validate_archive_prefix('/any') |
|
189 | base_commit._validate_archive_prefix('/any') | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | def test_validate_archive_prefix_falls_back_to_repository_name(base_commit): |
|
192 | def test_validate_archive_prefix_falls_back_to_repository_name(base_commit): | |
193 | prefix = base_commit._validate_archive_prefix(None) |
|
193 | prefix = base_commit._validate_archive_prefix(None) | |
194 | expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format( |
|
194 | expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format( | |
195 | repo_name='fake_repo', |
|
195 | repo_name='fake_repo', | |
196 | short_id='fake_id') |
|
196 | short_id='fake_id') | |
197 | assert isinstance(prefix, str) |
|
197 | assert isinstance(prefix, str) | |
198 | assert prefix == expected_prefix |
|
198 | assert prefix == expected_prefix |
General Comments 0
You need to be logged in to leave comments.
Login now