Show More
@@ -0,0 +1,161 b'' | |||||
|
1 | .. _config-saml-azure-ref: | |||
|
2 | ||||
|
3 | ||||
|
4 | SAML 2.0 with Azure Entra ID | |||
|
5 | ---------------------------- | |||
|
6 | ||||
|
7 | **This plugin is available only in EE Edition.** | |||
|
8 | ||||
|
9 | |RCE| supports SAML 2.0 Authentication with Azure Entra ID provider. This allows | |||
|
10 | users to log-in to RhodeCode via SSO mechanism of external identity provider | |||
|
11 | such as Azure AD. The login can be triggered either by the external IDP, or internally | |||
|
12 | by clicking specific authentication button on the log-in page. | |||
|
13 | ||||
|
14 | ||||
|
15 | Configuration steps | |||
|
16 | ^^^^^^^^^^^^^^^^^^^ | |||
|
17 | ||||
|
18 | To configure Duo Security SAML authentication, use the following steps: | |||
|
19 | ||||
|
20 | 1. From the |RCE| interface, select | |||
|
21 | :menuselection:`Admin --> Authentication` | |||
|
22 | 2. Activate the `Azure Entra ID` plugin and select :guilabel:`Save` | |||
|
23 | 3. Go to newly available menu option called `Azure Entra ID` on the left side. | |||
|
24 | 4. Check the `enabled` check box in the plugin configuration section, | |||
|
25 | and fill in the required SAML information and :guilabel:`Save`, for more details, | |||
|
26 | see :ref:`config-saml-azure` | |||
|
27 | ||||
|
28 | ||||
|
29 | .. _config-saml-azure: | |||
|
30 | ||||
|
31 | ||||
|
32 | Example SAML Azure Entra ID configuration | |||
|
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |||
|
34 | ||||
|
35 | Example configuration for SAML 2.0 with Azure Entra ID provider | |||
|
36 | ||||
|
37 | ||||
|
38 | Enabled | |||
|
39 | `True`: | |||
|
40 | ||||
|
41 | .. note:: | |||
|
42 | Enable or disable this authentication plugin. | |||
|
43 | ||||
|
44 | ||||
|
45 | Auth Cache TTL | |||
|
46 | `30`: | |||
|
47 | ||||
|
48 | .. note:: | |||
|
49 | Amount of seconds to cache the authentication and permissions check response call for this plugin. | |||
|
50 | Useful for expensive calls like LDAP to improve the performance of the system (0 means disabled). | |||
|
51 | ||||
|
52 | Debug | |||
|
53 | `True`: | |||
|
54 | ||||
|
55 | .. note:: | |||
|
56 | Enable or disable debug mode that shows SAML errors in the RhodeCode logs. | |||
|
57 | ||||
|
58 | ||||
|
59 | Auth button name | |||
|
60 | `Azure Entra ID`: | |||
|
61 | ||||
|
62 | .. note:: | |||
|
63 | Alternative authentication display name. E.g AzureAuth, CorporateID etc. | |||
|
64 | ||||
|
65 | ||||
|
66 | Entity ID | |||
|
67 | `https://sts.windows.net/APP_ID/`: | |||
|
68 | ||||
|
69 | .. note:: | |||
|
70 | Identity Provider entity/metadata URI. Known as "Microsoft Entra Identifier" | |||
|
71 | E.g. https://sts.windows.net/abcd-c655-dcee-aab7-abcd/ | |||
|
72 | ||||
|
73 | SSO URL | |||
|
74 | `https://login.microsoftonline.com/APP_ID/saml2`: | |||
|
75 | ||||
|
76 | .. note:: | |||
|
77 | SSO (SingleSignOn) endpoint URL of the IdP. This can be used to initialize login, Known also as Login URL | |||
|
78 | E.g. https://login.microsoftonline.com/abcd-c655-dcee-aab7-abcd/saml2 | |||
|
79 | ||||
|
80 | SLO URL | |||
|
81 | `https://login.microsoftonline.com/APP_ID/saml2`: | |||
|
82 | ||||
|
83 | .. note:: | |||
|
84 | SLO (SingleLogout) endpoint URL of the IdP. , Known also as Logout URL | |||
|
85 | E.g. https://login.microsoftonline.com/abcd-c655-dcee-aab7-abcd/saml2 | |||
|
86 | ||||
|
87 | x509cert | |||
|
88 | `<CERTIFICATE_STRING>`: | |||
|
89 | ||||
|
90 | .. note:: | |||
|
91 | Identity provider public x509 certificate. It will be converted to single-line format without headers. | |||
|
92 | Download the raw base64 encoded certificate from the Identity provider and paste it here. | |||
|
93 | ||||
|
94 | SAML Signature | |||
|
95 | `sha-256`: | |||
|
96 | ||||
|
97 | .. note:: | |||
|
98 | Type of Algorithm to use for verification of SAML signature on Identity provider side. | |||
|
99 | ||||
|
100 | SAML Digest | |||
|
101 | `sha-256`: | |||
|
102 | ||||
|
103 | .. note:: | |||
|
104 | Type of Algorithm to use for verification of SAML digest on Identity provider side. | |||
|
105 | ||||
|
106 | Service Provider Cert Dir | |||
|
107 | `/etc/rhodecode/conf/saml_ssl/`: | |||
|
108 | ||||
|
109 | .. note:: | |||
|
110 | Optional directory to store service provider certificate and private keys. | |||
|
111 | Expected certs for the SP should be stored in this folder as: | |||
|
112 | ||||
|
113 | * sp.key Private Key | |||
|
114 | * sp.crt Public cert | |||
|
115 | * sp_new.crt Future Public cert | |||
|
116 | ||||
|
117 | Also you can use other cert to sign the metadata of the SP using the: | |||
|
118 | ||||
|
119 | * metadata.key | |||
|
120 | * metadata.crt | |||
|
121 | ||||
|
122 | Expected NameID Format | |||
|
123 | `nameid-format:emailAddress`: | |||
|
124 | ||||
|
125 | .. note:: | |||
|
126 | The format that specifies how the NameID is sent to the service provider. | |||
|
127 | ||||
|
128 | User ID Attribute | |||
|
129 | `user.email`: | |||
|
130 | ||||
|
131 | .. note:: | |||
|
132 | User ID Attribute name. This defines which attribute in SAML response will be used to link accounts via unique id. | |||
|
133 | Ensure this is returned from DuoSecurity for example via duo_username. | |||
|
134 | ||||
|
135 | Username Attribute | |||
|
136 | `user.username`: | |||
|
137 | ||||
|
138 | .. note:: | |||
|
139 | Username Attribute name. This defines which attribute in SAML response will map to a username. | |||
|
140 | ||||
|
141 | Email Attribute | |||
|
142 | `user.email`: | |||
|
143 | ||||
|
144 | .. note:: | |||
|
145 | Email Attribute name. This defines which attribute in SAML response will map to an email address. | |||
|
146 | ||||
|
147 | ||||
|
148 | ||||
|
149 | Below is example setup from Azure Administration page that can be used with above config. | |||
|
150 | ||||
|
151 | .. image:: ../images/saml-azure-service-provider-example.png | |||
|
152 | :alt: Azure SAML setup example | |||
|
153 | :scale: 50 % | |||
|
154 | ||||
|
155 | ||||
|
156 | Below is an example attribute mapping set for IDP provider required by the above config. | |||
|
157 | ||||
|
158 | ||||
|
159 | .. image:: ../images/saml-azure-attributes-example.png | |||
|
160 | :alt: Azure SAML setup example | |||
|
161 | :scale: 50 % No newline at end of file |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
@@ -0,0 +1,40 b'' | |||||
|
1 | |RCE| 5.1.1 |RNS| | |||
|
2 | ----------------- | |||
|
3 | ||||
|
4 | Release Date | |||
|
5 | ^^^^^^^^^^^^ | |||
|
6 | ||||
|
7 | - 2024-07-23 | |||
|
8 | ||||
|
9 | ||||
|
10 | New Features | |||
|
11 | ^^^^^^^^^^^^ | |||
|
12 | ||||
|
13 | ||||
|
14 | ||||
|
15 | General | |||
|
16 | ^^^^^^^ | |||
|
17 | ||||
|
18 | ||||
|
19 | ||||
|
20 | Security | |||
|
21 | ^^^^^^^^ | |||
|
22 | ||||
|
23 | ||||
|
24 | ||||
|
25 | Performance | |||
|
26 | ^^^^^^^^^^^ | |||
|
27 | ||||
|
28 | ||||
|
29 | ||||
|
30 | ||||
|
31 | Fixes | |||
|
32 | ^^^^^ | |||
|
33 | ||||
|
34 | - Fixed problems with JS static files build | |||
|
35 | ||||
|
36 | ||||
|
37 | Upgrade notes | |||
|
38 | ^^^^^^^^^^^^^ | |||
|
39 | ||||
|
40 | - RhodeCode 5.1.1 is unscheduled bugfix release to address some build issues with 5.1 images |
@@ -0,0 +1,41 b'' | |||||
|
1 | |RCE| 5.1.2 |RNS| | |||
|
2 | ----------------- | |||
|
3 | ||||
|
4 | Release Date | |||
|
5 | ^^^^^^^^^^^^ | |||
|
6 | ||||
|
7 | - 2024-09-12 | |||
|
8 | ||||
|
9 | ||||
|
10 | New Features | |||
|
11 | ^^^^^^^^^^^^ | |||
|
12 | ||||
|
13 | ||||
|
14 | ||||
|
15 | General | |||
|
16 | ^^^^^^^ | |||
|
17 | ||||
|
18 | ||||
|
19 | ||||
|
20 | Security | |||
|
21 | ^^^^^^^^ | |||
|
22 | ||||
|
23 | ||||
|
24 | ||||
|
25 | Performance | |||
|
26 | ^^^^^^^^^^^ | |||
|
27 | ||||
|
28 | ||||
|
29 | ||||
|
30 | ||||
|
31 | Fixes | |||
|
32 | ^^^^^ | |||
|
33 | ||||
|
34 | - Fixed problems with Mercurial authentication after enabling httppostargs. | |||
|
35 | Currently this protocol will be disabled until proper fix is in place | |||
|
36 | ||||
|
37 | ||||
|
38 | Upgrade notes | |||
|
39 | ^^^^^^^^^^^^^ | |||
|
40 | ||||
|
41 | - RhodeCode 5.1.2 is unscheduled bugfix release to address some build issues with 5.1 images |
@@ -0,0 +1,55 b'' | |||||
|
1 | |RCE| 5.2.0 |RNS| | |||
|
2 | ----------------- | |||
|
3 | ||||
|
4 | Release Date | |||
|
5 | ^^^^^^^^^^^^ | |||
|
6 | ||||
|
7 | - 2024-10-09 | |||
|
8 | ||||
|
9 | ||||
|
10 | New Features | |||
|
11 | ^^^^^^^^^^^^ | |||
|
12 | ||||
|
13 | - New artifact storage engines allowing an s3 based uploads | |||
|
14 | - Enterprise version only: Added security tab to admin interface and possibility to whitelist specific vcs client versions. Some older versions clients have known security vulnerabilities, now you can disallow them. | |||
|
15 | - Enterprise version only: Implemented support for Azure SAML authentication | |||
|
16 | ||||
|
17 | ||||
|
18 | General | |||
|
19 | ^^^^^^^ | |||
|
20 | - Bumped version of packaging, gunicorn, orjson, zope.interface and some other requirements | |||
|
21 | - Few tweaks and changes to saml plugins to allows easier setup | |||
|
22 | - Configs: allow json log format for gunicorn | |||
|
23 | - Configs: deprecated old ssh wrapper command and make the v2 the default one | |||
|
24 | - Make sure commit-caches propagate to parent repo groups | |||
|
25 | - Configs: Moved git lfs path and path of hg large files to ini file | |||
|
26 | ||||
|
27 | Security | |||
|
28 | ^^^^^^^^ | |||
|
29 | ||||
|
30 | ||||
|
31 | ||||
|
32 | Performance | |||
|
33 | ^^^^^^^^^^^ | |||
|
34 | ||||
|
35 | - description escaper for better performance | |||
|
36 | ||||
|
37 | Fixes | |||
|
38 | ^^^^^ | |||
|
39 | ||||
|
40 | - Email notifications not working properly | |||
|
41 | - Removed waitress as a default runner | |||
|
42 | - Fixed issue with branch permissions | |||
|
43 | - Ldap: fixed nested groups extraction logic | |||
|
44 | - Fixed possible db corruption in case of filesystem problems | |||
|
45 | - Cleanup and improvements to documentation | |||
|
46 | - Added Kubernetes deployment section to the documentation | |||
|
47 | - Added default value to celery result and broker | |||
|
48 | - Fixed broken backends function after python3 migration | |||
|
49 | - Explicitly disable mercurial web_push ssl flag to prevent from errors about ssl required | |||
|
50 | - VCS: fixed problems with locked repos and with branch permissions reporting | |||
|
51 | ||||
|
52 | Upgrade notes | |||
|
53 | ^^^^^^^^^^^^^ | |||
|
54 | ||||
|
55 | - RhodeCode 5.2.0 is a planned major release featuring Azure SAML, whitelist for client versions, s3 artifacts backend and more! |
@@ -0,0 +1,46 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import logging | |||
|
20 | ||||
|
21 | from rhodecode.apps._base import BaseAppView | |||
|
22 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator | |||
|
23 | ||||
|
24 | log = logging.getLogger(__name__) | |||
|
25 | ||||
|
26 | ||||
|
27 | class AdminSecurityView(BaseAppView): | |||
|
28 | ||||
|
29 | def load_default_context(self): | |||
|
30 | c = self._get_local_tmpl_context() | |||
|
31 | return c | |||
|
32 | ||||
|
33 | @LoginRequired() | |||
|
34 | @HasPermissionAllDecorator('hg.admin') | |||
|
35 | def security(self): | |||
|
36 | c = self.load_default_context() | |||
|
37 | c.active = 'security' | |||
|
38 | return self._get_template_context(c) | |||
|
39 | ||||
|
40 | ||||
|
41 | @LoginRequired() | |||
|
42 | @HasPermissionAllDecorator('hg.admin') | |||
|
43 | def admin_security_modify_allowed_vcs_client_versions(self): | |||
|
44 | c = self.load_default_context() | |||
|
45 | c.active = 'security' | |||
|
46 | return self._get_template_context(c) |
@@ -0,0 +1,269 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import os | |||
|
20 | import fsspec # noqa | |||
|
21 | import logging | |||
|
22 | ||||
|
23 | from rhodecode.lib.ext_json import json | |||
|
24 | ||||
|
25 | from rhodecode.apps.file_store.utils import sha256_safe, ShardFileReader, get_uid_filename | |||
|
26 | from rhodecode.apps.file_store.extensions import resolve_extensions | |||
|
27 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException # noqa: F401 | |||
|
28 | ||||
|
29 | log = logging.getLogger(__name__) | |||
|
30 | ||||
|
31 | ||||
|
32 | class BaseShard: | |||
|
33 | ||||
|
34 | metadata_suffix: str = '.metadata' | |||
|
35 | storage_type: str = '' | |||
|
36 | fs = None | |||
|
37 | ||||
|
38 | @property | |||
|
39 | def storage_medium(self): | |||
|
40 | if not self.storage_type: | |||
|
41 | raise ValueError('No storage type set for this shard storage_type=""') | |||
|
42 | return getattr(self, self.storage_type) | |||
|
43 | ||||
|
44 | def __contains__(self, key): | |||
|
45 | full_path = self.store_path(key) | |||
|
46 | return self.fs.exists(full_path) | |||
|
47 | ||||
|
48 | def metadata_convert(self, uid_filename, metadata): | |||
|
49 | return metadata | |||
|
50 | ||||
|
51 | def get_metadata_filename(self, uid_filename) -> tuple[str, str]: | |||
|
52 | metadata_file: str = f'{uid_filename}{self.metadata_suffix}' | |||
|
53 | return metadata_file, self.store_path(metadata_file) | |||
|
54 | ||||
|
55 | def get_metadata(self, uid_filename, ignore_missing=False) -> dict: | |||
|
56 | _metadata_file, metadata_file_path = self.get_metadata_filename(uid_filename) | |||
|
57 | if ignore_missing and not self.fs.exists(metadata_file_path): | |||
|
58 | return {} | |||
|
59 | ||||
|
60 | with self.fs.open(metadata_file_path, 'rb') as f: | |||
|
61 | metadata = json.loads(f.read()) | |||
|
62 | ||||
|
63 | metadata = self.metadata_convert(uid_filename, metadata) | |||
|
64 | return metadata | |||
|
65 | ||||
|
66 | def _store(self, key: str, uid_key: str, value_reader, max_filesize: int | None = None, metadata: dict | None = None, **kwargs): | |||
|
67 | raise NotImplementedError | |||
|
68 | ||||
|
69 | def store(self, key: str, uid_key: str, value_reader, max_filesize: int | None = None, metadata: dict | None = None, **kwargs): | |||
|
70 | return self._store(key, uid_key, value_reader, max_filesize, metadata, **kwargs) | |||
|
71 | ||||
|
72 | def _fetch(self, key, presigned_url_expires: int = 0): | |||
|
73 | raise NotImplementedError | |||
|
74 | ||||
|
75 | def fetch(self, key, **kwargs) -> tuple[ShardFileReader, dict]: | |||
|
76 | return self._fetch(key) | |||
|
77 | ||||
|
78 | def _delete(self, key): | |||
|
79 | if key not in self: | |||
|
80 | log.exception(f'requested key={key} not found in {self}') | |||
|
81 | raise KeyError(key) | |||
|
82 | ||||
|
83 | metadata = self.get_metadata(key) | |||
|
84 | _metadata_file, metadata_file_path = self.get_metadata_filename(key) | |||
|
85 | artifact_file_path = metadata['filename_uid_path'] | |||
|
86 | self.fs.rm(artifact_file_path) | |||
|
87 | self.fs.rm(metadata_file_path) | |||
|
88 | ||||
|
89 | return 1 | |||
|
90 | ||||
|
91 | def delete(self, key): | |||
|
92 | raise NotImplementedError | |||
|
93 | ||||
|
94 | def store_path(self, uid_filename): | |||
|
95 | raise NotImplementedError | |||
|
96 | ||||
|
97 | ||||
|
98 | class BaseFileStoreBackend: | |||
|
99 | _shards = tuple() | |||
|
100 | _shard_cls = BaseShard | |||
|
101 | _config: dict | None = None | |||
|
102 | _storage_path: str = '' | |||
|
103 | ||||
|
104 | def __init__(self, settings, extension_groups=None): | |||
|
105 | self._config = settings | |||
|
106 | extension_groups = extension_groups or ['any'] | |||
|
107 | self.extensions = resolve_extensions([], groups=extension_groups) | |||
|
108 | ||||
|
109 | def __contains__(self, key): | |||
|
110 | return self.filename_exists(key) | |||
|
111 | ||||
|
112 | def __repr__(self): | |||
|
113 | return f'<{self.__class__.__name__}(storage={self.storage_path})>' | |||
|
114 | ||||
|
115 | @property | |||
|
116 | def storage_path(self): | |||
|
117 | return self._storage_path | |||
|
118 | ||||
|
119 | @classmethod | |||
|
120 | def get_shard_index(cls, filename: str, num_shards) -> int: | |||
|
121 | # Generate a hash value from the filename | |||
|
122 | hash_value = sha256_safe(filename) | |||
|
123 | ||||
|
124 | # Convert the hash value to an integer | |||
|
125 | hash_int = int(hash_value, 16) | |||
|
126 | ||||
|
127 | # Map the hash integer to a shard number between 1 and num_shards | |||
|
128 | shard_number = (hash_int % num_shards) | |||
|
129 | ||||
|
130 | return shard_number | |||
|
131 | ||||
|
132 | @classmethod | |||
|
133 | def apply_counter(cls, counter: int, filename: str) -> str: | |||
|
134 | """ | |||
|
135 | Apply a counter to the filename. | |||
|
136 | ||||
|
137 | :param counter: The counter value to apply. | |||
|
138 | :param filename: The original filename. | |||
|
139 | :return: The modified filename with the counter. | |||
|
140 | """ | |||
|
141 | name_counted = f'{counter:d}-{filename}' | |||
|
142 | return name_counted | |||
|
143 | ||||
|
144 | def _get_shard(self, key) -> _shard_cls: | |||
|
145 | index = self.get_shard_index(key, len(self._shards)) | |||
|
146 | shard = self._shards[index] | |||
|
147 | return shard | |||
|
148 | ||||
|
149 | def get_conf(self, key, pop=False): | |||
|
150 | if key not in self._config: | |||
|
151 | raise ValueError( | |||
|
152 | f"No configuration key '{key}', please make sure it exists in filestore config") | |||
|
153 | val = self._config[key] | |||
|
154 | if pop: | |||
|
155 | del self._config[key] | |||
|
156 | return val | |||
|
157 | ||||
|
158 | def filename_allowed(self, filename, extensions=None): | |||
|
159 | """Checks if a filename has an allowed extension | |||
|
160 | ||||
|
161 | :param filename: base name of file | |||
|
162 | :param extensions: iterable of extensions (or self.extensions) | |||
|
163 | """ | |||
|
164 | _, ext = os.path.splitext(filename) | |||
|
165 | return self.extension_allowed(ext, extensions) | |||
|
166 | ||||
|
167 | def extension_allowed(self, ext, extensions=None): | |||
|
168 | """ | |||
|
169 | Checks if an extension is permitted. Both e.g. ".jpg" and | |||
|
170 | "jpg" can be passed in. Extension lookup is case-insensitive. | |||
|
171 | ||||
|
172 | :param ext: extension to check | |||
|
173 | :param extensions: iterable of extensions to validate against (or self.extensions) | |||
|
174 | """ | |||
|
175 | def normalize_ext(_ext): | |||
|
176 | if _ext.startswith('.'): | |||
|
177 | _ext = _ext[1:] | |||
|
178 | return _ext.lower() | |||
|
179 | ||||
|
180 | extensions = extensions or self.extensions | |||
|
181 | if not extensions: | |||
|
182 | return True | |||
|
183 | ||||
|
184 | ext = normalize_ext(ext) | |||
|
185 | ||||
|
186 | return ext in [normalize_ext(x) for x in extensions] | |||
|
187 | ||||
|
188 | def filename_exists(self, uid_filename): | |||
|
189 | shard = self._get_shard(uid_filename) | |||
|
190 | return uid_filename in shard | |||
|
191 | ||||
|
192 | def store_path(self, uid_filename): | |||
|
193 | """ | |||
|
194 | Returns absolute file path of the uid_filename | |||
|
195 | """ | |||
|
196 | shard = self._get_shard(uid_filename) | |||
|
197 | return shard.store_path(uid_filename) | |||
|
198 | ||||
|
199 | def store_metadata(self, uid_filename): | |||
|
200 | shard = self._get_shard(uid_filename) | |||
|
201 | return shard.get_metadata_filename(uid_filename) | |||
|
202 | ||||
|
203 | def store(self, filename, value_reader, extensions=None, metadata=None, max_filesize=None, randomized_name=True, **kwargs): | |||
|
204 | extensions = extensions or self.extensions | |||
|
205 | ||||
|
206 | if not self.filename_allowed(filename, extensions): | |||
|
207 | msg = f'filename {filename} does not allow extensions {extensions}' | |||
|
208 | raise FileNotAllowedException(msg) | |||
|
209 | ||||
|
210 | # # TODO: check why we need this setting ? it looks stupid... | |||
|
211 | # no_body_seek is used in stream mode importer somehow | |||
|
212 | # no_body_seek = kwargs.pop('no_body_seek', False) | |||
|
213 | # if no_body_seek: | |||
|
214 | # pass | |||
|
215 | # else: | |||
|
216 | # value_reader.seek(0) | |||
|
217 | ||||
|
218 | uid_filename = kwargs.pop('uid_filename', None) | |||
|
219 | if uid_filename is None: | |||
|
220 | uid_filename = get_uid_filename(filename, randomized=randomized_name) | |||
|
221 | ||||
|
222 | shard = self._get_shard(uid_filename) | |||
|
223 | ||||
|
224 | return shard.store(filename, uid_filename, value_reader, max_filesize, metadata, **kwargs) | |||
|
225 | ||||
|
226 | def import_to_store(self, value_reader, org_filename, uid_filename, metadata, **kwargs): | |||
|
227 | shard = self._get_shard(uid_filename) | |||
|
228 | max_filesize = None | |||
|
229 | return shard.store(org_filename, uid_filename, value_reader, max_filesize, metadata, import_mode=True) | |||
|
230 | ||||
|
231 | def delete(self, uid_filename): | |||
|
232 | shard = self._get_shard(uid_filename) | |||
|
233 | return shard.delete(uid_filename) | |||
|
234 | ||||
|
235 | def fetch(self, uid_filename) -> tuple[ShardFileReader, dict]: | |||
|
236 | shard = self._get_shard(uid_filename) | |||
|
237 | return shard.fetch(uid_filename) | |||
|
238 | ||||
|
239 | def get_metadata(self, uid_filename, ignore_missing=False) -> dict: | |||
|
240 | shard = self._get_shard(uid_filename) | |||
|
241 | return shard.get_metadata(uid_filename, ignore_missing=ignore_missing) | |||
|
242 | ||||
|
243 | def iter_keys(self): | |||
|
244 | for shard in self._shards: | |||
|
245 | if shard.fs.exists(shard.storage_medium): | |||
|
246 | for path, _dirs, _files in shard.fs.walk(shard.storage_medium): | |||
|
247 | for key_file_path in _files: | |||
|
248 | if key_file_path.endswith(shard.metadata_suffix): | |||
|
249 | yield shard, key_file_path | |||
|
250 | ||||
|
251 | def iter_artifacts(self): | |||
|
252 | for shard, key_file in self.iter_keys(): | |||
|
253 | json_key = f"{shard.storage_medium}/{key_file}" | |||
|
254 | with shard.fs.open(json_key, 'rb') as f: | |||
|
255 | yield shard, json.loads(f.read())['filename_uid'] | |||
|
256 | ||||
|
257 | def get_statistics(self): | |||
|
258 | total_files = 0 | |||
|
259 | total_size = 0 | |||
|
260 | meta = {} | |||
|
261 | ||||
|
262 | for shard, key_file in self.iter_keys(): | |||
|
263 | json_key = f"{shard.storage_medium}/{key_file}" | |||
|
264 | with shard.fs.open(json_key, 'rb') as f: | |||
|
265 | total_files += 1 | |||
|
266 | metadata = json.loads(f.read()) | |||
|
267 | total_size += metadata['size'] | |||
|
268 | ||||
|
269 | return total_files, total_size, meta |
@@ -0,0 +1,183 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import os | |||
|
20 | import hashlib | |||
|
21 | import functools | |||
|
22 | import time | |||
|
23 | import logging | |||
|
24 | ||||
|
25 | from .. import config_keys | |||
|
26 | from ..exceptions import FileOverSizeException | |||
|
27 | from ..backends.base import BaseFileStoreBackend, fsspec, BaseShard, ShardFileReader | |||
|
28 | ||||
|
29 | from ....lib.ext_json import json | |||
|
30 | ||||
|
31 | log = logging.getLogger(__name__) | |||
|
32 | ||||
|
33 | ||||
|
34 | class FileSystemShard(BaseShard): | |||
|
35 | METADATA_VER = 'v2' | |||
|
36 | BACKEND_TYPE = config_keys.backend_filesystem | |||
|
37 | storage_type: str = 'directory' | |||
|
38 | ||||
|
39 | def __init__(self, index, directory, directory_folder, fs, **settings): | |||
|
40 | self._index: int = index | |||
|
41 | self._directory: str = directory | |||
|
42 | self._directory_folder: str = directory_folder | |||
|
43 | self.fs = fs | |||
|
44 | ||||
|
45 | @property | |||
|
46 | def directory(self) -> str: | |||
|
47 | """Cache directory final path.""" | |||
|
48 | return os.path.join(self._directory, self._directory_folder) | |||
|
49 | ||||
|
50 | def _write_file(self, full_path, iterator, max_filesize, mode='wb'): | |||
|
51 | ||||
|
52 | # ensure dir exists | |||
|
53 | destination, _ = os.path.split(full_path) | |||
|
54 | if not self.fs.exists(destination): | |||
|
55 | self.fs.makedirs(destination) | |||
|
56 | ||||
|
57 | writer = self.fs.open(full_path, mode) | |||
|
58 | ||||
|
59 | digest = hashlib.sha256() | |||
|
60 | oversize_cleanup = False | |||
|
61 | with writer: | |||
|
62 | size = 0 | |||
|
63 | for chunk in iterator: | |||
|
64 | size += len(chunk) | |||
|
65 | digest.update(chunk) | |||
|
66 | writer.write(chunk) | |||
|
67 | ||||
|
68 | if max_filesize and size > max_filesize: | |||
|
69 | oversize_cleanup = True | |||
|
70 | # free up the copied file, and raise exc | |||
|
71 | break | |||
|
72 | ||||
|
73 | writer.flush() | |||
|
74 | # Get the file descriptor | |||
|
75 | fd = writer.fileno() | |||
|
76 | ||||
|
77 | # Sync the file descriptor to disk, helps with NFS cases... | |||
|
78 | os.fsync(fd) | |||
|
79 | ||||
|
80 | if oversize_cleanup: | |||
|
81 | self.fs.rm(full_path) | |||
|
82 | raise FileOverSizeException(f'given file is over size limit ({max_filesize}): {full_path}') | |||
|
83 | ||||
|
84 | sha256 = digest.hexdigest() | |||
|
85 | log.debug('written new artifact under %s, sha256: %s', full_path, sha256) | |||
|
86 | return size, sha256 | |||
|
87 | ||||
|
88 | def _store(self, key: str, uid_key, value_reader, max_filesize: int | None = None, metadata: dict | None = None, **kwargs): | |||
|
89 | ||||
|
90 | filename = key | |||
|
91 | uid_filename = uid_key | |||
|
92 | full_path = self.store_path(uid_filename) | |||
|
93 | ||||
|
94 | # STORE METADATA | |||
|
95 | _metadata = { | |||
|
96 | "version": self.METADATA_VER, | |||
|
97 | "store_type": self.BACKEND_TYPE, | |||
|
98 | ||||
|
99 | "filename": filename, | |||
|
100 | "filename_uid_path": full_path, | |||
|
101 | "filename_uid": uid_filename, | |||
|
102 | "sha256": "", # NOTE: filled in by reader iteration | |||
|
103 | ||||
|
104 | "store_time": time.time(), | |||
|
105 | ||||
|
106 | "size": 0 | |||
|
107 | } | |||
|
108 | ||||
|
109 | if metadata: | |||
|
110 | if kwargs.pop('import_mode', False): | |||
|
111 | # in import mode, we don't need to compute metadata, we just take the old version | |||
|
112 | _metadata["import_mode"] = True | |||
|
113 | else: | |||
|
114 | _metadata.update(metadata) | |||
|
115 | ||||
|
116 | read_iterator = iter(functools.partial(value_reader.read, 2**22), b'') | |||
|
117 | size, sha256 = self._write_file(full_path, read_iterator, max_filesize) | |||
|
118 | _metadata['size'] = size | |||
|
119 | _metadata['sha256'] = sha256 | |||
|
120 | ||||
|
121 | # after storing the artifacts, we write the metadata present | |||
|
122 | _metadata_file, metadata_file_path = self.get_metadata_filename(uid_key) | |||
|
123 | ||||
|
124 | with self.fs.open(metadata_file_path, 'wb') as f: | |||
|
125 | f.write(json.dumps(_metadata)) | |||
|
126 | ||||
|
127 | return uid_filename, _metadata | |||
|
128 | ||||
|
129 | def store_path(self, uid_filename): | |||
|
130 | """ | |||
|
131 | Returns absolute file path of the uid_filename | |||
|
132 | """ | |||
|
133 | return os.path.join(self._directory, self._directory_folder, uid_filename) | |||
|
134 | ||||
|
135 | def _fetch(self, key, presigned_url_expires: int = 0): | |||
|
136 | if key not in self: | |||
|
137 | log.exception(f'requested key={key} not found in {self}') | |||
|
138 | raise KeyError(key) | |||
|
139 | ||||
|
140 | metadata = self.get_metadata(key) | |||
|
141 | ||||
|
142 | file_path = metadata['filename_uid_path'] | |||
|
143 | if presigned_url_expires and presigned_url_expires > 0: | |||
|
144 | metadata['url'] = self.fs.url(file_path, expires=presigned_url_expires) | |||
|
145 | ||||
|
146 | return ShardFileReader(self.fs.open(file_path, 'rb')), metadata | |||
|
147 | ||||
|
148 | def delete(self, key): | |||
|
149 | return self._delete(key) | |||
|
150 | ||||
|
151 | ||||
|
152 | class FileSystemBackend(BaseFileStoreBackend): | |||
|
153 | shard_name: str = 'shard_{:03d}' | |||
|
154 | _shard_cls = FileSystemShard | |||
|
155 | ||||
|
156 | def __init__(self, settings): | |||
|
157 | super().__init__(settings) | |||
|
158 | ||||
|
159 | store_dir = self.get_conf(config_keys.filesystem_storage_path) | |||
|
160 | directory = os.path.expanduser(store_dir) | |||
|
161 | ||||
|
162 | self._directory = directory | |||
|
163 | self._storage_path = directory # common path for all from BaseCache | |||
|
164 | self._shard_count = int(self.get_conf(config_keys.filesystem_shards, pop=True)) | |||
|
165 | if self._shard_count < 1: | |||
|
166 | raise ValueError(f'{config_keys.filesystem_shards} must be 1 or more') | |||
|
167 | ||||
|
168 | log.debug('Initializing %s file_store instance', self) | |||
|
169 | fs = fsspec.filesystem('file') | |||
|
170 | ||||
|
171 | if not fs.exists(self._directory): | |||
|
172 | fs.makedirs(self._directory, exist_ok=True) | |||
|
173 | ||||
|
174 | self._shards = tuple( | |||
|
175 | self._shard_cls( | |||
|
176 | index=num, | |||
|
177 | directory=directory, | |||
|
178 | directory_folder=self.shard_name.format(num), | |||
|
179 | fs=fs, | |||
|
180 | **settings, | |||
|
181 | ) | |||
|
182 | for num in range(self._shard_count) | |||
|
183 | ) |
@@ -0,0 +1,278 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import errno | |||
|
19 | import os | |||
|
20 | import hashlib | |||
|
21 | import functools | |||
|
22 | import time | |||
|
23 | import logging | |||
|
24 | ||||
|
25 | from .. import config_keys | |||
|
26 | from ..exceptions import FileOverSizeException | |||
|
27 | from ..backends.base import BaseFileStoreBackend, fsspec, BaseShard, ShardFileReader | |||
|
28 | ||||
|
29 | from ....lib.ext_json import json | |||
|
30 | ||||
|
31 | log = logging.getLogger(__name__) | |||
|
32 | ||||
|
33 | ||||
|
34 | class LegacyFileSystemShard(BaseShard): | |||
|
35 | # legacy ver | |||
|
36 | METADATA_VER = 'v2' | |||
|
37 | BACKEND_TYPE = config_keys.backend_legacy_filesystem | |||
|
38 | storage_type: str = 'dir_struct' | |||
|
39 | ||||
|
40 | # legacy suffix | |||
|
41 | metadata_suffix: str = '.meta' | |||
|
42 | ||||
|
43 | @classmethod | |||
|
44 | def _sub_store_from_filename(cls, filename): | |||
|
45 | return filename[:2] | |||
|
46 | ||||
|
47 | @classmethod | |||
|
48 | def apply_counter(cls, counter, filename): | |||
|
49 | name_counted = '%d-%s' % (counter, filename) | |||
|
50 | return name_counted | |||
|
51 | ||||
|
52 | @classmethod | |||
|
53 | def safe_make_dirs(cls, dir_path): | |||
|
54 | if not os.path.exists(dir_path): | |||
|
55 | try: | |||
|
56 | os.makedirs(dir_path) | |||
|
57 | except OSError as e: | |||
|
58 | if e.errno != errno.EEXIST: | |||
|
59 | raise | |||
|
60 | return | |||
|
61 | ||||
|
62 | @classmethod | |||
|
63 | def resolve_name(cls, name, directory): | |||
|
64 | """ | |||
|
65 | Resolves a unique name and the correct path. If a filename | |||
|
66 | for that path already exists then a numeric prefix with values > 0 will be | |||
|
67 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. | |||
|
68 | ||||
|
69 | :param name: base name of file | |||
|
70 | :param directory: absolute directory path | |||
|
71 | """ | |||
|
72 | ||||
|
73 | counter = 0 | |||
|
74 | while True: | |||
|
75 | name_counted = cls.apply_counter(counter, name) | |||
|
76 | ||||
|
77 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file | |||
|
78 | sub_store: str = cls._sub_store_from_filename(name_counted) | |||
|
79 | sub_store_path: str = os.path.join(directory, sub_store) | |||
|
80 | cls.safe_make_dirs(sub_store_path) | |||
|
81 | ||||
|
82 | path = os.path.join(sub_store_path, name_counted) | |||
|
83 | if not os.path.exists(path): | |||
|
84 | return name_counted, path | |||
|
85 | counter += 1 | |||
|
86 | ||||
|
87 | def __init__(self, index, directory, directory_folder, fs, **settings): | |||
|
88 | self._index: int = index | |||
|
89 | self._directory: str = directory | |||
|
90 | self._directory_folder: str = directory_folder | |||
|
91 | self.fs = fs | |||
|
92 | ||||
|
93 | @property | |||
|
94 | def dir_struct(self) -> str: | |||
|
95 | """Cache directory final path.""" | |||
|
96 | return os.path.join(self._directory, '0-') | |||
|
97 | ||||
|
98 | def _write_file(self, full_path, iterator, max_filesize, mode='wb'): | |||
|
99 | ||||
|
100 | # ensure dir exists | |||
|
101 | destination, _ = os.path.split(full_path) | |||
|
102 | if not self.fs.exists(destination): | |||
|
103 | self.fs.makedirs(destination) | |||
|
104 | ||||
|
105 | writer = self.fs.open(full_path, mode) | |||
|
106 | ||||
|
107 | digest = hashlib.sha256() | |||
|
108 | oversize_cleanup = False | |||
|
109 | with writer: | |||
|
110 | size = 0 | |||
|
111 | for chunk in iterator: | |||
|
112 | size += len(chunk) | |||
|
113 | digest.update(chunk) | |||
|
114 | writer.write(chunk) | |||
|
115 | ||||
|
116 | if max_filesize and size > max_filesize: | |||
|
117 | # free up the copied file, and raise exc | |||
|
118 | oversize_cleanup = True | |||
|
119 | break | |||
|
120 | ||||
|
121 | writer.flush() | |||
|
122 | # Get the file descriptor | |||
|
123 | fd = writer.fileno() | |||
|
124 | ||||
|
125 | # Sync the file descriptor to disk, helps with NFS cases... | |||
|
126 | os.fsync(fd) | |||
|
127 | ||||
|
128 | if oversize_cleanup: | |||
|
129 | self.fs.rm(full_path) | |||
|
130 | raise FileOverSizeException(f'given file is over size limit ({max_filesize}): {full_path}') | |||
|
131 | ||||
|
132 | sha256 = digest.hexdigest() | |||
|
133 | log.debug('written new artifact under %s, sha256: %s', full_path, sha256) | |||
|
134 | return size, sha256 | |||
|
135 | ||||
|
136 | def _store(self, key: str, uid_key, value_reader, max_filesize: int | None = None, metadata: dict | None = None, **kwargs): | |||
|
137 | ||||
|
138 | filename = key | |||
|
139 | uid_filename = uid_key | |||
|
140 | ||||
|
141 | # NOTE:, also apply N- Counter... | |||
|
142 | uid_filename, full_path = self.resolve_name(uid_filename, self._directory) | |||
|
143 | ||||
|
144 | # STORE METADATA | |||
|
145 | # TODO: make it compatible, and backward proof | |||
|
146 | _metadata = { | |||
|
147 | "version": self.METADATA_VER, | |||
|
148 | ||||
|
149 | "filename": filename, | |||
|
150 | "filename_uid_path": full_path, | |||
|
151 | "filename_uid": uid_filename, | |||
|
152 | "sha256": "", # NOTE: filled in by reader iteration | |||
|
153 | ||||
|
154 | "store_time": time.time(), | |||
|
155 | ||||
|
156 | "size": 0 | |||
|
157 | } | |||
|
158 | if metadata: | |||
|
159 | _metadata.update(metadata) | |||
|
160 | ||||
|
161 | read_iterator = iter(functools.partial(value_reader.read, 2**22), b'') | |||
|
162 | size, sha256 = self._write_file(full_path, read_iterator, max_filesize) | |||
|
163 | _metadata['size'] = size | |||
|
164 | _metadata['sha256'] = sha256 | |||
|
165 | ||||
|
166 | # after storing the artifacts, we write the metadata present | |||
|
167 | _metadata_file, metadata_file_path = self.get_metadata_filename(uid_filename) | |||
|
168 | ||||
|
169 | with self.fs.open(metadata_file_path, 'wb') as f: | |||
|
170 | f.write(json.dumps(_metadata)) | |||
|
171 | ||||
|
172 | return uid_filename, _metadata | |||
|
173 | ||||
|
174 | def store_path(self, uid_filename): | |||
|
175 | """ | |||
|
176 | Returns absolute file path of the uid_filename | |||
|
177 | """ | |||
|
178 | prefix_dir = '' | |||
|
179 | if '/' in uid_filename: | |||
|
180 | prefix_dir, filename = uid_filename.split('/') | |||
|
181 | sub_store = self._sub_store_from_filename(filename) | |||
|
182 | else: | |||
|
183 | sub_store = self._sub_store_from_filename(uid_filename) | |||
|
184 | ||||
|
185 | return os.path.join(self._directory, prefix_dir, sub_store, uid_filename) | |||
|
186 | ||||
|
187 | def metadata_convert(self, uid_filename, metadata): | |||
|
188 | # NOTE: backward compat mode here... this is for file created PRE 5.2 system | |||
|
189 | if 'meta_ver' in metadata: | |||
|
190 | full_path = self.store_path(uid_filename) | |||
|
191 | metadata = { | |||
|
192 | "_converted": True, | |||
|
193 | "_org": metadata, | |||
|
194 | "version": self.METADATA_VER, | |||
|
195 | "store_type": self.BACKEND_TYPE, | |||
|
196 | ||||
|
197 | "filename": metadata['filename'], | |||
|
198 | "filename_uid_path": full_path, | |||
|
199 | "filename_uid": uid_filename, | |||
|
200 | "sha256": metadata['sha256'], | |||
|
201 | ||||
|
202 | "store_time": metadata['time'], | |||
|
203 | ||||
|
204 | "size": metadata['size'] | |||
|
205 | } | |||
|
206 | return metadata | |||
|
207 | ||||
|
208 | def _fetch(self, key, presigned_url_expires: int = 0): | |||
|
209 | if key not in self: | |||
|
210 | log.exception(f'requested key={key} not found in {self}') | |||
|
211 | raise KeyError(key) | |||
|
212 | ||||
|
213 | metadata = self.get_metadata(key) | |||
|
214 | ||||
|
215 | file_path = metadata['filename_uid_path'] | |||
|
216 | if presigned_url_expires and presigned_url_expires > 0: | |||
|
217 | metadata['url'] = self.fs.url(file_path, expires=presigned_url_expires) | |||
|
218 | ||||
|
219 | return ShardFileReader(self.fs.open(file_path, 'rb')), metadata | |||
|
220 | ||||
|
221 | def delete(self, key): | |||
|
222 | return self._delete(key) | |||
|
223 | ||||
|
224 | def _delete(self, key): | |||
|
225 | if key not in self: | |||
|
226 | log.exception(f'requested key={key} not found in {self}') | |||
|
227 | raise KeyError(key) | |||
|
228 | ||||
|
229 | metadata = self.get_metadata(key) | |||
|
230 | metadata_file, metadata_file_path = self.get_metadata_filename(key) | |||
|
231 | artifact_file_path = metadata['filename_uid_path'] | |||
|
232 | self.fs.rm(artifact_file_path) | |||
|
233 | self.fs.rm(metadata_file_path) | |||
|
234 | ||||
|
235 | def get_metadata_filename(self, uid_filename) -> tuple[str, str]: | |||
|
236 | ||||
|
237 | metadata_file: str = f'{uid_filename}{self.metadata_suffix}' | |||
|
238 | uid_path_in_store = self.store_path(uid_filename) | |||
|
239 | ||||
|
240 | metadata_file_path = f'{uid_path_in_store}{self.metadata_suffix}' | |||
|
241 | return metadata_file, metadata_file_path | |||
|
242 | ||||
|
243 | ||||
|
244 | class LegacyFileSystemBackend(BaseFileStoreBackend): | |||
|
245 | _shard_cls = LegacyFileSystemShard | |||
|
246 | ||||
|
247 | def __init__(self, settings): | |||
|
248 | super().__init__(settings) | |||
|
249 | ||||
|
250 | store_dir = self.get_conf(config_keys.legacy_filesystem_storage_path) | |||
|
251 | directory = os.path.expanduser(store_dir) | |||
|
252 | ||||
|
253 | self._directory = directory | |||
|
254 | self._storage_path = directory # common path for all from BaseCache | |||
|
255 | ||||
|
256 | log.debug('Initializing %s file_store instance', self) | |||
|
257 | fs = fsspec.filesystem('file') | |||
|
258 | ||||
|
259 | if not fs.exists(self._directory): | |||
|
260 | fs.makedirs(self._directory, exist_ok=True) | |||
|
261 | ||||
|
262 | # legacy system uses single shard | |||
|
263 | self._shards = tuple( | |||
|
264 | [ | |||
|
265 | self._shard_cls( | |||
|
266 | index=0, | |||
|
267 | directory=directory, | |||
|
268 | directory_folder='', | |||
|
269 | fs=fs, | |||
|
270 | **settings, | |||
|
271 | ) | |||
|
272 | ] | |||
|
273 | ) | |||
|
274 | ||||
|
275 | @classmethod | |||
|
276 | def get_shard_index(cls, filename: str, num_shards) -> int: | |||
|
277 | # legacy filesystem doesn't use shards, and always uses single shard | |||
|
278 | return 0 |
@@ -0,0 +1,184 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import os | |||
|
20 | import hashlib | |||
|
21 | import functools | |||
|
22 | import time | |||
|
23 | import logging | |||
|
24 | ||||
|
25 | from .. import config_keys | |||
|
26 | from ..exceptions import FileOverSizeException | |||
|
27 | from ..backends.base import BaseFileStoreBackend, fsspec, BaseShard, ShardFileReader | |||
|
28 | ||||
|
29 | from ....lib.ext_json import json | |||
|
30 | ||||
|
31 | log = logging.getLogger(__name__) | |||
|
32 | ||||
|
33 | ||||
|
34 | class S3Shard(BaseShard): | |||
|
35 | METADATA_VER = 'v2' | |||
|
36 | BACKEND_TYPE = config_keys.backend_objectstore | |||
|
37 | storage_type: str = 'bucket' | |||
|
38 | ||||
|
39 | def __init__(self, index, bucket, bucket_folder, fs, **settings): | |||
|
40 | self._index: int = index | |||
|
41 | self._bucket_main: str = bucket | |||
|
42 | self._bucket_folder: str = bucket_folder | |||
|
43 | ||||
|
44 | self.fs = fs | |||
|
45 | ||||
|
46 | @property | |||
|
47 | def bucket(self) -> str: | |||
|
48 | """Cache bucket final path.""" | |||
|
49 | return os.path.join(self._bucket_main, self._bucket_folder) | |||
|
50 | ||||
|
51 | def _write_file(self, full_path, iterator, max_filesize, mode='wb'): | |||
|
52 | ||||
|
53 | # ensure dir exists | |||
|
54 | destination, _ = os.path.split(full_path) | |||
|
55 | if not self.fs.exists(destination): | |||
|
56 | self.fs.makedirs(destination) | |||
|
57 | ||||
|
58 | writer = self.fs.open(full_path, mode) | |||
|
59 | ||||
|
60 | digest = hashlib.sha256() | |||
|
61 | oversize_cleanup = False | |||
|
62 | with writer: | |||
|
63 | size = 0 | |||
|
64 | for chunk in iterator: | |||
|
65 | size += len(chunk) | |||
|
66 | digest.update(chunk) | |||
|
67 | writer.write(chunk) | |||
|
68 | ||||
|
69 | if max_filesize and size > max_filesize: | |||
|
70 | oversize_cleanup = True | |||
|
71 | # free up the copied file, and raise exc | |||
|
72 | break | |||
|
73 | ||||
|
74 | if oversize_cleanup: | |||
|
75 | self.fs.rm(full_path) | |||
|
76 | raise FileOverSizeException(f'given file is over size limit ({max_filesize}): {full_path}') | |||
|
77 | ||||
|
78 | sha256 = digest.hexdigest() | |||
|
79 | log.debug('written new artifact under %s, sha256: %s', full_path, sha256) | |||
|
80 | return size, sha256 | |||
|
81 | ||||
|
82 | def _store(self, key: str, uid_key, value_reader, max_filesize: int | None = None, metadata: dict | None = None, **kwargs): | |||
|
83 | ||||
|
84 | filename = key | |||
|
85 | uid_filename = uid_key | |||
|
86 | full_path = self.store_path(uid_filename) | |||
|
87 | ||||
|
88 | # STORE METADATA | |||
|
89 | _metadata = { | |||
|
90 | "version": self.METADATA_VER, | |||
|
91 | "store_type": self.BACKEND_TYPE, | |||
|
92 | ||||
|
93 | "filename": filename, | |||
|
94 | "filename_uid_path": full_path, | |||
|
95 | "filename_uid": uid_filename, | |||
|
96 | "sha256": "", # NOTE: filled in by reader iteration | |||
|
97 | ||||
|
98 | "store_time": time.time(), | |||
|
99 | ||||
|
100 | "size": 0 | |||
|
101 | } | |||
|
102 | ||||
|
103 | if metadata: | |||
|
104 | if kwargs.pop('import_mode', False): | |||
|
105 | # in import mode, we don't need to compute metadata, we just take the old version | |||
|
106 | _metadata["import_mode"] = True | |||
|
107 | else: | |||
|
108 | _metadata.update(metadata) | |||
|
109 | ||||
|
110 | read_iterator = iter(functools.partial(value_reader.read, 2**22), b'') | |||
|
111 | size, sha256 = self._write_file(full_path, read_iterator, max_filesize) | |||
|
112 | _metadata['size'] = size | |||
|
113 | _metadata['sha256'] = sha256 | |||
|
114 | ||||
|
115 | # after storing the artifacts, we write the metadata present | |||
|
116 | metadata_file, metadata_file_path = self.get_metadata_filename(uid_key) | |||
|
117 | ||||
|
118 | with self.fs.open(metadata_file_path, 'wb') as f: | |||
|
119 | f.write(json.dumps(_metadata)) | |||
|
120 | ||||
|
121 | return uid_filename, _metadata | |||
|
122 | ||||
|
123 | def store_path(self, uid_filename): | |||
|
124 | """ | |||
|
125 | Returns absolute file path of the uid_filename | |||
|
126 | """ | |||
|
127 | return os.path.join(self._bucket_main, self._bucket_folder, uid_filename) | |||
|
128 | ||||
|
129 | def _fetch(self, key, presigned_url_expires: int = 0): | |||
|
130 | if key not in self: | |||
|
131 | log.exception(f'requested key={key} not found in {self}') | |||
|
132 | raise KeyError(key) | |||
|
133 | ||||
|
134 | metadata_file, metadata_file_path = self.get_metadata_filename(key) | |||
|
135 | with self.fs.open(metadata_file_path, 'rb') as f: | |||
|
136 | metadata = json.loads(f.read()) | |||
|
137 | ||||
|
138 | file_path = metadata['filename_uid_path'] | |||
|
139 | if presigned_url_expires and presigned_url_expires > 0: | |||
|
140 | metadata['url'] = self.fs.url(file_path, expires=presigned_url_expires) | |||
|
141 | ||||
|
142 | return ShardFileReader(self.fs.open(file_path, 'rb')), metadata | |||
|
143 | ||||
|
144 | def delete(self, key): | |||
|
145 | return self._delete(key) | |||
|
146 | ||||
|
147 | ||||
|
148 | class ObjectStoreBackend(BaseFileStoreBackend): | |||
|
149 | shard_name: str = 'shard-{:03d}' | |||
|
150 | _shard_cls = S3Shard | |||
|
151 | ||||
|
152 | def __init__(self, settings): | |||
|
153 | super().__init__(settings) | |||
|
154 | ||||
|
155 | self._shard_count = int(self.get_conf(config_keys.objectstore_bucket_shards, pop=True)) | |||
|
156 | if self._shard_count < 1: | |||
|
157 | raise ValueError('cache_shards must be 1 or more') | |||
|
158 | ||||
|
159 | self._bucket = settings.pop(config_keys.objectstore_bucket) | |||
|
160 | if not self._bucket: | |||
|
161 | raise ValueError(f'{config_keys.objectstore_bucket} needs to have a value') | |||
|
162 | ||||
|
163 | objectstore_url = self.get_conf(config_keys.objectstore_url) | |||
|
164 | key = settings.pop(config_keys.objectstore_key) | |||
|
165 | secret = settings.pop(config_keys.objectstore_secret) | |||
|
166 | ||||
|
167 | self._storage_path = objectstore_url # common path for all from BaseCache | |||
|
168 | log.debug('Initializing %s file_store instance', self) | |||
|
169 | fs = fsspec.filesystem('s3', anon=False, endpoint_url=objectstore_url, key=key, secret=secret) | |||
|
170 | ||||
|
171 | # init main bucket | |||
|
172 | if not fs.exists(self._bucket): | |||
|
173 | fs.mkdir(self._bucket) | |||
|
174 | ||||
|
175 | self._shards = tuple( | |||
|
176 | self._shard_cls( | |||
|
177 | index=num, | |||
|
178 | bucket=self._bucket, | |||
|
179 | bucket_folder=self.shard_name.format(num), | |||
|
180 | fs=fs, | |||
|
181 | **settings, | |||
|
182 | ) | |||
|
183 | for num in range(self._shard_count) | |||
|
184 | ) |
@@ -0,0 +1,128 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import pytest | |||
|
19 | ||||
|
20 | from rhodecode.apps import file_store | |||
|
21 | from rhodecode.apps.file_store import config_keys | |||
|
22 | from rhodecode.apps.file_store.backends.filesystem_legacy import LegacyFileSystemBackend | |||
|
23 | from rhodecode.apps.file_store.backends.filesystem import FileSystemBackend | |||
|
24 | from rhodecode.apps.file_store.backends.objectstore import ObjectStoreBackend | |||
|
25 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException | |||
|
26 | ||||
|
27 | from rhodecode.apps.file_store import utils as store_utils | |||
|
28 | from rhodecode.apps.file_store.tests import random_binary_file, file_store_instance | |||
|
29 | ||||
|
30 | ||||
|
31 | class TestFileStoreBackends: | |||
|
32 | ||||
|
33 | @pytest.mark.parametrize('backend_type, expected_instance', [ | |||
|
34 | (config_keys.backend_legacy_filesystem, LegacyFileSystemBackend), | |||
|
35 | (config_keys.backend_filesystem, FileSystemBackend), | |||
|
36 | (config_keys.backend_objectstore, ObjectStoreBackend), | |||
|
37 | ]) | |||
|
38 | def test_get_backend(self, backend_type, expected_instance, ini_settings): | |||
|
39 | config = ini_settings | |||
|
40 | config[config_keys.backend_type] = backend_type | |||
|
41 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
42 | assert isinstance(f_store, expected_instance) | |||
|
43 | ||||
|
44 | @pytest.mark.parametrize('backend_type, expected_instance', [ | |||
|
45 | (config_keys.backend_legacy_filesystem, LegacyFileSystemBackend), | |||
|
46 | (config_keys.backend_filesystem, FileSystemBackend), | |||
|
47 | (config_keys.backend_objectstore, ObjectStoreBackend), | |||
|
48 | ]) | |||
|
49 | def test_store_and_read(self, backend_type, expected_instance, ini_settings, random_binary_file): | |||
|
50 | filename, temp_file = random_binary_file | |||
|
51 | config = ini_settings | |||
|
52 | config[config_keys.backend_type] = backend_type | |||
|
53 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
54 | metadata = { | |||
|
55 | 'user_uploaded': { | |||
|
56 | 'username': 'user1', | |||
|
57 | 'user_id': 10, | |||
|
58 | 'ip': '10.20.30.40' | |||
|
59 | } | |||
|
60 | } | |||
|
61 | store_fid, metadata = f_store.store(filename, temp_file, extra_metadata=metadata) | |||
|
62 | assert store_fid | |||
|
63 | assert metadata | |||
|
64 | ||||
|
65 | # read-after write | |||
|
66 | reader, metadata2 = f_store.fetch(store_fid) | |||
|
67 | assert reader | |||
|
68 | assert metadata2['filename'] == filename | |||
|
69 | ||||
|
70 | @pytest.mark.parametrize('backend_type, expected_instance', [ | |||
|
71 | (config_keys.backend_legacy_filesystem, LegacyFileSystemBackend), | |||
|
72 | (config_keys.backend_filesystem, FileSystemBackend), | |||
|
73 | (config_keys.backend_objectstore, ObjectStoreBackend), | |||
|
74 | ]) | |||
|
75 | def test_store_file_not_allowed(self, backend_type, expected_instance, ini_settings, random_binary_file): | |||
|
76 | filename, temp_file = random_binary_file | |||
|
77 | config = ini_settings | |||
|
78 | config[config_keys.backend_type] = backend_type | |||
|
79 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
80 | with pytest.raises(FileNotAllowedException): | |||
|
81 | f_store.store('notallowed.exe', temp_file, extensions=['.txt']) | |||
|
82 | ||||
|
83 | @pytest.mark.parametrize('backend_type, expected_instance', [ | |||
|
84 | (config_keys.backend_legacy_filesystem, LegacyFileSystemBackend), | |||
|
85 | (config_keys.backend_filesystem, FileSystemBackend), | |||
|
86 | (config_keys.backend_objectstore, ObjectStoreBackend), | |||
|
87 | ]) | |||
|
88 | def test_store_file_over_size(self, backend_type, expected_instance, ini_settings, random_binary_file): | |||
|
89 | filename, temp_file = random_binary_file | |||
|
90 | config = ini_settings | |||
|
91 | config[config_keys.backend_type] = backend_type | |||
|
92 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
93 | with pytest.raises(FileOverSizeException): | |||
|
94 | f_store.store('toobig.exe', temp_file, extensions=['.exe'], max_filesize=124) | |||
|
95 | ||||
|
96 | @pytest.mark.parametrize('backend_type, expected_instance, extra_conf', [ | |||
|
97 | (config_keys.backend_legacy_filesystem, LegacyFileSystemBackend, {}), | |||
|
98 | (config_keys.backend_filesystem, FileSystemBackend, {config_keys.filesystem_storage_path: '/tmp/test-fs-store'}), | |||
|
99 | (config_keys.backend_objectstore, ObjectStoreBackend, {config_keys.objectstore_bucket: 'test-bucket'}), | |||
|
100 | ]) | |||
|
101 | def test_store_stats_and_keys(self, backend_type, expected_instance, extra_conf, ini_settings, random_binary_file): | |||
|
102 | config = ini_settings | |||
|
103 | config[config_keys.backend_type] = backend_type | |||
|
104 | config.update(extra_conf) | |||
|
105 | ||||
|
106 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
107 | ||||
|
108 | # purge storage before running | |||
|
109 | for shard, k in f_store.iter_artifacts(): | |||
|
110 | f_store.delete(k) | |||
|
111 | ||||
|
112 | for i in range(10): | |||
|
113 | filename, temp_file = random_binary_file | |||
|
114 | ||||
|
115 | metadata = { | |||
|
116 | 'user_uploaded': { | |||
|
117 | 'username': 'user1', | |||
|
118 | 'user_id': 10, | |||
|
119 | 'ip': '10.20.30.40' | |||
|
120 | } | |||
|
121 | } | |||
|
122 | store_fid, metadata = f_store.store(filename, temp_file, extra_metadata=metadata) | |||
|
123 | assert store_fid | |||
|
124 | assert metadata | |||
|
125 | ||||
|
126 | cnt, size, meta = f_store.get_statistics() | |||
|
127 | assert cnt == 10 | |||
|
128 | assert 10 == len(list(f_store.iter_keys())) |
@@ -0,0 +1,52 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import pytest | |||
|
19 | ||||
|
20 | from rhodecode.apps.file_store import utils as store_utils | |||
|
21 | from rhodecode.apps.file_store import config_keys | |||
|
22 | from rhodecode.apps.file_store.tests import generate_random_filename | |||
|
23 | ||||
|
24 | ||||
|
25 | @pytest.fixture() | |||
|
26 | def file_store_filesystem_instance(ini_settings): | |||
|
27 | config = ini_settings | |||
|
28 | config[config_keys.backend_type] = config_keys.backend_filesystem | |||
|
29 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
30 | return f_store | |||
|
31 | ||||
|
32 | ||||
|
33 | class TestFileStoreFileSystemBackend: | |||
|
34 | ||||
|
35 | @pytest.mark.parametrize('filename', [generate_random_filename() for _ in range(10)]) | |||
|
36 | def test_get_shard_number(self, filename, file_store_filesystem_instance): | |||
|
37 | shard_number = file_store_filesystem_instance.get_shard_index(filename, len(file_store_filesystem_instance._shards)) | |||
|
38 | # Check that the shard number is between 0 and max-shards | |||
|
39 | assert 0 <= shard_number <= len(file_store_filesystem_instance._shards) | |||
|
40 | ||||
|
41 | @pytest.mark.parametrize('filename, expected_shard_num', [ | |||
|
42 | ('my-name-1', 3), | |||
|
43 | ('my-name-2', 2), | |||
|
44 | ('my-name-3', 4), | |||
|
45 | ('my-name-4', 1), | |||
|
46 | ||||
|
47 | ('rhodecode-enterprise-ce', 5), | |||
|
48 | ('rhodecode-enterprise-ee', 6), | |||
|
49 | ]) | |||
|
50 | def test_get_shard_number_consistency(self, filename, expected_shard_num, file_store_filesystem_instance): | |||
|
51 | shard_number = file_store_filesystem_instance.get_shard_index(filename, len(file_store_filesystem_instance._shards)) | |||
|
52 | assert expected_shard_num == shard_number |
@@ -0,0 +1,17 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ No newline at end of file |
@@ -0,0 +1,52 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import pytest | |||
|
19 | ||||
|
20 | from rhodecode.apps.file_store import utils as store_utils | |||
|
21 | from rhodecode.apps.file_store import config_keys | |||
|
22 | from rhodecode.apps.file_store.tests import generate_random_filename | |||
|
23 | ||||
|
24 | ||||
|
25 | @pytest.fixture() | |||
|
26 | def file_store_legacy_instance(ini_settings): | |||
|
27 | config = ini_settings | |||
|
28 | config[config_keys.backend_type] = config_keys.backend_legacy_filesystem | |||
|
29 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
30 | return f_store | |||
|
31 | ||||
|
32 | ||||
|
33 | class TestFileStoreLegacyBackend: | |||
|
34 | ||||
|
35 | @pytest.mark.parametrize('filename', [generate_random_filename() for _ in range(10)]) | |||
|
36 | def test_get_shard_number(self, filename, file_store_legacy_instance): | |||
|
37 | shard_number = file_store_legacy_instance.get_shard_index(filename, len(file_store_legacy_instance._shards)) | |||
|
38 | # Check that the shard number is 0 for legacy filesystem store we don't use shards | |||
|
39 | assert shard_number == 0 | |||
|
40 | ||||
|
41 | @pytest.mark.parametrize('filename, expected_shard_num', [ | |||
|
42 | ('my-name-1', 0), | |||
|
43 | ('my-name-2', 0), | |||
|
44 | ('my-name-3', 0), | |||
|
45 | ('my-name-4', 0), | |||
|
46 | ||||
|
47 | ('rhodecode-enterprise-ce', 0), | |||
|
48 | ('rhodecode-enterprise-ee', 0), | |||
|
49 | ]) | |||
|
50 | def test_get_shard_number_consistency(self, filename, expected_shard_num, file_store_legacy_instance): | |||
|
51 | shard_number = file_store_legacy_instance.get_shard_index(filename, len(file_store_legacy_instance._shards)) | |||
|
52 | assert expected_shard_num == shard_number |
@@ -0,0 +1,52 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import pytest | |||
|
19 | ||||
|
20 | from rhodecode.apps.file_store import utils as store_utils | |||
|
21 | from rhodecode.apps.file_store import config_keys | |||
|
22 | from rhodecode.apps.file_store.tests import generate_random_filename | |||
|
23 | ||||
|
24 | ||||
|
25 | @pytest.fixture() | |||
|
26 | def file_store_objectstore_instance(ini_settings): | |||
|
27 | config = ini_settings | |||
|
28 | config[config_keys.backend_type] = config_keys.backend_objectstore | |||
|
29 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
30 | return f_store | |||
|
31 | ||||
|
32 | ||||
|
33 | class TestFileStoreObjectStoreBackend: | |||
|
34 | ||||
|
35 | @pytest.mark.parametrize('filename', [generate_random_filename() for _ in range(10)]) | |||
|
36 | def test_get_shard_number(self, filename, file_store_objectstore_instance): | |||
|
37 | shard_number = file_store_objectstore_instance.get_shard_index(filename, len(file_store_objectstore_instance._shards)) | |||
|
38 | # Check that the shard number is between 0 and shards | |||
|
39 | assert 0 <= shard_number <= len(file_store_objectstore_instance._shards) | |||
|
40 | ||||
|
41 | @pytest.mark.parametrize('filename, expected_shard_num', [ | |||
|
42 | ('my-name-1', 3), | |||
|
43 | ('my-name-2', 2), | |||
|
44 | ('my-name-3', 4), | |||
|
45 | ('my-name-4', 1), | |||
|
46 | ||||
|
47 | ('rhodecode-enterprise-ce', 5), | |||
|
48 | ('rhodecode-enterprise-ee', 6), | |||
|
49 | ]) | |||
|
50 | def test_get_shard_number_consistency(self, filename, expected_shard_num, file_store_objectstore_instance): | |||
|
51 | shard_number = file_store_objectstore_instance.get_shard_index(filename, len(file_store_objectstore_instance._shards)) | |||
|
52 | assert expected_shard_num == shard_number |
@@ -0,0 +1,122 b'' | |||||
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import sys | |||
|
20 | import logging | |||
|
21 | ||||
|
22 | import click | |||
|
23 | ||||
|
24 | from rhodecode.lib.pyramid_utils import bootstrap | |||
|
25 | from rhodecode.lib.ext_json import json | |||
|
26 | from rhodecode.model.db import FileStore | |||
|
27 | from rhodecode.apps.file_store import utils as store_utils | |||
|
28 | ||||
|
29 | log = logging.getLogger(__name__) | |||
|
30 | ||||
|
31 | ||||
|
32 | @click.command() | |||
|
33 | @click.argument('ini_path', type=click.Path(exists=True)) | |||
|
34 | @click.argument('file_uid') | |||
|
35 | @click.option( | |||
|
36 | '--source-backend-conf', | |||
|
37 | type=click.Path(exists=True, dir_okay=False, readable=True), | |||
|
38 | help='Source backend config file path in a json format' | |||
|
39 | ) | |||
|
40 | @click.option( | |||
|
41 | '--dest-backend-conf', | |||
|
42 | type=click.Path(exists=True, dir_okay=False, readable=True), | |||
|
43 | help='Source backend config file path in a json format' | |||
|
44 | ) | |||
|
45 | def main(ini_path, file_uid, source_backend_conf, dest_backend_conf): | |||
|
46 | return command(ini_path, file_uid, source_backend_conf, dest_backend_conf) | |||
|
47 | ||||
|
48 | ||||
|
49 | _source_settings = {} | |||
|
50 | ||||
|
51 | _dest_settings = {} | |||
|
52 | ||||
|
53 | ||||
|
54 | def command(ini_path, file_uid, source_backend_conf, dest_backend_conf): | |||
|
55 | with bootstrap(ini_path, env={'RC_CMD_SETUP_RC': '1'}) as env: | |||
|
56 | migrate_func(env, file_uid, source_backend_conf, dest_backend_conf) | |||
|
57 | ||||
|
58 | ||||
|
59 | def migrate_func(env, file_uid, source_backend_conf=None, dest_backend_conf=None): | |||
|
60 | """ | |||
|
61 | ||||
|
62 | Example usage:: | |||
|
63 | ||||
|
64 | from rhodecode.lib.rc_commands import migrate_artifact | |||
|
65 | migrate_artifact._source_settings = { | |||
|
66 | 'file_store.backend.type': 'filesystem_v1', | |||
|
67 | 'file_store.filesystem_v1.storage_path': '/var/opt/rhodecode_data/file_store', | |||
|
68 | } | |||
|
69 | migrate_artifact._dest_settings = { | |||
|
70 | 'file_store.backend.type': 'objectstore', | |||
|
71 | 'file_store.objectstore.url': 'http://s3-minio:9000', | |||
|
72 | 'file_store.objectstore.bucket': 'rhodecode-file-store', | |||
|
73 | 'file_store.objectstore.key': 's3admin', | |||
|
74 | 'file_store.objectstore.secret': 's3secret4', | |||
|
75 | 'file_store.objectstore.region': 'eu-central-1', | |||
|
76 | } | |||
|
77 | for db_obj in FileStore.query().all(): | |||
|
78 | migrate_artifact.migrate_func({}, db_obj.file_uid) | |||
|
79 | ||||
|
80 | """ | |||
|
81 | ||||
|
82 | try: | |||
|
83 | from rc_ee.api.views.store_api import _store_file | |||
|
84 | except ImportError: | |||
|
85 | click.secho('ERROR: Unable to import store_api. ' | |||
|
86 | 'store_api is only available in EE edition of RhodeCode', | |||
|
87 | fg='red') | |||
|
88 | sys.exit(-1) | |||
|
89 | ||||
|
90 | source_settings = _source_settings | |||
|
91 | if source_backend_conf: | |||
|
92 | source_settings = json.loads(open(source_backend_conf).read()) | |||
|
93 | dest_settings = _dest_settings | |||
|
94 | if dest_backend_conf: | |||
|
95 | dest_settings = json.loads(open(dest_backend_conf).read()) | |||
|
96 | ||||
|
97 | if file_uid.isnumeric(): | |||
|
98 | file_store_db_obj = FileStore().query() \ | |||
|
99 | .filter(FileStore.file_store_id == file_uid) \ | |||
|
100 | .scalar() | |||
|
101 | else: | |||
|
102 | file_store_db_obj = FileStore().query() \ | |||
|
103 | .filter(FileStore.file_uid == file_uid) \ | |||
|
104 | .scalar() | |||
|
105 | if not file_store_db_obj: | |||
|
106 | click.secho(f'ERROR: Unable to fetch artifact from database file_uid={file_uid}', | |||
|
107 | fg='red') | |||
|
108 | sys.exit(-1) | |||
|
109 | ||||
|
110 | uid_filename = file_store_db_obj.file_uid | |||
|
111 | org_filename = file_store_db_obj.file_display_name | |||
|
112 | click.secho(f'Attempting to migrate artifact {uid_filename}, filename: {org_filename}', fg='green') | |||
|
113 | ||||
|
114 | # get old version of f_store based on the data. | |||
|
115 | ||||
|
116 | origin_f_store = store_utils.get_filestore_backend(source_settings, always_init=True) | |||
|
117 | reader, metadata = origin_f_store.fetch(uid_filename) | |||
|
118 | ||||
|
119 | target_f_store = store_utils.get_filestore_backend(dest_settings, always_init=True) | |||
|
120 | target_f_store.import_to_store(reader, org_filename, uid_filename, metadata) | |||
|
121 | ||||
|
122 | click.secho(f'Migrated artifact {uid_filename}, filename: {org_filename} into {target_f_store} storage', fg='green') |
@@ -0,0 +1,17 b'' | |||||
|
1 | ||||
|
2 | def apply_license(*args, **kwargs): | |||
|
3 | pass | |||
|
4 | ||||
|
5 | try: | |||
|
6 | from rc_license.models import apply_license | |||
|
7 | except ImportError: | |||
|
8 | pass | |||
|
9 | ||||
|
10 | ||||
|
11 | def apply_license_from_file(*args, **kwargs): | |||
|
12 | pass | |||
|
13 | ||||
|
14 | try: | |||
|
15 | from rc_license.models import apply_license_from_file | |||
|
16 | except ImportError: | |||
|
17 | pass |
@@ -0,0 +1,50 b'' | |||||
|
1 | <%inherit file="/base/base.mako"/> | |||
|
2 | ||||
|
3 | <%def name="title()"> | |||
|
4 | ${_('Security Admin')} | |||
|
5 | %if c.rhodecode_name: | |||
|
6 | · ${h.branding(c.rhodecode_name)} | |||
|
7 | %endif | |||
|
8 | </%def> | |||
|
9 | ||||
|
10 | <%def name="breadcrumbs_links()"></%def> | |||
|
11 | ||||
|
12 | <%def name="menu_bar_nav()"> | |||
|
13 | ${self.menu_items(active='admin')} | |||
|
14 | </%def> | |||
|
15 | ||||
|
16 | <%def name="menu_bar_subnav()"> | |||
|
17 | ${self.admin_menu(active='security')} | |||
|
18 | </%def> | |||
|
19 | ||||
|
20 | <%def name="main()"> | |||
|
21 | ||||
|
22 | <div class="box"> | |||
|
23 | ||||
|
24 | <!-- <div class="panel panel-default">--> | |||
|
25 | <!-- <div class="panel-heading">--> | |||
|
26 | <!-- <h3 class="panel-title">${_('Security Audit')}</h3>--> | |||
|
27 | <!-- </div>--> | |||
|
28 | <!-- <div class="panel-body">--> | |||
|
29 | <!-- <h4>${_('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='<a href="mailto:sales@rhodecode.com">sales@rhodecode.com</a>')|n}</h4>--> | |||
|
30 | <!-- <p>--> | |||
|
31 | <!-- ${_('You can scan your repositories for exposed secrets, passwords, etc')}--> | |||
|
32 | <!-- </p>--> | |||
|
33 | <!-- </div>--> | |||
|
34 | <!-- </div>--> | |||
|
35 | ||||
|
36 | <div class="panel panel-default"> | |||
|
37 | <div class="panel-heading"> | |||
|
38 | <h3 class="panel-title">${_('Allowed client versions')}</h3> | |||
|
39 | </div> | |||
|
40 | <div class="panel-body"> | |||
|
41 | <h4>${_('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='<a href="mailto:sales@rhodecode.com">sales@rhodecode.com</a>')|n}</h4> | |||
|
42 | <p> | |||
|
43 | ${_('Some outdated client versions may have security vulnerabilities. This section have rules for whitelisting versions of clients for Git, Mercurial and SVN.')} | |||
|
44 | </p> | |||
|
45 | </div> | |||
|
46 | ||||
|
47 | ||||
|
48 | </div> | |||
|
49 | ||||
|
50 | </%def> |
@@ -1,5 +1,5 b'' | |||||
1 | [bumpversion] |
|
1 | [bumpversion] | |
2 |
current_version = 5. |
|
2 | current_version = 5.2.0 | |
3 | message = release: Bump version {current_version} to {new_version} |
|
3 | message = release: Bump version {current_version} to {new_version} | |
4 |
|
4 | |||
5 | [bumpversion:file:rhodecode/VERSION] |
|
5 | [bumpversion:file:rhodecode/VERSION] |
@@ -54,7 +54,7 b' syntax: regexp' | |||||
54 | ^rhodecode\.log$ |
|
54 | ^rhodecode\.log$ | |
55 | ^rhodecode_dev\.log$ |
|
55 | ^rhodecode_dev\.log$ | |
56 | ^test\.db$ |
|
56 | ^test\.db$ | |
57 |
|
57 | ^venv/ | ||
58 |
|
58 | |||
59 | # ac-tests |
|
59 | # ac-tests | |
60 | ^acceptance_tests/\.cache.*$ |
|
60 | ^acceptance_tests/\.cache.*$ |
@@ -1,12 +1,49 b'' | |||||
|
1 | .DEFAULT_GOAL := help | |||
|
2 | ||||
|
3 | # Pretty print values cf. https://misc.flogisoft.com/bash/tip_colors_and_formatting | |||
|
4 | RESET := \033[0m # Reset all formatting | |||
|
5 | GREEN := \033[0;32m # Resets before setting 16b colour (32 -- green) | |||
|
6 | YELLOW := \033[0;33m | |||
|
7 | ORANGE := \033[0;38;5;208m # Reset then set 256b colour (208 -- orange) | |||
|
8 | PEACH := \033[0;38;5;216m | |||
|
9 | ||||
|
10 | ||||
|
11 | ## ---------------------------------------------------------------------------------- ## | |||
|
12 | ## ------------------------- Help usage builder ------------------------------------- ## | |||
|
13 | ## ---------------------------------------------------------------------------------- ## | |||
|
14 | # use '# >>> Build commands' to create section | |||
|
15 | # use '# target: target description' to create help for target | |||
|
16 | .PHONY: help | |||
|
17 | help: | |||
|
18 | @echo "Usage:" | |||
|
19 | @cat $(MAKEFILE_LIST) | grep -E '^# >>>|^# [A-Za-z0-9_.-]+:' | sed -E 's/^# //' | awk ' \ | |||
|
20 | BEGIN { \ | |||
|
21 | green="\033[32m"; \ | |||
|
22 | yellow="\033[33m"; \ | |||
|
23 | reset="\033[0m"; \ | |||
|
24 | section=""; \ | |||
|
25 | } \ | |||
|
26 | /^>>>/ { \ | |||
|
27 | section=substr($$0, 5); \ | |||
|
28 | printf "\n" green ">>> %s" reset "\n", section; \ | |||
|
29 | next; \ | |||
|
30 | } \ | |||
|
31 | /^([A-Za-z0-9_.-]+):/ { \ | |||
|
32 | target=$$1; \ | |||
|
33 | gsub(/:$$/, "", target); \ | |||
|
34 | description=substr($$0, index($$0, ":") + 2); \ | |||
|
35 | if (description == "") { description="-"; } \ | |||
|
36 | printf " - " yellow "%-35s" reset " %s\n", target, description; \ | |||
|
37 | } \ | |||
|
38 | ' | |||
|
39 | ||||
1 | # required for pushd to work.. |
|
40 | # required for pushd to work.. | |
2 | SHELL = /bin/bash |
|
41 | SHELL = /bin/bash | |
3 |
|
42 | |||
4 |
|
43 | # >>> Tests commands | ||
5 | # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py |
|
|||
6 | OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES} |
|
|||
7 |
|
44 | |||
8 | .PHONY: clean |
|
45 | .PHONY: clean | |
9 |
# |
|
46 | # clean: Cleanup compiled and cache py files | |
10 | clean: |
|
47 | clean: | |
11 | make test-clean |
|
48 | make test-clean | |
12 | find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';' |
|
49 | find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';' | |
@@ -14,14 +51,14 b' clean:' | |||||
14 |
|
51 | |||
15 |
|
52 | |||
16 | .PHONY: test |
|
53 | .PHONY: test | |
17 |
# |
|
54 | # test: run test-clean and tests | |
18 | test: |
|
55 | test: | |
19 | make test-clean |
|
56 | make test-clean | |
20 | make test-only |
|
57 | unset RC_SQLALCHEMY_DB1_URL && unset RC_DB_URL && make test-only | |
21 |
|
58 | |||
22 |
|
59 | |||
23 | .PHONY: test-clean |
|
60 | .PHONY: test-clean | |
24 |
# |
|
61 | # test-clean: run test-clean and tests | |
25 | test-clean: |
|
62 | test-clean: | |
26 | rm -rf coverage.xml htmlcov junit.xml pylint.log result |
|
63 | rm -rf coverage.xml htmlcov junit.xml pylint.log result | |
27 | find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' |
|
64 | find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';' | |
@@ -29,34 +66,36 b' test-clean:' | |||||
29 |
|
66 | |||
30 |
|
67 | |||
31 | .PHONY: test-only |
|
68 | .PHONY: test-only | |
32 |
# |
|
69 | # test-only: Run tests only without cleanup | |
33 | test-only: |
|
70 | test-only: | |
34 | PYTHONHASHSEED=random \ |
|
71 | PYTHONHASHSEED=random \ | |
35 | py.test -x -vv -r xw -p no:sugar \ |
|
72 | py.test -x -vv -r xw -p no:sugar \ | |
36 | --cov-report=term-missing --cov-report=html \ |
|
73 | --cov-report=term-missing --cov-report=html \ | |
37 | --cov=rhodecode rhodecode |
|
74 | --cov=rhodecode rhodecode | |
38 |
|
75 | |||
|
76 | # >>> Docs commands | |||
39 |
|
77 | |||
40 | .PHONY: docs |
|
78 | .PHONY: docs | |
41 |
# |
|
79 | # docs: build docs | |
42 | docs: |
|
80 | docs: | |
43 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html SPHINXOPTS="-W") |
|
81 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean html SPHINXOPTS="-W") | |
44 |
|
82 | |||
45 |
|
83 | |||
46 | .PHONY: docs-clean |
|
84 | .PHONY: docs-clean | |
47 |
# |
|
85 | # docs-clean: Cleanup docs | |
48 | docs-clean: |
|
86 | docs-clean: | |
49 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean) |
|
87 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make clean) | |
50 |
|
88 | |||
51 |
|
89 | |||
52 | .PHONY: docs-cleanup |
|
90 | .PHONY: docs-cleanup | |
53 |
# |
|
91 | # docs-cleanup: Cleanup docs | |
54 | docs-cleanup: |
|
92 | docs-cleanup: | |
55 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup) |
|
93 | (cd docs; docker run --rm -v $(PWD):/project --workdir=/project/docs sphinx-doc-build-rc make cleanup) | |
56 |
|
94 | |||
|
95 | # >>> Dev commands | |||
57 |
|
96 | |||
58 | .PHONY: web-build |
|
97 | .PHONY: web-build | |
59 |
# |
|
98 | # web-build: Build JS packages static/js | |
60 | web-build: |
|
99 | web-build: | |
61 | rm -rf node_modules |
|
100 | rm -rf node_modules | |
62 | docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt" |
|
101 | docker run -it --rm -v $(PWD):/project --workdir=/project rhodecode/static-files-build:16 -c "npm install && /project/node_modules/.bin/grunt" | |
@@ -64,25 +103,9 b' web-build:' | |||||
64 | ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/ |
|
103 | ./rhodecode/tests/scripts/static-file-check.sh rhodecode/public/ | |
65 | rm -rf node_modules |
|
104 | rm -rf node_modules | |
66 |
|
105 | |||
67 | .PHONY: ruff-check |
|
|||
68 | ## run a ruff analysis |
|
|||
69 | ruff-check: |
|
|||
70 | ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev . |
|
|||
71 |
|
||||
72 | .PHONY: pip-packages |
|
|||
73 | ## Show outdated packages |
|
|||
74 | pip-packages: |
|
|||
75 | python ${OUTDATED_PACKAGES} |
|
|||
76 |
|
||||
77 |
|
||||
78 | .PHONY: build |
|
|||
79 | ## Build sdist/egg |
|
|||
80 | build: |
|
|||
81 | python -m build |
|
|||
82 |
|
||||
83 |
|
106 | |||
84 | .PHONY: dev-sh |
|
107 | .PHONY: dev-sh | |
85 |
# |
|
108 | # dev-sh: make dev-sh | |
86 | dev-sh: |
|
109 | dev-sh: | |
87 | sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list" |
|
110 | sudo echo "deb [trusted=yes] https://apt.fury.io/rsteube/ /" | sudo tee -a "/etc/apt/sources.list.d/fury.list" | |
88 | sudo apt-get update |
|
111 | sudo apt-get update | |
@@ -95,14 +118,14 b' dev-sh:' | |||||
95 |
|
118 | |||
96 |
|
119 | |||
97 | .PHONY: dev-cleanup |
|
120 | .PHONY: dev-cleanup | |
98 |
# |
|
121 | # dev-cleanup: Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
99 | dev-cleanup: |
|
122 | dev-cleanup: | |
100 | pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y |
|
123 | pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
101 | rm -rf /tmp/* |
|
124 | rm -rf /tmp/* | |
102 |
|
125 | |||
103 |
|
126 | |||
104 | .PHONY: dev-env |
|
127 | .PHONY: dev-env | |
105 |
# |
|
128 | # dev-env: make dev-env based on the requirements files and install develop of packages | |
106 | ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y |
|
129 | ## Cleanup: pip freeze | grep -v "^-e" | grep -v "@" | xargs pip uninstall -y | |
107 | dev-env: |
|
130 | dev-env: | |
108 | sudo -u root chown rhodecode:rhodecode /home/rhodecode/.cache/pip/ |
|
131 | sudo -u root chown rhodecode:rhodecode /home/rhodecode/.cache/pip/ | |
@@ -114,7 +137,7 b' dev-env:' | |||||
114 |
|
137 | |||
115 |
|
138 | |||
116 | .PHONY: sh |
|
139 | .PHONY: sh | |
117 |
# |
|
140 | # sh: shortcut for make dev-sh dev-env | |
118 | sh: |
|
141 | sh: | |
119 | make dev-env |
|
142 | make dev-env | |
120 | make dev-sh |
|
143 | make dev-sh | |
@@ -124,49 +147,12 b' sh:' | |||||
124 | workers?=1 |
|
147 | workers?=1 | |
125 |
|
148 | |||
126 | .PHONY: dev-srv |
|
149 | .PHONY: dev-srv | |
127 |
# |
|
150 | # dev-srv: run gunicorn web server with reloader, use workers=N to set multiworker mode, workers=N allows changes of workers | |
128 | dev-srv: |
|
151 | dev-srv: | |
129 | gunicorn --paste=.dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload --workers=$(workers) |
|
152 | gunicorn --paste=.dev/dev.ini --bind=0.0.0.0:10020 --config=.dev/gunicorn_config.py --timeout=120 --reload --workers=$(workers) | |
130 |
|
153 | |||
131 |
|
154 | .PHONY: ruff-check | ||
132 | # Default command on calling make |
|
155 | # ruff-check: run a ruff analysis | |
133 | .DEFAULT_GOAL := show-help |
|
156 | ruff-check: | |
|
157 | ruff check --ignore F401 --ignore I001 --ignore E402 --ignore E501 --ignore F841 --exclude rhodecode/lib/dbmigrate --exclude .eggs --exclude .dev . | |||
134 |
|
|
158 | ||
135 | .PHONY: show-help |
|
|||
136 | show-help: |
|
|||
137 | @echo "$$(tput bold)Available rules:$$(tput sgr0)" |
|
|||
138 | @echo |
|
|||
139 | @sed -n -e "/^## / { \ |
|
|||
140 | h; \ |
|
|||
141 | s/.*//; \ |
|
|||
142 | :doc" \ |
|
|||
143 | -e "H; \ |
|
|||
144 | n; \ |
|
|||
145 | s/^## //; \ |
|
|||
146 | t doc" \ |
|
|||
147 | -e "s/:.*//; \ |
|
|||
148 | G; \ |
|
|||
149 | s/\\n## /---/; \ |
|
|||
150 | s/\\n/ /g; \ |
|
|||
151 | p; \ |
|
|||
152 | }" ${MAKEFILE_LIST} \ |
|
|||
153 | | LC_ALL='C' sort --ignore-case \ |
|
|||
154 | | awk -F '---' \ |
|
|||
155 | -v ncol=$$(tput cols) \ |
|
|||
156 | -v indent=19 \ |
|
|||
157 | -v col_on="$$(tput setaf 6)" \ |
|
|||
158 | -v col_off="$$(tput sgr0)" \ |
|
|||
159 | '{ \ |
|
|||
160 | printf "%s%*s%s ", col_on, -indent, $$1, col_off; \ |
|
|||
161 | n = split($$2, words, " "); \ |
|
|||
162 | line_length = ncol - indent; \ |
|
|||
163 | for (i = 1; i <= n; i++) { \ |
|
|||
164 | line_length -= length(words[i]) + 1; \ |
|
|||
165 | if (line_length <= 0) { \ |
|
|||
166 | line_length = ncol - indent - length(words[i]) - 1; \ |
|
|||
167 | printf "\n%*s ", -indent, " "; \ |
|
|||
168 | } \ |
|
|||
169 | printf "%s ", words[i]; \ |
|
|||
170 | } \ |
|
|||
171 | printf "\n"; \ |
|
|||
172 | }' |
|
@@ -257,6 +257,13 b' license_token =' | |||||
257 | ; This flag hides sensitive information on the license page such as token, and license data |
|
257 | ; This flag hides sensitive information on the license page such as token, and license data | |
258 | license.hide_license_info = false |
|
258 | license.hide_license_info = false | |
259 |
|
259 | |||
|
260 | ; Import EE license from this license path | |||
|
261 | #license.import_path = %(here)s/rhodecode_enterprise.license | |||
|
262 | ||||
|
263 | ; import license 'if-missing' or 'force' (always override) | |||
|
264 | ; if-missing means apply license if it doesn't exist. 'force' option always overrides it | |||
|
265 | license.import_path_mode = if-missing | |||
|
266 | ||||
260 | ; supervisor connection uri, for managing supervisor and logs. |
|
267 | ; supervisor connection uri, for managing supervisor and logs. | |
261 | supervisor.uri = |
|
268 | supervisor.uri = | |
262 |
|
269 | |||
@@ -281,15 +288,56 b' labs_settings_active = true' | |||||
281 | ; optional prefix to Add to email Subject |
|
288 | ; optional prefix to Add to email Subject | |
282 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
289 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
283 |
|
290 | |||
284 | ; File store configuration. This is used to store and serve uploaded files |
|
291 | ; NOTE: this setting IS DEPRECATED: | |
285 | file_store.enabled = true |
|
292 | ; file_store backend is always enabled | |
|
293 | #file_store.enabled = true | |||
286 |
|
294 | |||
|
295 | ; NOTE: this setting IS DEPRECATED: | |||
|
296 | ; file_store.backend = X -> use `file_store.backend.type = filesystem_v2` instead | |||
287 | ; Storage backend, available options are: local |
|
297 | ; Storage backend, available options are: local | |
288 | file_store.backend = local |
|
298 | #file_store.backend = local | |
289 |
|
299 | |||
|
300 | ; NOTE: this setting IS DEPRECATED: | |||
|
301 | ; file_store.storage_path = X -> use `file_store.filesystem_v2.storage_path = X` instead | |||
290 | ; path to store the uploaded binaries and artifacts |
|
302 | ; path to store the uploaded binaries and artifacts | |
291 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
303 | #file_store.storage_path = /var/opt/rhodecode_data/file_store | |
|
304 | ||||
|
305 | ; Artifacts file-store, is used to store comment attachments and artifacts uploads. | |||
|
306 | ; file_store backend type: filesystem_v1, filesystem_v2 or objectstore (s3-based) are available as options | |||
|
307 | ; filesystem_v1 is backwards compat with pre 5.1 storage changes | |||
|
308 | ; new installations should choose filesystem_v2 or objectstore (s3-based), pick filesystem when migrating from | |||
|
309 | ; previous installations to keep the artifacts without a need of migration | |||
|
310 | #file_store.backend.type = filesystem_v2 | |||
|
311 | ||||
|
312 | ; filesystem options... | |||
|
313 | #file_store.filesystem_v1.storage_path = /var/opt/rhodecode_data/artifacts_file_store | |||
|
314 | ||||
|
315 | ; filesystem_v2 options... | |||
|
316 | #file_store.filesystem_v2.storage_path = /var/opt/rhodecode_data/artifacts_file_store | |||
|
317 | #file_store.filesystem_v2.shards = 8 | |||
292 |
|
318 | |||
|
319 | ; objectstore options... | |||
|
320 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
321 | ; e.g http://minio:9000 | |||
|
322 | #file_store.backend.type = objectstore | |||
|
323 | #file_store.objectstore.url = http://s3-minio:9000 | |||
|
324 | ||||
|
325 | ; a top-level bucket to put all other shards in | |||
|
326 | ; objects will be stored in rhodecode-file-store/shard-N based on the bucket_shards number | |||
|
327 | #file_store.objectstore.bucket = rhodecode-file-store | |||
|
328 | ||||
|
329 | ; number of sharded buckets to create to distribute archives across | |||
|
330 | ; default is 8 shards | |||
|
331 | #file_store.objectstore.bucket_shards = 8 | |||
|
332 | ||||
|
333 | ; key for s3 auth | |||
|
334 | #file_store.objectstore.key = s3admin | |||
|
335 | ||||
|
336 | ; secret for s3 auth | |||
|
337 | #file_store.objectstore.secret = s3secret4 | |||
|
338 | ||||
|
339 | ;region for s3 storage | |||
|
340 | #file_store.objectstore.region = eu-central-1 | |||
293 |
|
341 | |||
294 | ; Redis url to acquire/check generation of archives locks |
|
342 | ; Redis url to acquire/check generation of archives locks | |
295 | archive_cache.locking.url = redis://redis:6379/1 |
|
343 | archive_cache.locking.url = redis://redis:6379/1 | |
@@ -624,7 +672,8 b' vcs.scm_app_implementation = http' | |||||
624 | ; Push/Pull operations hooks protocol, available options are: |
|
672 | ; Push/Pull operations hooks protocol, available options are: | |
625 | ; `http` - use http-rpc backend (default) |
|
673 | ; `http` - use http-rpc backend (default) | |
626 | ; `celery` - use celery based hooks |
|
674 | ; `celery` - use celery based hooks | |
627 | vcs.hooks.protocol = http |
|
675 | #DEPRECATED:vcs.hooks.protocol = http | |
|
676 | vcs.hooks.protocol.v2 = celery | |||
628 |
|
677 | |||
629 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
678 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
630 | ; accessible via network. |
|
679 | ; accessible via network. | |
@@ -647,6 +696,12 b' vcs.connection_timeout = 3600' | |||||
647 | ; It uses cache_region `cache_repo` |
|
696 | ; It uses cache_region `cache_repo` | |
648 | vcs.methods.cache = true |
|
697 | vcs.methods.cache = true | |
649 |
|
698 | |||
|
699 | ; Filesystem location where Git lfs objects should be stored | |||
|
700 | vcs.git.lfs.storage_location = /var/opt/rhodecode_repo_store/.cache/git_lfs_store | |||
|
701 | ||||
|
702 | ; Filesystem location where Mercurial largefile objects should be stored | |||
|
703 | vcs.hg.largefiles.storage_location = /var/opt/rhodecode_repo_store/.cache/hg_largefiles_store | |||
|
704 | ||||
650 | ; #################################################### |
|
705 | ; #################################################### | |
651 | ; Subversion proxy support (mod_dav_svn) |
|
706 | ; Subversion proxy support (mod_dav_svn) | |
652 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
707 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
@@ -716,7 +771,8 b' ssh.authorized_keys_file_path = /etc/rho' | |||||
716 | ; RhodeCode installation directory. |
|
771 | ; RhodeCode installation directory. | |
717 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
772 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
718 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
773 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
719 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
774 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
|
775 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |||
720 |
|
776 | |||
721 | ; Allow shell when executing the ssh-wrapper command |
|
777 | ; Allow shell when executing the ssh-wrapper command | |
722 | ssh.wrapper_cmd_allow_shell = false |
|
778 | ssh.wrapper_cmd_allow_shell = false |
@@ -13,6 +13,7 b' import traceback' | |||||
13 | import random |
|
13 | import random | |
14 | import socket |
|
14 | import socket | |
15 | import dataclasses |
|
15 | import dataclasses | |
|
16 | import json | |||
16 | from gunicorn.glogging import Logger |
|
17 | from gunicorn.glogging import Logger | |
17 |
|
18 | |||
18 |
|
19 | |||
@@ -37,17 +38,41 b" accesslog = '-'" | |||||
37 | worker_tmp_dir = None |
|
38 | worker_tmp_dir = None | |
38 | tmp_upload_dir = None |
|
39 | tmp_upload_dir = None | |
39 |
|
40 | |||
40 | # use re-use port logic |
|
41 | # use re-use port logic to let linux internals load-balance the requests better. | |
41 |
|
|
42 | reuse_port = True | |
42 |
|
43 | |||
43 | # Custom log format |
|
44 | # Custom log format | |
44 | #access_log_format = ( |
|
45 | #access_log_format = ( | |
45 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') |
|
46 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') | |
46 |
|
47 | |||
47 | # loki format for easier parsing in grafana |
|
48 | # loki format for easier parsing in grafana | |
48 | access_log_format = ( |
|
49 | loki_access_log_format = ( | |
49 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') |
|
50 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') | |
50 |
|
51 | |||
|
52 | # JSON format | |||
|
53 | json_access_log_format = json.dumps({ | |||
|
54 | 'time': r'%(t)s', | |||
|
55 | 'pid': r'%(p)s', | |||
|
56 | 'level': 'INFO', | |||
|
57 | 'ip': r'%(h)s', | |||
|
58 | 'request_time': r'%(L)s', | |||
|
59 | 'remote_address': r'%(h)s', | |||
|
60 | 'user_name': r'%(u)s', | |||
|
61 | 'status': r'%(s)s', | |||
|
62 | 'method': r'%(m)s', | |||
|
63 | 'url_path': r'%(U)s', | |||
|
64 | 'query_string': r'%(q)s', | |||
|
65 | 'protocol': r'%(H)s', | |||
|
66 | 'response_length': r'%(B)s', | |||
|
67 | 'referer': r'%(f)s', | |||
|
68 | 'user_agent': r'%(a)s', | |||
|
69 | ||||
|
70 | }) | |||
|
71 | ||||
|
72 | access_log_format = loki_access_log_format | |||
|
73 | if os.environ.get('RC_LOGGING_FORMATTER') == 'json': | |||
|
74 | access_log_format = json_access_log_format | |||
|
75 | ||||
51 | # self adjust workers based on CPU count, to use maximum of CPU and not overquota the resources |
|
76 | # self adjust workers based on CPU count, to use maximum of CPU and not overquota the resources | |
52 | # workers = get_workers() |
|
77 | # workers = get_workers() | |
53 |
|
78 |
@@ -225,6 +225,13 b' license_token =' | |||||
225 | ; This flag hides sensitive information on the license page such as token, and license data |
|
225 | ; This flag hides sensitive information on the license page such as token, and license data | |
226 | license.hide_license_info = false |
|
226 | license.hide_license_info = false | |
227 |
|
227 | |||
|
228 | ; Import EE license from this license path | |||
|
229 | #license.import_path = %(here)s/rhodecode_enterprise.license | |||
|
230 | ||||
|
231 | ; import license 'if-missing' or 'force' (always override) | |||
|
232 | ; if-missing means apply license if it doesn't exist. 'force' option always overrides it | |||
|
233 | license.import_path_mode = if-missing | |||
|
234 | ||||
228 | ; supervisor connection uri, for managing supervisor and logs. |
|
235 | ; supervisor connection uri, for managing supervisor and logs. | |
229 | supervisor.uri = |
|
236 | supervisor.uri = | |
230 |
|
237 | |||
@@ -249,15 +256,56 b' labs_settings_active = true' | |||||
249 | ; optional prefix to Add to email Subject |
|
256 | ; optional prefix to Add to email Subject | |
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
257 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
251 |
|
258 | |||
252 | ; File store configuration. This is used to store and serve uploaded files |
|
259 | ; NOTE: this setting IS DEPRECATED: | |
253 | file_store.enabled = true |
|
260 | ; file_store backend is always enabled | |
|
261 | #file_store.enabled = true | |||
254 |
|
262 | |||
|
263 | ; NOTE: this setting IS DEPRECATED: | |||
|
264 | ; file_store.backend = X -> use `file_store.backend.type = filesystem_v2` instead | |||
255 | ; Storage backend, available options are: local |
|
265 | ; Storage backend, available options are: local | |
256 | file_store.backend = local |
|
266 | #file_store.backend = local | |
257 |
|
267 | |||
|
268 | ; NOTE: this setting IS DEPRECATED: | |||
|
269 | ; file_store.storage_path = X -> use `file_store.filesystem_v2.storage_path = X` instead | |||
258 | ; path to store the uploaded binaries and artifacts |
|
270 | ; path to store the uploaded binaries and artifacts | |
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
271 | #file_store.storage_path = /var/opt/rhodecode_data/file_store | |
|
272 | ||||
|
273 | ; Artifacts file-store, is used to store comment attachments and artifacts uploads. | |||
|
274 | ; file_store backend type: filesystem_v1, filesystem_v2 or objectstore (s3-based) are available as options | |||
|
275 | ; filesystem_v1 is backwards compat with pre 5.1 storage changes | |||
|
276 | ; new installations should choose filesystem_v2 or objectstore (s3-based), pick filesystem when migrating from | |||
|
277 | ; previous installations to keep the artifacts without a need of migration | |||
|
278 | #file_store.backend.type = filesystem_v2 | |||
|
279 | ||||
|
280 | ; filesystem options... | |||
|
281 | #file_store.filesystem_v1.storage_path = /var/opt/rhodecode_data/artifacts_file_store | |||
|
282 | ||||
|
283 | ; filesystem_v2 options... | |||
|
284 | #file_store.filesystem_v2.storage_path = /var/opt/rhodecode_data/artifacts_file_store | |||
|
285 | #file_store.filesystem_v2.shards = 8 | |||
260 |
|
286 | |||
|
287 | ; objectstore options... | |||
|
288 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
289 | ; e.g http://minio:9000 | |||
|
290 | #file_store.backend.type = objectstore | |||
|
291 | #file_store.objectstore.url = http://s3-minio:9000 | |||
|
292 | ||||
|
293 | ; a top-level bucket to put all other shards in | |||
|
294 | ; objects will be stored in rhodecode-file-store/shard-N based on the bucket_shards number | |||
|
295 | #file_store.objectstore.bucket = rhodecode-file-store | |||
|
296 | ||||
|
297 | ; number of sharded buckets to create to distribute archives across | |||
|
298 | ; default is 8 shards | |||
|
299 | #file_store.objectstore.bucket_shards = 8 | |||
|
300 | ||||
|
301 | ; key for s3 auth | |||
|
302 | #file_store.objectstore.key = s3admin | |||
|
303 | ||||
|
304 | ; secret for s3 auth | |||
|
305 | #file_store.objectstore.secret = s3secret4 | |||
|
306 | ||||
|
307 | ;region for s3 storage | |||
|
308 | #file_store.objectstore.region = eu-central-1 | |||
261 |
|
309 | |||
262 | ; Redis url to acquire/check generation of archives locks |
|
310 | ; Redis url to acquire/check generation of archives locks | |
263 | archive_cache.locking.url = redis://redis:6379/1 |
|
311 | archive_cache.locking.url = redis://redis:6379/1 | |
@@ -592,7 +640,8 b' vcs.scm_app_implementation = http' | |||||
592 | ; Push/Pull operations hooks protocol, available options are: |
|
640 | ; Push/Pull operations hooks protocol, available options are: | |
593 | ; `http` - use http-rpc backend (default) |
|
641 | ; `http` - use http-rpc backend (default) | |
594 | ; `celery` - use celery based hooks |
|
642 | ; `celery` - use celery based hooks | |
595 | vcs.hooks.protocol = http |
|
643 | #DEPRECATED:vcs.hooks.protocol = http | |
|
644 | vcs.hooks.protocol.v2 = celery | |||
596 |
|
645 | |||
597 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
646 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
598 | ; accessible via network. |
|
647 | ; accessible via network. | |
@@ -615,6 +664,12 b' vcs.connection_timeout = 3600' | |||||
615 | ; It uses cache_region `cache_repo` |
|
664 | ; It uses cache_region `cache_repo` | |
616 | vcs.methods.cache = true |
|
665 | vcs.methods.cache = true | |
617 |
|
666 | |||
|
667 | ; Filesystem location where Git lfs objects should be stored | |||
|
668 | vcs.git.lfs.storage_location = /var/opt/rhodecode_repo_store/.cache/git_lfs_store | |||
|
669 | ||||
|
670 | ; Filesystem location where Mercurial largefile objects should be stored | |||
|
671 | vcs.hg.largefiles.storage_location = /var/opt/rhodecode_repo_store/.cache/hg_largefiles_store | |||
|
672 | ||||
618 | ; #################################################### |
|
673 | ; #################################################### | |
619 | ; Subversion proxy support (mod_dav_svn) |
|
674 | ; Subversion proxy support (mod_dav_svn) | |
620 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
675 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
@@ -684,7 +739,8 b' ssh.authorized_keys_file_path = /etc/rho' | |||||
684 | ; RhodeCode installation directory. |
|
739 | ; RhodeCode installation directory. | |
685 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
740 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
686 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
741 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
687 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
742 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
|
743 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |||
688 |
|
744 | |||
689 | ; Allow shell when executing the ssh-wrapper command |
|
745 | ; Allow shell when executing the ssh-wrapper command | |
690 | ssh.wrapper_cmd_allow_shell = false |
|
746 | ssh.wrapper_cmd_allow_shell = false |
@@ -22,6 +22,12 b' RUN apt-get update \\' | |||||
22 | && apt-get clean \ |
|
22 | && apt-get clean \ | |
23 | && rm -rf /var/lib/apt/lists/* |
|
23 | && rm -rf /var/lib/apt/lists/* | |
24 |
|
24 | |||
|
25 | RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ | |||
|
26 | unzip awscliv2.zip && \ | |||
|
27 | ./aws/install && \ | |||
|
28 | rm -rf ./aws && \ | |||
|
29 | rm awscliv2.zip | |||
|
30 | ||||
25 | RUN \ |
|
31 | RUN \ | |
26 | python3 -m pip install --no-cache-dir --upgrade pip && \ |
|
32 | python3 -m pip install --no-cache-dir --upgrade pip && \ | |
27 | python3 -m pip install --no-cache-dir Sphinx Pillow |
|
33 | python3 -m pip install --no-cache-dir Sphinx Pillow |
@@ -147,10 +147,6 b' Peer-to-peer Failover Support' | |||||
147 |
|
147 | |||
148 | * Yes |
|
148 | * Yes | |
149 |
|
149 | |||
150 | Additional Binaries |
|
|||
151 | ------------------- |
|
|||
152 |
|
||||
153 | * Yes, see :ref:`rhodecode-nix-ref` for full details. |
|
|||
154 |
|
150 | |||
155 | Remote Connectivity |
|
151 | Remote Connectivity | |
156 | ------------------- |
|
152 | ------------------- |
@@ -13,7 +13,7 b' This method simply enables SAML authenti' | |||||
13 | From the server RhodeCode Enterprise is running run ishell on the instance which we |
|
13 | From the server RhodeCode Enterprise is running run ishell on the instance which we | |
14 | want to apply the SAML migration:: |
|
14 | want to apply the SAML migration:: | |
15 |
|
15 | |||
16 | rccontrol ishell enterprise-1 |
|
16 | ./rcstack cli ishell | |
17 |
|
17 | |||
18 | Follow these steps to enable SAML authentication for multiple users. |
|
18 | Follow these steps to enable SAML authentication for multiple users. | |
19 |
|
19 | |||
@@ -46,6 +46,8 b' From available options pick only one and' | |||||
46 |
|
46 | |||
47 | # for Duo Security |
|
47 | # for Duo Security | |
48 | In [2]: from rc_auth_plugins.auth_duo_security import RhodeCodeAuthPlugin |
|
48 | In [2]: from rc_auth_plugins.auth_duo_security import RhodeCodeAuthPlugin | |
|
49 | # for Azure Entra | |||
|
50 | In [2]: from rc_auth_plugins.auth_azure import RhodeCodeAuthPlugin | |||
49 | # for OneLogin |
|
51 | # for OneLogin | |
50 | In [2]: from rc_auth_plugins.auth_onelogin import RhodeCodeAuthPlugin |
|
52 | In [2]: from rc_auth_plugins.auth_onelogin import RhodeCodeAuthPlugin | |
51 | # generic SAML plugin |
|
53 | # generic SAML plugin | |
@@ -62,13 +64,13 b' Enter in the ishell prompt' | |||||
62 | ...: attrs = saml2user.get(user.user_id) |
|
64 | ...: attrs = saml2user.get(user.user_id) | |
63 | ...: provider = RhodeCodeAuthPlugin.uid |
|
65 | ...: provider = RhodeCodeAuthPlugin.uid | |
64 | ...: if existing_identity: |
|
66 | ...: if existing_identity: | |
65 |
...: print('Identity for user `{ |
|
67 | ...: print(f'Identity for user `{user.username}` already exists, skipping') | |
66 | ...: continue |
|
68 | ...: continue | |
67 | ...: if attrs: |
|
69 | ...: if attrs: | |
68 | ...: external_id = attrs['id'] |
|
70 | ...: external_id = attrs['id'] | |
69 | ...: new_external_identity = ExternalIdentity() |
|
71 | ...: new_external_identity = ExternalIdentity() | |
70 | ...: new_external_identity.external_id = external_id |
|
72 | ...: new_external_identity.external_id = external_id | |
71 |
...: new_external_identity.external_username = '{ |
|
73 | ...: new_external_identity.external_username = f'{user.username}-saml-{user.user_id}' | |
72 | ...: new_external_identity.provider_name = provider |
|
74 | ...: new_external_identity.provider_name = provider | |
73 | ...: new_external_identity.local_user_id = user.user_id |
|
75 | ...: new_external_identity.local_user_id = user.user_id | |
74 | ...: new_external_identity.access_token = '' |
|
76 | ...: new_external_identity.access_token = '' | |
@@ -76,7 +78,7 b' Enter in the ishell prompt' | |||||
76 | ...: new_external_identity.alt_token = '' |
|
78 | ...: new_external_identity.alt_token = '' | |
77 | ...: Session().add(ex_identity) |
|
79 | ...: Session().add(ex_identity) | |
78 | ...: Session().commit() |
|
80 | ...: Session().commit() | |
79 |
...: print('Set user `{ |
|
81 | ...: print(f'Set user `{user.username}` external identity bound to ExternalID:{external_id}') | |
80 |
|
82 | |||
81 | .. note:: |
|
83 | .. note:: | |
82 |
|
84 |
@@ -32,62 +32,118 b' 4. Check the `enabled` check box in the ' | |||||
32 | Example SAML Duo Security configuration |
|
32 | Example SAML Duo Security configuration | |
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
34 |
|
34 | |||
35 |
Example configuration for SAML 2.0 with Duo Security provider |
|
35 | Example configuration for SAML 2.0 with Duo Security provider | |
|
36 | ||||
|
37 | ||||
|
38 | Enabled | |||
|
39 | `True`: | |||
36 |
|
40 | |||
37 | *option*: `enabled` => `True` |
|
41 | .. note:: | |
38 |
|
|
42 | Enable or disable this authentication plugin. | |
|
43 | ||||
|
44 | ||||
|
45 | Auth Cache TTL | |||
|
46 | `30`: | |||
39 |
|
47 | |||
40 | *option*: `cache_ttl` => `0` |
|
48 | .. note:: | |
41 |
|
|
49 | Amount of seconds to cache the authentication and permissions check response call for this plugin. | |
42 |
|
|
50 | Useful for expensive calls like LDAP to improve the performance of the system (0 means disabled). | |
|
51 | ||||
|
52 | Debug | |||
|
53 | `True`: | |||
43 |
|
54 | |||
44 | *option*: `debug` => `True` |
|
55 | .. note:: | |
45 |
|
|
56 | Enable or disable debug mode that shows SAML errors in the RhodeCode logs. | |
|
57 | ||||
|
58 | ||||
|
59 | Auth button name | |||
|
60 | `Azure Entra ID`: | |||
46 |
|
61 | |||
47 | *option*: `entity_id` => `http://rc-app.com/dag/saml2/idp/metadata.php` |
|
62 | .. note:: | |
48 | # Identity Provider entity/metadata URI. |
|
63 | Alternative authentication display name. E.g AzureAuth, CorporateID etc. | |
49 | # E.g. https://duo-gateway.com/dag/saml2/idp/metadata.php |
|
64 | ||
|
65 | ||||
|
66 | Entity ID | |||
|
67 | `https://my-duo-gateway.com/dag/saml2/idp/metadata.php`: | |||
|
68 | ||||
|
69 | .. note:: | |||
|
70 | Identity Provider entity/metadata URI. | |||
|
71 | E.g. https://duo-gateway.com/dag/saml2/idp/metadata.php | |||
|
72 | ||||
|
73 | SSO URL | |||
|
74 | `https://duo-gateway.com/dag/saml2/idp/SSOService.php?spentityid=<metadata_entity_id>`: | |||
50 |
|
75 | |||
51 | *option*: `sso_service_url` => `http://rc-app.com/dag/saml2/idp/SSOService.php?spentityid=http://rc.local.pl/_admin/auth/duosecurity/saml-metadata` |
|
76 | .. note:: | |
52 |
|
|
77 | SSO (SingleSignOn) endpoint URL of the IdP. This can be used to initialize login, Known also as Login URL | |
53 |
|
|
78 | E.g. http://rc-app.com/dag/saml2/idp/SSOService.php?spentityid=https://docker-dev/_admin/auth/duosecurity/saml-metadata | |
|
79 | ||||
|
80 | SLO URL | |||
|
81 | `https://duo-gateway.com/dag/saml2/idp/SingleLogoutService.php?ReturnTo=<return_url>`: | |||
54 |
|
82 | |||
55 | *option*: `slo_service_url` => `http://rc-app.com/dag/saml2/idp/SingleLogoutService.php?ReturnTo=http://rc-app.com/dag/module.php/duosecurity/logout.php` |
|
83 | .. note:: | |
56 |
|
|
84 | SLO (SingleLogout) endpoint URL of the IdP. , Known also as Logout URL | |
57 |
|
|
85 | E.g. http://rc-app.com/dag/saml2/idp/SingleLogoutService.php?ReturnTo=https://docker-dev/_admin/auth/duosecurity/saml-sign-out-endpoint | |
58 |
|
86 | |||
59 | *option*: `x509cert` => `<CERTIFICATE_STRING>` |
|
87 | x509cert | |
60 | # Identity provider public x509 certificate. It will be converted to single-line format without headers |
|
88 | `<CERTIFICATE_STRING>`: | |
61 |
|
89 | |||
62 | *option*: `name_id_format` => `sha-1` |
|
90 | .. note:: | |
63 | # The format that specifies how the NameID is sent to the service provider. |
|
91 | Identity provider public x509 certificate. It will be converted to single-line format without headers. | |
|
92 | Download the raw base64 encoded certificate from the Identity provider and paste it here. | |||
|
93 | ||||
|
94 | SAML Signature | |||
|
95 | `sha-256`: | |||
|
96 | ||||
|
97 | .. note:: | |||
|
98 | Type of Algorithm to use for verification of SAML signature on Identity provider side. | |||
|
99 | ||||
|
100 | SAML Digest | |||
|
101 | `sha-256`: | |||
64 |
|
102 | |||
65 | *option*: `signature_algo` => `sha-256` |
|
103 | .. note:: | |
66 |
|
|
104 | Type of Algorithm to use for verification of SAML digest on Identity provider side. | |
|
105 | ||||
|
106 | Service Provider Cert Dir | |||
|
107 | `/etc/rhodecode/conf/saml_ssl/`: | |||
67 |
|
108 | |||
68 | *option*: `digest_algo` => `sha-256` |
|
109 | .. note:: | |
69 | # Type of Algorithm to use for verification of SAML digest on Identity provider side |
|
110 | Optional directory to store service provider certificate and private keys. | |
|
111 | Expected certs for the SP should be stored in this folder as: | |||
|
112 | ||||
|
113 | * sp.key Private Key | |||
|
114 | * sp.crt Public cert | |||
|
115 | * sp_new.crt Future Public cert | |||
|
116 | ||||
|
117 | Also you can use other cert to sign the metadata of the SP using the: | |||
70 |
|
118 | |||
71 | *option*: `cert_dir` => `/etc/saml/` |
|
119 | * metadata.key | |
72 | # Optional directory to store service provider certificate and private keys. |
|
120 | * metadata.crt | |
73 | # Expected certs for the SP should be stored in this folder as: |
|
121 | ||
74 | # * sp.key Private Key |
|
122 | Expected NameID Format | |
75 | # * sp.crt Public cert |
|
123 | `nameid-format:emailAddress`: | |
76 | # * sp_new.crt Future Public cert |
|
124 | ||
77 | # |
|
125 | .. note:: | |
78 | # Also you can use other cert to sign the metadata of the SP using the: |
|
126 | The format that specifies how the NameID is sent to the service provider. | |
79 | # * metadata.key |
|
127 | ||
80 | # * metadata.crt |
|
128 | User ID Attribute | |
|
129 | `PersonImmutableID`: | |||
81 |
|
130 | |||
82 | *option*: `user_id_attribute` => `PersonImmutableID` |
|
131 | .. note:: | |
83 |
|
|
132 | User ID Attribute name. This defines which attribute in SAML response will be used to link accounts via unique id. | |
84 |
|
|
133 | Ensure this is returned from DuoSecurity for example via duo_username. | |
|
134 | ||||
|
135 | Username Attribute | |||
|
136 | `User.username`: | |||
85 |
|
137 | |||
86 | *option*: `username_attribute` => `User.username` |
|
138 | .. note:: | |
87 |
|
|
139 | Username Attribute name. This defines which attribute in SAML response will map to a username. | |
88 |
|
140 | |||
89 | *option*: `email_attribute` => `User.email` |
|
141 | Email Attribute | |
90 | # Email Attribute name. This defines which attribute in SAML response will map to an email address. |
|
142 | `User.email`: | |
|
143 | ||||
|
144 | .. note:: | |||
|
145 | Email Attribute name. This defines which attribute in SAML response will map to an email address. | |||
|
146 | ||||
91 |
|
147 | |||
92 |
|
148 | |||
93 | Below is example setup from DUO Administration page that can be used with above config. |
|
149 | Below is example setup from DUO Administration page that can be used with above config. |
@@ -15,5 +15,6 b' Please check for reference two example p' | |||||
15 |
|
15 | |||
16 | auth-saml-duosecurity |
|
16 | auth-saml-duosecurity | |
17 | auth-saml-onelogin |
|
17 | auth-saml-onelogin | |
|
18 | auth-saml-azure | |||
18 | auth-saml-bulk-enroll-users |
|
19 | auth-saml-bulk-enroll-users | |
19 |
|
20 |
@@ -32,62 +32,117 b' 4. Check the `enabled` check box in the ' | |||||
32 | Example SAML OneLogin configuration |
|
32 | Example SAML OneLogin configuration | |
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
34 |
|
34 | |||
35 |
Example configuration for SAML 2.0 with OneLogin provider |
|
35 | Example configuration for SAML 2.0 with OneLogin provider | |
|
36 | ||||
|
37 | ||||
|
38 | Enabled | |||
|
39 | `True`: | |||
36 |
|
40 | |||
37 | *option*: `enabled` => `True` |
|
41 | .. note:: | |
38 |
|
|
42 | Enable or disable this authentication plugin. | |
|
43 | ||||
|
44 | ||||
|
45 | Auth Cache TTL | |||
|
46 | `30`: | |||
39 |
|
47 | |||
40 | *option*: `cache_ttl` => `0` |
|
48 | .. note:: | |
41 |
|
|
49 | Amount of seconds to cache the authentication and permissions check response call for this plugin. | |
42 |
|
|
50 | Useful for expensive calls like LDAP to improve the performance of the system (0 means disabled). | |
|
51 | ||||
|
52 | Debug | |||
|
53 | `True`: | |||
43 |
|
54 | |||
44 | *option*: `debug` => `True` |
|
55 | .. note:: | |
45 |
|
|
56 | Enable or disable debug mode that shows SAML errors in the RhodeCode logs. | |
|
57 | ||||
|
58 | ||||
|
59 | Auth button name | |||
|
60 | `Azure Entra ID`: | |||
46 |
|
61 | |||
47 | *option*: `entity_id` => `https://app.onelogin.com/saml/metadata/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` |
|
62 | .. note:: | |
48 | # Identity Provider entity/metadata URI. |
|
63 | Alternative authentication display name. E.g AzureAuth, CorporateID etc. | |
49 | # E.g. https://app.onelogin.com/saml/metadata/<onelogin_connector_id> |
|
64 | ||
|
65 | ||||
|
66 | Entity ID | |||
|
67 | `https://app.onelogin.com/saml/metadata/<onelogin_connector_id>`: | |||
|
68 | ||||
|
69 | .. note:: | |||
|
70 | Identity Provider entity/metadata URI. | |||
|
71 | E.g. https://app.onelogin.com/saml/metadata/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx | |||
|
72 | ||||
|
73 | SSO URL | |||
|
74 | `https://app.onelogin.com/trust/saml2/http-post/sso/<onelogin_connector_id>`: | |||
50 |
|
75 | |||
51 | *option*: `sso_service_url` => `https://customer-domain.onelogin.com/trust/saml2/http-post/sso/xxxxxx` |
|
76 | .. note:: | |
52 |
|
|
77 | SSO (SingleSignOn) endpoint URL of the IdP. This can be used to initialize login, Known also as Login URL | |
53 |
|
|
78 | E.g. https://app.onelogin.com/trust/saml2/http-post/sso/<onelogin_connector_id> | |
|
79 | ||||
|
80 | SLO URL | |||
|
81 | `https://app.onelogin.com/trust/saml2/http-redirect/slo/<onelogin_connector_id>`: | |||
54 |
|
82 | |||
55 | *option*: `slo_service_url` => `https://customer-domain.onelogin.com/trust/saml2/http-redirect/slo/xxxxxx` |
|
83 | .. note:: | |
56 |
|
|
84 | SLO (SingleLogout) endpoint URL of the IdP. , Known also as Logout URL | |
57 |
|
|
85 | E.g. https://app.onelogin.com/trust/saml2/http-redirect/slo/<onelogin_connector_id> | |
58 |
|
86 | |||
59 | *option*: `x509cert` => `<CERTIFICATE_STRING>` |
|
87 | x509cert | |
60 | # Identity provider public x509 certificate. It will be converted to single-line format without headers |
|
88 | `<CERTIFICATE_STRING>`: | |
61 |
|
89 | |||
62 | *option*: `name_id_format` => `sha-1` |
|
90 | .. note:: | |
63 | # The format that specifies how the NameID is sent to the service provider. |
|
91 | Identity provider public x509 certificate. It will be converted to single-line format without headers. | |
|
92 | Download the raw base64 encoded certificate from the Identity provider and paste it here. | |||
|
93 | ||||
|
94 | SAML Signature | |||
|
95 | `sha-256`: | |||
|
96 | ||||
|
97 | .. note:: | |||
|
98 | Type of Algorithm to use for verification of SAML signature on Identity provider side. | |||
|
99 | ||||
|
100 | SAML Digest | |||
|
101 | `sha-256`: | |||
64 |
|
102 | |||
65 | *option*: `signature_algo` => `sha-256` |
|
103 | .. note:: | |
66 |
|
|
104 | Type of Algorithm to use for verification of SAML digest on Identity provider side. | |
|
105 | ||||
|
106 | Service Provider Cert Dir | |||
|
107 | `/etc/rhodecode/conf/saml_ssl/`: | |||
67 |
|
108 | |||
68 | *option*: `digest_algo` => `sha-256` |
|
109 | .. note:: | |
69 | # Type of Algorithm to use for verification of SAML digest on Identity provider side |
|
110 | Optional directory to store service provider certificate and private keys. | |
|
111 | Expected certs for the SP should be stored in this folder as: | |||
|
112 | ||||
|
113 | * sp.key Private Key | |||
|
114 | * sp.crt Public cert | |||
|
115 | * sp_new.crt Future Public cert | |||
70 |
|
116 | |||
71 | *option*: `cert_dir` => `/etc/saml/` |
|
117 | Also you can use other cert to sign the metadata of the SP using the: | |
72 | # Optional directory to store service provider certificate and private keys. |
|
118 | ||
73 | # Expected certs for the SP should be stored in this folder as: |
|
119 | * metadata.key | |
74 | # * sp.key Private Key |
|
120 | * metadata.crt | |
75 | # * sp.crt Public cert |
|
121 | ||
76 | # * sp_new.crt Future Public cert |
|
122 | Expected NameID Format | |
77 | # |
|
123 | `nameid-format:emailAddress`: | |
78 | # Also you can use other cert to sign the metadata of the SP using the: |
|
124 | ||
79 | # * metadata.key |
|
125 | .. note:: | |
80 | # * metadata.crt |
|
126 | The format that specifies how the NameID is sent to the service provider. | |
|
127 | ||||
|
128 | User ID Attribute | |||
|
129 | `PersonImmutableID`: | |||
81 |
|
130 | |||
82 | *option*: `user_id_attribute` => `PersonImmutableID` |
|
131 | .. note:: | |
83 |
|
|
132 | User ID Attribute name. This defines which attribute in SAML response will be used to link accounts via unique id. | |
84 |
|
|
133 | Ensure this is returned from DuoSecurity for example via duo_username. | |
|
134 | ||||
|
135 | Username Attribute | |||
|
136 | `User.username`: | |||
85 |
|
137 | |||
86 | *option*: `username_attribute` => `User.username` |
|
138 | .. note:: | |
87 |
|
|
139 | Username Attribute name. This defines which attribute in SAML response will map to a username. | |
88 |
|
140 | |||
89 | *option*: `email_attribute` => `User.email` |
|
141 | Email Attribute | |
90 | # Email Attribute name. This defines which attribute in SAML response will map to an email address. |
|
142 | `User.email`: | |
|
143 | ||||
|
144 | .. note:: | |||
|
145 | Email Attribute name. This defines which attribute in SAML response will map to an email address. | |||
91 |
|
146 | |||
92 |
|
147 | |||
93 |
|
148 |
@@ -29,6 +29,7 b' administrator greater control over how u' | |||||
29 | auth-saml-generic |
|
29 | auth-saml-generic | |
30 | auth-saml-onelogin |
|
30 | auth-saml-onelogin | |
31 | auth-saml-duosecurity |
|
31 | auth-saml-duosecurity | |
|
32 | auth-saml-azure | |||
32 | auth-crowd |
|
33 | auth-crowd | |
33 | auth-pam |
|
34 | auth-pam | |
34 | ssh-connection |
|
35 | ssh-connection |
@@ -4,237 +4,8 b'' | |||||
4 | Development setup |
|
4 | Development setup | |
5 | =================== |
|
5 | =================== | |
6 |
|
6 | |||
7 |
|
7 | Please refer to RCstack installed documentation for instructions on setting up dev environment: | ||
8 | RhodeCode Enterprise runs inside a Nix managed environment. This ensures build |
|
8 | https://docs.rhodecode.com/rcstack/dev/dev-setup.html | |
9 | environment dependencies are correctly declared and installed during setup. |
|
|||
10 | It also enables atomic upgrades, rollbacks, and multiple instances of RhodeCode |
|
|||
11 | Enterprise running with isolation. |
|
|||
12 |
|
||||
13 | To set up RhodeCode Enterprise inside the Nix environment, use the following steps: |
|
|||
14 |
|
||||
15 |
|
||||
16 |
|
||||
17 | Setup Nix Package Manager |
|
|||
18 | ------------------------- |
|
|||
19 |
|
||||
20 | To install the Nix Package Manager, please run:: |
|
|||
21 |
|
||||
22 | $ curl https://releases.nixos.org/nix/nix-2.3.4/install | sh |
|
|||
23 |
|
||||
24 | or go to https://nixos.org/nix/ and follow the installation instructions. |
|
|||
25 | Once this is correctly set up on your system, you should be able to use the |
|
|||
26 | following commands: |
|
|||
27 |
|
||||
28 | * `nix-env` |
|
|||
29 |
|
||||
30 | * `nix-shell` |
|
|||
31 |
|
||||
32 |
|
||||
33 | .. tip:: |
|
|||
34 |
|
||||
35 | Update your channels frequently by running ``nix-channel --update``. |
|
|||
36 |
|
||||
37 | .. note:: |
|
|||
38 |
|
||||
39 | To uninstall nix run the following: |
|
|||
40 |
|
||||
41 | remove the . "$HOME/.nix-profile/etc/profile.d/nix.sh" line in your ~/.profile or ~/.bash_profile |
|
|||
42 | rm -rf $HOME/{.nix-channels,.nix-defexpr,.nix-profile,.config/nixpkgs} |
|
|||
43 | sudo rm -rf /nix |
|
|||
44 |
|
||||
45 | Switch nix to the latest STABLE channel |
|
|||
46 | --------------------------------------- |
|
|||
47 |
|
||||
48 | run:: |
|
|||
49 |
|
||||
50 | nix-channel --add https://nixos.org/channels/nixos-20.03 nixpkgs |
|
|||
51 |
|
||||
52 | Followed by:: |
|
|||
53 |
|
||||
54 | nix-channel --update |
|
|||
55 | nix-env -i nix-2.3.4 |
|
|||
56 |
|
||||
57 |
|
||||
58 | Install required binaries |
|
|||
59 | ------------------------- |
|
|||
60 |
|
||||
61 | We need some handy tools first. |
|
|||
62 |
|
||||
63 | run:: |
|
|||
64 |
|
||||
65 | nix-env -i nix-prefetch-hg |
|
|||
66 | nix-env -i nix-prefetch-git |
|
|||
67 |
|
||||
68 |
|
||||
69 | Speed up JS build by installing PhantomJS |
|
|||
70 | ----------------------------------------- |
|
|||
71 |
|
||||
72 | PhantomJS will be downloaded each time nix-shell is invoked. To speed this by |
|
|||
73 | setting already downloaded version do this:: |
|
|||
74 |
|
||||
75 | nix-env -i phantomjs-2.1.1 |
|
|||
76 |
|
||||
77 | # and set nix bin path |
|
|||
78 | export PATH=$PATH:~/.nix-profile/bin |
|
|||
79 |
|
||||
80 |
|
||||
81 | Clone the required repositories |
|
|||
82 | ------------------------------- |
|
|||
83 |
|
||||
84 | After Nix is set up, clone the RhodeCode Enterprise Community Edition and |
|
|||
85 | RhodeCode VCSServer repositories into the same directory. |
|
|||
86 | RhodeCode currently is using Mercurial Version Control System, please make sure |
|
|||
87 | you have it installed before continuing. |
|
|||
88 |
|
||||
89 | To obtain the required sources, use the following commands:: |
|
|||
90 |
|
||||
91 | mkdir rhodecode-develop && cd rhodecode-develop |
|
|||
92 | hg clone -u default https://code.rhodecode.com/rhodecode-enterprise-ce |
|
|||
93 | hg clone -u default https://code.rhodecode.com/rhodecode-vcsserver |
|
|||
94 |
|
||||
95 | .. note:: |
|
|||
96 |
|
||||
97 | If you cannot clone the repository, please contact us via support@rhodecode.com |
|
|||
98 |
|
||||
99 |
|
||||
100 | Install some required libraries |
|
|||
101 | ------------------------------- |
|
|||
102 |
|
||||
103 | There are some required drivers and dev libraries that we need to install to |
|
|||
104 | test RhodeCode under different types of databases. For example in Ubuntu we |
|
|||
105 | need to install the following. |
|
|||
106 |
|
||||
107 | required libraries:: |
|
|||
108 |
|
||||
109 | # svn related |
|
|||
110 | sudo apt-get install libapr1-dev libaprutil1-dev |
|
|||
111 | sudo apt-get install libsvn-dev |
|
|||
112 | # libcurl required too |
|
|||
113 | sudo apt-get install libcurl4-openssl-dev |
|
|||
114 | # mysql/pg server for development, optional |
|
|||
115 | sudo apt-get install mysql-server libmysqlclient-dev |
|
|||
116 | sudo apt-get install postgresql postgresql-contrib libpq-dev |
|
|||
117 |
|
||||
118 |
|
||||
119 |
|
||||
120 | Enter the Development Shell |
|
|||
121 | --------------------------- |
|
|||
122 |
|
||||
123 | The final step is to start the development shells. To do this, run the |
|
|||
124 | following command from inside the cloned repository:: |
|
|||
125 |
|
||||
126 | # first, the vcsserver |
|
|||
127 | cd ~/rhodecode-vcsserver |
|
|||
128 | nix-shell |
|
|||
129 |
|
||||
130 | # then enterprise sources |
|
|||
131 | cd ~/rhodecode-enterprise-ce |
|
|||
132 | nix-shell |
|
|||
133 |
|
||||
134 | .. note:: |
|
|||
135 |
|
||||
136 | On the first run, this will take a while to download and optionally compile |
|
|||
137 | a few things. The following runs will be faster. The development shell works |
|
|||
138 | fine on both MacOS and Linux platforms. |
|
|||
139 |
|
||||
140 |
|
||||
141 | Create config.nix for development |
|
|||
142 | --------------------------------- |
|
|||
143 |
|
||||
144 | In order to run proper tests and setup linking across projects, a config.nix |
|
|||
145 | file needs to be setup:: |
|
|||
146 |
|
||||
147 | # create config |
|
|||
148 | mkdir -p ~/.nixpkgs |
|
|||
149 | touch ~/.nixpkgs/config.nix |
|
|||
150 |
|
||||
151 | # put the below content into the ~/.nixpkgs/config.nix file |
|
|||
152 | # adjusts, the path to where you cloned your repositories. |
|
|||
153 |
|
||||
154 | { |
|
|||
155 | rc = { |
|
|||
156 | sources = { |
|
|||
157 | rhodecode-vcsserver = "/home/dev/rhodecode-vcsserver"; |
|
|||
158 | rhodecode-enterprise-ce = "/home/dev/rhodecode-enterprise-ce"; |
|
|||
159 | rhodecode-enterprise-ee = "/home/dev/rhodecode-enterprise-ee"; |
|
|||
160 | }; |
|
|||
161 | }; |
|
|||
162 | } |
|
|||
163 |
|
||||
164 |
|
||||
165 |
|
||||
166 | Creating a Development Configuration |
|
|||
167 | ------------------------------------ |
|
|||
168 |
|
||||
169 | To create a development environment for RhodeCode Enterprise, |
|
|||
170 | use the following steps: |
|
|||
171 |
|
||||
172 | 1. Create a copy of vcsserver config: |
|
|||
173 | `cp ~/rhodecode-vcsserver/configs/development.ini ~/rhodecode-vcsserver/configs/dev.ini` |
|
|||
174 | 2. Create a copy of rhodocode config: |
|
|||
175 | `cp ~/rhodecode-enterprise-ce/configs/development.ini ~/rhodecode-enterprise-ce/configs/dev.ini` |
|
|||
176 | 3. Adjust the configuration settings to your needs if needed. |
|
|||
177 |
|
||||
178 | .. note:: |
|
|||
179 |
|
||||
180 | It is recommended to use the name `dev.ini` since it's included in .hgignore file. |
|
|||
181 |
|
||||
182 |
|
||||
183 | Setup the Development Database |
|
|||
184 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
|||
185 |
|
||||
186 | To create a development database, use the following example. This is a one |
|
|||
187 | time operation executed from the nix-shell of rhodecode-enterprise-ce sources :: |
|
|||
188 |
|
||||
189 | rc-setup-app dev.ini \ |
|
|||
190 | --user=admin --password=secret \ |
|
|||
191 | --email=admin@example.com \ |
|
|||
192 | --repos=~/my_dev_repos |
|
|||
193 |
|
||||
194 |
|
||||
195 | Compile CSS and JavaScript |
|
|||
196 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
|||
197 |
|
||||
198 | To use the application's frontend and prepare it for production deployment, |
|
|||
199 | you will need to compile the CSS and JavaScript with Grunt. |
|
|||
200 | This is easily done from within the nix-shell using the following command:: |
|
|||
201 |
|
||||
202 | make web-build |
|
|||
203 |
|
||||
204 | When developing new features you will need to recompile following any |
|
|||
205 | changes made to the CSS or JavaScript files when developing the code:: |
|
|||
206 |
|
||||
207 | grunt watch |
|
|||
208 |
|
||||
209 | This prepares the development (with comments/whitespace) versions of files. |
|
|||
210 |
|
||||
211 | Start the Development Servers |
|
|||
212 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
|||
213 |
|
||||
214 | From the rhodecode-vcsserver directory, start the development server in another |
|
|||
215 | nix-shell, using the following command:: |
|
|||
216 |
|
||||
217 | pserve configs/dev.ini |
|
|||
218 |
|
||||
219 | In the adjacent nix-shell which you created for your development server, you may |
|
|||
220 | now start CE with the following command:: |
|
|||
221 |
|
||||
222 |
|
||||
223 | pserve --reload configs/dev.ini |
|
|||
224 |
|
||||
225 | .. note:: |
|
|||
226 |
|
||||
227 | `--reload` flag will automatically reload the server when source file changes. |
|
|||
228 |
|
||||
229 |
|
||||
230 | Run the Environment Tests |
|
|||
231 | ^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
|||
232 |
|
||||
233 | Please make sure that the tests are passing to verify that your environment is |
|
|||
234 | set up correctly. RhodeCode uses py.test to run tests. |
|
|||
235 | While your instance is running, start a new nix-shell and simply run |
|
|||
236 | ``make test`` to run the basic test suite. |
|
|||
237 |
|
||||
238 |
|
9 | |||
239 | Need Help? |
|
10 | Need Help? | |
240 | ^^^^^^^^^^ |
|
11 | ^^^^^^^^^^ |
@@ -37,6 +37,18 b' and commit files and |repos| while manag' | |||||
37 |
|
37 | |||
38 | .. toctree:: |
|
38 | .. toctree:: | |
39 | :maxdepth: 1 |
|
39 | :maxdepth: 1 | |
|
40 | :caption: Documentation directory | |||
|
41 | ||||
|
42 | Back to documentation directory <https://docs.rhodecode.com/> | |||
|
43 | ||||
|
44 | .. toctree:: | |||
|
45 | :maxdepth: 1 | |||
|
46 | :caption: RhodeCode RCstack Documentation | |||
|
47 | ||||
|
48 | RhodeCode RCstack Installer <https://docs.rhodecode.com/rcstack/> | |||
|
49 | ||||
|
50 | .. toctree:: | |||
|
51 | :maxdepth: 1 | |||
40 | :caption: Admin Documentation |
|
52 | :caption: Admin Documentation | |
41 |
|
53 | |||
42 | install/quick-start |
|
54 | install/quick-start |
@@ -66,6 +66,7 b' Output should look similar to this:' | |||||
66 | fb77fb6496c6 channelstream/channelstream:0.7.1 Up 2 hours (healthy) rc_cluster_services-channelstream-1 8000/tcp |
|
66 | fb77fb6496c6 channelstream/channelstream:0.7.1 Up 2 hours (healthy) rc_cluster_services-channelstream-1 8000/tcp | |
67 | cb6c5c022f5b postgres:14.6 Up 2 hours (healthy) rc_cluster_services-database-1 5432/tcp |
|
67 | cb6c5c022f5b postgres:14.6 Up 2 hours (healthy) rc_cluster_services-database-1 5432/tcp | |
68 |
|
68 | |||
|
69 | ||||
69 | At this point you should be able to access: |
|
70 | At this point you should be able to access: | |
70 |
|
71 | |||
71 | - RhodeCode instance at your domain entered, e.g http://rhodecode.local, the default access |
|
72 | - RhodeCode instance at your domain entered, e.g http://rhodecode.local, the default access | |
@@ -76,6 +77,7 b' At this point you should be able to acce' | |||||
76 | RHODECODE_USER_PASS=super-secret-password |
|
77 | RHODECODE_USER_PASS=super-secret-password | |
77 |
|
78 | |||
78 |
|
79 | |||
|
80 | ||||
79 | .. note:: |
|
81 | .. note:: | |
80 |
|
82 | |||
81 | Recommended post quick start install instructions: |
|
83 | Recommended post quick start install instructions: | |
@@ -85,7 +87,6 b' At this point you should be able to acce' | |||||
85 | * Set up :ref:`indexing-ref` |
|
87 | * Set up :ref:`indexing-ref` | |
86 | * Familiarise yourself with the :ref:`rhodecode-admin-ref` section. |
|
88 | * Familiarise yourself with the :ref:`rhodecode-admin-ref` section. | |
87 |
|
89 | |||
88 | .. _rhodecode.com/download/: https://rhodecode.com/download/ |
|
|||
89 | .. _rhodecode.com: https://rhodecode.com/ |
|
90 | .. _rhodecode.com: https://rhodecode.com/ | |
90 | .. _rhodecode.com/register: https://rhodecode.com/register/ |
|
91 | .. _rhodecode.com/register: https://rhodecode.com/register/ | |
91 | .. _rhodecode.com/download: https://rhodecode.com/download/ |
|
92 | .. _rhodecode.com/download: https://rhodecode.com/download/ |
@@ -1,23 +1,12 b'' | |||||
1 | .. _install-sqlite-database: |
|
1 | .. _install-sqlite-database: | |
2 |
|
2 | |||
3 | SQLite |
|
3 | SQLite (Deprecated) | |
4 | ------ |
|
4 | ------------------- | |
5 |
|
5 | |||
6 | .. important:: |
|
6 | .. important:: | |
7 |
|
7 | |||
8 | We do not recommend using SQLite in a large development environment |
|
8 | As of 5.x, SQLite is no longer supported, we advise to migrate to MySQL or PostgreSQL. | |
9 | as it has an internal locking mechanism which can become a performance |
|
|||
10 | bottleneck when there are more than 5 concurrent users. |
|
|||
11 |
|
9 | |||
12 | |RCE| installs SQLite as the default database if you do not specify another |
|
|||
13 | during installation. SQLite is suitable for small teams, |
|
|||
14 | projects with a low load, and evaluation purposes since it is built into |
|
|||
15 | |RCE| and does not require any additional database server. |
|
|||
16 |
|
||||
17 | Using MySQL or PostgreSQL in an large setup gives you much greater |
|
|||
18 | performance, and while migration tools exist to move from one database type |
|
|||
19 | to another, it is better to get it right first time and to immediately use |
|
|||
20 | MySQL or PostgreSQL when you deploy |RCE| in a production environment. |
|
|||
21 |
|
10 | |||
22 | Migrating From SQLite to PostgreSQL |
|
11 | Migrating From SQLite to PostgreSQL | |
23 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
|
12 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
@@ -42,6 +42,7 b' Newer Operating system locales' | |||||
42 | the local-archive format, which is now incompatible with our used glibc 2.26. |
|
42 | the local-archive format, which is now incompatible with our used glibc 2.26. | |
43 |
|
43 | |||
44 | Mostly affected are: |
|
44 | Mostly affected are: | |
|
45 | ||||
45 | - Fedora 23+ |
|
46 | - Fedora 23+ | |
46 | - Ubuntu 18.04 |
|
47 | - Ubuntu 18.04 | |
47 | - CentOS / RHEL 8 |
|
48 | - CentOS / RHEL 8 | |
@@ -93,3 +94,24 b' example to pass the correct locale infor' | |||||
93 | [Install] |
|
94 | [Install] | |
94 | WantedBy=multi-user.target |
|
95 | WantedBy=multi-user.target | |
95 |
|
96 | |||
|
97 | ||||
|
98 | Merge stucks in "merging" status | |||
|
99 | -------------------------------- | |||
|
100 | ||||
|
101 | Similar issues: | |||
|
102 | ||||
|
103 | - Pull Request duplicated and/or stucks in "creating" status. | |||
|
104 | ||||
|
105 | Mostly affected are: | |||
|
106 | ||||
|
107 | - Kubernetes AWS EKS setup with NFS as shared storage | |||
|
108 | - AWS EFS as shared storage | |||
|
109 | ||||
|
110 | Workaround: | |||
|
111 | ||||
|
112 | 1. Manually clear the repo cache via UI: | |||
|
113 | :menuselection:`Repository Settings --> Caches --> Invalidate repository cache` | |||
|
114 | ||||
|
115 | 1. Open problematic PR and reset status to "created" | |||
|
116 | ||||
|
117 | Now you can merge PR normally |
@@ -10,20 +10,20 b' Release Date' | |||||
10 | New Features |
|
10 | New Features | |
11 | ^^^^^^^^^^^^ |
|
11 | ^^^^^^^^^^^^ | |
12 |
|
12 | |||
13 |
- We've introduced 2FA for users. Now alongside the external auth 2 |
|
13 | - We've introduced 2FA for users. Now alongside the external auth 2FA support RhodeCode allows to enable 2FA for users. | |
14 | 2FA options will be available for each user individually, or enforced via authentication plugins like ldap, or internal. |
|
14 | 2FA options will be available for each user individually, or enforced via authentication plugins like ldap, or internal. | |
15 | - Email based log-in. RhodeCode now allows to log-in using email as well as username for main authentication type. |
|
15 | - Email based log-in. RhodeCode now allows to log-in using email as well as username for main authentication type. | |
16 | - Ability to replace a file using web UI. Now one can replace an existing file from the web-ui. |
|
16 | - Ability to replace a file using web UI. Now one can replace an existing file from the web-ui. | |
17 | - GIT LFS Sync automation. Remote push/pull commands now can also sync GIT LFS objects. |
|
17 | - GIT LFS Sync automation. Remote push/pull commands now can also sync GIT LFS objects. | |
18 | - Added ability to remove or close branches from the web ui |
|
18 | - Added ability to remove or close branches from the web ui. | |
19 | - Added ability to delete a branch automatically after merging PR for git repositories |
|
19 | - Added ability to delete a branch automatically after merging PR for git repositories. | |
20 |
- Added support for S3 based archive_cache |
|
20 | - Added support for S3 based archive_cache that allows storing cached archives in S3 compatible object store. | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | General |
|
23 | General | |
24 | ^^^^^^^ |
|
24 | ^^^^^^^ | |
25 |
|
25 | |||
26 | - Upgraded all dependency libraries to their latest available versions |
|
26 | - Upgraded all dependency libraries to their latest available versions. | |
27 | - Repository storage is no longer controlled via DB settings, but .ini file. This allows easier automated deployments. |
|
27 | - Repository storage is no longer controlled via DB settings, but .ini file. This allows easier automated deployments. | |
28 | - Bumped mercurial to 6.7.4 |
|
28 | - Bumped mercurial to 6.7.4 | |
29 | - Mercurial: enable httppostarguments for better support of large repositories with lots of heads. |
|
29 | - Mercurial: enable httppostarguments for better support of large repositories with lots of heads. | |
@@ -39,21 +39,20 b' Performance' | |||||
39 | ^^^^^^^^^^^ |
|
39 | ^^^^^^^^^^^ | |
40 |
|
40 | |||
41 | - Introduced a full rewrite of ssh backend for performance. The result is 2-5x speed improvement for operation with ssh. |
|
41 | - Introduced a full rewrite of ssh backend for performance. The result is 2-5x speed improvement for operation with ssh. | |
42 |
|
|
42 | Enable new ssh wrapper by setting: `ssh.wrapper_cmd = /home/rhodecode/venv/bin/rc-ssh-wrapper-v2` | |
43 |
- Introduced a new hooks subsystem that is more scalable and faster, enable it by setting |
|
43 | - Introduced a new hooks subsystem that is more scalable and faster, enable it by setting: `vcs.hooks.protocol = celery` | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | Fixes |
|
46 | Fixes | |
47 | ^^^^^ |
|
47 | ^^^^^ | |
48 |
|
48 | |||
49 | - Archives: Zip archive download breaks when a gitmodules file is present |
|
49 | - Archives: Zip archive download breaks when a gitmodules file is present. | |
50 | - Branch permissions: fixed bug preventing to specify own rules from 4.X install |
|
50 | - Branch permissions: fixed bug preventing to specify own rules from 4.X install. | |
51 | - SVN: refactored svn events, thus fixing support for it in dockerized env |
|
51 | - SVN: refactored svn events, thus fixing support for it in dockerized environment. | |
52 | - Fixed empty server url in PR link after push from cli |
|
52 | - Fixed empty server url in PR link after push from cli. | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | Upgrade notes |
|
55 | Upgrade notes | |
56 | ^^^^^^^^^^^^^ |
|
56 | ^^^^^^^^^^^^^ | |
57 |
|
57 | |||
58 |
- RhodeCode 5.1.0 is a ma |
|
58 | - RhodeCode 5.1.0 is a major feature release after big 5.0.0 python3 migration. Happy to ship a first time feature-rich release. | |
59 | rich release |
|
@@ -9,7 +9,9 b' Release Notes' | |||||
9 | .. toctree:: |
|
9 | .. toctree:: | |
10 | :maxdepth: 1 |
|
10 | :maxdepth: 1 | |
11 |
|
11 | |||
12 |
|
12 | release-notes-5.2.0.rst | ||
|
13 | release-notes-5.1.2.rst | |||
|
14 | release-notes-5.1.1.rst | |||
13 | release-notes-5.1.0.rst |
|
15 | release-notes-5.1.0.rst | |
14 | release-notes-5.0.3.rst |
|
16 | release-notes-5.0.3.rst | |
15 | release-notes-5.0.2.rst |
|
17 | release-notes-5.0.2.rst |
@@ -4,7 +4,7 b' furo==2023.9.10' | |||||
4 | sphinx-press-theme==0.8.0 |
|
4 | sphinx-press-theme==0.8.0 | |
5 | sphinx-rtd-theme==1.3.0 |
|
5 | sphinx-rtd-theme==1.3.0 | |
6 |
|
6 | |||
7 |
pygments==2.1 |
|
7 | pygments==2.18.0 | |
8 |
|
8 | |||
9 | docutils<0.19 |
|
9 | docutils<0.19 | |
10 | markupsafe==2.1.3 |
|
10 | markupsafe==2.1.3 |
@@ -5,7 +5,7 b' alembic==1.13.1' | |||||
5 | markupsafe==2.1.2 |
|
5 | markupsafe==2.1.2 | |
6 | sqlalchemy==1.4.52 |
|
6 | sqlalchemy==1.4.52 | |
7 | greenlet==3.0.3 |
|
7 | greenlet==3.0.3 | |
8 |
typing_extensions==4. |
|
8 | typing_extensions==4.12.2 | |
9 | async-timeout==4.0.3 |
|
9 | async-timeout==4.0.3 | |
10 | babel==2.12.1 |
|
10 | babel==2.12.1 | |
11 | beaker==1.12.1 |
|
11 | beaker==1.12.1 | |
@@ -18,8 +18,8 b' celery==5.3.6' | |||||
18 | click==8.1.3 |
|
18 | click==8.1.3 | |
19 | click-repl==0.2.0 |
|
19 | click-repl==0.2.0 | |
20 | click==8.1.3 |
|
20 | click==8.1.3 | |
21 |
prompt |
|
21 | prompt_toolkit==3.0.47 | |
22 |
wcwidth==0.2. |
|
22 | wcwidth==0.2.13 | |
23 | six==1.16.0 |
|
23 | six==1.16.0 | |
24 | kombu==5.3.5 |
|
24 | kombu==5.3.5 | |
25 | amqp==5.2.0 |
|
25 | amqp==5.2.0 | |
@@ -33,7 +33,7 b' channelstream==0.7.1' | |||||
33 | gevent==24.2.1 |
|
33 | gevent==24.2.1 | |
34 | greenlet==3.0.3 |
|
34 | greenlet==3.0.3 | |
35 | zope.event==5.0.0 |
|
35 | zope.event==5.0.0 | |
36 |
zope.interface== |
|
36 | zope.interface==7.0.3 | |
37 | itsdangerous==1.1.0 |
|
37 | itsdangerous==1.1.0 | |
38 | marshmallow==2.18.0 |
|
38 | marshmallow==2.18.0 | |
39 | pyramid==2.0.2 |
|
39 | pyramid==2.0.2 | |
@@ -46,7 +46,7 b' channelstream==0.7.1' | |||||
46 | venusian==3.0.0 |
|
46 | venusian==3.0.0 | |
47 | webob==1.8.7 |
|
47 | webob==1.8.7 | |
48 | zope.deprecation==5.0.0 |
|
48 | zope.deprecation==5.0.0 | |
49 |
zope.interface== |
|
49 | zope.interface==7.0.3 | |
50 | pyramid-jinja2==2.10 |
|
50 | pyramid-jinja2==2.10 | |
51 | jinja2==3.1.2 |
|
51 | jinja2==3.1.2 | |
52 | markupsafe==2.1.2 |
|
52 | markupsafe==2.1.2 | |
@@ -61,7 +61,7 b' channelstream==0.7.1' | |||||
61 | venusian==3.0.0 |
|
61 | venusian==3.0.0 | |
62 | webob==1.8.7 |
|
62 | webob==1.8.7 | |
63 | zope.deprecation==5.0.0 |
|
63 | zope.deprecation==5.0.0 | |
64 |
zope.interface== |
|
64 | zope.interface==7.0.3 | |
65 | zope.deprecation==5.0.0 |
|
65 | zope.deprecation==5.0.0 | |
66 | python-dateutil==2.8.2 |
|
66 | python-dateutil==2.8.2 | |
67 | six==1.16.0 |
|
67 | six==1.16.0 | |
@@ -87,32 +87,31 b' dogpile.cache==1.3.3' | |||||
87 | pbr==5.11.1 |
|
87 | pbr==5.11.1 | |
88 | formencode==2.1.0 |
|
88 | formencode==2.1.0 | |
89 | six==1.16.0 |
|
89 | six==1.16.0 | |
90 |
fsspec==2024. |
|
90 | fsspec==2024.9.0 | |
91 |
gunicorn==2 |
|
91 | gunicorn==23.0.0 | |
92 |
packaging==24. |
|
92 | packaging==24.1 | |
93 | gevent==24.2.1 |
|
93 | gevent==24.2.1 | |
94 | greenlet==3.0.3 |
|
94 | greenlet==3.0.3 | |
95 | zope.event==5.0.0 |
|
95 | zope.event==5.0.0 | |
96 |
zope.interface== |
|
96 | zope.interface==7.0.3 | |
97 |
ipython==8. |
|
97 | ipython==8.26.0 | |
98 | backcall==0.2.0 |
|
|||
99 | decorator==5.1.1 |
|
98 | decorator==5.1.1 | |
100 |
jedi==0.19. |
|
99 | jedi==0.19.1 | |
101 |
parso==0.8. |
|
100 | parso==0.8.4 | |
102 |
matplotlib-inline==0.1. |
|
101 | matplotlib-inline==0.1.7 | |
103 |
traitlets==5. |
|
102 | traitlets==5.14.3 | |
104 |
pexpect==4. |
|
103 | pexpect==4.9.0 | |
105 | ptyprocess==0.7.0 |
|
104 | ptyprocess==0.7.0 | |
106 | pickleshare==0.7.5 |
|
105 | prompt_toolkit==3.0.47 | |
107 | prompt-toolkit==3.0.38 |
|
106 | wcwidth==0.2.13 | |
108 | wcwidth==0.2.6 |
|
107 | pygments==2.18.0 | |
109 | pygments==2.15.1 |
|
108 | stack-data==0.6.3 | |
110 | stack-data==0.6.2 |
|
109 | asttokens==2.4.1 | |
111 | asttokens==2.2.1 |
|
|||
112 | six==1.16.0 |
|
110 | six==1.16.0 | |
113 |
executing== |
|
111 | executing==2.0.1 | |
114 |
pure |
|
112 | pure_eval==0.2.3 | |
115 |
traitlets==5. |
|
113 | traitlets==5.14.3 | |
|
114 | typing_extensions==4.12.2 | |||
116 | markdown==3.4.3 |
|
115 | markdown==3.4.3 | |
117 | msgpack==1.0.8 |
|
116 | msgpack==1.0.8 | |
118 | mysqlclient==2.1.1 |
|
117 | mysqlclient==2.1.1 | |
@@ -127,7 +126,7 b' nbconvert==7.7.3' | |||||
127 | markupsafe==2.1.2 |
|
126 | markupsafe==2.1.2 | |
128 | jupyter_core==5.3.1 |
|
127 | jupyter_core==5.3.1 | |
129 | platformdirs==3.10.0 |
|
128 | platformdirs==3.10.0 | |
130 |
traitlets==5. |
|
129 | traitlets==5.14.3 | |
131 | jupyterlab-pygments==0.2.2 |
|
130 | jupyterlab-pygments==0.2.2 | |
132 | markupsafe==2.1.2 |
|
131 | markupsafe==2.1.2 | |
133 | mistune==2.0.5 |
|
132 | mistune==2.0.5 | |
@@ -135,15 +134,15 b' nbconvert==7.7.3' | |||||
135 | jupyter_client==8.3.0 |
|
134 | jupyter_client==8.3.0 | |
136 | jupyter_core==5.3.1 |
|
135 | jupyter_core==5.3.1 | |
137 | platformdirs==3.10.0 |
|
136 | platformdirs==3.10.0 | |
138 |
traitlets==5. |
|
137 | traitlets==5.14.3 | |
139 | python-dateutil==2.8.2 |
|
138 | python-dateutil==2.8.2 | |
140 | six==1.16.0 |
|
139 | six==1.16.0 | |
141 | pyzmq==25.0.0 |
|
140 | pyzmq==25.0.0 | |
142 | tornado==6.2 |
|
141 | tornado==6.2 | |
143 |
traitlets==5. |
|
142 | traitlets==5.14.3 | |
144 | jupyter_core==5.3.1 |
|
143 | jupyter_core==5.3.1 | |
145 | platformdirs==3.10.0 |
|
144 | platformdirs==3.10.0 | |
146 |
traitlets==5. |
|
145 | traitlets==5.14.3 | |
147 | nbformat==5.9.2 |
|
146 | nbformat==5.9.2 | |
148 | fastjsonschema==2.18.0 |
|
147 | fastjsonschema==2.18.0 | |
149 | jsonschema==4.18.6 |
|
148 | jsonschema==4.18.6 | |
@@ -151,9 +150,9 b' nbconvert==7.7.3' | |||||
151 | pyrsistent==0.19.3 |
|
150 | pyrsistent==0.19.3 | |
152 | jupyter_core==5.3.1 |
|
151 | jupyter_core==5.3.1 | |
153 | platformdirs==3.10.0 |
|
152 | platformdirs==3.10.0 | |
154 |
traitlets==5. |
|
153 | traitlets==5.14.3 | |
155 |
traitlets==5. |
|
154 | traitlets==5.14.3 | |
156 |
traitlets==5. |
|
155 | traitlets==5.14.3 | |
157 | nbformat==5.9.2 |
|
156 | nbformat==5.9.2 | |
158 | fastjsonschema==2.18.0 |
|
157 | fastjsonschema==2.18.0 | |
159 | jsonschema==4.18.6 |
|
158 | jsonschema==4.18.6 | |
@@ -161,20 +160,20 b' nbconvert==7.7.3' | |||||
161 | pyrsistent==0.19.3 |
|
160 | pyrsistent==0.19.3 | |
162 | jupyter_core==5.3.1 |
|
161 | jupyter_core==5.3.1 | |
163 | platformdirs==3.10.0 |
|
162 | platformdirs==3.10.0 | |
164 |
traitlets==5. |
|
163 | traitlets==5.14.3 | |
165 |
traitlets==5. |
|
164 | traitlets==5.14.3 | |
166 | pandocfilters==1.5.0 |
|
165 | pandocfilters==1.5.0 | |
167 |
pygments==2.1 |
|
166 | pygments==2.18.0 | |
168 | tinycss2==1.2.1 |
|
167 | tinycss2==1.2.1 | |
169 | webencodings==0.5.1 |
|
168 | webencodings==0.5.1 | |
170 |
traitlets==5. |
|
169 | traitlets==5.14.3 | |
171 |
orjson==3.10. |
|
170 | orjson==3.10.7 | |
172 | paste==3.10.1 |
|
171 | paste==3.10.1 | |
173 | premailer==3.10.0 |
|
172 | premailer==3.10.0 | |
174 | cachetools==5.3.3 |
|
173 | cachetools==5.3.3 | |
175 | cssselect==1.2.0 |
|
174 | cssselect==1.2.0 | |
176 | cssutils==2.6.0 |
|
175 | cssutils==2.6.0 | |
177 |
lxml== |
|
176 | lxml==5.3.0 | |
178 | requests==2.28.2 |
|
177 | requests==2.28.2 | |
179 | certifi==2022.12.7 |
|
178 | certifi==2022.12.7 | |
180 | charset-normalizer==3.1.0 |
|
179 | charset-normalizer==3.1.0 | |
@@ -191,33 +190,6 b' pycurl==7.45.3' | |||||
191 | pymysql==1.0.3 |
|
190 | pymysql==1.0.3 | |
192 | pyotp==2.8.0 |
|
191 | pyotp==2.8.0 | |
193 | pyparsing==3.1.1 |
|
192 | pyparsing==3.1.1 | |
194 | pyramid-debugtoolbar==4.12.1 |
|
|||
195 | pygments==2.15.1 |
|
|||
196 | pyramid==2.0.2 |
|
|||
197 | hupper==1.12 |
|
|||
198 | plaster==1.1.2 |
|
|||
199 | plaster-pastedeploy==1.0.1 |
|
|||
200 | pastedeploy==3.1.0 |
|
|||
201 | plaster==1.1.2 |
|
|||
202 | translationstring==1.4 |
|
|||
203 | venusian==3.0.0 |
|
|||
204 | webob==1.8.7 |
|
|||
205 | zope.deprecation==5.0.0 |
|
|||
206 | zope.interface==6.3.0 |
|
|||
207 | pyramid-mako==1.1.0 |
|
|||
208 | mako==1.2.4 |
|
|||
209 | markupsafe==2.1.2 |
|
|||
210 | pyramid==2.0.2 |
|
|||
211 | hupper==1.12 |
|
|||
212 | plaster==1.1.2 |
|
|||
213 | plaster-pastedeploy==1.0.1 |
|
|||
214 | pastedeploy==3.1.0 |
|
|||
215 | plaster==1.1.2 |
|
|||
216 | translationstring==1.4 |
|
|||
217 | venusian==3.0.0 |
|
|||
218 | webob==1.8.7 |
|
|||
219 | zope.deprecation==5.0.0 |
|
|||
220 | zope.interface==6.3.0 |
|
|||
221 | pyramid-mailer==0.15.1 |
|
193 | pyramid-mailer==0.15.1 | |
222 | pyramid==2.0.2 |
|
194 | pyramid==2.0.2 | |
223 | hupper==1.12 |
|
195 | hupper==1.12 | |
@@ -229,13 +201,27 b' pyramid-mailer==0.15.1' | |||||
229 | venusian==3.0.0 |
|
201 | venusian==3.0.0 | |
230 | webob==1.8.7 |
|
202 | webob==1.8.7 | |
231 | zope.deprecation==5.0.0 |
|
203 | zope.deprecation==5.0.0 | |
232 |
zope.interface== |
|
204 | zope.interface==7.0.3 | |
233 | repoze.sendmail==4.4.1 |
|
205 | repoze.sendmail==4.4.1 | |
234 |
transaction== |
|
206 | transaction==5.0.0 | |
235 |
zope.interface== |
|
207 | zope.interface==7.0.3 | |
236 |
zope.interface== |
|
208 | zope.interface==7.0.3 | |
237 |
transaction== |
|
209 | transaction==5.0.0 | |
238 |
zope.interface== |
|
210 | zope.interface==7.0.3 | |
|
211 | pyramid-mako==1.1.0 | |||
|
212 | mako==1.2.4 | |||
|
213 | markupsafe==2.1.2 | |||
|
214 | pyramid==2.0.2 | |||
|
215 | hupper==1.12 | |||
|
216 | plaster==1.1.2 | |||
|
217 | plaster-pastedeploy==1.0.1 | |||
|
218 | pastedeploy==3.1.0 | |||
|
219 | plaster==1.1.2 | |||
|
220 | translationstring==1.4 | |||
|
221 | venusian==3.0.0 | |||
|
222 | webob==1.8.7 | |||
|
223 | zope.deprecation==5.0.0 | |||
|
224 | zope.interface==7.0.3 | |||
239 | python-ldap==3.4.3 |
|
225 | python-ldap==3.4.3 | |
240 | pyasn1==0.4.8 |
|
226 | pyasn1==0.4.8 | |
241 | pyasn1-modules==0.2.8 |
|
227 | pyasn1-modules==0.2.8 | |
@@ -243,20 +229,20 b' python-ldap==3.4.3' | |||||
243 | python-memcached==1.59 |
|
229 | python-memcached==1.59 | |
244 | six==1.16.0 |
|
230 | six==1.16.0 | |
245 | python-pam==2.0.2 |
|
231 | python-pam==2.0.2 | |
246 |
python3-saml==1.1 |
|
232 | python3-saml==1.16.0 | |
247 | isodate==0.6.1 |
|
233 | isodate==0.6.1 | |
248 | six==1.16.0 |
|
234 | six==1.16.0 | |
249 |
lxml== |
|
235 | lxml==5.3.0 | |
250 |
xmlsec==1.3.1 |
|
236 | xmlsec==1.3.14 | |
251 |
lxml== |
|
237 | lxml==5.3.0 | |
252 | pyyaml==6.0.1 |
|
238 | pyyaml==6.0.1 | |
253 |
redis==5. |
|
239 | redis==5.1.0 | |
254 | async-timeout==4.0.3 |
|
240 | async-timeout==4.0.3 | |
255 | regex==2022.10.31 |
|
241 | regex==2022.10.31 | |
256 | routes==2.5.1 |
|
242 | routes==2.5.1 | |
257 | repoze.lru==0.7 |
|
243 | repoze.lru==0.7 | |
258 | six==1.16.0 |
|
244 | six==1.16.0 | |
259 |
s3fs==2024. |
|
245 | s3fs==2024.9.0 | |
260 | aiobotocore==2.13.0 |
|
246 | aiobotocore==2.13.0 | |
261 | aiohttp==3.9.5 |
|
247 | aiohttp==3.9.5 | |
262 | aiosignal==1.3.1 |
|
248 | aiosignal==1.3.1 | |
@@ -283,7 +269,7 b' s3fs==2024.6.0' | |||||
283 | yarl==1.9.4 |
|
269 | yarl==1.9.4 | |
284 | idna==3.4 |
|
270 | idna==3.4 | |
285 | multidict==6.0.5 |
|
271 | multidict==6.0.5 | |
286 |
fsspec==2024. |
|
272 | fsspec==2024.9.0 | |
287 | simplejson==3.19.2 |
|
273 | simplejson==3.19.2 | |
288 | sshpubkeys==3.3.1 |
|
274 | sshpubkeys==3.3.1 | |
289 | cryptography==40.0.2 |
|
275 | cryptography==40.0.2 | |
@@ -293,7 +279,7 b' sshpubkeys==3.3.1' | |||||
293 | six==1.16.0 |
|
279 | six==1.16.0 | |
294 | sqlalchemy==1.4.52 |
|
280 | sqlalchemy==1.4.52 | |
295 | greenlet==3.0.3 |
|
281 | greenlet==3.0.3 | |
296 |
typing_extensions==4. |
|
282 | typing_extensions==4.12.2 | |
297 | supervisor==4.2.5 |
|
283 | supervisor==4.2.5 | |
298 | tzlocal==4.3 |
|
284 | tzlocal==4.3 | |
299 | pytz-deprecation-shim==0.1.0.post0 |
|
285 | pytz-deprecation-shim==0.1.0.post0 |
@@ -9,6 +9,7 b' pympler' | |||||
9 | ipdb |
|
9 | ipdb | |
10 | ipython |
|
10 | ipython | |
11 | rich |
|
11 | rich | |
|
12 | pyramid-debugtoolbar | |||
12 |
|
13 | |||
13 | # format |
|
14 | # format | |
14 | flake8 |
|
15 | flake8 |
@@ -4,38 +4,38 b' pytest-cov==4.1.0' | |||||
4 | coverage==7.4.3 |
|
4 | coverage==7.4.3 | |
5 | pytest==8.1.1 |
|
5 | pytest==8.1.1 | |
6 | iniconfig==2.0.0 |
|
6 | iniconfig==2.0.0 | |
7 |
packaging==24. |
|
7 | packaging==24.1 | |
8 | pluggy==1.4.0 |
|
8 | pluggy==1.4.0 | |
9 | pytest-env==1.1.3 |
|
9 | pytest-env==1.1.3 | |
10 | pytest==8.1.1 |
|
10 | pytest==8.1.1 | |
11 | iniconfig==2.0.0 |
|
11 | iniconfig==2.0.0 | |
12 |
packaging==24. |
|
12 | packaging==24.1 | |
13 | pluggy==1.4.0 |
|
13 | pluggy==1.4.0 | |
14 | pytest-profiling==1.7.0 |
|
14 | pytest-profiling==1.7.0 | |
15 | gprof2dot==2022.7.29 |
|
15 | gprof2dot==2022.7.29 | |
16 | pytest==8.1.1 |
|
16 | pytest==8.1.1 | |
17 | iniconfig==2.0.0 |
|
17 | iniconfig==2.0.0 | |
18 |
packaging==24. |
|
18 | packaging==24.1 | |
19 | pluggy==1.4.0 |
|
19 | pluggy==1.4.0 | |
20 | six==1.16.0 |
|
20 | six==1.16.0 | |
21 | pytest-rerunfailures==13.0 |
|
21 | pytest-rerunfailures==13.0 | |
22 |
packaging==24. |
|
22 | packaging==24.1 | |
23 | pytest==8.1.1 |
|
23 | pytest==8.1.1 | |
24 | iniconfig==2.0.0 |
|
24 | iniconfig==2.0.0 | |
25 |
packaging==24. |
|
25 | packaging==24.1 | |
26 | pluggy==1.4.0 |
|
26 | pluggy==1.4.0 | |
27 | pytest-runner==6.0.1 |
|
27 | pytest-runner==6.0.1 | |
28 | pytest-sugar==1.0.0 |
|
28 | pytest-sugar==1.0.0 | |
29 |
packaging==24. |
|
29 | packaging==24.1 | |
30 | pytest==8.1.1 |
|
30 | pytest==8.1.1 | |
31 | iniconfig==2.0.0 |
|
31 | iniconfig==2.0.0 | |
32 |
packaging==24. |
|
32 | packaging==24.1 | |
33 | pluggy==1.4.0 |
|
33 | pluggy==1.4.0 | |
34 | termcolor==2.4.0 |
|
34 | termcolor==2.4.0 | |
35 | pytest-timeout==2.3.1 |
|
35 | pytest-timeout==2.3.1 | |
36 | pytest==8.1.1 |
|
36 | pytest==8.1.1 | |
37 | iniconfig==2.0.0 |
|
37 | iniconfig==2.0.0 | |
38 |
packaging==24. |
|
38 | packaging==24.1 | |
39 | pluggy==1.4.0 |
|
39 | pluggy==1.4.0 | |
40 | webtest==3.0.0 |
|
40 | webtest==3.0.0 | |
41 | beautifulsoup4==4.12.3 |
|
41 | beautifulsoup4==4.12.3 |
@@ -40,6 +40,7 b' from rhodecode.lib import ext_json' | |||||
40 | from rhodecode.lib.utils2 import safe_str |
|
40 | from rhodecode.lib.utils2 import safe_str | |
41 | from rhodecode.lib.plugins.utils import get_plugin_settings |
|
41 | from rhodecode.lib.plugins.utils import get_plugin_settings | |
42 | from rhodecode.model.db import User, UserApiKeys |
|
42 | from rhodecode.model.db import User, UserApiKeys | |
|
43 | from rhodecode.config.patches import inspect_getargspec | |||
43 |
|
44 | |||
44 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
45 |
|
46 | |||
@@ -186,7 +187,6 b' def request_view(request):' | |||||
186 | exposed method |
|
187 | exposed method | |
187 | """ |
|
188 | """ | |
188 | # cython compatible inspect |
|
189 | # cython compatible inspect | |
189 | from rhodecode.config.patches import inspect_getargspec |
|
|||
190 | inspect = inspect_getargspec() |
|
190 | inspect = inspect_getargspec() | |
191 |
|
191 | |||
192 | # check if we can find this session using api_key, get_by_auth_token |
|
192 | # check if we can find this session using api_key, get_by_auth_token |
@@ -33,7 +33,7 b' from rhodecode.lib.ext_json import json' | |||||
33 | from rhodecode.lib.utils2 import safe_int |
|
33 | from rhodecode.lib.utils2 import safe_int | |
34 | from rhodecode.model.db import UserIpMap |
|
34 | from rhodecode.model.db import UserIpMap | |
35 | from rhodecode.model.scm import ScmModel |
|
35 | from rhodecode.model.scm import ScmModel | |
36 | from rhodecode.apps.file_store import utils |
|
36 | from rhodecode.apps.file_store import utils as store_utils | |
37 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ |
|
37 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ | |
38 | FileOverSizeException |
|
38 | FileOverSizeException | |
39 |
|
39 | |||
@@ -328,8 +328,8 b' def get_method(request, apiuser, pattern' | |||||
328 | ] |
|
328 | ] | |
329 | error : null |
|
329 | error : null | |
330 | """ |
|
330 | """ | |
331 |
from rhodecode.config |
|
331 | from rhodecode.config import patches | |
332 | inspect = inspect_getargspec() |
|
332 | inspect = patches.inspect_getargspec() | |
333 |
|
333 | |||
334 | if not has_superadmin_permission(apiuser): |
|
334 | if not has_superadmin_permission(apiuser): | |
335 | raise JSONRPCForbidden() |
|
335 | raise JSONRPCForbidden() |
@@ -43,7 +43,38 b' def admin_routes(config):' | |||||
43 | from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView |
|
43 | from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView | |
44 | from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView |
|
44 | from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView | |
45 | from rhodecode.apps.admin.views.users import AdminUsersView, UsersView |
|
45 | from rhodecode.apps.admin.views.users import AdminUsersView, UsersView | |
46 |
|
46 | from rhodecode.apps.admin.views.security import AdminSecurityView | ||
|
47 | ||||
|
48 | # Security EE feature | |||
|
49 | ||||
|
50 | config.add_route( | |||
|
51 | 'admin_security', | |||
|
52 | pattern='/security') | |||
|
53 | config.add_view( | |||
|
54 | AdminSecurityView, | |||
|
55 | attr='security', | |||
|
56 | route_name='admin_security', request_method='GET', | |||
|
57 | renderer='rhodecode:templates/admin/security/security.mako') | |||
|
58 | ||||
|
59 | config.add_route( | |||
|
60 | name='admin_security_update', | |||
|
61 | pattern='/security/update') | |||
|
62 | config.add_view( | |||
|
63 | AdminSecurityView, | |||
|
64 | attr='security_update', | |||
|
65 | route_name='admin_security_update', request_method='POST', | |||
|
66 | renderer='rhodecode:templates/admin/security/security.mako') | |||
|
67 | ||||
|
68 | config.add_route( | |||
|
69 | name='admin_security_modify_allowed_vcs_client_versions', | |||
|
70 | pattern=ADMIN_PREFIX + '/security/modify/allowed_vcs_client_versions') | |||
|
71 | config.add_view( | |||
|
72 | AdminSecurityView, | |||
|
73 | attr='vcs_whitelisted_client_versions_edit', | |||
|
74 | route_name='admin_security_modify_allowed_vcs_client_versions', request_method=('GET', 'POST'), | |||
|
75 | renderer='rhodecode:templates/admin/security/edit_allowed_vcs_client_versions.mako') | |||
|
76 | ||||
|
77 | ||||
47 | config.add_route( |
|
78 | config.add_route( | |
48 | name='admin_audit_logs', |
|
79 | name='admin_audit_logs', | |
49 | pattern='/audit_logs') |
|
80 | pattern='/audit_logs') |
@@ -75,14 +75,21 b' class AdminSettingsView(BaseAppView):' | |||||
75 |
|
75 | |||
76 | if not ret: |
|
76 | if not ret: | |
77 | raise Exception('Could not get application ui settings !') |
|
77 | raise Exception('Could not get application ui settings !') | |
78 |
settings = { |
|
78 | settings = { | |
|
79 | # legacy param that needs to be kept | |||
|
80 | 'web_push_ssl': False | |||
|
81 | } | |||
79 | for each in ret: |
|
82 | for each in ret: | |
80 | k = each.ui_key |
|
83 | k = each.ui_key | |
81 | v = each.ui_value |
|
84 | v = each.ui_value | |
|
85 | # skip some options if they are defined | |||
|
86 | if k in ['push_ssl']: | |||
|
87 | continue | |||
|
88 | ||||
82 | if k == '/': |
|
89 | if k == '/': | |
83 | k = 'root_path' |
|
90 | k = 'root_path' | |
84 |
|
91 | |||
85 |
if k in [ |
|
92 | if k in ['publish', 'enabled']: | |
86 | v = str2bool(v) |
|
93 | v = str2bool(v) | |
87 |
|
94 | |||
88 | if k.find('.') != -1: |
|
95 | if k.find('.') != -1: | |
@@ -92,6 +99,7 b' class AdminSettingsView(BaseAppView):' | |||||
92 | v = each.ui_active |
|
99 | v = each.ui_active | |
93 |
|
100 | |||
94 | settings[each.ui_section + '_' + k] = v |
|
101 | settings[each.ui_section + '_' + k] = v | |
|
102 | ||||
95 | return settings |
|
103 | return settings | |
96 |
|
104 | |||
97 | @classmethod |
|
105 | @classmethod | |
@@ -164,7 +172,6 b' class AdminSettingsView(BaseAppView):' | |||||
164 | return Response(html) |
|
172 | return Response(html) | |
165 |
|
173 | |||
166 | try: |
|
174 | try: | |
167 | model.update_global_ssl_setting(form_result['web_push_ssl']) |
|
|||
168 | model.update_global_hook_settings(form_result) |
|
175 | model.update_global_hook_settings(form_result) | |
169 |
|
176 | |||
170 | model.create_or_update_global_svn_settings(form_result) |
|
177 | model.create_or_update_global_svn_settings(form_result) |
@@ -171,11 +171,17 b' class AdminSystemInfoSettingsView(BaseAp' | |||||
171 | (_('Gist storage info'), val('storage_gist')['text'], state('storage_gist')), |
|
171 | (_('Gist storage info'), val('storage_gist')['text'], state('storage_gist')), | |
172 | ('', '', ''), # spacer |
|
172 | ('', '', ''), # spacer | |
173 |
|
173 | |||
174 |
(_('Ar |
|
174 | (_('Artifacts storage backend'), val('storage_artifacts')['type'], state('storage_artifacts')), | |
|
175 | (_('Artifacts storage location'), val('storage_artifacts')['path'], state('storage_artifacts')), | |||
|
176 | (_('Artifacts info'), val('storage_artifacts')['text'], state('storage_artifacts')), | |||
|
177 | ('', '', ''), # spacer | |||
|
178 | ||||
|
179 | (_('Archive cache storage backend'), val('storage_archive')['type'], state('storage_archive')), | |||
175 | (_('Archive cache storage location'), val('storage_archive')['path'], state('storage_archive')), |
|
180 | (_('Archive cache storage location'), val('storage_archive')['path'], state('storage_archive')), | |
176 | (_('Archive cache info'), val('storage_archive')['text'], state('storage_archive')), |
|
181 | (_('Archive cache info'), val('storage_archive')['text'], state('storage_archive')), | |
177 | ('', '', ''), # spacer |
|
182 | ('', '', ''), # spacer | |
178 |
|
183 | |||
|
184 | ||||
179 | (_('Temp storage location'), val('storage_temp')['path'], state('storage_temp')), |
|
185 | (_('Temp storage location'), val('storage_temp')['path'], state('storage_temp')), | |
180 | (_('Temp storage info'), val('storage_temp')['text'], state('storage_temp')), |
|
186 | (_('Temp storage info'), val('storage_temp')['text'], state('storage_temp')), | |
181 | ('', '', ''), # spacer |
|
187 | ('', '', ''), # spacer |
@@ -16,7 +16,8 b'' | |||||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 | import os |
|
18 | import os | |
19 | from rhodecode.apps.file_store import config_keys |
|
19 | ||
|
20 | ||||
20 | from rhodecode.config.settings_maker import SettingsMaker |
|
21 | from rhodecode.config.settings_maker import SettingsMaker | |
21 |
|
22 | |||
22 |
|
23 | |||
@@ -24,18 +25,48 b' def _sanitize_settings_and_apply_default' | |||||
24 | """ |
|
25 | """ | |
25 | Set defaults, convert to python types and validate settings. |
|
26 | Set defaults, convert to python types and validate settings. | |
26 | """ |
|
27 | """ | |
|
28 | from rhodecode.apps.file_store import config_keys | |||
|
29 | ||||
|
30 | # translate "legacy" params into new config | |||
|
31 | settings.pop(config_keys.deprecated_enabled, True) | |||
|
32 | if config_keys.deprecated_backend in settings: | |||
|
33 | # if legacy backend key is detected we use "legacy" backward compat setting | |||
|
34 | settings.pop(config_keys.deprecated_backend) | |||
|
35 | settings[config_keys.backend_type] = config_keys.backend_legacy_filesystem | |||
|
36 | ||||
|
37 | if config_keys.deprecated_store_path in settings: | |||
|
38 | store_path = settings.pop(config_keys.deprecated_store_path) | |||
|
39 | settings[config_keys.legacy_filesystem_storage_path] = store_path | |||
|
40 | ||||
27 | settings_maker = SettingsMaker(settings) |
|
41 | settings_maker = SettingsMaker(settings) | |
28 |
|
42 | |||
29 | settings_maker.make_setting(config_keys.enabled, True, parser='bool') |
|
43 | default_cache_dir = settings['cache_dir'] | |
30 | settings_maker.make_setting(config_keys.backend, 'local') |
|
44 | default_store_dir = os.path.join(default_cache_dir, 'artifacts_filestore') | |
|
45 | ||||
|
46 | # set default backend | |||
|
47 | settings_maker.make_setting(config_keys.backend_type, config_keys.backend_legacy_filesystem) | |||
|
48 | ||||
|
49 | # legacy filesystem defaults | |||
|
50 | settings_maker.make_setting(config_keys.legacy_filesystem_storage_path, default_store_dir, default_when_empty=True, ) | |||
31 |
|
51 | |||
32 | default_store = os.path.join(os.path.dirname(settings['__file__']), 'upload_store') |
|
52 | # filesystem defaults | |
33 | settings_maker.make_setting(config_keys.store_path, default_store) |
|
53 | settings_maker.make_setting(config_keys.filesystem_storage_path, default_store_dir, default_when_empty=True,) | |
|
54 | settings_maker.make_setting(config_keys.filesystem_shards, 8, parser='int') | |||
|
55 | ||||
|
56 | # objectstore defaults | |||
|
57 | settings_maker.make_setting(config_keys.objectstore_url, 'http://s3-minio:9000') | |||
|
58 | settings_maker.make_setting(config_keys.objectstore_bucket, 'rhodecode-artifacts-filestore') | |||
|
59 | settings_maker.make_setting(config_keys.objectstore_bucket_shards, 8, parser='int') | |||
|
60 | ||||
|
61 | settings_maker.make_setting(config_keys.objectstore_region, '') | |||
|
62 | settings_maker.make_setting(config_keys.objectstore_key, '') | |||
|
63 | settings_maker.make_setting(config_keys.objectstore_secret, '') | |||
34 |
|
64 | |||
35 | settings_maker.env_expand() |
|
65 | settings_maker.env_expand() | |
36 |
|
66 | |||
37 |
|
67 | |||
38 | def includeme(config): |
|
68 | def includeme(config): | |
|
69 | ||||
39 | from rhodecode.apps.file_store.views import FileStoreView |
|
70 | from rhodecode.apps.file_store.views import FileStoreView | |
40 |
|
71 | |||
41 | settings = config.registry.settings |
|
72 | settings = config.registry.settings |
@@ -20,6 +20,38 b'' | |||||
20 | # Definition of setting keys used to configure this module. Defined here to |
|
20 | # Definition of setting keys used to configure this module. Defined here to | |
21 | # avoid repetition of keys throughout the module. |
|
21 | # avoid repetition of keys throughout the module. | |
22 |
|
22 | |||
23 | enabled = 'file_store.enabled' |
|
23 | # OLD and deprecated keys not used anymore | |
24 |
|
|
24 | deprecated_enabled = 'file_store.enabled' | |
25 | store_path = 'file_store.storage_path' |
|
25 | deprecated_backend = 'file_store.backend' | |
|
26 | deprecated_store_path = 'file_store.storage_path' | |||
|
27 | ||||
|
28 | ||||
|
29 | backend_type = 'file_store.backend.type' | |||
|
30 | ||||
|
31 | backend_legacy_filesystem = 'filesystem_v1' | |||
|
32 | backend_filesystem = 'filesystem_v2' | |||
|
33 | backend_objectstore = 'objectstore' | |||
|
34 | ||||
|
35 | backend_types = [ | |||
|
36 | backend_legacy_filesystem, | |||
|
37 | backend_filesystem, | |||
|
38 | backend_objectstore, | |||
|
39 | ] | |||
|
40 | ||||
|
41 | # filesystem_v1 legacy | |||
|
42 | legacy_filesystem_storage_path = 'file_store.filesystem_v1.storage_path' | |||
|
43 | ||||
|
44 | ||||
|
45 | # filesystem_v2 new option | |||
|
46 | filesystem_storage_path = 'file_store.filesystem_v2.storage_path' | |||
|
47 | filesystem_shards = 'file_store.filesystem_v2.shards' | |||
|
48 | ||||
|
49 | # objectstore | |||
|
50 | objectstore_url = 'file_store.objectstore.url' | |||
|
51 | objectstore_bucket = 'file_store.objectstore.bucket' | |||
|
52 | objectstore_bucket_shards = 'file_store.objectstore.bucket_shards' | |||
|
53 | ||||
|
54 | objectstore_region = 'file_store.objectstore.region' | |||
|
55 | objectstore_key = 'file_store.objectstore.key' | |||
|
56 | objectstore_secret = 'file_store.objectstore.secret' | |||
|
57 |
@@ -16,3 +16,42 b'' | |||||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
|
19 | import os | |||
|
20 | import random | |||
|
21 | import tempfile | |||
|
22 | import string | |||
|
23 | ||||
|
24 | import pytest | |||
|
25 | ||||
|
26 | from rhodecode.apps.file_store import utils as store_utils | |||
|
27 | ||||
|
28 | ||||
|
29 | @pytest.fixture() | |||
|
30 | def file_store_instance(ini_settings): | |||
|
31 | config = ini_settings | |||
|
32 | f_store = store_utils.get_filestore_backend(config=config, always_init=True) | |||
|
33 | return f_store | |||
|
34 | ||||
|
35 | ||||
|
36 | @pytest.fixture | |||
|
37 | def random_binary_file(): | |||
|
38 | # Generate random binary data | |||
|
39 | data = bytearray(random.getrandbits(8) for _ in range(1024 * 512)) # 512 KB of random data | |||
|
40 | ||||
|
41 | # Create a temporary file | |||
|
42 | temp_file = tempfile.NamedTemporaryFile(delete=False) | |||
|
43 | filename = temp_file.name | |||
|
44 | ||||
|
45 | try: | |||
|
46 | # Write the random binary data to the file | |||
|
47 | temp_file.write(data) | |||
|
48 | temp_file.seek(0) # Rewind the file pointer to the beginning | |||
|
49 | yield filename, temp_file | |||
|
50 | finally: | |||
|
51 | # Close and delete the temporary file after the test | |||
|
52 | temp_file.close() | |||
|
53 | os.remove(filename) | |||
|
54 | ||||
|
55 | ||||
|
56 | def generate_random_filename(length=10): | |||
|
57 | return ''.join(random.choices(string.ascii_letters + string.digits, k=length)) No newline at end of file |
@@ -15,13 +15,16 b'' | |||||
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | ||||
18 | import os |
|
19 | import os | |
|
20 | ||||
19 | import pytest |
|
21 | import pytest | |
20 |
|
22 | |||
21 | from rhodecode.lib.ext_json import json |
|
23 | from rhodecode.lib.ext_json import json | |
22 | from rhodecode.model.auth_token import AuthTokenModel |
|
24 | from rhodecode.model.auth_token import AuthTokenModel | |
23 | from rhodecode.model.db import Session, FileStore, Repository, User |
|
25 | from rhodecode.model.db import Session, FileStore, Repository, User | |
24 |
from rhodecode.apps.file_store import utils |
|
26 | from rhodecode.apps.file_store import utils as store_utils | |
|
27 | from rhodecode.apps.file_store import config_keys | |||
25 |
|
28 | |||
26 | from rhodecode.tests import TestController |
|
29 | from rhodecode.tests import TestController | |
27 | from rhodecode.tests.routes import route_path |
|
30 | from rhodecode.tests.routes import route_path | |
@@ -29,27 +32,61 b' from rhodecode.tests.routes import route' | |||||
29 |
|
32 | |||
30 | class TestFileStoreViews(TestController): |
|
33 | class TestFileStoreViews(TestController): | |
31 |
|
34 | |||
|
35 | @pytest.fixture() | |||
|
36 | def create_artifact_factory(self, tmpdir, ini_settings): | |||
|
37 | ||||
|
38 | def factory(user_id, content, f_name='example.txt'): | |||
|
39 | ||||
|
40 | config = ini_settings | |||
|
41 | config[config_keys.backend_type] = config_keys.backend_legacy_filesystem | |||
|
42 | ||||
|
43 | f_store = store_utils.get_filestore_backend(config) | |||
|
44 | ||||
|
45 | filesystem_file = os.path.join(str(tmpdir), f_name) | |||
|
46 | with open(filesystem_file, 'wt') as f: | |||
|
47 | f.write(content) | |||
|
48 | ||||
|
49 | with open(filesystem_file, 'rb') as f: | |||
|
50 | store_uid, metadata = f_store.store(f_name, f, metadata={'filename': f_name}) | |||
|
51 | os.remove(filesystem_file) | |||
|
52 | ||||
|
53 | entry = FileStore.create( | |||
|
54 | file_uid=store_uid, filename=metadata["filename"], | |||
|
55 | file_hash=metadata["sha256"], file_size=metadata["size"], | |||
|
56 | file_display_name='file_display_name', | |||
|
57 | file_description='repo artifact `{}`'.format(metadata["filename"]), | |||
|
58 | check_acl=True, user_id=user_id, | |||
|
59 | ) | |||
|
60 | Session().add(entry) | |||
|
61 | Session().commit() | |||
|
62 | return entry | |||
|
63 | return factory | |||
|
64 | ||||
32 | @pytest.mark.parametrize("fid, content, exists", [ |
|
65 | @pytest.mark.parametrize("fid, content, exists", [ | |
33 | ('abcde-0.jpg', "xxxxx", True), |
|
66 | ('abcde-0.jpg', "xxxxx", True), | |
34 | ('abcde-0.exe', "1234567", True), |
|
67 | ('abcde-0.exe', "1234567", True), | |
35 | ('abcde-0.jpg', "xxxxx", False), |
|
68 | ('abcde-0.jpg', "xxxxx", False), | |
36 | ]) |
|
69 | ]) | |
37 | def test_get_files_from_store(self, fid, content, exists, tmpdir, user_util): |
|
70 | def test_get_files_from_store(self, fid, content, exists, tmpdir, user_util, ini_settings): | |
38 | user = self.log_user() |
|
71 | user = self.log_user() | |
39 | user_id = user['user_id'] |
|
72 | user_id = user['user_id'] | |
40 | repo_id = user_util.create_repo().repo_id |
|
73 | repo_id = user_util.create_repo().repo_id | |
41 | store_path = self.app._pyramid_settings[config_keys.store_path] |
|
74 | ||
|
75 | config = ini_settings | |||
|
76 | config[config_keys.backend_type] = config_keys.backend_legacy_filesystem | |||
|
77 | ||||
42 | store_uid = fid |
|
78 | store_uid = fid | |
43 |
|
79 | |||
44 | if exists: |
|
80 | if exists: | |
45 | status = 200 |
|
81 | status = 200 | |
46 |
store = utils.get_file |
|
82 | f_store = store_utils.get_filestore_backend(config) | |
47 | filesystem_file = os.path.join(str(tmpdir), fid) |
|
83 | filesystem_file = os.path.join(str(tmpdir), fid) | |
48 | with open(filesystem_file, 'wt') as f: |
|
84 | with open(filesystem_file, 'wt') as f: | |
49 | f.write(content) |
|
85 | f.write(content) | |
50 |
|
86 | |||
51 | with open(filesystem_file, 'rb') as f: |
|
87 | with open(filesystem_file, 'rb') as f: | |
52 |
store_uid, metadata = store |
|
88 | store_uid, metadata = f_store.store(fid, f, metadata={'filename': fid}) | |
|
89 | os.remove(filesystem_file) | |||
53 |
|
90 | |||
54 | entry = FileStore.create( |
|
91 | entry = FileStore.create( | |
55 | file_uid=store_uid, filename=metadata["filename"], |
|
92 | file_uid=store_uid, filename=metadata["filename"], | |
@@ -69,14 +106,10 b' class TestFileStoreViews(TestController)' | |||||
69 |
|
106 | |||
70 | if exists: |
|
107 | if exists: | |
71 | assert response.text == content |
|
108 | assert response.text == content | |
72 | file_store_path = os.path.dirname(store.resolve_name(store_uid, store_path)[1]) |
|
|||
73 | metadata_file = os.path.join(file_store_path, store_uid + '.meta') |
|
|||
74 | assert os.path.exists(metadata_file) |
|
|||
75 | with open(metadata_file, 'rb') as f: |
|
|||
76 | json_data = json.loads(f.read()) |
|
|||
77 |
|
109 | |||
78 | assert json_data |
|
110 | metadata = f_store.get_metadata(store_uid) | |
79 | assert 'size' in json_data |
|
111 | ||
|
112 | assert 'size' in metadata | |||
80 |
|
113 | |||
81 | def test_upload_files_without_content_to_store(self): |
|
114 | def test_upload_files_without_content_to_store(self): | |
82 | self.log_user() |
|
115 | self.log_user() | |
@@ -112,32 +145,6 b' class TestFileStoreViews(TestController)' | |||||
112 |
|
145 | |||
113 | assert response.json['store_fid'] |
|
146 | assert response.json['store_fid'] | |
114 |
|
147 | |||
115 | @pytest.fixture() |
|
|||
116 | def create_artifact_factory(self, tmpdir): |
|
|||
117 | def factory(user_id, content): |
|
|||
118 | store_path = self.app._pyramid_settings[config_keys.store_path] |
|
|||
119 | store = utils.get_file_storage({config_keys.store_path: store_path}) |
|
|||
120 | fid = 'example.txt' |
|
|||
121 |
|
||||
122 | filesystem_file = os.path.join(str(tmpdir), fid) |
|
|||
123 | with open(filesystem_file, 'wt') as f: |
|
|||
124 | f.write(content) |
|
|||
125 |
|
||||
126 | with open(filesystem_file, 'rb') as f: |
|
|||
127 | store_uid, metadata = store.save_file(f, fid, extra_metadata={'filename': fid}) |
|
|||
128 |
|
||||
129 | entry = FileStore.create( |
|
|||
130 | file_uid=store_uid, filename=metadata["filename"], |
|
|||
131 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
|||
132 | file_display_name='file_display_name', |
|
|||
133 | file_description='repo artifact `{}`'.format(metadata["filename"]), |
|
|||
134 | check_acl=True, user_id=user_id, |
|
|||
135 | ) |
|
|||
136 | Session().add(entry) |
|
|||
137 | Session().commit() |
|
|||
138 | return entry |
|
|||
139 | return factory |
|
|||
140 |
|
||||
141 | def test_download_file_non_scoped(self, user_util, create_artifact_factory): |
|
148 | def test_download_file_non_scoped(self, user_util, create_artifact_factory): | |
142 | user = self.log_user() |
|
149 | user = self.log_user() | |
143 | user_id = user['user_id'] |
|
150 | user_id = user['user_id'] |
@@ -19,21 +19,84 b'' | |||||
19 | import io |
|
19 | import io | |
20 | import uuid |
|
20 | import uuid | |
21 | import pathlib |
|
21 | import pathlib | |
|
22 | import s3fs | |||
|
23 | ||||
|
24 | from rhodecode.lib.hash_utils import sha256_safe | |||
|
25 | from rhodecode.apps.file_store import config_keys | |||
|
26 | ||||
|
27 | ||||
|
28 | file_store_meta = None | |||
|
29 | ||||
|
30 | ||||
|
31 | def get_filestore_config(config) -> dict: | |||
|
32 | ||||
|
33 | final_config = {} | |||
|
34 | ||||
|
35 | for k, v in config.items(): | |||
|
36 | if k.startswith('file_store'): | |||
|
37 | final_config[k] = v | |||
|
38 | ||||
|
39 | return final_config | |||
22 |
|
40 | |||
23 |
|
41 | |||
24 | def get_file_storage(settings): |
|
42 | def get_filestore_backend(config, always_init=False): | |
25 | from rhodecode.apps.file_store.backends.local_store import LocalFileStorage |
|
43 | """ | |
26 | from rhodecode.apps.file_store import config_keys |
|
44 | ||
27 | store_path = settings.get(config_keys.store_path) |
|
45 | usage:: | |
28 | return LocalFileStorage(base_path=store_path) |
|
46 | from rhodecode.apps.file_store import get_filestore_backend | |
|
47 | f_store = get_filestore_backend(config=CONFIG) | |||
|
48 | ||||
|
49 | :param config: | |||
|
50 | :param always_init: | |||
|
51 | :return: | |||
|
52 | """ | |||
|
53 | ||||
|
54 | global file_store_meta | |||
|
55 | if file_store_meta is not None and not always_init: | |||
|
56 | return file_store_meta | |||
|
57 | ||||
|
58 | config = get_filestore_config(config) | |||
|
59 | backend = config[config_keys.backend_type] | |||
|
60 | ||||
|
61 | match backend: | |||
|
62 | case config_keys.backend_legacy_filesystem: | |||
|
63 | # Legacy backward compatible storage | |||
|
64 | from rhodecode.apps.file_store.backends.filesystem_legacy import LegacyFileSystemBackend | |||
|
65 | d_cache = LegacyFileSystemBackend( | |||
|
66 | settings=config | |||
|
67 | ) | |||
|
68 | case config_keys.backend_filesystem: | |||
|
69 | from rhodecode.apps.file_store.backends.filesystem import FileSystemBackend | |||
|
70 | d_cache = FileSystemBackend( | |||
|
71 | settings=config | |||
|
72 | ) | |||
|
73 | case config_keys.backend_objectstore: | |||
|
74 | from rhodecode.apps.file_store.backends.objectstore import ObjectStoreBackend | |||
|
75 | d_cache = ObjectStoreBackend( | |||
|
76 | settings=config | |||
|
77 | ) | |||
|
78 | case _: | |||
|
79 | raise ValueError( | |||
|
80 | f'file_store.backend.type only supports "{config_keys.backend_types}" got {backend}' | |||
|
81 | ) | |||
|
82 | ||||
|
83 | cache_meta = d_cache | |||
|
84 | return cache_meta | |||
29 |
|
85 | |||
30 |
|
86 | |||
31 | def splitext(filename): |
|
87 | def splitext(filename): | |
32 | ext = ''.join(pathlib.Path(filename).suffixes) |
|
88 | final_ext = [] | |
|
89 | for suffix in pathlib.Path(filename).suffixes: | |||
|
90 | if not suffix.isascii(): | |||
|
91 | continue | |||
|
92 | ||||
|
93 | suffix = " ".join(suffix.split()).replace(" ", "") | |||
|
94 | final_ext.append(suffix) | |||
|
95 | ext = ''.join(final_ext) | |||
33 | return filename, ext |
|
96 | return filename, ext | |
34 |
|
97 | |||
35 |
|
98 | |||
36 | def uid_filename(filename, randomized=True): |
|
99 | def get_uid_filename(filename, randomized=True): | |
37 | """ |
|
100 | """ | |
38 | Generates a randomized or stable (uuid) filename, |
|
101 | Generates a randomized or stable (uuid) filename, | |
39 | preserving the original extension. |
|
102 | preserving the original extension. | |
@@ -46,10 +109,37 b' def uid_filename(filename, randomized=Tr' | |||||
46 | if randomized: |
|
109 | if randomized: | |
47 | uid = uuid.uuid4() |
|
110 | uid = uuid.uuid4() | |
48 | else: |
|
111 | else: | |
49 | hash_key = '{}.{}'.format(filename, 'store') |
|
112 | store_suffix = "store" | |
|
113 | hash_key = f'{filename}.{store_suffix}' | |||
50 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) |
|
114 | uid = uuid.uuid5(uuid.NAMESPACE_URL, hash_key) | |
51 | return str(uid) + ext.lower() |
|
115 | return str(uid) + ext.lower() | |
52 |
|
116 | |||
53 |
|
117 | |||
54 | def bytes_to_file_obj(bytes_data): |
|
118 | def bytes_to_file_obj(bytes_data): | |
55 |
return io. |
|
119 | return io.BytesIO(bytes_data) | |
|
120 | ||||
|
121 | ||||
|
122 | class ShardFileReader: | |||
|
123 | ||||
|
124 | def __init__(self, file_like_reader): | |||
|
125 | self._file_like_reader = file_like_reader | |||
|
126 | ||||
|
127 | def __getattr__(self, item): | |||
|
128 | if isinstance(self._file_like_reader, s3fs.core.S3File): | |||
|
129 | match item: | |||
|
130 | case 'name': | |||
|
131 | # S3 FileWrapper doesn't support name attribute, and we use it | |||
|
132 | return self._file_like_reader.full_name | |||
|
133 | case _: | |||
|
134 | return getattr(self._file_like_reader, item) | |||
|
135 | else: | |||
|
136 | return getattr(self._file_like_reader, item) | |||
|
137 | ||||
|
138 | ||||
|
139 | def archive_iterator(_reader, block_size: int = 4096 * 512): | |||
|
140 | # 4096 * 64 = 64KB | |||
|
141 | while 1: | |||
|
142 | data = _reader.read(block_size) | |||
|
143 | if not data: | |||
|
144 | break | |||
|
145 | yield data |
@@ -17,12 +17,11 b'' | |||||
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 | import logging |
|
18 | import logging | |
19 |
|
19 | |||
20 |
|
20 | from pyramid.response import Response | ||
21 | from pyramid.response import FileResponse |
|
|||
22 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
21 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound | |
23 |
|
22 | |||
24 | from rhodecode.apps._base import BaseAppView |
|
23 | from rhodecode.apps._base import BaseAppView | |
25 | from rhodecode.apps.file_store import utils |
|
24 | from rhodecode.apps.file_store import utils as store_utils | |
26 | from rhodecode.apps.file_store.exceptions import ( |
|
25 | from rhodecode.apps.file_store.exceptions import ( | |
27 | FileNotAllowedException, FileOverSizeException) |
|
26 | FileNotAllowedException, FileOverSizeException) | |
28 |
|
27 | |||
@@ -31,6 +30,7 b' from rhodecode.lib import audit_logger' | |||||
31 | from rhodecode.lib.auth import ( |
|
30 | from rhodecode.lib.auth import ( | |
32 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
31 | CSRFRequired, NotAnonymous, HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
33 | LoginRequired) |
|
32 | LoginRequired) | |
|
33 | from rhodecode.lib.str_utils import header_safe_str | |||
34 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db |
|
34 | from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db | |
35 | from rhodecode.model.db import Session, FileStore, UserApiKeys |
|
35 | from rhodecode.model.db import Session, FileStore, UserApiKeys | |
36 |
|
36 | |||
@@ -42,7 +42,7 b' class FileStoreView(BaseAppView):' | |||||
42 |
|
42 | |||
43 | def load_default_context(self): |
|
43 | def load_default_context(self): | |
44 | c = self._get_local_tmpl_context() |
|
44 | c = self._get_local_tmpl_context() | |
45 |
self.stor |
|
45 | self.f_store = store_utils.get_filestore_backend(self.request.registry.settings) | |
46 | return c |
|
46 | return c | |
47 |
|
47 | |||
48 | def _guess_type(self, file_name): |
|
48 | def _guess_type(self, file_name): | |
@@ -55,10 +55,10 b' class FileStoreView(BaseAppView):' | |||||
55 | return _content_type, _encoding |
|
55 | return _content_type, _encoding | |
56 |
|
56 | |||
57 | def _serve_file(self, file_uid): |
|
57 | def _serve_file(self, file_uid): | |
58 |
if not self.stor |
|
58 | if not self.f_store.filename_exists(file_uid): | |
59 |
store_path = self.stor |
|
59 | store_path = self.f_store.store_path(file_uid) | |
60 |
log. |
|
60 | log.warning('File with FID:%s not found in the store under `%s`', | |
61 | file_uid, store_path) |
|
61 | file_uid, store_path) | |
62 | raise HTTPNotFound() |
|
62 | raise HTTPNotFound() | |
63 |
|
63 | |||
64 | db_obj = FileStore.get_by_store_uid(file_uid, safe=True) |
|
64 | db_obj = FileStore.get_by_store_uid(file_uid, safe=True) | |
@@ -98,28 +98,25 b' class FileStoreView(BaseAppView):' | |||||
98 |
|
98 | |||
99 | FileStore.bump_access_counter(file_uid) |
|
99 | FileStore.bump_access_counter(file_uid) | |
100 |
|
100 | |||
101 | file_path = self.storage.store_path(file_uid) |
|
101 | file_name = db_obj.file_display_name | |
102 | content_type = 'application/octet-stream' |
|
102 | content_type = 'application/octet-stream' | |
103 | content_encoding = None |
|
|||
104 |
|
103 | |||
105 |
_content_type, _encoding = self._guess_type(file_ |
|
104 | _content_type, _encoding = self._guess_type(file_name) | |
106 | if _content_type: |
|
105 | if _content_type: | |
107 | content_type = _content_type |
|
106 | content_type = _content_type | |
108 |
|
107 | |||
109 | # For file store we don't submit any session data, this logic tells the |
|
108 | # For file store we don't submit any session data, this logic tells the | |
110 | # Session lib to skip it |
|
109 | # Session lib to skip it | |
111 | setattr(self.request, '_file_response', True) |
|
110 | setattr(self.request, '_file_response', True) | |
112 | response = FileResponse( |
|
111 | reader, _meta = self.f_store.fetch(file_uid) | |
113 | file_path, request=self.request, |
|
|||
114 | content_type=content_type, content_encoding=content_encoding) |
|
|||
115 |
|
112 | |||
116 | file_name = db_obj.file_display_name |
|
113 | response = Response(app_iter=store_utils.archive_iterator(reader)) | |
117 |
|
114 | |||
118 | response.headers["Content-Disposition"] = ( |
|
115 | response.content_type = str(content_type) | |
119 |
|
|
116 | response.content_disposition = f'attachment; filename="{header_safe_str(file_name)}"' | |
120 | ) |
|
117 | ||
121 | response.headers["X-RC-Artifact-Id"] = str(db_obj.file_store_id) |
|
118 | response.headers["X-RC-Artifact-Id"] = str(db_obj.file_store_id) | |
122 | response.headers["X-RC-Artifact-Desc"] = str(db_obj.file_description) |
|
119 | response.headers["X-RC-Artifact-Desc"] = header_safe_str(db_obj.file_description) | |
123 | response.headers["X-RC-Artifact-Sha256"] = str(db_obj.file_hash) |
|
120 | response.headers["X-RC-Artifact-Sha256"] = str(db_obj.file_hash) | |
124 | return response |
|
121 | return response | |
125 |
|
122 | |||
@@ -147,8 +144,8 b' class FileStoreView(BaseAppView):' | |||||
147 | 'user_id': self._rhodecode_user.user_id, |
|
144 | 'user_id': self._rhodecode_user.user_id, | |
148 | 'ip': self._rhodecode_user.ip_addr}} |
|
145 | 'ip': self._rhodecode_user.ip_addr}} | |
149 | try: |
|
146 | try: | |
150 |
store_uid, metadata = self.stor |
|
147 | store_uid, metadata = self.f_store.store( | |
151 |
file_obj.file |
|
148 | filename, file_obj.file, extra_metadata=metadata) | |
152 | except FileNotAllowedException: |
|
149 | except FileNotAllowedException: | |
153 | return {'store_fid': None, |
|
150 | return {'store_fid': None, | |
154 | 'access_path': None, |
|
151 | 'access_path': None, | |
@@ -182,7 +179,7 b' class FileStoreView(BaseAppView):' | |||||
182 | def download_file(self): |
|
179 | def download_file(self): | |
183 | self.load_default_context() |
|
180 | self.load_default_context() | |
184 | file_uid = self.request.matchdict['fid'] |
|
181 | file_uid = self.request.matchdict['fid'] | |
185 |
log.debug('Requesting FID:%s from store %s', file_uid, self.stor |
|
182 | log.debug('Requesting FID:%s from store %s', file_uid, self.f_store) | |
186 | return self._serve_file(file_uid) |
|
183 | return self._serve_file(file_uid) | |
187 |
|
184 | |||
188 | # in addition to @LoginRequired ACL is checked by scopes |
|
185 | # in addition to @LoginRequired ACL is checked by scopes |
@@ -601,26 +601,26 b' class RepoCommitsView(RepoAppView):' | |||||
601 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js |
|
601 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js | |
602 |
|
602 | |||
603 | try: |
|
603 | try: | |
604 |
stor |
|
604 | f_store = store_utils.get_filestore_backend(self.request.registry.settings) | |
605 |
store_uid, metadata = stor |
|
605 | store_uid, metadata = f_store.store( | |
606 |
file_obj.file, |
|
606 | filename, file_obj.file, metadata=metadata, | |
607 | extensions=allowed_extensions, max_filesize=max_file_size) |
|
607 | extensions=allowed_extensions, max_filesize=max_file_size) | |
608 | except FileNotAllowedException: |
|
608 | except FileNotAllowedException: | |
609 | self.request.response.status = 400 |
|
609 | self.request.response.status = 400 | |
610 | permitted_extensions = ', '.join(allowed_extensions) |
|
610 | permitted_extensions = ', '.join(allowed_extensions) | |
611 | error_msg = 'File `{}` is not allowed. ' \ |
|
611 | error_msg = f'File `{filename}` is not allowed. ' \ | |
612 |
'Only following extensions are permitted: {}' |
|
612 | f'Only following extensions are permitted: {permitted_extensions}' | |
613 | filename, permitted_extensions) |
|
613 | ||
614 | return {'store_fid': None, |
|
614 | return {'store_fid': None, | |
615 | 'access_path': None, |
|
615 | 'access_path': None, | |
616 | 'error': error_msg} |
|
616 | 'error': error_msg} | |
617 | except FileOverSizeException: |
|
617 | except FileOverSizeException: | |
618 | self.request.response.status = 400 |
|
618 | self.request.response.status = 400 | |
619 | limit_mb = h.format_byte_size_binary(max_file_size) |
|
619 | limit_mb = h.format_byte_size_binary(max_file_size) | |
|
620 | error_msg = f'File {filename} is exceeding allowed limit of {limit_mb}.' | |||
620 | return {'store_fid': None, |
|
621 | return {'store_fid': None, | |
621 | 'access_path': None, |
|
622 | 'access_path': None, | |
622 | 'error': 'File {} is exceeding allowed limit of {}.'.format( |
|
623 | 'error': error_msg} | |
623 | filename, limit_mb)} |
|
|||
624 |
|
624 | |||
625 | try: |
|
625 | try: | |
626 | entry = FileStore.create( |
|
626 | entry = FileStore.create( |
@@ -48,7 +48,7 b' from rhodecode.lib.codeblocks import (' | |||||
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
48 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) | |
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode |
|
49 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode | |
50 | from rhodecode.lib.type_utils import str2bool |
|
50 | from rhodecode.lib.type_utils import str2bool | |
51 | from rhodecode.lib.str_utils import safe_str, safe_int |
|
51 | from rhodecode.lib.str_utils import safe_str, safe_int, header_safe_str | |
52 | from rhodecode.lib.auth import ( |
|
52 | from rhodecode.lib.auth import ( | |
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
53 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
54 | from rhodecode.lib.vcs import path as vcspath |
|
54 | from rhodecode.lib.vcs import path as vcspath | |
@@ -820,7 +820,7 b' class RepoFilesView(RepoAppView):' | |||||
820 | "filename=\"{}\"; " \ |
|
820 | "filename=\"{}\"; " \ | |
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) |
|
821 | "filename*=UTF-8\'\'{}".format(safe_path, encoded_path) | |
822 |
|
822 | |||
823 | return safe_bytes(headers).decode('latin-1', errors='replace') |
|
823 | return header_safe_str(headers) | |
824 |
|
824 | |||
825 | @LoginRequired() |
|
825 | @LoginRequired() | |
826 | @HasRepoPermissionAnyDecorator( |
|
826 | @HasRepoPermissionAnyDecorator( |
@@ -29,7 +29,7 b' from rhodecode.lib import audit_logger' | |||||
29 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, | |
31 | HasRepoPermissionAny) |
|
31 | HasRepoPermissionAny) | |
32 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError |
|
32 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError | |
33 | from rhodecode.lib.utils2 import safe_int |
|
33 | from rhodecode.lib.utils2 import safe_int | |
34 | from rhodecode.lib.vcs import RepositoryError |
|
34 | from rhodecode.lib.vcs import RepositoryError | |
35 | from rhodecode.model.db import Session, UserFollowing, User, Repository |
|
35 | from rhodecode.model.db import Session, UserFollowing, User, Repository | |
@@ -136,6 +136,9 b' class RepoSettingsAdvancedView(RepoAppVi' | |||||
136 | elif handle_forks == 'delete_forks': |
|
136 | elif handle_forks == 'delete_forks': | |
137 | handle_forks = 'delete' |
|
137 | handle_forks = 'delete' | |
138 |
|
138 | |||
|
139 | repo_advanced_url = h.route_path( | |||
|
140 | 'edit_repo_advanced', repo_name=self.db_repo_name, | |||
|
141 | _anchor='advanced-delete') | |||
139 | try: |
|
142 | try: | |
140 | old_data = self.db_repo.get_api_data() |
|
143 | old_data = self.db_repo.get_api_data() | |
141 | RepoModel().delete(self.db_repo, forks=handle_forks) |
|
144 | RepoModel().delete(self.db_repo, forks=handle_forks) | |
@@ -158,9 +161,6 b' class RepoSettingsAdvancedView(RepoAppVi' | |||||
158 | category='success') |
|
161 | category='success') | |
159 | Session().commit() |
|
162 | Session().commit() | |
160 | except AttachedForksError: |
|
163 | except AttachedForksError: | |
161 | repo_advanced_url = h.route_path( |
|
|||
162 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
|||
163 | _anchor='advanced-delete') |
|
|||
164 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) |
|
164 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) | |
165 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' |
|
165 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' | |
166 | 'Try using {delete_or_detach} option.') |
|
166 | 'Try using {delete_or_detach} option.') | |
@@ -171,9 +171,6 b' class RepoSettingsAdvancedView(RepoAppVi' | |||||
171 | raise HTTPFound(repo_advanced_url) |
|
171 | raise HTTPFound(repo_advanced_url) | |
172 |
|
172 | |||
173 | except AttachedPullRequestsError: |
|
173 | except AttachedPullRequestsError: | |
174 | repo_advanced_url = h.route_path( |
|
|||
175 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
|||
176 | _anchor='advanced-delete') |
|
|||
177 | attached_prs = len(self.db_repo.pull_requests_source + |
|
174 | attached_prs = len(self.db_repo.pull_requests_source + | |
178 | self.db_repo.pull_requests_target) |
|
175 | self.db_repo.pull_requests_target) | |
179 | h.flash( |
|
176 | h.flash( | |
@@ -184,6 +181,16 b' class RepoSettingsAdvancedView(RepoAppVi' | |||||
184 | # redirect to advanced for forks handle action ? |
|
181 | # redirect to advanced for forks handle action ? | |
185 | raise HTTPFound(repo_advanced_url) |
|
182 | raise HTTPFound(repo_advanced_url) | |
186 |
|
183 | |||
|
184 | except AttachedArtifactsError: | |||
|
185 | ||||
|
186 | attached_artifacts = len(self.db_repo.artifacts) | |||
|
187 | h.flash( | |||
|
188 | _('Cannot delete `{repo}` it still contains {num} attached artifacts. ' | |||
|
189 | 'Consider archiving the repository instead.').format( | |||
|
190 | repo=self.db_repo_name, num=attached_artifacts), category='warning') | |||
|
191 | ||||
|
192 | # redirect to advanced for forks handle action ? | |||
|
193 | raise HTTPFound(repo_advanced_url) | |||
187 | except Exception: |
|
194 | except Exception: | |
188 | log.exception("Exception during deletion of repository") |
|
195 | log.exception("Exception during deletion of repository") | |
189 | h.flash(_('An error occurred during deletion of `%s`') |
|
196 | h.flash(_('An error occurred during deletion of `%s`') |
@@ -37,7 +37,7 b' def _sanitize_settings_and_apply_default' | |||||
37 | settings_maker.make_setting(config_keys.ssh_key_generator_enabled, True, parser='bool') |
|
37 | settings_maker.make_setting(config_keys.ssh_key_generator_enabled, True, parser='bool') | |
38 |
|
38 | |||
39 | settings_maker.make_setting(config_keys.authorized_keys_file_path, '~/.ssh/authorized_keys_rhodecode') |
|
39 | settings_maker.make_setting(config_keys.authorized_keys_file_path, '~/.ssh/authorized_keys_rhodecode') | |
40 | settings_maker.make_setting(config_keys.wrapper_cmd, '') |
|
40 | settings_maker.make_setting(config_keys.wrapper_cmd, '/usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2') | |
41 | settings_maker.make_setting(config_keys.authorized_keys_line_ssh_opts, '') |
|
41 | settings_maker.make_setting(config_keys.authorized_keys_line_ssh_opts, '') | |
42 |
|
42 | |||
43 | settings_maker.make_setting(config_keys.ssh_hg_bin, '/usr/local/bin/rhodecode_bin/vcs_bin/hg') |
|
43 | settings_maker.make_setting(config_keys.ssh_hg_bin, '/usr/local/bin/rhodecode_bin/vcs_bin/hg') |
@@ -23,7 +23,7 b" generate_authorized_keyfile = 'ssh.gener" | |||||
23 | authorized_keys_file_path = 'ssh.authorized_keys_file_path' |
|
23 | authorized_keys_file_path = 'ssh.authorized_keys_file_path' | |
24 | authorized_keys_line_ssh_opts = 'ssh.authorized_keys_ssh_opts' |
|
24 | authorized_keys_line_ssh_opts = 'ssh.authorized_keys_ssh_opts' | |
25 | ssh_key_generator_enabled = 'ssh.enable_ui_key_generator' |
|
25 | ssh_key_generator_enabled = 'ssh.enable_ui_key_generator' | |
26 | wrapper_cmd = 'ssh.wrapper_cmd' |
|
26 | wrapper_cmd = 'ssh.wrapper_cmd.v2' | |
27 | wrapper_allow_shell = 'ssh.wrapper_cmd_allow_shell' |
|
27 | wrapper_allow_shell = 'ssh.wrapper_cmd_allow_shell' | |
28 | enable_debug_logging = 'ssh.enable_debug_logging' |
|
28 | enable_debug_logging = 'ssh.enable_debug_logging' | |
29 |
|
29 |
@@ -157,7 +157,7 b' class SshVcsServer(object):' | |||||
157 | return exit_code, action == "push" |
|
157 | return exit_code, action == "push" | |
158 |
|
158 | |||
159 | def run(self, tunnel_extras=None): |
|
159 | def run(self, tunnel_extras=None): | |
160 | self.hooks_protocol = self.settings['vcs.hooks.protocol'] |
|
160 | self.hooks_protocol = self.settings['vcs.hooks.protocol.v2'] | |
161 | tunnel_extras = tunnel_extras or {} |
|
161 | tunnel_extras = tunnel_extras or {} | |
162 | extras = {} |
|
162 | extras = {} | |
163 | extras.update(tunnel_extras) |
|
163 | extras.update(tunnel_extras) |
@@ -32,7 +32,7 b' class GitServerCreator(object):' | |||||
32 | config_data = { |
|
32 | config_data = { | |
33 | 'app:main': { |
|
33 | 'app:main': { | |
34 | 'ssh.executable.git': git_path, |
|
34 | 'ssh.executable.git': git_path, | |
35 |
'vcs.hooks.protocol': ' |
|
35 | 'vcs.hooks.protocol.v2': 'celery', | |
36 | } |
|
36 | } | |
37 | } |
|
37 | } | |
38 | repo_name = 'test_git' |
|
38 | repo_name = 'test_git' |
@@ -31,7 +31,7 b' class MercurialServerCreator(object):' | |||||
31 | config_data = { |
|
31 | config_data = { | |
32 | 'app:main': { |
|
32 | 'app:main': { | |
33 | 'ssh.executable.hg': hg_path, |
|
33 | 'ssh.executable.hg': hg_path, | |
34 |
'vcs.hooks.protocol': ' |
|
34 | 'vcs.hooks.protocol.v2': 'celery', | |
35 | } |
|
35 | } | |
36 | } |
|
36 | } | |
37 | repo_name = 'test_hg' |
|
37 | repo_name = 'test_hg' |
@@ -29,7 +29,7 b' class SubversionServerCreator(object):' | |||||
29 | config_data = { |
|
29 | config_data = { | |
30 | 'app:main': { |
|
30 | 'app:main': { | |
31 | 'ssh.executable.svn': svn_path, |
|
31 | 'ssh.executable.svn': svn_path, | |
32 |
'vcs.hooks.protocol': ' |
|
32 | 'vcs.hooks.protocol.v2': 'celery', | |
33 | } |
|
33 | } | |
34 | } |
|
34 | } | |
35 | repo_name = 'test-svn' |
|
35 | repo_name = 'test-svn' |
@@ -52,6 +52,7 b' class AuthnRootResource(AuthnResourceBas' | |||||
52 | """ |
|
52 | """ | |
53 | This is the root traversal resource object for the authentication settings. |
|
53 | This is the root traversal resource object for the authentication settings. | |
54 | """ |
|
54 | """ | |
|
55 | is_root = True | |||
55 |
|
56 | |||
56 | def __init__(self): |
|
57 | def __init__(self): | |
57 | self._store = collections.OrderedDict() |
|
58 | self._store = collections.OrderedDict() |
@@ -52,7 +52,8 b' def sanitize_settings_and_apply_defaults' | |||||
52 | default=False, |
|
52 | default=False, | |
53 | parser='bool') |
|
53 | parser='bool') | |
54 |
|
54 | |||
55 |
|
|
55 | ini_loc = os.path.dirname(global_config.get('__file__')) | |
|
56 | logging_conf = jn(ini_loc, 'logging.ini') | |||
56 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') |
|
57 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') | |
57 |
|
58 | |||
58 | # Default includes, possible to change as a user |
|
59 | # Default includes, possible to change as a user | |
@@ -95,6 +96,11 b' def sanitize_settings_and_apply_defaults' | |||||
95 | settings_maker.make_setting('gzip_responses', False, parser='bool') |
|
96 | settings_maker.make_setting('gzip_responses', False, parser='bool') | |
96 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') |
|
97 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') | |
97 |
|
98 | |||
|
99 | # License settings. | |||
|
100 | settings_maker.make_setting('license.hide_license_info', False, parser='bool') | |||
|
101 | settings_maker.make_setting('license.import_path', '') | |||
|
102 | settings_maker.make_setting('license.import_path_mode', 'if-missing') | |||
|
103 | ||||
98 | # statsd |
|
104 | # statsd | |
99 | settings_maker.make_setting('statsd.enabled', False, parser='bool') |
|
105 | settings_maker.make_setting('statsd.enabled', False, parser='bool') | |
100 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') |
|
106 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') | |
@@ -106,7 +112,7 b' def sanitize_settings_and_apply_defaults' | |||||
106 | settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0') |
|
112 | settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0') | |
107 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') |
|
113 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') | |
108 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') |
|
114 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') | |
109 |
settings_maker.make_setting('vcs.hooks.protocol', ' |
|
115 | settings_maker.make_setting('vcs.hooks.protocol.v2', 'celery') | |
110 | settings_maker.make_setting('vcs.hooks.host', '*') |
|
116 | settings_maker.make_setting('vcs.hooks.host', '*') | |
111 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') |
|
117 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') | |
112 | settings_maker.make_setting('vcs.server', '') |
|
118 | settings_maker.make_setting('vcs.server', '') | |
@@ -116,6 +122,9 b' def sanitize_settings_and_apply_defaults' | |||||
116 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') |
|
122 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') | |
117 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') |
|
123 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') | |
118 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') |
|
124 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') | |
|
125 | settings_maker.make_setting('vcs.git.lfs.storage_location', '/var/opt/rhodecode_repo_store/.cache/git_lfs_store') | |||
|
126 | settings_maker.make_setting('vcs.hg.largefiles.storage_location', | |||
|
127 | '/var/opt/rhodecode_repo_store/.cache/hg_largefiles_store') | |||
119 |
|
128 | |||
120 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') |
|
129 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') | |
121 |
|
130 | |||
@@ -152,6 +161,10 b' def sanitize_settings_and_apply_defaults' | |||||
152 | parser='file:ensured' |
|
161 | parser='file:ensured' | |
153 | ) |
|
162 | ) | |
154 |
|
163 | |||
|
164 | # celery | |||
|
165 | broker_url = settings_maker.make_setting('celery.broker_url', 'redis://redis:6379/8') | |||
|
166 | settings_maker.make_setting('celery.result_backend', broker_url) | |||
|
167 | ||||
155 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') |
|
168 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') | |
156 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) |
|
169 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) | |
157 |
|
170 | |||
@@ -202,7 +215,7 b' def sanitize_settings_and_apply_defaults' | |||||
202 | settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int') |
|
215 | settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int') | |
203 | settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int') |
|
216 | settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int') | |
204 |
|
217 | |||
205 |
settings_maker.make_setting('archive_cache.objectstore.url', |
|
218 | settings_maker.make_setting('archive_cache.objectstore.url', 'http://s3-minio:9000', default_when_empty=True,) | |
206 | settings_maker.make_setting('archive_cache.objectstore.key', '') |
|
219 | settings_maker.make_setting('archive_cache.objectstore.key', '') | |
207 | settings_maker.make_setting('archive_cache.objectstore.secret', '') |
|
220 | settings_maker.make_setting('archive_cache.objectstore.secret', '') | |
208 | settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1') |
|
221 | settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1') |
@@ -16,8 +16,8 b'' | |||||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
|||
20 | import logging |
|
19 | import logging | |
|
20 | ||||
21 | import rhodecode |
|
21 | import rhodecode | |
22 | import collections |
|
22 | import collections | |
23 |
|
23 | |||
@@ -30,6 +30,21 b' from rhodecode.lib.vcs import connect_vc' | |||||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 |
|
32 | |||
|
33 | def propagate_rhodecode_config(global_config, settings, config): | |||
|
34 | # Store the settings to make them available to other modules. | |||
|
35 | settings_merged = global_config.copy() | |||
|
36 | settings_merged.update(settings) | |||
|
37 | if config: | |||
|
38 | settings_merged.update(config) | |||
|
39 | ||||
|
40 | rhodecode.PYRAMID_SETTINGS = settings_merged | |||
|
41 | rhodecode.CONFIG = settings_merged | |||
|
42 | ||||
|
43 | if 'default_user_id' not in rhodecode.CONFIG: | |||
|
44 | rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id() | |||
|
45 | log.debug('set rhodecode.CONFIG data') | |||
|
46 | ||||
|
47 | ||||
33 | def load_pyramid_environment(global_config, settings): |
|
48 | def load_pyramid_environment(global_config, settings): | |
34 | # Some parts of the code expect a merge of global and app settings. |
|
49 | # Some parts of the code expect a merge of global and app settings. | |
35 | settings_merged = global_config.copy() |
|
50 | settings_merged = global_config.copy() | |
@@ -75,11 +90,8 b' def load_pyramid_environment(global_conf' | |||||
75 |
|
90 | |||
76 | utils.configure_vcs(settings) |
|
91 | utils.configure_vcs(settings) | |
77 |
|
92 | |||
78 | # Store the settings to make them available to other modules. |
|
93 | # first run, to store data... | |
79 |
|
94 | propagate_rhodecode_config(global_config, settings, {}) | ||
80 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
|||
81 | rhodecode.CONFIG = settings_merged |
|
|||
82 | rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id() |
|
|||
83 |
|
95 | |||
84 | if vcs_server_enabled: |
|
96 | if vcs_server_enabled: | |
85 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
|
97 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
@@ -35,7 +35,7 b' from pyramid.renderers import render_to_' | |||||
35 | from rhodecode.model import meta |
|
35 | from rhodecode.model import meta | |
36 | from rhodecode.config import patches |
|
36 | from rhodecode.config import patches | |
37 |
|
37 | |||
38 | from rhodecode.config.environment import load_pyramid_environment |
|
38 | from rhodecode.config.environment import load_pyramid_environment, propagate_rhodecode_config | |
39 |
|
39 | |||
40 | import rhodecode.events |
|
40 | import rhodecode.events | |
41 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults |
|
41 | from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults | |
@@ -50,7 +50,7 b' from rhodecode.lib.utils2 import Attribu' | |||||
50 | from rhodecode.lib.exc_tracking import store_exception, format_exc |
|
50 | from rhodecode.lib.exc_tracking import store_exception, format_exc | |
51 | from rhodecode.subscribers import ( |
|
51 | from rhodecode.subscribers import ( | |
52 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
52 | scan_repositories_if_enabled, write_js_routes_if_enabled, | |
53 | write_metadata_if_needed, write_usage_data) |
|
53 | write_metadata_if_needed, write_usage_data, import_license_if_present) | |
54 | from rhodecode.lib.statsd_client import StatsdClient |
|
54 | from rhodecode.lib.statsd_client import StatsdClient | |
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
@@ -99,6 +99,7 b' def make_pyramid_app(global_config, **se' | |||||
99 |
|
99 | |||
100 | # Apply compatibility patches |
|
100 | # Apply compatibility patches | |
101 | patches.inspect_getargspec() |
|
101 | patches.inspect_getargspec() | |
|
102 | patches.repoze_sendmail_lf_fix() | |||
102 |
|
103 | |||
103 | load_pyramid_environment(global_config, settings) |
|
104 | load_pyramid_environment(global_config, settings) | |
104 |
|
105 | |||
@@ -114,6 +115,9 b' def make_pyramid_app(global_config, **se' | |||||
114 | celery_settings = get_celery_config(settings) |
|
115 | celery_settings = get_celery_config(settings) | |
115 | config.configure_celery(celery_settings) |
|
116 | config.configure_celery(celery_settings) | |
116 |
|
117 | |||
|
118 | # final config set... | |||
|
119 | propagate_rhodecode_config(global_config, settings, config.registry.settings) | |||
|
120 | ||||
117 | # creating the app uses a connection - return it after we are done |
|
121 | # creating the app uses a connection - return it after we are done | |
118 | meta.Session.remove() |
|
122 | meta.Session.remove() | |
119 |
|
123 | |||
@@ -396,7 +400,8 b' def includeme(config, auth_resources=Non' | |||||
396 | pyramid.events.ApplicationCreated) |
|
400 | pyramid.events.ApplicationCreated) | |
397 | config.add_subscriber(write_js_routes_if_enabled, |
|
401 | config.add_subscriber(write_js_routes_if_enabled, | |
398 | pyramid.events.ApplicationCreated) |
|
402 | pyramid.events.ApplicationCreated) | |
399 |
|
403 | config.add_subscriber(import_license_if_present, | ||
|
404 | pyramid.events.ApplicationCreated) | |||
400 |
|
405 | |||
401 | # Set the default renderer for HTML templates to mako. |
|
406 | # Set the default renderer for HTML templates to mako. | |
402 | config.add_mako_renderer('.html') |
|
407 | config.add_mako_renderer('.html') |
@@ -158,3 +158,10 b' def inspect_getargspec():' | |||||
158 | inspect.getargspec = inspect.getfullargspec |
|
158 | inspect.getargspec = inspect.getfullargspec | |
159 |
|
159 | |||
160 | return inspect |
|
160 | return inspect | |
|
161 | ||||
|
162 | ||||
|
163 | def repoze_sendmail_lf_fix(): | |||
|
164 | from repoze.sendmail import encoding | |||
|
165 | from email.policy import SMTP | |||
|
166 | ||||
|
167 | encoding.encode_message = lambda message, *args, **kwargs: message.as_bytes(policy=SMTP) |
@@ -35,7 +35,7 b' def configure_vcs(config):' | |||||
35 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', |
|
35 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', | |
36 | } |
|
36 | } | |
37 |
|
37 | |||
38 | conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol'] |
|
38 | conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol.v2'] | |
39 | conf.settings.HOOKS_HOST = config['vcs.hooks.host'] |
|
39 | conf.settings.HOOKS_HOST = config['vcs.hooks.host'] | |
40 | conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] |
|
40 | conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] | |
41 | conf.settings.ALIASES[:] = config['vcs.backends'] |
|
41 | conf.settings.ALIASES[:] = config['vcs.backends'] |
@@ -31,9 +31,11 b' cache_meta = None' | |||||
31 |
|
31 | |||
32 |
|
32 | |||
33 | def includeme(config): |
|
33 | def includeme(config): | |
|
34 | return # don't init cache currently for faster startup time | |||
|
35 | ||||
34 | # init our cache at start |
|
36 | # init our cache at start | |
35 | settings = config.get_settings() |
|
37 | # settings = config.get_settings() | |
36 | get_archival_cache_store(settings) |
|
38 | # get_archival_cache_store(settings) | |
37 |
|
39 | |||
38 |
|
40 | |||
39 | def get_archival_config(config): |
|
41 | def get_archival_config(config): |
@@ -58,7 +58,7 b' class S3Shard(BaseShard):' | |||||
58 | # ensure folder in bucket exists |
|
58 | # ensure folder in bucket exists | |
59 | destination = self.bucket |
|
59 | destination = self.bucket | |
60 | if not self.fs.exists(destination): |
|
60 | if not self.fs.exists(destination): | |
61 |
self.fs.mkdir(destination |
|
61 | self.fs.mkdir(destination) | |
62 |
|
62 | |||
63 | writer = self._get_writer(full_path, mode) |
|
63 | writer = self._get_writer(full_path, mode) | |
64 |
|
64 |
@@ -27,10 +27,11 b' Celery loader, run with::' | |||||
27 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ |
|
27 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ | |
28 | --loglevel DEBUG --ini=.dev/dev.ini |
|
28 | --loglevel DEBUG --ini=.dev/dev.ini | |
29 | """ |
|
29 | """ | |
30 | from rhodecode.config.patches import inspect_getargspec, inspect_formatargspec |
|
30 | from rhodecode.config import patches | |
31 | inspect_getargspec() |
|
31 | patches.inspect_getargspec() | |
32 | inspect_formatargspec() |
|
32 | patches.inspect_formatargspec() | |
33 | # python3.11 inspect patches for backward compat on `paste` code |
|
33 | # python3.11 inspect patches for backward compat on `paste` code | |
|
34 | patches.repoze_sendmail_lf_fix() | |||
34 |
|
35 | |||
35 | import sys |
|
36 | import sys | |
36 | import logging |
|
37 | import logging |
@@ -1,4 +1,4 b'' | |||||
1 |
# Copyright (C) 2012-202 |
|
1 | # Copyright (C) 2012-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -64,8 +64,9 b' def send_email(recipients, subject, body' | |||||
64 | "Make sure that `smtp_server` variable is configured " |
|
64 | "Make sure that `smtp_server` variable is configured " | |
65 | "inside the .ini file") |
|
65 | "inside the .ini file") | |
66 | return False |
|
66 | return False | |
67 |
|
67 | conf_prefix = email_config.get('email_prefix', None) | ||
68 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
68 | prefix = f'{conf_prefix} ' if conf_prefix else '' | |
|
69 | subject = f"{prefix}{subject}" | |||
69 |
|
70 | |||
70 | if recipients: |
|
71 | if recipients: | |
71 | if isinstance(recipients, str): |
|
72 | if isinstance(recipients, str): | |
@@ -86,8 +87,8 b' def send_email(recipients, subject, body' | |||||
86 | email_conf = dict( |
|
87 | email_conf = dict( | |
87 | host=mail_server, |
|
88 | host=mail_server, | |
88 | port=email_config.get('smtp_port', 25), |
|
89 | port=email_config.get('smtp_port', 25), | |
89 | username=email_config.get('smtp_username'), |
|
90 | username=email_config.get('smtp_username', None), | |
90 | password=email_config.get('smtp_password'), |
|
91 | password=email_config.get('smtp_password', None), | |
91 |
|
92 | |||
92 | tls=str2bool(email_config.get('smtp_use_tls')), |
|
93 | tls=str2bool(email_config.get('smtp_use_tls')), | |
93 | ssl=str2bool(email_config.get('smtp_use_ssl')), |
|
94 | ssl=str2bool(email_config.get('smtp_use_ssl')), | |
@@ -207,7 +208,7 b' def create_repo(form_data, cur_user):' | |||||
207 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
208 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) | |
208 |
|
209 | |||
209 | # update repo commit caches initially |
|
210 | # update repo commit caches initially | |
210 | repo.update_commit_cache() |
|
211 | repo.update_commit_cache(recursive=False) | |
211 |
|
212 | |||
212 | # set new created state |
|
213 | # set new created state | |
213 | repo.set_state(Repository.STATE_CREATED) |
|
214 | repo.set_state(Repository.STATE_CREATED) | |
@@ -298,7 +299,7 b' def create_repo_fork(form_data, cur_user' | |||||
298 | # update repo commit caches initially |
|
299 | # update repo commit caches initially | |
299 | config = repo._config |
|
300 | config = repo._config | |
300 | config.set('extensions', 'largefiles', '') |
|
301 | config.set('extensions', 'largefiles', '') | |
301 | repo.update_commit_cache(config=config) |
|
302 | repo.update_commit_cache(config=config, recursive=False) | |
302 |
|
303 | |||
303 | # set new created state |
|
304 | # set new created state | |
304 | repo.set_state(Repository.STATE_CREATED) |
|
305 | repo.set_state(Repository.STATE_CREATED) | |
@@ -390,7 +391,7 b' def sync_last_update_for_objects(*args, ' | |||||
390 | .order_by(Repository.group_id.asc()) |
|
391 | .order_by(Repository.group_id.asc()) | |
391 |
|
392 | |||
392 | for repo in repos: |
|
393 | for repo in repos: | |
393 | repo.update_commit_cache() |
|
394 | repo.update_commit_cache(recursive=False) | |
394 |
|
395 | |||
395 | skip_groups = kwargs.get('skip_groups') |
|
396 | skip_groups = kwargs.get('skip_groups') | |
396 | if not skip_groups: |
|
397 | if not skip_groups: |
@@ -570,7 +570,6 b' class DbManage(object):' | |||||
570 | self.create_ui_settings(path) |
|
570 | self.create_ui_settings(path) | |
571 |
|
571 | |||
572 | ui_config = [ |
|
572 | ui_config = [ | |
573 | ('web', 'push_ssl', 'False'), |
|
|||
574 | ('web', 'allow_archive', 'gz zip bz2'), |
|
573 | ('web', 'allow_archive', 'gz zip bz2'), | |
575 | ('web', 'allow_push', '*'), |
|
574 | ('web', 'allow_push', '*'), | |
576 | ('web', 'baseurl', '/'), |
|
575 | ('web', 'baseurl', '/'), |
@@ -35,16 +35,19 b' def downgrade(migrate_engine):' | |||||
35 |
|
35 | |||
36 |
|
36 | |||
37 | def fixups(models, _SESSION): |
|
37 | def fixups(models, _SESSION): | |
|
38 | ||||
38 | for db_repo in _SESSION.query(models.Repository).all(): |
|
39 | for db_repo in _SESSION.query(models.Repository).all(): | |
39 |
|
40 | |||
40 | config = db_repo._config |
|
41 | try: | |
41 | config.set('extensions', 'largefiles', '') |
|
42 | config = db_repo._config | |
|
43 | config.set('extensions', 'largefiles', '') | |||
42 |
|
44 | |||
43 | try: |
|
45 | scm = db_repo.scm_instance(cache=False, config=config, vcs_full_cache=False) | |
44 | scm = db_repo.scm_instance(cache=False, config=config) |
|
|||
45 | if scm: |
|
46 | if scm: | |
46 | print(f'installing hook for repo: {db_repo}') |
|
47 | print(f'installing hook for repo: {db_repo}') | |
47 | scm.install_hooks(force=True) |
|
48 | scm.install_hooks(force=True) | |
|
49 | del scm # force GC | |||
|
50 | del config | |||
48 | except Exception as e: |
|
51 | except Exception as e: | |
49 | print(e) |
|
52 | print(e) | |
50 | print('continue...') |
|
53 | print('continue...') |
@@ -80,6 +80,10 b' class AttachedPullRequestsError(Exceptio' | |||||
80 | pass |
|
80 | pass | |
81 |
|
81 | |||
82 |
|
82 | |||
|
83 | class AttachedArtifactsError(Exception): | |||
|
84 | pass | |||
|
85 | ||||
|
86 | ||||
83 | class RepoGroupAssignmentError(Exception): |
|
87 | class RepoGroupAssignmentError(Exception): | |
84 | pass |
|
88 | pass | |
85 |
|
89 | |||
@@ -98,6 +102,11 b' class HTTPRequirementError(HTTPClientErr' | |||||
98 | self.args = (message, ) |
|
102 | self.args = (message, ) | |
99 |
|
103 | |||
100 |
|
104 | |||
|
105 | class ClientNotSupportedError(HTTPRequirementError): | |||
|
106 | title = explanation = 'Client Not Supported' | |||
|
107 | reason = None | |||
|
108 | ||||
|
109 | ||||
101 | class HTTPLockedRC(HTTPClientError): |
|
110 | class HTTPLockedRC(HTTPClientError): | |
102 | """ |
|
111 | """ | |
103 | Special Exception For locked Repos in RhodeCode, the return code can |
|
112 | Special Exception For locked Repos in RhodeCode, the return code can |
@@ -81,7 +81,7 b' from rhodecode.lib.action_parser import ' | |||||
81 | from rhodecode.lib.html_filters import sanitize_html |
|
81 | from rhodecode.lib.html_filters import sanitize_html | |
82 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage |
|
82 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage | |
83 | from rhodecode.lib import ext_json |
|
83 | from rhodecode.lib import ext_json | |
84 | from rhodecode.lib.ext_json import json |
|
84 | from rhodecode.lib.ext_json import json, formatted_str_json | |
85 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str |
|
85 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str | |
86 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
86 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer | |
87 | from rhodecode.lib.str_utils import safe_str |
|
87 | from rhodecode.lib.str_utils import safe_str | |
@@ -1416,62 +1416,14 b' class InitialsGravatar(object):' | |||||
1416 | return "data:image/svg+xml;base64,{}".format(img_data) |
|
1416 | return "data:image/svg+xml;base64,{}".format(img_data) | |
1417 |
|
1417 | |||
1418 |
|
1418 | |||
1419 |
def initials_gravatar(request, email_address, first_name, last_name, size=30 |
|
1419 | def initials_gravatar(request, email_address, first_name, last_name, size=30): | |
1420 |
|
1420 | |||
1421 | svg_type = None |
|
1421 | svg_type = None | |
1422 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1422 | if email_address == User.DEFAULT_USER_EMAIL: | |
1423 | svg_type = 'default_user' |
|
1423 | svg_type = 'default_user' | |
1424 |
|
1424 | |||
1425 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1425 | klass = InitialsGravatar(email_address, first_name, last_name, size) | |
1426 |
|
1426 | return klass.generate_svg(svg_type=svg_type) | ||
1427 | if store_on_disk: |
|
|||
1428 | from rhodecode.apps.file_store import utils as store_utils |
|
|||
1429 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ |
|
|||
1430 | FileOverSizeException |
|
|||
1431 | from rhodecode.model.db import Session |
|
|||
1432 |
|
||||
1433 | image_key = md5_safe(email_address.lower() |
|
|||
1434 | + first_name.lower() + last_name.lower()) |
|
|||
1435 |
|
||||
1436 | storage = store_utils.get_file_storage(request.registry.settings) |
|
|||
1437 | filename = '{}.svg'.format(image_key) |
|
|||
1438 | subdir = 'gravatars' |
|
|||
1439 | # since final name has a counter, we apply the 0 |
|
|||
1440 | uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) |
|
|||
1441 | store_uid = os.path.join(subdir, uid) |
|
|||
1442 |
|
||||
1443 | db_entry = FileStore.get_by_store_uid(store_uid) |
|
|||
1444 | if db_entry: |
|
|||
1445 | return request.route_path('download_file', fid=store_uid) |
|
|||
1446 |
|
||||
1447 | img_data = klass.get_img_data(svg_type=svg_type) |
|
|||
1448 | img_file = store_utils.bytes_to_file_obj(img_data) |
|
|||
1449 |
|
||||
1450 | try: |
|
|||
1451 | store_uid, metadata = storage.save_file( |
|
|||
1452 | img_file, filename, directory=subdir, |
|
|||
1453 | extensions=['.svg'], randomized_name=False) |
|
|||
1454 | except (FileNotAllowedException, FileOverSizeException): |
|
|||
1455 | raise |
|
|||
1456 |
|
||||
1457 | try: |
|
|||
1458 | entry = FileStore.create( |
|
|||
1459 | file_uid=store_uid, filename=metadata["filename"], |
|
|||
1460 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
|||
1461 | file_display_name=filename, |
|
|||
1462 | file_description=f'user gravatar `{safe_str(filename)}`', |
|
|||
1463 | hidden=True, check_acl=False, user_id=1 |
|
|||
1464 | ) |
|
|||
1465 | Session().add(entry) |
|
|||
1466 | Session().commit() |
|
|||
1467 | log.debug('Stored upload in DB as %s', entry) |
|
|||
1468 | except Exception: |
|
|||
1469 | raise |
|
|||
1470 |
|
||||
1471 | return request.route_path('download_file', fid=store_uid) |
|
|||
1472 |
|
||||
1473 | else: |
|
|||
1474 | return klass.generate_svg(svg_type=svg_type) |
|
|||
1475 |
|
1427 | |||
1476 |
|
1428 | |||
1477 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): |
|
1429 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): |
@@ -66,12 +66,12 b' class Hooks(object):' | |||||
66 | result = hook(extras) |
|
66 | result = hook(extras) | |
67 | if result is None: |
|
67 | if result is None: | |
68 | raise Exception(f'Failed to obtain hook result from func: {hook}') |
|
68 | raise Exception(f'Failed to obtain hook result from func: {hook}') | |
69 |
except HTTPBranchProtected as |
|
69 | except HTTPBranchProtected as error: | |
70 | # Those special cases don't need error reporting. It's a case of |
|
70 | # Those special cases don't need error reporting. It's a case of | |
71 | # locked repo or protected branch |
|
71 | # locked repo or protected branch | |
72 | result = AttributeDict({ |
|
72 | result = AttributeDict({ | |
73 |
'status': |
|
73 | 'status': error.code, | |
74 |
'output': |
|
74 | 'output': error.explanation | |
75 | }) |
|
75 | }) | |
76 | except (HTTPLockedRC, Exception) as error: |
|
76 | except (HTTPLockedRC, Exception) as error: | |
77 | # locked needs different handling since we need to also |
|
77 | # locked needs different handling since we need to also |
@@ -30,7 +30,7 b' from rhodecode.lib import helpers as h' | |||||
30 | from rhodecode.lib import audit_logger |
|
30 | from rhodecode.lib import audit_logger | |
31 | from rhodecode.lib.utils2 import safe_str, user_agent_normalizer |
|
31 | from rhodecode.lib.utils2 import safe_str, user_agent_normalizer | |
32 | from rhodecode.lib.exceptions import ( |
|
32 | from rhodecode.lib.exceptions import ( | |
33 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) |
|
33 | HTTPLockedRC, HTTPBranchProtected, UserCreationError, ClientNotSupportedError) | |
34 | from rhodecode.model.db import Repository, User |
|
34 | from rhodecode.model.db import Repository, User | |
35 | from rhodecode.lib.statsd_client import StatsdClient |
|
35 | from rhodecode.lib.statsd_client import StatsdClient | |
36 |
|
36 | |||
@@ -64,6 +64,18 b' def is_shadow_repo(extras):' | |||||
64 | return extras['is_shadow_repo'] |
|
64 | return extras['is_shadow_repo'] | |
65 |
|
65 | |||
66 |
|
66 | |||
|
67 | def check_vcs_client(extras): | |||
|
68 | """ | |||
|
69 | Checks if vcs client is allowed (Only works in enterprise edition) | |||
|
70 | """ | |||
|
71 | try: | |||
|
72 | from rc_ee.lib.security.utils import is_vcs_client_whitelisted | |||
|
73 | except ModuleNotFoundError: | |||
|
74 | is_vcs_client_whitelisted = lambda *x: True | |||
|
75 | backend = extras.get('scm') | |||
|
76 | if not is_vcs_client_whitelisted(extras.get('user_agent'), backend): | |||
|
77 | raise ClientNotSupportedError(f"Your {backend} client is forbidden") | |||
|
78 | ||||
67 | def _get_scm_size(alias, root_path): |
|
79 | def _get_scm_size(alias, root_path): | |
68 |
|
80 | |||
69 | if not alias.startswith('.'): |
|
81 | if not alias.startswith('.'): | |
@@ -108,6 +120,7 b' def pre_push(extras):' | |||||
108 | It bans pushing when the repository is locked. |
|
120 | It bans pushing when the repository is locked. | |
109 | """ |
|
121 | """ | |
110 |
|
122 | |||
|
123 | check_vcs_client(extras) | |||
111 | user = User.get_by_username(extras.username) |
|
124 | user = User.get_by_username(extras.username) | |
112 | output = '' |
|
125 | output = '' | |
113 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): |
|
126 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): | |
@@ -129,6 +142,8 b' def pre_push(extras):' | |||||
129 | if extras.commit_ids and extras.check_branch_perms: |
|
142 | if extras.commit_ids and extras.check_branch_perms: | |
130 | auth_user = user.AuthUser() |
|
143 | auth_user = user.AuthUser() | |
131 | repo = Repository.get_by_repo_name(extras.repository) |
|
144 | repo = Repository.get_by_repo_name(extras.repository) | |
|
145 | if not repo: | |||
|
146 | raise ValueError(f'Repo for {extras.repository} not found') | |||
132 | affected_branches = [] |
|
147 | affected_branches = [] | |
133 | if repo.repo_type == 'hg': |
|
148 | if repo.repo_type == 'hg': | |
134 | for entry in extras.commit_ids: |
|
149 | for entry in extras.commit_ids: | |
@@ -180,6 +195,7 b' def pre_pull(extras):' | |||||
180 | It bans pulling when the repository is locked. |
|
195 | It bans pulling when the repository is locked. | |
181 | """ |
|
196 | """ | |
182 |
|
197 | |||
|
198 | check_vcs_client(extras) | |||
183 | output = '' |
|
199 | output = '' | |
184 | if extras.locked_by[0]: |
|
200 | if extras.locked_by[0]: | |
185 | locked_by = User.get(extras.locked_by[0]).username |
|
201 | locked_by = User.get(extras.locked_by[0]).username |
@@ -46,7 +46,7 b' class RequestWrapperTween(object):' | |||||
46 |
|
46 | |||
47 | def __call__(self, request): |
|
47 | def __call__(self, request): | |
48 | start = time.time() |
|
48 | start = time.time() | |
49 |
log.debug('Starting request |
|
49 | log.debug('Starting request processing') | |
50 | response = None |
|
50 | response = None | |
51 | request.req_wrapper_start = start |
|
51 | request.req_wrapper_start = start | |
52 |
|
52 | |||
@@ -63,7 +63,7 b' class RequestWrapperTween(object):' | |||||
63 |
|
63 | |||
64 | total = time.time() - start |
|
64 | total = time.time() - start | |
65 | log.info( |
|
65 | log.info( | |
66 |
' |
|
66 | 'Finished request processing: req[%4s] %s %s Request to %s time: %.4fs [%s], RhodeCode %s', | |
67 | count, _auth_user, request.environ.get('REQUEST_METHOD'), |
|
67 | count, _auth_user, request.environ.get('REQUEST_METHOD'), | |
68 | _path, total, get_user_agent(request. environ), _ver_, |
|
68 | _path, total, get_user_agent(request. environ), _ver_, | |
69 | extra={"time": total, "ver": _ver_, "ip": ip, |
|
69 | extra={"time": total, "ver": _ver_, "ip": ip, |
@@ -53,25 +53,31 b' class SimpleHg(simplevcs.SimpleVCS):' | |||||
53 | return repo_name.rstrip('/') |
|
53 | return repo_name.rstrip('/') | |
54 |
|
54 | |||
55 | _ACTION_MAPPING = { |
|
55 | _ACTION_MAPPING = { | |
|
56 | 'between': 'pull', | |||
|
57 | 'branches': 'pull', | |||
|
58 | 'branchmap': 'pull', | |||
|
59 | 'capabilities': 'pull', | |||
56 | 'changegroup': 'pull', |
|
60 | 'changegroup': 'pull', | |
57 | 'changegroupsubset': 'pull', |
|
61 | 'changegroupsubset': 'pull', | |
|
62 | 'changesetdata': 'pull', | |||
|
63 | 'clonebundles': 'pull', | |||
|
64 | 'clonebundles_manifest': 'pull', | |||
|
65 | 'debugwireargs': 'pull', | |||
|
66 | 'filedata': 'pull', | |||
58 | 'getbundle': 'pull', |
|
67 | 'getbundle': 'pull', | |
59 | 'stream_out': 'pull', |
|
|||
60 | 'listkeys': 'pull', |
|
|||
61 | 'between': 'pull', |
|
|||
62 | 'branchmap': 'pull', |
|
|||
63 | 'branches': 'pull', |
|
|||
64 | 'clonebundles': 'pull', |
|
|||
65 | 'capabilities': 'pull', |
|
|||
66 | 'debugwireargs': 'pull', |
|
|||
67 | 'heads': 'pull', |
|
68 | 'heads': 'pull', | |
68 | 'lookup': 'pull', |
|
|||
69 | 'hello': 'pull', |
|
69 | 'hello': 'pull', | |
70 | 'known': 'pull', |
|
70 | 'known': 'pull', | |
|
71 | 'listkeys': 'pull', | |||
|
72 | 'lookup': 'pull', | |||
|
73 | 'manifestdata': 'pull', | |||
|
74 | 'narrow_widen': 'pull', | |||
|
75 | 'protocaps': 'pull', | |||
|
76 | 'stream_out': 'pull', | |||
71 |
|
77 | |||
72 | # largefiles |
|
78 | # largefiles | |
|
79 | 'getlfile': 'pull', | |||
73 | 'putlfile': 'push', |
|
80 | 'putlfile': 'push', | |
74 | 'getlfile': 'pull', |
|
|||
75 | 'statlfile': 'pull', |
|
81 | 'statlfile': 'pull', | |
76 | 'lheads': 'pull', |
|
82 | 'lheads': 'pull', | |
77 |
|
83 |
@@ -293,7 +293,7 b' class SimpleVCS(object):' | |||||
293 | def compute_perm_vcs( |
|
293 | def compute_perm_vcs( | |
294 | cache_name, plugin_id, action, user_id, repo_name, ip_addr): |
|
294 | cache_name, plugin_id, action, user_id, repo_name, ip_addr): | |
295 |
|
295 | |||
296 |
log.debug('auth: calculating permission access now |
|
296 | log.debug('auth: calculating permission access now for vcs operation: %s', action) | |
297 | # check IP |
|
297 | # check IP | |
298 | inherit = user.inherit_default_permissions |
|
298 | inherit = user.inherit_default_permissions | |
299 | ip_allowed = AuthUser.check_ip_allowed( |
|
299 | ip_allowed = AuthUser.check_ip_allowed( | |
@@ -339,21 +339,6 b' class SimpleVCS(object):' | |||||
339 | log.exception('Failed to read http scheme') |
|
339 | log.exception('Failed to read http scheme') | |
340 | return 'http' |
|
340 | return 'http' | |
341 |
|
341 | |||
342 | def _check_ssl(self, environ, start_response): |
|
|||
343 | """ |
|
|||
344 | Checks the SSL check flag and returns False if SSL is not present |
|
|||
345 | and required True otherwise |
|
|||
346 | """ |
|
|||
347 | org_proto = environ['wsgi._org_proto'] |
|
|||
348 | # check if we have SSL required ! if not it's a bad request ! |
|
|||
349 | require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl')) |
|
|||
350 | if require_ssl and org_proto == 'http': |
|
|||
351 | log.debug( |
|
|||
352 | 'Bad request: detected protocol is `%s` and ' |
|
|||
353 | 'SSL/HTTPS is required.', org_proto) |
|
|||
354 | return False |
|
|||
355 | return True |
|
|||
356 |
|
||||
357 | def _get_default_cache_ttl(self): |
|
342 | def _get_default_cache_ttl(self): | |
358 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin |
|
343 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin | |
359 | plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode') |
|
344 | plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode') | |
@@ -373,12 +358,6 b' class SimpleVCS(object):' | |||||
373 | meta.Session.remove() |
|
358 | meta.Session.remove() | |
374 |
|
359 | |||
375 | def _handle_request(self, environ, start_response): |
|
360 | def _handle_request(self, environ, start_response): | |
376 | if not self._check_ssl(environ, start_response): |
|
|||
377 | reason = ('SSL required, while RhodeCode was unable ' |
|
|||
378 | 'to detect this as SSL request') |
|
|||
379 | log.debug('User not allowed to proceed, %s', reason) |
|
|||
380 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
|||
381 |
|
||||
382 | if not self.url_repo_name: |
|
361 | if not self.url_repo_name: | |
383 | log.warning('Repository name is empty: %s', self.url_repo_name) |
|
362 | log.warning('Repository name is empty: %s', self.url_repo_name) | |
384 | # failed to get repo name, we fail now |
|
363 | # failed to get repo name, we fail now |
@@ -159,11 +159,18 b' def detect_vcs_request(environ, backends' | |||||
159 | # favicon often requested by browsers |
|
159 | # favicon often requested by browsers | |
160 | 'favicon.ico', |
|
160 | 'favicon.ico', | |
161 |
|
161 | |||
|
162 | # static files no detection | |||
|
163 | '_static++', | |||
|
164 | ||||
|
165 | # debug-toolbar | |||
|
166 | '_debug_toolbar++', | |||
|
167 | ||||
162 | # e.g /_file_store/download |
|
168 | # e.g /_file_store/download | |
163 | '_file_store++', |
|
169 | '_file_store++', | |
164 |
|
170 | |||
165 | # login |
|
171 | # login | |
166 |
" |
|
172 | f"{ADMIN_PREFIX}/login", | |
|
173 | f"{ADMIN_PREFIX}/logout", | |||
167 |
|
174 | |||
168 | # 2fa |
|
175 | # 2fa | |
169 | f"{ADMIN_PREFIX}/check_2fa", |
|
176 | f"{ADMIN_PREFIX}/check_2fa", | |
@@ -178,12 +185,6 b' def detect_vcs_request(environ, backends' | |||||
178 | # _admin/my_account is safe too |
|
185 | # _admin/my_account is safe too | |
179 | f'{ADMIN_PREFIX}/my_account++', |
|
186 | f'{ADMIN_PREFIX}/my_account++', | |
180 |
|
187 | |||
181 | # static files no detection |
|
|||
182 | '_static++', |
|
|||
183 |
|
||||
184 | # debug-toolbar |
|
|||
185 | '_debug_toolbar++', |
|
|||
186 |
|
||||
187 | # skip ops ping, status |
|
188 | # skip ops ping, status | |
188 | f'{ADMIN_PREFIX}/ops/ping', |
|
189 | f'{ADMIN_PREFIX}/ops/ping', | |
189 | f'{ADMIN_PREFIX}/ops/status', |
|
190 | f'{ADMIN_PREFIX}/ops/status', | |
@@ -193,11 +194,14 b' def detect_vcs_request(environ, backends' | |||||
193 |
|
194 | |||
194 | '++/repo_creating_check' |
|
195 | '++/repo_creating_check' | |
195 | ] |
|
196 | ] | |
|
197 | ||||
196 | path_info = get_path_info(environ) |
|
198 | path_info = get_path_info(environ) | |
197 | path_url = path_info.lstrip('/') |
|
199 | path_url = path_info.lstrip('/') | |
198 | req_method = environ.get('REQUEST_METHOD') |
|
200 | req_method = environ.get('REQUEST_METHOD') | |
199 |
|
201 | |||
200 | for item in white_list: |
|
202 | for item in white_list: | |
|
203 | item = item.lstrip('/') | |||
|
204 | ||||
201 | if item.endswith('++') and path_url.startswith(item[:-2]): |
|
205 | if item.endswith('++') and path_url.startswith(item[:-2]): | |
202 | log.debug('path `%s` in whitelist (match:%s), skipping...', path_url, item) |
|
206 | log.debug('path `%s` in whitelist (match:%s), skipping...', path_url, item) | |
203 | return handler |
|
207 | return handler |
@@ -38,9 +38,9 b' from dogpile.cache.backends import redis' | |||||
38 | from dogpile.cache.backends.file import FileLock |
|
38 | from dogpile.cache.backends.file import FileLock | |
39 | from dogpile.cache.util import memoized_property |
|
39 | from dogpile.cache.util import memoized_property | |
40 |
|
40 | |||
41 |
from |
|
41 | from ...lib.memory_lru_dict import LRUDict, LRUDictDebug | |
42 |
from |
|
42 | from ...lib.str_utils import safe_bytes, safe_str | |
43 |
from |
|
43 | from ...lib.type_utils import str2bool | |
44 |
|
44 | |||
45 | _default_max_size = 1024 |
|
45 | _default_max_size = 1024 | |
46 |
|
46 | |||
@@ -198,6 +198,13 b' class FileNamespaceBackend(PickleSeriali' | |||||
198 | def get_store(self): |
|
198 | def get_store(self): | |
199 | return self.filename |
|
199 | return self.filename | |
200 |
|
200 | |||
|
201 | def cleanup_store(self): | |||
|
202 | for ext in ("db", "dat", "pag", "dir"): | |||
|
203 | final_filename = self.filename + os.extsep + ext | |||
|
204 | if os.path.exists(final_filename): | |||
|
205 | os.remove(final_filename) | |||
|
206 | log.warning('Removed dbm file %s', final_filename) | |||
|
207 | ||||
201 |
|
208 | |||
202 | class BaseRedisBackend(redis_backend.RedisBackend): |
|
209 | class BaseRedisBackend(redis_backend.RedisBackend): | |
203 | key_prefix = '' |
|
210 | key_prefix = '' | |
@@ -289,7 +296,7 b' class RedisMsgPackBackend(MsgPackSeriali' | |||||
289 |
|
296 | |||
290 |
|
297 | |||
291 | def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False): |
|
298 | def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False): | |
292 |
from |
|
299 | from ...lib._vendor import redis_lock | |
293 |
|
300 | |||
294 | class _RedisLockWrapper: |
|
301 | class _RedisLockWrapper: | |
295 | """LockWrapper for redis_lock""" |
|
302 | """LockWrapper for redis_lock""" |
@@ -26,9 +26,9 b' import decorator' | |||||
26 | from dogpile.cache import CacheRegion |
|
26 | from dogpile.cache import CacheRegion | |
27 |
|
27 | |||
28 | import rhodecode |
|
28 | import rhodecode | |
29 |
from |
|
29 | from ...lib.hash_utils import sha1 | |
30 |
from |
|
30 | from ...lib.str_utils import safe_bytes | |
31 |
from |
|
31 | from ...lib.type_utils import str2bool # noqa :required by imports from .utils | |
32 |
|
32 | |||
33 | from . import region_meta |
|
33 | from . import region_meta | |
34 |
|
34 |
@@ -91,15 +91,14 b' def command(ini_path, filename, file_pat' | |||||
91 |
|
91 | |||
92 | auth_user = db_user.AuthUser(ip_addr='127.0.0.1') |
|
92 | auth_user = db_user.AuthUser(ip_addr='127.0.0.1') | |
93 |
|
93 | |||
94 |
stor |
|
94 | f_store = store_utils.get_filestore_backend(request.registry.settings) | |
95 |
|
95 | |||
96 | with open(file_path, 'rb') as f: |
|
96 | with open(file_path, 'rb') as f: | |
97 | click.secho(f'Adding new artifact from path: `{file_path}`', |
|
97 | click.secho(f'Adding new artifact from path: `{file_path}`', | |
98 | fg='green') |
|
98 | fg='green') | |
99 |
|
99 | |||
100 | file_data = _store_file( |
|
100 | file_data = _store_file( | |
101 |
stor |
|
101 | f_store, auth_user, filename, content=None, check_acl=True, | |
102 | file_obj=f, description=description, |
|
102 | file_obj=f, description=description, | |
103 | scope_repo_id=repo.repo_id) |
|
103 | scope_repo_id=repo.repo_id) | |
104 | click.secho(f'File Data: {file_data}', |
|
104 | click.secho(f'File Data: {file_data}', fg='green') | |
105 | fg='green') |
|
@@ -108,11 +108,10 b' def command(ini_path, force_yes, user, e' | |||||
108 | dbmanage.create_permissions() |
|
108 | dbmanage.create_permissions() | |
109 | dbmanage.populate_default_permissions() |
|
109 | dbmanage.populate_default_permissions() | |
110 | if apply_license_key: |
|
110 | if apply_license_key: | |
111 | try: |
|
111 | from rhodecode.model.license import apply_license_from_file | |
112 | from rc_license.models import apply_trial_license_if_missing |
|
112 | license_file_path = config.get('license.import_path') | |
113 | apply_trial_license_if_missing(force=True) |
|
113 | if license_file_path: | |
114 | except ImportError: |
|
114 | apply_license_from_file(license_file_path, force=True) | |
115 | pass |
|
|||
116 |
|
115 | |||
117 | Session().commit() |
|
116 | Session().commit() | |
118 |
|
117 |
@@ -181,3 +181,7 b' def splitnewlines(text: bytes):' | |||||
181 | else: |
|
181 | else: | |
182 | lines[-1] = lines[-1][:-1] |
|
182 | lines[-1] = lines[-1][:-1] | |
183 | return lines |
|
183 | return lines | |
|
184 | ||||
|
185 | ||||
|
186 | def header_safe_str(val): | |||
|
187 | return safe_bytes(val).decode('latin-1', errors='replace') |
@@ -396,17 +396,18 b' def storage_inodes():' | |||||
396 |
|
396 | |||
397 |
|
397 | |||
398 | @register_sysinfo |
|
398 | @register_sysinfo | |
399 |
def storage_ar |
|
399 | def storage_artifacts(): | |
400 | import rhodecode |
|
400 | import rhodecode | |
401 | from rhodecode.lib.helpers import format_byte_size_binary |
|
401 | from rhodecode.lib.helpers import format_byte_size_binary | |
402 | from rhodecode.lib.archive_cache import get_archival_cache_store |
|
402 | from rhodecode.lib.archive_cache import get_archival_cache_store | |
403 |
|
403 | |||
404 |
|
|
404 | backend_type = rhodecode.ConfigGet().get_str('archive_cache.backend.type') | |
405 |
|
405 | |||
406 |
value = dict(percent=0, used=0, total=0, items=0, path='', text='', type= |
|
406 | value = dict(percent=0, used=0, total=0, items=0, path='', text='', type=backend_type) | |
407 | state = STATE_OK_DEFAULT |
|
407 | state = STATE_OK_DEFAULT | |
408 | try: |
|
408 | try: | |
409 | d_cache = get_archival_cache_store(config=rhodecode.CONFIG) |
|
409 | d_cache = get_archival_cache_store(config=rhodecode.CONFIG) | |
|
410 | backend_type = str(d_cache) | |||
410 |
|
411 | |||
411 | total_files, total_size, _directory_stats = d_cache.get_statistics() |
|
412 | total_files, total_size, _directory_stats = d_cache.get_statistics() | |
412 |
|
413 | |||
@@ -415,7 +416,8 b' def storage_archives():' | |||||
415 | 'used': total_size, |
|
416 | 'used': total_size, | |
416 | 'total': total_size, |
|
417 | 'total': total_size, | |
417 | 'items': total_files, |
|
418 | 'items': total_files, | |
418 | 'path': d_cache.storage_path |
|
419 | 'path': d_cache.storage_path, | |
|
420 | 'type': backend_type | |||
419 | }) |
|
421 | }) | |
420 |
|
422 | |||
421 | except Exception as e: |
|
423 | except Exception as e: | |
@@ -425,8 +427,44 b' def storage_archives():' | |||||
425 | human_value = value.copy() |
|
427 | human_value = value.copy() | |
426 | human_value['used'] = format_byte_size_binary(value['used']) |
|
428 | human_value['used'] = format_byte_size_binary(value['used']) | |
427 | human_value['total'] = format_byte_size_binary(value['total']) |
|
429 | human_value['total'] = format_byte_size_binary(value['total']) | |
428 |
human_value['text'] = "{} ({} items)" |
|
430 | human_value['text'] = f"{human_value['used']} ({value['items']} items)" | |
429 | human_value['used'], value['items']) |
|
431 | ||
|
432 | return SysInfoRes(value=value, state=state, human_value=human_value) | |||
|
433 | ||||
|
434 | ||||
|
435 | @register_sysinfo | |||
|
436 | def storage_archives(): | |||
|
437 | import rhodecode | |||
|
438 | from rhodecode.lib.helpers import format_byte_size_binary | |||
|
439 | import rhodecode.apps.file_store.utils as store_utils | |||
|
440 | from rhodecode import CONFIG | |||
|
441 | ||||
|
442 | backend_type = rhodecode.ConfigGet().get_str(store_utils.config_keys.backend_type) | |||
|
443 | ||||
|
444 | value = dict(percent=0, used=0, total=0, items=0, path='', text='', type=backend_type) | |||
|
445 | state = STATE_OK_DEFAULT | |||
|
446 | try: | |||
|
447 | f_store = store_utils.get_filestore_backend(config=CONFIG) | |||
|
448 | backend_type = str(f_store) | |||
|
449 | total_files, total_size, _directory_stats = f_store.get_statistics() | |||
|
450 | ||||
|
451 | value.update({ | |||
|
452 | 'percent': 100, | |||
|
453 | 'used': total_size, | |||
|
454 | 'total': total_size, | |||
|
455 | 'items': total_files, | |||
|
456 | 'path': f_store.storage_path, | |||
|
457 | 'type': backend_type | |||
|
458 | }) | |||
|
459 | ||||
|
460 | except Exception as e: | |||
|
461 | log.exception('failed to fetch archive cache storage') | |||
|
462 | state = {'message': str(e), 'type': STATE_ERR} | |||
|
463 | ||||
|
464 | human_value = value.copy() | |||
|
465 | human_value['used'] = format_byte_size_binary(value['used']) | |||
|
466 | human_value['total'] = format_byte_size_binary(value['total']) | |||
|
467 | human_value['text'] = f"{human_value['used']} ({value['items']} items)" | |||
430 |
|
468 | |||
431 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
469 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
432 |
|
470 | |||
@@ -798,6 +836,7 b' def get_system_info(environ):' | |||||
798 | 'storage': SysInfo(storage)(), |
|
836 | 'storage': SysInfo(storage)(), | |
799 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
837 | 'storage_inodes': SysInfo(storage_inodes)(), | |
800 | 'storage_archive': SysInfo(storage_archives)(), |
|
838 | 'storage_archive': SysInfo(storage_archives)(), | |
|
839 | 'storage_artifacts': SysInfo(storage_artifacts)(), | |||
801 | 'storage_gist': SysInfo(storage_gist)(), |
|
840 | 'storage_gist': SysInfo(storage_gist)(), | |
802 | 'storage_temp': SysInfo(storage_temp)(), |
|
841 | 'storage_temp': SysInfo(storage_temp)(), | |
803 |
|
842 |
@@ -42,6 +42,8 b' from webhelpers2.text import collapse, s' | |||||
42 |
|
42 | |||
43 | from mako import exceptions |
|
43 | from mako import exceptions | |
44 |
|
44 | |||
|
45 | from rhodecode import ConfigGet | |||
|
46 | from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRC | |||
45 | from rhodecode.lib.hash_utils import sha256_safe, md5, sha1 |
|
47 | from rhodecode.lib.hash_utils import sha256_safe, md5, sha1 | |
46 | from rhodecode.lib.type_utils import AttributeDict |
|
48 | from rhodecode.lib.type_utils import AttributeDict | |
47 | from rhodecode.lib.str_utils import safe_bytes, safe_str |
|
49 | from rhodecode.lib.str_utils import safe_bytes, safe_str | |
@@ -84,8 +86,39 b' def adopt_for_celery(func):' | |||||
84 | @wraps(func) |
|
86 | @wraps(func) | |
85 | def wrapper(extras): |
|
87 | def wrapper(extras): | |
86 | extras = AttributeDict(extras) |
|
88 | extras = AttributeDict(extras) | |
87 | # HooksResponse implements to_json method which must be used there. |
|
89 | try: | |
88 | return func(extras).to_json() |
|
90 | # HooksResponse implements to_json method which must be used there. | |
|
91 | return func(extras).to_json() | |||
|
92 | except HTTPBranchProtected as error: | |||
|
93 | # Those special cases don't need error reporting. It's a case of | |||
|
94 | # locked repo or protected branch | |||
|
95 | error_args = error.args | |||
|
96 | return { | |||
|
97 | 'status': error.code, | |||
|
98 | 'output': error.explanation, | |||
|
99 | 'exception': type(error).__name__, | |||
|
100 | 'exception_args': error_args, | |||
|
101 | 'exception_traceback': '', | |||
|
102 | } | |||
|
103 | except HTTPLockedRC as error: | |||
|
104 | # Those special cases don't need error reporting. It's a case of | |||
|
105 | # locked repo or protected branch | |||
|
106 | error_args = error.args | |||
|
107 | return { | |||
|
108 | 'status': error.code, | |||
|
109 | 'output': error.explanation, | |||
|
110 | 'exception': type(error).__name__, | |||
|
111 | 'exception_args': error_args, | |||
|
112 | 'exception_traceback': '', | |||
|
113 | } | |||
|
114 | except Exception as e: | |||
|
115 | return { | |||
|
116 | 'status': 128, | |||
|
117 | 'output': '', | |||
|
118 | 'exception': type(e).__name__, | |||
|
119 | 'exception_args': e.args, | |||
|
120 | 'exception_traceback': '', | |||
|
121 | } | |||
89 | return wrapper |
|
122 | return wrapper | |
90 |
|
123 | |||
91 |
|
124 | |||
@@ -361,32 +394,39 b' ui_sections = [' | |||||
361 | 'ui', 'web', ] |
|
394 | 'ui', 'web', ] | |
362 |
|
395 | |||
363 |
|
396 | |||
364 |
def config_data |
|
397 | def prepare_config_data(clear_session=True, repo=None): | |
365 | """ |
|
398 | """ | |
366 | Read the configuration data from the database and return configuration |
|
399 | Read the configuration data from the database, *.ini files and return configuration | |
367 | tuples. |
|
400 | tuples. | |
368 | """ |
|
401 | """ | |
369 | from rhodecode.model.settings import VcsSettingsModel |
|
402 | from rhodecode.model.settings import VcsSettingsModel | |
370 |
|
403 | |||
371 | config = [] |
|
|||
372 |
|
||||
373 | sa = meta.Session() |
|
404 | sa = meta.Session() | |
374 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
405 | settings_model = VcsSettingsModel(repo=repo, sa=sa) | |
375 |
|
406 | |||
376 | ui_settings = settings_model.get_ui_settings() |
|
407 | ui_settings = settings_model.get_ui_settings() | |
377 |
|
408 | |||
378 | ui_data = [] |
|
409 | ui_data = [] | |
|
410 | config = [ | |||
|
411 | ('web', 'push_ssl', 'false'), | |||
|
412 | ] | |||
379 | for setting in ui_settings: |
|
413 | for setting in ui_settings: | |
|
414 | # Todo: remove this section once transition to *.ini files will be completed | |||
|
415 | if setting.section in ('largefiles', 'vcs_git_lfs'): | |||
|
416 | if setting.key != 'enabled': | |||
|
417 | continue | |||
380 | if setting.active: |
|
418 | if setting.active: | |
381 | ui_data.append((setting.section, setting.key, setting.value)) |
|
419 | ui_data.append((setting.section, setting.key, setting.value)) | |
382 | config.append(( |
|
420 | config.append(( | |
383 | safe_str(setting.section), safe_str(setting.key), |
|
421 | safe_str(setting.section), safe_str(setting.key), | |
384 | safe_str(setting.value))) |
|
422 | safe_str(setting.value))) | |
385 | if setting.key == 'push_ssl': |
|
423 | if setting.key == 'push_ssl': | |
386 |
# force set push_ssl requirement to False |
|
424 | # force set push_ssl requirement to False this is deprecated, and we must force it to False | |
387 | # handles that |
|
|||
388 | config.append(( |
|
425 | config.append(( | |
389 | safe_str(setting.section), safe_str(setting.key), False)) |
|
426 | safe_str(setting.section), safe_str(setting.key), False)) | |
|
427 | config_getter = ConfigGet() | |||
|
428 | config.append(('vcs_git_lfs', 'store_location', config_getter.get_str('vcs.git.lfs.storage_location'))) | |||
|
429 | config.append(('largefiles', 'usercache', config_getter.get_str('vcs.hg.largefiles.storage_location'))) | |||
390 | log.debug( |
|
430 | log.debug( | |
391 | 'settings ui from db@repo[%s]: %s', |
|
431 | 'settings ui from db@repo[%s]: %s', | |
392 | repo, |
|
432 | repo, | |
@@ -415,7 +455,7 b' def make_db_config(clear_session=True, r' | |||||
415 | Create a :class:`Config` instance based on the values in the database. |
|
455 | Create a :class:`Config` instance based on the values in the database. | |
416 | """ |
|
456 | """ | |
417 | config = Config() |
|
457 | config = Config() | |
418 |
config_data = config_data |
|
458 | config_data = prepare_config_data(clear_session=clear_session, repo=repo) | |
419 | for section, option, value in config_data: |
|
459 | for section, option, value in config_data: | |
420 | config.set(section, option, value) |
|
460 | config.set(section, option, value) | |
421 | return config |
|
461 | return config | |
@@ -582,7 +622,7 b' def repo2db_mapper(initial_repo_list, re' | |||||
582 | log.debug('Running update server info') |
|
622 | log.debug('Running update server info') | |
583 | git_repo._update_server_info(force=True) |
|
623 | git_repo._update_server_info(force=True) | |
584 |
|
624 | |||
585 | db_repo.update_commit_cache() |
|
625 | db_repo.update_commit_cache(recursive=False) | |
586 |
|
626 | |||
587 | config = db_repo._config |
|
627 | config = db_repo._config | |
588 | config.set('extensions', 'largefiles', '') |
|
628 | config.set('extensions', 'largefiles', '') |
@@ -2568,10 +2568,10 b' class Repository(Base, BaseModel):' | |||||
2568 | return commit |
|
2568 | return commit | |
2569 |
|
2569 | |||
2570 | def flush_commit_cache(self): |
|
2570 | def flush_commit_cache(self): | |
2571 | self.update_commit_cache(cs_cache={'raw_id':'0'}) |
|
2571 | self.update_commit_cache(cs_cache={'raw_id': '0'}) | |
2572 | self.update_commit_cache() |
|
2572 | self.update_commit_cache() | |
2573 |
|
2573 | |||
2574 | def update_commit_cache(self, cs_cache=None, config=None): |
|
2574 | def update_commit_cache(self, cs_cache=None, config=None, recursive=True): | |
2575 | """ |
|
2575 | """ | |
2576 | Update cache of last commit for repository |
|
2576 | Update cache of last commit for repository | |
2577 | cache_keys should be:: |
|
2577 | cache_keys should be:: | |
@@ -2610,6 +2610,14 b' class Repository(Base, BaseModel):' | |||||
2610 | if isinstance(cs_cache, BaseCommit): |
|
2610 | if isinstance(cs_cache, BaseCommit): | |
2611 | cs_cache = cs_cache.__json__() |
|
2611 | cs_cache = cs_cache.__json__() | |
2612 |
|
2612 | |||
|
2613 | def maybe_update_recursive(instance, _config, _recursive, _cs_cache, _last_change): | |||
|
2614 | if _recursive: | |||
|
2615 | repo_id = instance.repo_id | |||
|
2616 | _cs_cache['source_repo_id'] = repo_id | |||
|
2617 | for gr in instance.groups_with_parents: | |||
|
2618 | gr.changeset_cache = _cs_cache | |||
|
2619 | gr.updated_on = _last_change | |||
|
2620 | ||||
2613 | def is_outdated(new_cs_cache): |
|
2621 | def is_outdated(new_cs_cache): | |
2614 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or |
|
2622 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |
2615 | new_cs_cache['revision'] != self.changeset_cache['revision']): |
|
2623 | new_cs_cache['revision'] != self.changeset_cache['revision']): | |
@@ -2636,6 +2644,7 b' class Repository(Base, BaseModel):' | |||||
2636 | self.changeset_cache = cs_cache |
|
2644 | self.changeset_cache = cs_cache | |
2637 | self.updated_on = last_change |
|
2645 | self.updated_on = last_change | |
2638 | Session().add(self) |
|
2646 | Session().add(self) | |
|
2647 | maybe_update_recursive(self, config, recursive, cs_cache, last_change) | |||
2639 | Session().commit() |
|
2648 | Session().commit() | |
2640 |
|
2649 | |||
2641 | else: |
|
2650 | else: | |
@@ -2650,6 +2659,7 b' class Repository(Base, BaseModel):' | |||||
2650 | self.changeset_cache = cs_cache |
|
2659 | self.changeset_cache = cs_cache | |
2651 | self.updated_on = _date_latest |
|
2660 | self.updated_on = _date_latest | |
2652 | Session().add(self) |
|
2661 | Session().add(self) | |
|
2662 | maybe_update_recursive(self, config, recursive, cs_cache, _date_latest) | |||
2653 | Session().commit() |
|
2663 | Session().commit() | |
2654 |
|
2664 | |||
2655 | log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', |
|
2665 | log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', | |
@@ -5839,8 +5849,7 b' class FileStore(Base, BaseModel):' | |||||
5839 | .filter(FileStoreMetadata.file_store_meta_key == key) \ |
|
5849 | .filter(FileStoreMetadata.file_store_meta_key == key) \ | |
5840 | .scalar() |
|
5850 | .scalar() | |
5841 | if has_key: |
|
5851 | if has_key: | |
5842 |
msg = 'key `{}` already defined under section `{}` for this file.' |
|
5852 | msg = f'key `{key}` already defined under section `{section}` for this file.' | |
5843 | .format(key, section) |
|
|||
5844 | raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) |
|
5853 | raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) | |
5845 |
|
5854 | |||
5846 | # NOTE(marcink): raises ArtifactMetadataBadValueType |
|
5855 | # NOTE(marcink): raises ArtifactMetadataBadValueType | |
@@ -5939,7 +5948,7 b' class FileStoreMetadata(Base, BaseModel)' | |||||
5939 | def valid_value_type(cls, value): |
|
5948 | def valid_value_type(cls, value): | |
5940 | if value.split('.')[0] not in cls.SETTINGS_TYPES: |
|
5949 | if value.split('.')[0] not in cls.SETTINGS_TYPES: | |
5941 | raise ArtifactMetadataBadValueType( |
|
5950 | raise ArtifactMetadataBadValueType( | |
5942 |
'value_type must be one of |
|
5951 | f'value_type must be one of {cls.SETTINGS_TYPES.keys()} got {value}') | |
5943 |
|
5952 | |||
5944 | @hybrid_property |
|
5953 | @hybrid_property | |
5945 | def file_store_meta_section(self): |
|
5954 | def file_store_meta_section(self): |
@@ -129,6 +129,20 b' def TOTPForm(localizer, user, allow_reco' | |||||
129 | return _TOTPForm |
|
129 | return _TOTPForm | |
130 |
|
130 | |||
131 |
|
131 | |||
|
132 | def WhitelistedVcsClientsForm(localizer): | |||
|
133 | _ = localizer | |||
|
134 | ||||
|
135 | class _WhitelistedVcsClientsForm(formencode.Schema): | |||
|
136 | regexp = r'^(?:\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\*)\s*(?:,\s*[<>=~^!]*\s*\d{1,2}\.\d{1,2}(?:\.\d{1,2})?\s*|\s*\*\s*)*$' | |||
|
137 | allow_extra_fields = True | |||
|
138 | filter_extra_fields = True | |||
|
139 | git = v.Regex(regexp) | |||
|
140 | hg = v.Regex(regexp) | |||
|
141 | svn = v.Regex(regexp) | |||
|
142 | ||||
|
143 | return _WhitelistedVcsClientsForm | |||
|
144 | ||||
|
145 | ||||
132 | def UserForm(localizer, edit=False, available_languages=None, old_data=None): |
|
146 | def UserForm(localizer, edit=False, available_languages=None, old_data=None): | |
133 | old_data = old_data or {} |
|
147 | old_data = old_data or {} | |
134 | available_languages = available_languages or [] |
|
148 | available_languages = available_languages or [] | |
@@ -454,13 +468,6 b' def ApplicationUiSettingsForm(localizer)' | |||||
454 | _ = localizer |
|
468 | _ = localizer | |
455 |
|
469 | |||
456 | class _ApplicationUiSettingsForm(_BaseVcsSettingsForm): |
|
470 | class _ApplicationUiSettingsForm(_BaseVcsSettingsForm): | |
457 | web_push_ssl = v.StringBoolean(if_missing=False) |
|
|||
458 | largefiles_usercache = All( |
|
|||
459 | v.ValidPath(localizer), |
|
|||
460 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
|||
461 | vcs_git_lfs_store_location = All( |
|
|||
462 | v.ValidPath(localizer), |
|
|||
463 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
|||
464 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
471 | extensions_hggit = v.StringBoolean(if_missing=False) | |
465 | new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch') |
|
472 | new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch') | |
466 | new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag') |
|
473 | new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag') |
@@ -117,14 +117,16 b' class NotificationModel(BaseModel):' | |||||
117 | # add mentioned users into recipients |
|
117 | # add mentioned users into recipients | |
118 | final_recipients = set(recipients_objs).union(mention_recipients) |
|
118 | final_recipients = set(recipients_objs).union(mention_recipients) | |
119 |
|
119 | |||
120 | (subject, email_body, email_body_plaintext) = \ |
|
120 | # No need to render email if we are sending just notification | |
121 | EmailNotificationModel().render_email(notification_type, **email_kwargs) |
|
121 | if with_email: | |
|
122 | (subject, email_body, email_body_plaintext) = \ | |||
|
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) | |||
122 |
|
124 | |||
123 | if not notification_subject: |
|
125 | if not notification_subject: | |
124 | notification_subject = subject |
|
126 | notification_subject = subject | |
125 |
|
127 | |||
126 | if not notification_body: |
|
128 | if not notification_body: | |
127 | notification_body = email_body_plaintext |
|
129 | notification_body = email_body_plaintext | |
128 |
|
130 | |||
129 | notification = Notification.create( |
|
131 | notification = Notification.create( | |
130 | created_by=created_by_obj, subject=notification_subject, |
|
132 | created_by=created_by_obj, subject=notification_subject, |
@@ -31,7 +31,7 b' from zope.cachedescriptors.property impo' | |||||
31 | from rhodecode import events |
|
31 | from rhodecode import events | |
32 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
32 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
33 | from rhodecode.lib.caching_query import FromCache |
|
33 | from rhodecode.lib.caching_query import FromCache | |
34 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError |
|
34 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError, AttachedArtifactsError | |
35 | from rhodecode.lib import hooks_base |
|
35 | from rhodecode.lib import hooks_base | |
36 | from rhodecode.lib.user_log_filter import user_log_filter |
|
36 | from rhodecode.lib.user_log_filter import user_log_filter | |
37 | from rhodecode.lib.utils import make_db_config |
|
37 | from rhodecode.lib.utils import make_db_config | |
@@ -736,7 +736,7 b' class RepoModel(BaseModel):' | |||||
736 | log.error(traceback.format_exc()) |
|
736 | log.error(traceback.format_exc()) | |
737 | raise |
|
737 | raise | |
738 |
|
738 | |||
739 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): |
|
739 | def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None): | |
740 | """ |
|
740 | """ | |
741 | Delete given repository, forks parameter defines what do do with |
|
741 | Delete given repository, forks parameter defines what do do with | |
742 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
742 | attached forks. Throws AttachedForksError if deleted repo has attached | |
@@ -745,6 +745,7 b' class RepoModel(BaseModel):' | |||||
745 | :param repo: |
|
745 | :param repo: | |
746 | :param forks: str 'delete' or 'detach' |
|
746 | :param forks: str 'delete' or 'detach' | |
747 | :param pull_requests: str 'delete' or None |
|
747 | :param pull_requests: str 'delete' or None | |
|
748 | :param artifacts: str 'delete' or None | |||
748 | :param fs_remove: remove(archive) repo from filesystem |
|
749 | :param fs_remove: remove(archive) repo from filesystem | |
749 | """ |
|
750 | """ | |
750 | if not cur_user: |
|
751 | if not cur_user: | |
@@ -767,6 +768,13 b' class RepoModel(BaseModel):' | |||||
767 | if pull_requests != 'delete' and (pr_sources or pr_targets): |
|
768 | if pull_requests != 'delete' and (pr_sources or pr_targets): | |
768 | raise AttachedPullRequestsError() |
|
769 | raise AttachedPullRequestsError() | |
769 |
|
770 | |||
|
771 | artifacts_objs = repo.artifacts | |||
|
772 | if artifacts == 'delete': | |||
|
773 | for a in artifacts_objs: | |||
|
774 | self.sa.delete(a) | |||
|
775 | elif [a for a in artifacts_objs]: | |||
|
776 | raise AttachedArtifactsError() | |||
|
777 | ||||
770 | old_repo_dict = repo.get_dict() |
|
778 | old_repo_dict = repo.get_dict() | |
771 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
779 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
772 | try: |
|
780 | try: |
@@ -486,7 +486,6 b' class VcsSettingsModel(object):' | |||||
486 | ) |
|
486 | ) | |
487 | GLOBAL_HG_SETTINGS = ( |
|
487 | GLOBAL_HG_SETTINGS = ( | |
488 | ('extensions', 'largefiles'), |
|
488 | ('extensions', 'largefiles'), | |
489 | ('largefiles', 'usercache'), |
|
|||
490 | ('phases', 'publish'), |
|
489 | ('phases', 'publish'), | |
491 | ('extensions', 'evolve'), |
|
490 | ('extensions', 'evolve'), | |
492 | ('extensions', 'topic'), |
|
491 | ('extensions', 'topic'), | |
@@ -496,12 +495,10 b' class VcsSettingsModel(object):' | |||||
496 |
|
495 | |||
497 | GLOBAL_GIT_SETTINGS = ( |
|
496 | GLOBAL_GIT_SETTINGS = ( | |
498 | ('vcs_git_lfs', 'enabled'), |
|
497 | ('vcs_git_lfs', 'enabled'), | |
499 | ('vcs_git_lfs', 'store_location') |
|
|||
500 | ) |
|
498 | ) | |
501 |
|
499 | |||
502 | SVN_BRANCH_SECTION = 'vcs_svn_branch' |
|
500 | SVN_BRANCH_SECTION = 'vcs_svn_branch' | |
503 | SVN_TAG_SECTION = 'vcs_svn_tag' |
|
501 | SVN_TAG_SECTION = 'vcs_svn_tag' | |
504 | SSL_SETTING = ('web', 'push_ssl') |
|
|||
505 | PATH_SETTING = ('paths', '/') |
|
502 | PATH_SETTING = ('paths', '/') | |
506 |
|
503 | |||
507 | def __init__(self, sa=None, repo=None): |
|
504 | def __init__(self, sa=None, repo=None): | |
@@ -666,18 +663,16 b' class VcsSettingsModel(object):' | |||||
666 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
663 | self.repo_settings, *phases, value=safe_str(data[phases_key])) | |
667 |
|
664 | |||
668 | def create_or_update_global_hg_settings(self, data): |
|
665 | def create_or_update_global_hg_settings(self, data): | |
669 |
opts_len = |
|
666 | opts_len = 3 | |
670 |
largefiles |
|
667 | largefiles, phases, evolve \ | |
671 | = self.GLOBAL_HG_SETTINGS[:opts_len] |
|
668 | = self.GLOBAL_HG_SETTINGS[:opts_len] | |
672 |
largefiles_key |
|
669 | largefiles_key, phases_key, evolve_key \ | |
673 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data) |
|
670 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data) | |
674 |
|
671 | |||
675 | self._create_or_update_ui( |
|
672 | self._create_or_update_ui( | |
676 | self.global_settings, *largefiles, value='', |
|
673 | self.global_settings, *largefiles, value='', | |
677 | active=data[largefiles_key]) |
|
674 | active=data[largefiles_key]) | |
678 | self._create_or_update_ui( |
|
675 | self._create_or_update_ui( | |
679 | self.global_settings, *largefiles_store, value=data[largefiles_store_key]) |
|
|||
680 | self._create_or_update_ui( |
|
|||
681 | self.global_settings, *phases, value=safe_str(data[phases_key])) |
|
676 | self.global_settings, *phases, value=safe_str(data[phases_key])) | |
682 | self._create_or_update_ui( |
|
677 | self._create_or_update_ui( | |
683 | self.global_settings, *evolve, value='', |
|
678 | self.global_settings, *evolve, value='', | |
@@ -697,26 +692,17 b' class VcsSettingsModel(object):' | |||||
697 | active=data[lfs_enabled_key]) |
|
692 | active=data[lfs_enabled_key]) | |
698 |
|
693 | |||
699 | def create_or_update_global_git_settings(self, data): |
|
694 | def create_or_update_global_git_settings(self, data): | |
700 | lfs_enabled, lfs_store_location \ |
|
695 | lfs_enabled = self.GLOBAL_GIT_SETTINGS[0] | |
701 | = self.GLOBAL_GIT_SETTINGS |
|
696 | lfs_enabled_key = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)[0] | |
702 | lfs_enabled_key, lfs_store_location_key \ |
|
|||
703 | = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data) |
|
|||
704 |
|
697 | |||
705 | self._create_or_update_ui( |
|
698 | self._create_or_update_ui( | |
706 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
699 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], | |
707 | active=data[lfs_enabled_key]) |
|
700 | active=data[lfs_enabled_key]) | |
708 | self._create_or_update_ui( |
|
|||
709 | self.global_settings, *lfs_store_location, |
|
|||
710 | value=data[lfs_store_location_key]) |
|
|||
711 |
|
701 | |||
712 | def create_or_update_global_svn_settings(self, data): |
|
702 | def create_or_update_global_svn_settings(self, data): | |
713 | # branch/tags patterns |
|
703 | # branch/tags patterns | |
714 | self._create_svn_settings(self.global_settings, data) |
|
704 | self._create_svn_settings(self.global_settings, data) | |
715 |
|
705 | |||
716 | def update_global_ssl_setting(self, value): |
|
|||
717 | self._create_or_update_ui( |
|
|||
718 | self.global_settings, *self.SSL_SETTING, value=value) |
|
|||
719 |
|
||||
720 | @assert_repo_settings |
|
706 | @assert_repo_settings | |
721 | def delete_repo_svn_pattern(self, id_): |
|
707 | def delete_repo_svn_pattern(self, id_): | |
722 | ui = self.repo_settings.UiDbModel.get(id_) |
|
708 | ui = self.repo_settings.UiDbModel.get(id_) |
@@ -557,10 +557,10 b' class UserModel(BaseModel):' | |||||
557 | elif handle_mode == 'delete': |
|
557 | elif handle_mode == 'delete': | |
558 | from rhodecode.apps.file_store import utils as store_utils |
|
558 | from rhodecode.apps.file_store import utils as store_utils | |
559 | request = get_current_request() |
|
559 | request = get_current_request() | |
560 |
stor |
|
560 | f_store = store_utils.get_filestore_backend(request.registry.settings) | |
561 | for a in artifacts: |
|
561 | for a in artifacts: | |
562 | file_uid = a.file_uid |
|
562 | file_uid = a.file_uid | |
563 |
stor |
|
563 | f_store.delete(file_uid) | |
564 | self.sa.delete(a) |
|
564 | self.sa.delete(a) | |
565 |
|
565 | |||
566 | left_overs = False |
|
566 | left_overs = False |
@@ -86,6 +86,7 b' function registerRCRoutes() {' | |||||
86 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
86 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); | |
87 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
87 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); | |
88 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
88 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); | |
|
89 | pyroutes.register('admin_security_modify_allowed_vcs_client_versions', '/_admin/security/modify/allowed_vcs_client_versions', []); | |||
89 | pyroutes.register('apiv2', '/_admin/api', []); |
|
90 | pyroutes.register('apiv2', '/_admin/api', []); | |
90 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); |
|
91 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); | |
91 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
92 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
@@ -111,9 +111,11 b' def scan_repositories_if_enabled(event):' | |||||
111 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
111 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It | |
112 | does a repository scan if enabled in the settings. |
|
112 | does a repository scan if enabled in the settings. | |
113 | """ |
|
113 | """ | |
|
114 | ||||
114 | settings = event.app.registry.settings |
|
115 | settings = event.app.registry.settings | |
115 | vcs_server_enabled = settings['vcs.server.enable'] |
|
116 | vcs_server_enabled = settings['vcs.server.enable'] | |
116 | import_on_startup = settings['startup.import_repos'] |
|
117 | import_on_startup = settings['startup.import_repos'] | |
|
118 | ||||
117 | if vcs_server_enabled and import_on_startup: |
|
119 | if vcs_server_enabled and import_on_startup: | |
118 | from rhodecode.model.scm import ScmModel |
|
120 | from rhodecode.model.scm import ScmModel | |
119 | from rhodecode.lib.utils import repo2db_mapper |
|
121 | from rhodecode.lib.utils import repo2db_mapper | |
@@ -205,7 +207,7 b' def write_usage_data(event):' | |||||
205 | return |
|
207 | return | |
206 |
|
208 | |||
207 | def get_update_age(dest_file): |
|
209 | def get_update_age(dest_file): | |
208 |
now = datetime.datetime. |
|
210 | now = datetime.datetime.now(datetime.UTC) | |
209 |
|
211 | |||
210 | with open(dest_file, 'rb') as f: |
|
212 | with open(dest_file, 'rb') as f: | |
211 | data = ext_json.json.loads(f.read()) |
|
213 | data = ext_json.json.loads(f.read()) | |
@@ -216,10 +218,9 b' def write_usage_data(event):' | |||||
216 |
|
218 | |||
217 | return 0 |
|
219 | return 0 | |
218 |
|
220 | |||
219 |
utc_date = datetime.datetime. |
|
221 | utc_date = datetime.datetime.now(datetime.UTC) | |
220 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) |
|
222 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) | |
221 |
fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json' |
|
223 | fname = f'.rc_usage_{utc_date.year}{utc_date.month:02d}{utc_date.day:02d}_{hour_quarter}.json' | |
222 | date=utc_date, hour=hour_quarter) |
|
|||
223 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) |
|
224 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) | |
224 |
|
225 | |||
225 | usage_dir = os.path.join(ini_loc, '.rcusage') |
|
226 | usage_dir = os.path.join(ini_loc, '.rcusage') | |
@@ -314,6 +315,28 b' def write_js_routes_if_enabled(event):' | |||||
314 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) |
|
315 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) | |
315 |
|
316 | |||
316 |
|
317 | |||
|
318 | def import_license_if_present(event): | |||
|
319 | """ | |||
|
320 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It | |||
|
321 | does a import license key based on a presence of the file. | |||
|
322 | """ | |||
|
323 | settings = event.app.registry.settings | |||
|
324 | ||||
|
325 | rhodecode_edition_id = settings.get('rhodecode.edition_id') | |||
|
326 | license_file_path = settings.get('license.import_path') | |||
|
327 | force = settings.get('license.import_path_mode') == 'force' | |||
|
328 | ||||
|
329 | if license_file_path and rhodecode_edition_id == 'EE': | |||
|
330 | log.debug('license.import_path= is set importing license from %s', license_file_path) | |||
|
331 | from rhodecode.model.meta import Session | |||
|
332 | from rhodecode.model.license import apply_license_from_file | |||
|
333 | try: | |||
|
334 | apply_license_from_file(license_file_path, force=force) | |||
|
335 | Session().commit() | |||
|
336 | except OSError: | |||
|
337 | log.exception('Failed to import license from %s, make sure this file exists', license_file_path) | |||
|
338 | ||||
|
339 | ||||
317 | class Subscriber(object): |
|
340 | class Subscriber(object): | |
318 | """ |
|
341 | """ | |
319 | Base class for subscribers to the pyramid event system. |
|
342 | Base class for subscribers to the pyramid event system. |
@@ -26,8 +26,13 b'' | |||||
26 | <div class="sidebar"> |
|
26 | <div class="sidebar"> | |
27 | <ul class="nav nav-pills nav-stacked"> |
|
27 | <ul class="nav nav-pills nav-stacked"> | |
28 | % for item in resource.get_root().get_nav_list(): |
|
28 | % for item in resource.get_root().get_nav_list(): | |
|
29 | ||||
29 | <li ${('class=active' if item == resource else '')}> |
|
30 | <li ${('class=active' if item == resource else '')}> | |
30 | <a href="${request.resource_path(item, route_name='auth_home')}">${item.display_name}</a> |
|
31 | % if getattr(item, 'is_root', False): | |
|
32 | <a href="${request.resource_path(item, route_name='auth_home')}">${item.display_name}</a> | |||
|
33 | % else: | |||
|
34 | <a style="padding-left: 10px" href="${request.resource_path(item, route_name='auth_home')}">${item.display_name}</a> | |||
|
35 | % endif | |||
31 | </li> |
|
36 | </li> | |
32 | % endfor |
|
37 | % endfor | |
33 | </ul> |
|
38 | </ul> |
@@ -50,6 +50,13 b'' | |||||
50 | ${h.secure_form(request.resource_path(resource, route_name='auth_home'), request=request)} |
|
50 | ${h.secure_form(request.resource_path(resource, route_name='auth_home'), request=request)} | |
51 | <div class="form"> |
|
51 | <div class="form"> | |
52 |
|
|
52 | ||
|
53 | ## Allow derived templates to add something above the form | |||
|
54 | ## input fields | |||
|
55 | %if hasattr(next, 'above_form_fields'): | |||
|
56 | ${next.above_form_fields()} | |||
|
57 | %endif | |||
|
58 | ||||
|
59 | <h4>${_('Plugin Configuration')}</h4> | |||
53 | %for node in plugin.get_settings_schema(): |
|
60 | %for node in plugin.get_settings_schema(): | |
54 | <% |
|
61 | <% | |
55 | label_to_type = {'label-checkbox': 'bool', 'label-textarea': 'textarea'} |
|
62 | label_to_type = {'label-checkbox': 'bool', 'label-textarea': 'textarea'} |
@@ -215,18 +215,35 b'' | |||||
215 | %endif |
|
215 | %endif | |
216 | </td> |
|
216 | </td> | |
217 | </tr> |
|
217 | </tr> | |
|
218 | ||||
218 | <% attached_prs = len(c.rhodecode_db_repo.pull_requests_source + c.rhodecode_db_repo.pull_requests_target) %> |
|
219 | <% attached_prs = len(c.rhodecode_db_repo.pull_requests_source + c.rhodecode_db_repo.pull_requests_target) %> | |
219 | % if c.rhodecode_db_repo.pull_requests_source or c.rhodecode_db_repo.pull_requests_target: |
|
220 | % if c.rhodecode_db_repo.pull_requests_source or c.rhodecode_db_repo.pull_requests_target: | |
220 | <tr> |
|
221 | <tr> | |
221 | <td> |
|
222 | <td> | |
222 | ${_ungettext('This repository has %s attached pull request.', 'This repository has %s attached pull requests.', attached_prs) % attached_prs} |
|
223 | ${_ungettext('This repository has %s attached pull request.', 'This repository has %s attached pull requests.', attached_prs) % attached_prs} | |
223 | <br/> |
|
224 | <br/> | |
224 | ${_('Consider to archive this repository instead.')} |
|
225 | <br/> | |
|
226 | <strong>${_('Consider to archive this repository instead.')}</strong> | |||
225 | </td> |
|
227 | </td> | |
226 | <td></td> |
|
228 | <td></td> | |
227 | <td></td> |
|
229 | <td></td> | |
228 | </tr> |
|
230 | </tr> | |
229 | % endif |
|
231 | % endif | |
|
232 | ||||
|
233 | <% attached_artifacts = len(c.rhodecode_db_repo.artifacts) %> | |||
|
234 | % if attached_artifacts: | |||
|
235 | <tr> | |||
|
236 | <td> | |||
|
237 | ${_ungettext('This repository has %s attached artifact.', 'This repository has %s attached artifacts.', attached_artifacts) % attached_artifacts} | |||
|
238 | <br/> | |||
|
239 | <br/> | |||
|
240 | <strong>${_('Consider to archive this repository instead.')}</strong> | |||
|
241 | </td> | |||
|
242 | <td></td> | |||
|
243 | <td></td> | |||
|
244 | </tr> | |||
|
245 | % endif | |||
|
246 | ||||
230 | </table> |
|
247 | </table> | |
231 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
248 | <div style="margin: 0 0 20px 0" class="fake-space"></div> | |
232 |
|
249 |
@@ -114,6 +114,7 b'' | |||||
114 | <li class="${h.is_active('repository_groups', active)}"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li> |
|
114 | <li class="${h.is_active('repository_groups', active)}"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li> | |
115 | <li class="${h.is_active('users', active)}"><a href="${h.route_path('users')}">${_('Users')}</a></li> |
|
115 | <li class="${h.is_active('users', active)}"><a href="${h.route_path('users')}">${_('Users')}</a></li> | |
116 | <li class="${h.is_active('user_groups', active)}"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li> |
|
116 | <li class="${h.is_active('user_groups', active)}"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li> | |
|
117 | <li class="${h.is_active('security', active)}"><a href="${h.route_path('admin_security')}">${_('Security')}</a></li> | |||
117 | <li class="${h.is_active('artifacts', active)}"><a href="${h.route_path('admin_artifacts')}">${_('Artifacts')}</a></li> |
|
118 | <li class="${h.is_active('artifacts', active)}"><a href="${h.route_path('admin_artifacts')}">${_('Artifacts')}</a></li> | |
118 | <li class="${h.is_active('automation', active)}"><a href="${h.route_path('admin_automation')}">${_('Automation')}</a></li> |
|
119 | <li class="${h.is_active('automation', active)}"><a href="${h.route_path('admin_automation')}">${_('Automation')}</a></li> | |
119 | <li class="${h.is_active('scheduler', active)}"><a href="${h.route_path('admin_scheduler')}">${_('Scheduler')}</a></li> |
|
120 | <li class="${h.is_active('scheduler', active)}"><a href="${h.route_path('admin_scheduler')}">${_('Scheduler')}</a></li> |
@@ -5,22 +5,7 b'' | |||||
5 |
|
5 | |||
6 | <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)"> |
|
6 | <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, **kwargs)"> | |
7 | % if display_globals: |
|
7 | % if display_globals: | |
8 | <div class="panel panel-default"> |
|
8 | ||
9 | <div class="panel-heading" id="general"> |
|
|||
10 | <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ¶</a></h3> |
|
|||
11 | </div> |
|
|||
12 | <div class="panel-body"> |
|
|||
13 | <div class="field"> |
|
|||
14 | <div class="checkbox"> |
|
|||
15 | ${h.checkbox('web_push_ssl' + suffix, 'True')} |
|
|||
16 | <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label> |
|
|||
17 | </div> |
|
|||
18 | <div class="label"> |
|
|||
19 | <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span> |
|
|||
20 | </div> |
|
|||
21 | </div> |
|
|||
22 | </div> |
|
|||
23 | </div> |
|
|||
24 |
|
|
9 | % endif | |
25 |
|
10 | |||
26 | % if display_globals or repo_type in ['git', 'hg']: |
|
11 | % if display_globals or repo_type in ['git', 'hg']: | |
@@ -75,17 +60,6 b'' | |||||
75 | % endif |
|
60 | % endif | |
76 | </div> |
|
61 | </div> | |
77 |
|
|
62 | ||
78 | % if display_globals: |
|
|||
79 | <div class="field"> |
|
|||
80 | <div class="input"> |
|
|||
81 | ${h.text('largefiles_usercache' + suffix, size=59)} |
|
|||
82 | </div> |
|
|||
83 | </div> |
|
|||
84 | <div class="label"> |
|
|||
85 | <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span> |
|
|||
86 | </div> |
|
|||
87 | % endif |
|
|||
88 |
|
||||
89 | <div class="checkbox"> |
|
63 | <div class="checkbox"> | |
90 | ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)} |
|
64 | ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)} | |
91 | <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label> |
|
65 | <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label> | |
@@ -127,17 +101,6 b'' | |||||
127 | <span class="help-block">${_('Enable lfs extensions for this repository.')}</span> |
|
101 | <span class="help-block">${_('Enable lfs extensions for this repository.')}</span> | |
128 | % endif |
|
102 | % endif | |
129 | </div> |
|
103 | </div> | |
130 |
|
||||
131 | % if display_globals: |
|
|||
132 | <div class="field"> |
|
|||
133 | <div class="input"> |
|
|||
134 | ${h.text('vcs_git_lfs_store_location' + suffix, size=59)} |
|
|||
135 | </div> |
|
|||
136 | </div> |
|
|||
137 | <div class="label"> |
|
|||
138 | <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span> |
|
|||
139 | </div> |
|
|||
140 | % endif |
|
|||
141 | </div> |
|
104 | </div> | |
142 | </div> |
|
105 | </div> | |
143 | % endif |
|
106 | % endif |
@@ -117,7 +117,7 b' class TestSanitizeVcsSettings(object):' | |||||
117 |
|
117 | |||
118 | _string_funcs = [ |
|
118 | _string_funcs = [ | |
119 | ('vcs.svn.compatible_version', ''), |
|
119 | ('vcs.svn.compatible_version', ''), | |
120 |
('vcs.hooks.protocol', ' |
|
120 | ('vcs.hooks.protocol.v2', 'celery'), | |
121 | ('vcs.hooks.host', '*'), |
|
121 | ('vcs.hooks.host', '*'), | |
122 | ('vcs.scm_app_implementation', 'http'), |
|
122 | ('vcs.scm_app_implementation', 'http'), | |
123 | ('vcs.server', ''), |
|
123 | ('vcs.server', ''), |
@@ -305,7 +305,7 b' class Fixture(object):' | |||||
305 | return r |
|
305 | return r | |
306 |
|
306 | |||
307 | def destroy_repo(self, repo_name, **kwargs): |
|
307 | def destroy_repo(self, repo_name, **kwargs): | |
308 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) |
|
308 | RepoModel().delete(repo_name, pull_requests='delete', artifacts='delete', **kwargs) | |
309 | Session().commit() |
|
309 | Session().commit() | |
310 |
|
310 | |||
311 | def destroy_repo_on_filesystem(self, repo_name): |
|
311 | def destroy_repo_on_filesystem(self, repo_name): |
@@ -110,7 +110,7 b' def ini_config(request, tmpdir_factory, ' | |||||
110 | 'vcs.server.protocol': 'http', |
|
110 | 'vcs.server.protocol': 'http', | |
111 | 'vcs.scm_app_implementation': 'http', |
|
111 | 'vcs.scm_app_implementation': 'http', | |
112 | 'vcs.svn.proxy.enabled': 'true', |
|
112 | 'vcs.svn.proxy.enabled': 'true', | |
113 |
'vcs.hooks.protocol': ' |
|
113 | 'vcs.hooks.protocol.v2': 'celery', | |
114 | 'vcs.hooks.host': '*', |
|
114 | 'vcs.hooks.host': '*', | |
115 | 'repo_store.path': TESTS_TMP_PATH, |
|
115 | 'repo_store.path': TESTS_TMP_PATH, | |
116 | 'app.service_api.token': 'service_secret_token', |
|
116 | 'app.service_api.token': 'service_secret_token', |
@@ -120,7 +120,6 b' def test_get_config(user_util, baseapp, ' | |||||
120 |
|
120 | |||
121 | expected_config = [ |
|
121 | expected_config = [ | |
122 | ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'), |
|
122 | ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'), | |
123 | ('web', 'push_ssl', 'False'), |
|
|||
124 | ('web', 'allow_push', '*'), |
|
123 | ('web', 'allow_push', '*'), | |
125 | ('web', 'allow_archive', 'gz zip bz2'), |
|
124 | ('web', 'allow_archive', 'gz zip bz2'), | |
126 | ('web', 'baseurl', '/'), |
|
125 | ('web', 'baseurl', '/'), |
@@ -239,7 +239,6 b' class TestShadowRepoExposure(object):' | |||||
239 | """ |
|
239 | """ | |
240 | controller = StubVCSController( |
|
240 | controller = StubVCSController( | |
241 | baseapp.config.get_settings(), request_stub.registry) |
|
241 | baseapp.config.get_settings(), request_stub.registry) | |
242 | controller._check_ssl = mock.Mock() |
|
|||
243 | controller.is_shadow_repo = True |
|
242 | controller.is_shadow_repo = True | |
244 | controller._action = 'pull' |
|
243 | controller._action = 'pull' | |
245 | controller._is_shadow_repo_dir = True |
|
244 | controller._is_shadow_repo_dir = True | |
@@ -267,7 +266,6 b' class TestShadowRepoExposure(object):' | |||||
267 | """ |
|
266 | """ | |
268 | controller = StubVCSController( |
|
267 | controller = StubVCSController( | |
269 | baseapp.config.get_settings(), request_stub.registry) |
|
268 | baseapp.config.get_settings(), request_stub.registry) | |
270 | controller._check_ssl = mock.Mock() |
|
|||
271 | controller.is_shadow_repo = True |
|
269 | controller.is_shadow_repo = True | |
272 | controller._action = 'pull' |
|
270 | controller._action = 'pull' | |
273 | controller._is_shadow_repo_dir = False |
|
271 | controller._is_shadow_repo_dir = False | |
@@ -291,7 +289,6 b' class TestShadowRepoExposure(object):' | |||||
291 | """ |
|
289 | """ | |
292 | controller = StubVCSController( |
|
290 | controller = StubVCSController( | |
293 | baseapp.config.get_settings(), request_stub.registry) |
|
291 | baseapp.config.get_settings(), request_stub.registry) | |
294 | controller._check_ssl = mock.Mock() |
|
|||
295 | controller.is_shadow_repo = True |
|
292 | controller.is_shadow_repo = True | |
296 | controller._action = 'push' |
|
293 | controller._action = 'push' | |
297 | controller.stub_response_body = (b'dummy body value',) |
|
294 | controller.stub_response_body = (b'dummy body value',) | |
@@ -399,7 +396,7 b' class TestGenerateVcsResponse(object):' | |||||
399 | def call_controller_with_response_body(self, response_body): |
|
396 | def call_controller_with_response_body(self, response_body): | |
400 | settings = { |
|
397 | settings = { | |
401 | 'base_path': 'fake_base_path', |
|
398 | 'base_path': 'fake_base_path', | |
402 |
'vcs.hooks.protocol': ' |
|
399 | 'vcs.hooks.protocol.v2': 'celery', | |
403 | 'vcs.hooks.direct_calls': False, |
|
400 | 'vcs.hooks.direct_calls': False, | |
404 | } |
|
401 | } | |
405 | registry = AttributeDict() |
|
402 | registry = AttributeDict() |
@@ -371,7 +371,7 b' class TestMakeDbConfig(object):' | |||||
371 | ('section2', 'option2', 'value2'), |
|
371 | ('section2', 'option2', 'value2'), | |
372 | ('section3', 'option3', 'value3'), |
|
372 | ('section3', 'option3', 'value3'), | |
373 | ] |
|
373 | ] | |
374 |
with mock.patch.object(utils, 'config_data |
|
374 | with mock.patch.object(utils, 'prepare_config_data') as config_mock: | |
375 | config_mock.return_value = test_data |
|
375 | config_mock.return_value = test_data | |
376 | kwargs = {'clear_session': False, 'repo': 'test_repo'} |
|
376 | kwargs = {'clear_session': False, 'repo': 'test_repo'} | |
377 | result = utils.make_db_config(**kwargs) |
|
377 | result = utils.make_db_config(**kwargs) | |
@@ -381,8 +381,8 b' class TestMakeDbConfig(object):' | |||||
381 | assert value == expected_value |
|
381 | assert value == expected_value | |
382 |
|
382 | |||
383 |
|
383 | |||
384 |
class TestConfigData |
|
384 | class TestPrepareConfigData(object): | |
385 |
def test_config_data |
|
385 | def test_prepare_config_data_returns_active_settings(self): | |
386 | test_data = [ |
|
386 | test_data = [ | |
387 | UiSetting('section1', 'option1', 'value1', True), |
|
387 | UiSetting('section1', 'option1', 'value1', True), | |
388 | UiSetting('section2', 'option2', 'value2', True), |
|
388 | UiSetting('section2', 'option2', 'value2', True), | |
@@ -398,7 +398,7 b' class TestConfigDataFromDb(object):' | |||||
398 | instance_mock = mock.Mock() |
|
398 | instance_mock = mock.Mock() | |
399 | model_mock.return_value = instance_mock |
|
399 | model_mock.return_value = instance_mock | |
400 | instance_mock.get_ui_settings.return_value = test_data |
|
400 | instance_mock.get_ui_settings.return_value = test_data | |
401 |
result = utils.config_data |
|
401 | result = utils.prepare_config_data( | |
402 | clear_session=False, repo=repo_name) |
|
402 | clear_session=False, repo=repo_name) | |
403 |
|
403 | |||
404 | self._assert_repo_name_passed(model_mock, repo_name) |
|
404 | self._assert_repo_name_passed(model_mock, repo_name) | |
@@ -407,7 +407,8 b' class TestConfigDataFromDb(object):' | |||||
407 | ('section1', 'option1', 'value1'), |
|
407 | ('section1', 'option1', 'value1'), | |
408 | ('section2', 'option2', 'value2'), |
|
408 | ('section2', 'option2', 'value2'), | |
409 | ] |
|
409 | ] | |
410 | assert result == expected_result |
|
410 | # We have extra config items returned, so we're ignoring two last items | |
|
411 | assert result[:2] == expected_result | |||
411 |
|
412 | |||
412 | def _assert_repo_name_passed(self, model_mock, repo_name): |
|
413 | def _assert_repo_name_passed(self, model_mock, repo_name): | |
413 | assert model_mock.call_count == 1 |
|
414 | assert model_mock.call_count == 1 |
@@ -578,21 +578,9 b' class TestCreateOrUpdateRepoHgSettings(o' | |||||
578 | assert str(exc_info.value) == 'Repository is not specified' |
|
578 | assert str(exc_info.value) == 'Repository is not specified' | |
579 |
|
579 | |||
580 |
|
580 | |||
581 | class TestUpdateGlobalSslSetting(object): |
|
|||
582 | def test_updates_global_hg_settings(self): |
|
|||
583 | model = VcsSettingsModel() |
|
|||
584 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: |
|
|||
585 | model.update_global_ssl_setting('False') |
|
|||
586 | Session().commit() |
|
|||
587 |
|
||||
588 | create_mock.assert_called_once_with( |
|
|||
589 | model.global_settings, 'web', 'push_ssl', value='False') |
|
|||
590 |
|
||||
591 |
|
||||
592 | class TestCreateOrUpdateGlobalHgSettings(object): |
|
581 | class TestCreateOrUpdateGlobalHgSettings(object): | |
593 | FORM_DATA = { |
|
582 | FORM_DATA = { | |
594 | 'extensions_largefiles': False, |
|
583 | 'extensions_largefiles': False, | |
595 | 'largefiles_usercache': '/example/largefiles-store', |
|
|||
596 | 'phases_publish': False, |
|
584 | 'phases_publish': False, | |
597 | 'extensions_evolve': False |
|
585 | 'extensions_evolve': False | |
598 | } |
|
586 | } | |
@@ -605,7 +593,6 b' class TestCreateOrUpdateGlobalHgSettings' | |||||
605 |
|
593 | |||
606 | expected_calls = [ |
|
594 | expected_calls = [ | |
607 | mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''), |
|
595 | mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''), | |
608 | mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'), |
|
|||
609 | mock.call(model.global_settings, 'phases', 'publish', value='False'), |
|
596 | mock.call(model.global_settings, 'phases', 'publish', value='False'), | |
610 | mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''), |
|
597 | mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''), | |
611 | mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''), |
|
598 | mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''), | |
@@ -632,7 +619,6 b' class TestCreateOrUpdateGlobalHgSettings' | |||||
632 | class TestCreateOrUpdateGlobalGitSettings(object): |
|
619 | class TestCreateOrUpdateGlobalGitSettings(object): | |
633 | FORM_DATA = { |
|
620 | FORM_DATA = { | |
634 | 'vcs_git_lfs_enabled': False, |
|
621 | 'vcs_git_lfs_enabled': False, | |
635 | 'vcs_git_lfs_store_location': '/example/lfs-store', |
|
|||
636 | } |
|
622 | } | |
637 |
|
623 | |||
638 | def test_creates_repo_hg_settings_when_data_is_correct(self): |
|
624 | def test_creates_repo_hg_settings_when_data_is_correct(self): | |
@@ -643,7 +629,6 b' class TestCreateOrUpdateGlobalGitSetting' | |||||
643 |
|
629 | |||
644 | expected_calls = [ |
|
630 | expected_calls = [ | |
645 | mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False), |
|
631 | mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False), | |
646 | mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'), |
|
|||
647 | ] |
|
632 | ] | |
648 | assert expected_calls == create_mock.call_args_list |
|
633 | assert expected_calls == create_mock.call_args_list | |
649 |
|
634 | |||
@@ -1001,9 +986,7 b' class TestCreateOrUpdateRepoSettings(obj' | |||||
1001 | 'hooks_outgoing_pull_logger': False, |
|
986 | 'hooks_outgoing_pull_logger': False, | |
1002 | 'extensions_largefiles': False, |
|
987 | 'extensions_largefiles': False, | |
1003 | 'extensions_evolve': False, |
|
988 | 'extensions_evolve': False, | |
1004 | 'largefiles_usercache': '/example/largefiles-store', |
|
|||
1005 | 'vcs_git_lfs_enabled': False, |
|
989 | 'vcs_git_lfs_enabled': False, | |
1006 | 'vcs_git_lfs_store_location': '/', |
|
|||
1007 | 'phases_publish': 'False', |
|
990 | 'phases_publish': 'False', | |
1008 | 'rhodecode_pr_merge_enabled': False, |
|
991 | 'rhodecode_pr_merge_enabled': False, | |
1009 | 'rhodecode_use_outdated_comments': False, |
|
992 | 'rhodecode_use_outdated_comments': False, |
@@ -449,7 +449,7 b' class TestPullRequestModel(object):' | |||||
449 | @pytest.mark.usefixtures('config_stub') |
|
449 | @pytest.mark.usefixtures('config_stub') | |
450 | class TestIntegrationMerge(object): |
|
450 | class TestIntegrationMerge(object): | |
451 | @pytest.mark.parametrize('extra_config', ( |
|
451 | @pytest.mark.parametrize('extra_config', ( | |
452 |
{'vcs.hooks.protocol': ' |
|
452 | {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False}, | |
453 | )) |
|
453 | )) | |
454 | def test_merge_triggers_push_hooks( |
|
454 | def test_merge_triggers_push_hooks( | |
455 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
|
455 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
@@ -36,7 +36,7 b' port = 10020' | |||||
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 |
; run with gunicorn |
|
39 | ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
@@ -249,15 +249,56 b' labs_settings_active = true' | |||||
249 | ; optional prefix to Add to email Subject |
|
249 | ; optional prefix to Add to email Subject | |
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
251 |
|
251 | |||
252 | ; File store configuration. This is used to store and serve uploaded files |
|
252 | ; NOTE: this setting IS DEPRECATED: | |
253 | file_store.enabled = true |
|
253 | ; file_store backend is always enabled | |
|
254 | #file_store.enabled = true | |||
254 |
|
255 | |||
|
256 | ; NOTE: this setting IS DEPRECATED: | |||
|
257 | ; file_store.backend = X -> use `file_store.backend.type = filesystem_v2` instead | |||
255 | ; Storage backend, available options are: local |
|
258 | ; Storage backend, available options are: local | |
256 | file_store.backend = local |
|
259 | #file_store.backend = local | |
257 |
|
260 | |||
|
261 | ; NOTE: this setting IS DEPRECATED: | |||
|
262 | ; file_store.storage_path = X -> use `file_store.filesystem_v2.storage_path = X` instead | |||
258 | ; path to store the uploaded binaries and artifacts |
|
263 | ; path to store the uploaded binaries and artifacts | |
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
264 | #file_store.storage_path = /var/opt/rhodecode_data/file_store | |
|
265 | ||||
|
266 | ; Artifacts file-store, is used to store comment attachments and artifacts uploads. | |||
|
267 | ; file_store backend type: filesystem_v1, filesystem_v2 or objectstore (s3-based) are available as options | |||
|
268 | ; filesystem_v1 is backwards compat with pre 5.1 storage changes | |||
|
269 | ; new installations should choose filesystem_v2 or objectstore (s3-based), pick filesystem when migrating from | |||
|
270 | ; previous installations to keep the artifacts without a need of migration | |||
|
271 | file_store.backend.type = filesystem_v1 | |||
|
272 | ||||
|
273 | ; filesystem options... | |||
|
274 | file_store.filesystem_v1.storage_path = /var/opt/rhodecode_data/test_artifacts_file_store | |||
|
275 | ||||
|
276 | ; filesystem_v2 options... | |||
|
277 | file_store.filesystem_v2.storage_path = /var/opt/rhodecode_data/test_artifacts_file_store_2 | |||
|
278 | file_store.filesystem_v2.shards = 8 | |||
260 |
|
279 | |||
|
280 | ; objectstore options... | |||
|
281 | ; url for s3 compatible storage that allows to upload artifacts | |||
|
282 | ; e.g http://minio:9000 | |||
|
283 | #file_store.backend.type = objectstore | |||
|
284 | file_store.objectstore.url = http://s3-minio:9000 | |||
|
285 | ||||
|
286 | ; a top-level bucket to put all other shards in | |||
|
287 | ; objects will be stored in rhodecode-file-store/shard-N based on the bucket_shards number | |||
|
288 | file_store.objectstore.bucket = rhodecode-file-store-tests | |||
|
289 | ||||
|
290 | ; number of sharded buckets to create to distribute archives across | |||
|
291 | ; default is 8 shards | |||
|
292 | file_store.objectstore.bucket_shards = 8 | |||
|
293 | ||||
|
294 | ; key for s3 auth | |||
|
295 | file_store.objectstore.key = s3admin | |||
|
296 | ||||
|
297 | ; secret for s3 auth | |||
|
298 | file_store.objectstore.secret = s3secret4 | |||
|
299 | ||||
|
300 | ;region for s3 storage | |||
|
301 | file_store.objectstore.region = eu-central-1 | |||
261 |
|
302 | |||
262 | ; Redis url to acquire/check generation of archives locks |
|
303 | ; Redis url to acquire/check generation of archives locks | |
263 | archive_cache.locking.url = redis://redis:6379/1 |
|
304 | archive_cache.locking.url = redis://redis:6379/1 | |
@@ -593,6 +634,7 b' vcs.scm_app_implementation = http' | |||||
593 | ; Push/Pull operations hooks protocol, available options are: |
|
634 | ; Push/Pull operations hooks protocol, available options are: | |
594 | ; `http` - use http-rpc backend (default) |
|
635 | ; `http` - use http-rpc backend (default) | |
595 | ; `celery` - use celery based hooks |
|
636 | ; `celery` - use celery based hooks | |
|
637 | #DEPRECATED:vcs.hooks.protocol = http | |||
596 | vcs.hooks.protocol = http |
|
638 | vcs.hooks.protocol = http | |
597 |
|
639 | |||
598 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
640 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
@@ -626,6 +668,10 b' vcs.methods.cache = false' | |||||
626 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
668 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
627 | #vcs.svn.compatible_version = 1.8 |
|
669 | #vcs.svn.compatible_version = 1.8 | |
628 |
|
670 | |||
|
671 | ; Redis connection settings for svn integrations logic | |||
|
672 | ; This connection string needs to be the same on ce and vcsserver | |||
|
673 | vcs.svn.redis_conn = redis://redis:6379/0 | |||
|
674 | ||||
629 | ; Enable SVN proxy of requests over HTTP |
|
675 | ; Enable SVN proxy of requests over HTTP | |
630 | vcs.svn.proxy.enabled = true |
|
676 | vcs.svn.proxy.enabled = true | |
631 |
|
677 | |||
@@ -681,7 +727,8 b' ssh.authorized_keys_file_path = %(here)s' | |||||
681 | ; RhodeCode installation directory. |
|
727 | ; RhodeCode installation directory. | |
682 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
728 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
683 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
729 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
684 | ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
730 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
|
731 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |||
685 |
|
732 | |||
686 | ; Allow shell when executing the ssh-wrapper command |
|
733 | ; Allow shell when executing the ssh-wrapper command | |
687 | ssh.wrapper_cmd_allow_shell = false |
|
734 | ssh.wrapper_cmd_allow_shell = false |
@@ -189,6 +189,7 b' setup(' | |||||
189 | 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main', |
|
189 | 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main', | |
190 | 'rc-ishell=rhodecode.lib.rc_commands.ishell:main', |
|
190 | 'rc-ishell=rhodecode.lib.rc_commands.ishell:main', | |
191 | 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main', |
|
191 | 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main', | |
|
192 | 'rc-migrate-artifact=rhodecode.lib.rc_commands.migrate_artifact:main', | |||
192 | 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper_v1:main', |
|
193 | 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper_v1:main', | |
193 | 'rc-ssh-wrapper-v2=rhodecode.apps.ssh_support.lib.ssh_wrapper_v2:main', |
|
194 | 'rc-ssh-wrapper-v2=rhodecode.apps.ssh_support.lib.ssh_wrapper_v2:main', | |
194 | ], |
|
195 | ], |
@@ -1,1 +0,0 b'' | |||||
1 | Example init scripts. No newline at end of file |
|
@@ -1,61 +0,0 b'' | |||||
1 | ; Sample supervisor RhodeCode config file. |
|
|||
2 | ; |
|
|||
3 | ; For more information on the config file, please see: |
|
|||
4 | ; http://supervisord.org/configuration.html |
|
|||
5 | ; |
|
|||
6 | ; Note: shell expansion ("~" or "$HOME") is not supported. Environment |
|
|||
7 | ; variables can be expanded using this syntax: "%(ENV_HOME)s". |
|
|||
8 |
|
||||
9 | [unix_http_server] |
|
|||
10 | file=/tmp/supervisor.sock ; (the path to the socket file) |
|
|||
11 | ;chmod=0700 ; socket file mode (default 0700) |
|
|||
12 | ;chown=nobody:nogroup ; socket file uid:gid owner |
|
|||
13 | ;username=user ; (default is no username (open server)) |
|
|||
14 | ;password=123 ; (default is no password (open server)) |
|
|||
15 |
|
||||
16 | [supervisord] |
|
|||
17 | logfile=/home/ubuntu/rhodecode/supervisord.log ; (main log file;default $CWD/supervisord.log) |
|
|||
18 | logfile_maxbytes=50MB ; (max main logfile bytes b4 rotation;default 50MB) |
|
|||
19 | logfile_backups=10 ; (num of main logfile rotation backups;default 10) |
|
|||
20 | loglevel=info ; (log level;default info; others: debug,warn,trace) |
|
|||
21 | pidfile=/home/ubuntu/rhodecode/supervisord.pid ; (supervisord pidfile;default supervisord.pid) |
|
|||
22 | nodaemon=true ; (start in foreground if true;default false) |
|
|||
23 | minfds=1024 ; (min. avail startup file descriptors;default 1024) |
|
|||
24 | minprocs=200 ; (min. avail process descriptors;default 200) |
|
|||
25 | ;umask=022 ; (process file creation umask;default 022) |
|
|||
26 | user=ubuntu ; (default is current user, required if root) |
|
|||
27 | ;identifier=supervisor ; (supervisord identifier, default is 'supervisor') |
|
|||
28 | ;directory=/tmp ; (default is not to cd during start) |
|
|||
29 | ;nocleanup=true ; (don't clean up tempfiles at start;default false) |
|
|||
30 | ;childlogdir=/tmp ; ('AUTO' child log dir, default $TEMP) |
|
|||
31 | environment=HOME=/home/ubuntu,LANG=en_US.UTF-8 ; (key value pairs to add to environment) |
|
|||
32 | ;strip_ansi=false ; (strip ansi escape codes in logs; def. false) |
|
|||
33 |
|
||||
34 | ; the below section must remain in the config file for RPC |
|
|||
35 | ; (supervisorctl/web interface) to work, additional interfaces may be |
|
|||
36 | ; added by defining them in separate rpcinterface: sections |
|
|||
37 | [rpcinterface:supervisor] |
|
|||
38 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface |
|
|||
39 |
|
||||
40 | [supervisorctl] |
|
|||
41 | serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket |
|
|||
42 | ;username=chris ; should be same as http_username if set |
|
|||
43 | ;password=123 ; should be same as http_password if set |
|
|||
44 |
|
||||
45 |
|
||||
46 | ; restart with supervisorctl restart rhodecode:* |
|
|||
47 | [program:rhodecode] |
|
|||
48 | numprocs = 1 |
|
|||
49 | numprocs_start = 5000 |
|
|||
50 | directory=/home/ubuntu/rhodecode/source |
|
|||
51 | command = /home/ubuntu/rhodecode/venv/bin/paster serve /home/ubuntu/rhodecode/source/prod.ini |
|
|||
52 | process_name = %(program_name)s_%(process_num)04d |
|
|||
53 | redirect_stderr=true |
|
|||
54 | stdout_logfile=/home/ubuntu/rhodecode/rhodecode.log |
|
|||
55 |
|
||||
56 | [program:rhodecode_workers] |
|
|||
57 | numproces = 1 |
|
|||
58 | directory = /home/ubuntu/rhodecode/source |
|
|||
59 | command = /home/ubuntu/rhodecode/venv/bin/paster celeryd /home/ubuntu/rhodecode/source/prod.ini --autoscale=10,2 |
|
|||
60 | redirect_stderr=true |
|
|||
61 | stdout_logfile=/%(here)s/rhodecode_workers.log |
|
@@ -1,268 +0,0 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
|||
2 | # |
|
|||
3 | # This program is free software: you can redistribute it and/or modify |
|
|||
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
|||
5 | # (only), as published by the Free Software Foundation. |
|
|||
6 | # |
|
|||
7 | # This program is distributed in the hope that it will be useful, |
|
|||
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
|||
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
|||
10 | # GNU General Public License for more details. |
|
|||
11 | # |
|
|||
12 | # You should have received a copy of the GNU Affero General Public License |
|
|||
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
|||
14 | # |
|
|||
15 | # This program is dual-licensed. If you wish to learn more about the |
|
|||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
|||
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
|||
18 |
|
||||
19 | import os |
|
|||
20 | import time |
|
|||
21 | import errno |
|
|||
22 | import hashlib |
|
|||
23 |
|
||||
24 | from rhodecode.lib.ext_json import json |
|
|||
25 | from rhodecode.apps.file_store import utils |
|
|||
26 | from rhodecode.apps.file_store.extensions import resolve_extensions |
|
|||
27 | from rhodecode.apps.file_store.exceptions import ( |
|
|||
28 | FileNotAllowedException, FileOverSizeException) |
|
|||
29 |
|
||||
30 | METADATA_VER = 'v1' |
|
|||
31 |
|
||||
32 |
|
||||
33 | def safe_make_dirs(dir_path): |
|
|||
34 | if not os.path.exists(dir_path): |
|
|||
35 | try: |
|
|||
36 | os.makedirs(dir_path) |
|
|||
37 | except OSError as e: |
|
|||
38 | if e.errno != errno.EEXIST: |
|
|||
39 | raise |
|
|||
40 | return |
|
|||
41 |
|
||||
42 |
|
||||
43 | class LocalFileStorage(object): |
|
|||
44 |
|
||||
45 | @classmethod |
|
|||
46 | def apply_counter(cls, counter, filename): |
|
|||
47 | name_counted = '%d-%s' % (counter, filename) |
|
|||
48 | return name_counted |
|
|||
49 |
|
||||
50 | @classmethod |
|
|||
51 | def resolve_name(cls, name, directory): |
|
|||
52 | """ |
|
|||
53 | Resolves a unique name and the correct path. If a filename |
|
|||
54 | for that path already exists then a numeric prefix with values > 0 will be |
|
|||
55 | added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix. |
|
|||
56 |
|
||||
57 | :param name: base name of file |
|
|||
58 | :param directory: absolute directory path |
|
|||
59 | """ |
|
|||
60 |
|
||||
61 | counter = 0 |
|
|||
62 | while True: |
|
|||
63 | name_counted = cls.apply_counter(counter, name) |
|
|||
64 |
|
||||
65 | # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file |
|
|||
66 | sub_store = cls._sub_store_from_filename(name_counted) |
|
|||
67 | sub_store_path = os.path.join(directory, sub_store) |
|
|||
68 | safe_make_dirs(sub_store_path) |
|
|||
69 |
|
||||
70 | path = os.path.join(sub_store_path, name_counted) |
|
|||
71 | if not os.path.exists(path): |
|
|||
72 | return name_counted, path |
|
|||
73 | counter += 1 |
|
|||
74 |
|
||||
75 | @classmethod |
|
|||
76 | def _sub_store_from_filename(cls, filename): |
|
|||
77 | return filename[:2] |
|
|||
78 |
|
||||
79 | @classmethod |
|
|||
80 | def calculate_path_hash(cls, file_path): |
|
|||
81 | """ |
|
|||
82 | Efficient calculation of file_path sha256 sum |
|
|||
83 |
|
||||
84 | :param file_path: |
|
|||
85 | :return: sha256sum |
|
|||
86 | """ |
|
|||
87 | digest = hashlib.sha256() |
|
|||
88 | with open(file_path, 'rb') as f: |
|
|||
89 | for chunk in iter(lambda: f.read(1024 * 100), b""): |
|
|||
90 | digest.update(chunk) |
|
|||
91 |
|
||||
92 | return digest.hexdigest() |
|
|||
93 |
|
||||
94 | def __init__(self, base_path, extension_groups=None): |
|
|||
95 |
|
||||
96 | """ |
|
|||
97 | Local file storage |
|
|||
98 |
|
||||
99 | :param base_path: the absolute base path where uploads are stored |
|
|||
100 | :param extension_groups: extensions string |
|
|||
101 | """ |
|
|||
102 |
|
||||
103 | extension_groups = extension_groups or ['any'] |
|
|||
104 | self.base_path = base_path |
|
|||
105 | self.extensions = resolve_extensions([], groups=extension_groups) |
|
|||
106 |
|
||||
107 | def __repr__(self): |
|
|||
108 | return f'{self.__class__}@{self.base_path}' |
|
|||
109 |
|
||||
110 | def store_path(self, filename): |
|
|||
111 | """ |
|
|||
112 | Returns absolute file path of the filename, joined to the |
|
|||
113 | base_path. |
|
|||
114 |
|
||||
115 | :param filename: base name of file |
|
|||
116 | """ |
|
|||
117 | prefix_dir = '' |
|
|||
118 | if '/' in filename: |
|
|||
119 | prefix_dir, filename = filename.split('/') |
|
|||
120 | sub_store = self._sub_store_from_filename(filename) |
|
|||
121 | else: |
|
|||
122 | sub_store = self._sub_store_from_filename(filename) |
|
|||
123 | return os.path.join(self.base_path, prefix_dir, sub_store, filename) |
|
|||
124 |
|
||||
125 | def delete(self, filename): |
|
|||
126 | """ |
|
|||
127 | Deletes the filename. Filename is resolved with the |
|
|||
128 | absolute path based on base_path. If file does not exist, |
|
|||
129 | returns **False**, otherwise **True** |
|
|||
130 |
|
||||
131 | :param filename: base name of file |
|
|||
132 | """ |
|
|||
133 | if self.exists(filename): |
|
|||
134 | os.remove(self.store_path(filename)) |
|
|||
135 | return True |
|
|||
136 | return False |
|
|||
137 |
|
||||
138 | def exists(self, filename): |
|
|||
139 | """ |
|
|||
140 | Checks if file exists. Resolves filename's absolute |
|
|||
141 | path based on base_path. |
|
|||
142 |
|
||||
143 | :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg |
|
|||
144 | """ |
|
|||
145 | return os.path.exists(self.store_path(filename)) |
|
|||
146 |
|
||||
147 | def filename_allowed(self, filename, extensions=None): |
|
|||
148 | """Checks if a filename has an allowed extension |
|
|||
149 |
|
||||
150 | :param filename: base name of file |
|
|||
151 | :param extensions: iterable of extensions (or self.extensions) |
|
|||
152 | """ |
|
|||
153 | _, ext = os.path.splitext(filename) |
|
|||
154 | return self.extension_allowed(ext, extensions) |
|
|||
155 |
|
||||
156 | def extension_allowed(self, ext, extensions=None): |
|
|||
157 | """ |
|
|||
158 | Checks if an extension is permitted. Both e.g. ".jpg" and |
|
|||
159 | "jpg" can be passed in. Extension lookup is case-insensitive. |
|
|||
160 |
|
||||
161 | :param ext: extension to check |
|
|||
162 | :param extensions: iterable of extensions to validate against (or self.extensions) |
|
|||
163 | """ |
|
|||
164 | def normalize_ext(_ext): |
|
|||
165 | if _ext.startswith('.'): |
|
|||
166 | _ext = _ext[1:] |
|
|||
167 | return _ext.lower() |
|
|||
168 |
|
||||
169 | extensions = extensions or self.extensions |
|
|||
170 | if not extensions: |
|
|||
171 | return True |
|
|||
172 |
|
||||
173 | ext = normalize_ext(ext) |
|
|||
174 |
|
||||
175 | return ext in [normalize_ext(x) for x in extensions] |
|
|||
176 |
|
||||
177 | def save_file(self, file_obj, filename, directory=None, extensions=None, |
|
|||
178 | extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs): |
|
|||
179 | """ |
|
|||
180 | Saves a file object to the uploads location. |
|
|||
181 | Returns the resolved filename, i.e. the directory + |
|
|||
182 | the (randomized/incremented) base name. |
|
|||
183 |
|
||||
184 | :param file_obj: **cgi.FieldStorage** object (or similar) |
|
|||
185 | :param filename: original filename |
|
|||
186 | :param directory: relative path of sub-directory |
|
|||
187 | :param extensions: iterable of allowed extensions, if not default |
|
|||
188 | :param max_filesize: maximum size of file that should be allowed |
|
|||
189 | :param randomized_name: generate random generated UID or fixed based on the filename |
|
|||
190 | :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix |
|
|||
191 |
|
||||
192 | """ |
|
|||
193 |
|
||||
194 | extensions = extensions or self.extensions |
|
|||
195 |
|
||||
196 | if not self.filename_allowed(filename, extensions): |
|
|||
197 | raise FileNotAllowedException() |
|
|||
198 |
|
||||
199 | if directory: |
|
|||
200 | dest_directory = os.path.join(self.base_path, directory) |
|
|||
201 | else: |
|
|||
202 | dest_directory = self.base_path |
|
|||
203 |
|
||||
204 | safe_make_dirs(dest_directory) |
|
|||
205 |
|
||||
206 | uid_filename = utils.uid_filename(filename, randomized=randomized_name) |
|
|||
207 |
|
||||
208 | # resolve also produces special sub-dir for file optimized store |
|
|||
209 | filename, path = self.resolve_name(uid_filename, dest_directory) |
|
|||
210 | stored_file_dir = os.path.dirname(path) |
|
|||
211 |
|
||||
212 | no_body_seek = kwargs.pop('no_body_seek', False) |
|
|||
213 | if no_body_seek: |
|
|||
214 | pass |
|
|||
215 | else: |
|
|||
216 | file_obj.seek(0) |
|
|||
217 |
|
||||
218 | with open(path, "wb") as dest: |
|
|||
219 | length = 256 * 1024 |
|
|||
220 | while 1: |
|
|||
221 | buf = file_obj.read(length) |
|
|||
222 | if not buf: |
|
|||
223 | break |
|
|||
224 | dest.write(buf) |
|
|||
225 |
|
||||
226 | metadata = {} |
|
|||
227 | if extra_metadata: |
|
|||
228 | metadata = extra_metadata |
|
|||
229 |
|
||||
230 | size = os.stat(path).st_size |
|
|||
231 |
|
||||
232 | if max_filesize and size > max_filesize: |
|
|||
233 | # free up the copied file, and raise exc |
|
|||
234 | os.remove(path) |
|
|||
235 | raise FileOverSizeException() |
|
|||
236 |
|
||||
237 | file_hash = self.calculate_path_hash(path) |
|
|||
238 |
|
||||
239 | metadata.update({ |
|
|||
240 | "filename": filename, |
|
|||
241 | "size": size, |
|
|||
242 | "time": time.time(), |
|
|||
243 | "sha256": file_hash, |
|
|||
244 | "meta_ver": METADATA_VER |
|
|||
245 | }) |
|
|||
246 |
|
||||
247 | filename_meta = filename + '.meta' |
|
|||
248 | with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta: |
|
|||
249 | dest_meta.write(json.dumps(metadata)) |
|
|||
250 |
|
||||
251 | if directory: |
|
|||
252 | filename = os.path.join(directory, filename) |
|
|||
253 |
|
||||
254 | return filename, metadata |
|
|||
255 |
|
||||
256 | def get_metadata(self, filename, ignore_missing=False): |
|
|||
257 | """ |
|
|||
258 | Reads JSON stored metadata for a file |
|
|||
259 |
|
||||
260 | :param filename: |
|
|||
261 | :return: |
|
|||
262 | """ |
|
|||
263 | filename = self.store_path(filename) |
|
|||
264 | filename_meta = filename + '.meta' |
|
|||
265 | if ignore_missing and not os.path.isfile(filename_meta): |
|
|||
266 | return {} |
|
|||
267 | with open(filename_meta, "rb") as source_meta: |
|
|||
268 | return json.loads(source_meta.read()) |
|
General Comments 0
You need to be logged in to leave comments.
Login now