Show More
@@ -1,857 +1,858 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = true |
|
8 | debug = true | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, | |
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py | |
31 | host = 127.0.0.1 |
|
31 | host = 127.0.0.1 | |
32 | port = 10020 |
|
32 | port = 10020 | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | ; ########################### |
|
35 | ; ########################### | |
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 | ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini |
|
39 | ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
43 |
|
43 | |||
44 | ; Prefix middleware for RhodeCode. |
|
44 | ; Prefix middleware for RhodeCode. | |
45 | ; recommended when using proxy setup. |
|
45 | ; recommended when using proxy setup. | |
46 | ; allows to set RhodeCode under a prefix in server. |
|
46 | ; allows to set RhodeCode under a prefix in server. | |
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
48 | ; And set your prefix like: `prefix = /custom_prefix` | |
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
50 | ; to make your cookies only work on prefix url |
|
50 | ; to make your cookies only work on prefix url | |
51 | [filter:proxy-prefix] |
|
51 | [filter:proxy-prefix] | |
52 | use = egg:PasteDeploy#prefix |
|
52 | use = egg:PasteDeploy#prefix | |
53 | prefix = / |
|
53 | prefix = / | |
54 |
|
54 | |||
55 | [app:main] |
|
55 | [app:main] | |
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
57 | ; of this file |
|
57 | ; of this file | |
58 | ; Each option in the app:main can be override by an environmental variable |
|
58 | ; Each option in the app:main can be override by an environmental variable | |
59 | ; |
|
59 | ; | |
60 | ;To override an option: |
|
60 | ;To override an option: | |
61 | ; |
|
61 | ; | |
62 | ;RC_<KeyName> |
|
62 | ;RC_<KeyName> | |
63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
63 | ;Everything should be uppercase, . and - should be replaced by _. | |
64 | ;For example, if you have these configuration settings: |
|
64 | ;For example, if you have these configuration settings: | |
65 | ;rc_cache.repo_object.backend = foo |
|
65 | ;rc_cache.repo_object.backend = foo | |
66 | ;can be overridden by |
|
66 | ;can be overridden by | |
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
68 |
|
68 | |||
69 | use = egg:rhodecode-enterprise-ce |
|
69 | use = egg:rhodecode-enterprise-ce | |
70 |
|
70 | |||
71 | ; enable proxy prefix middleware, defined above |
|
71 | ; enable proxy prefix middleware, defined above | |
72 | #filter-with = proxy-prefix |
|
72 | #filter-with = proxy-prefix | |
73 |
|
73 | |||
74 | ; ############# |
|
74 | ; ############# | |
75 | ; DEBUG OPTIONS |
|
75 | ; DEBUG OPTIONS | |
76 | ; ############# |
|
76 | ; ############# | |
77 |
|
77 | |||
78 | pyramid.reload_templates = true |
|
78 | pyramid.reload_templates = true | |
79 |
|
79 | |||
80 | # During development the we want to have the debug toolbar enabled |
|
80 | # During development the we want to have the debug toolbar enabled | |
81 | pyramid.includes = |
|
81 | pyramid.includes = | |
82 | pyramid_debugtoolbar |
|
82 | pyramid_debugtoolbar | |
83 |
|
83 | |||
84 | debugtoolbar.hosts = 0.0.0.0/0 |
|
84 | debugtoolbar.hosts = 0.0.0.0/0 | |
85 | debugtoolbar.exclude_prefixes = |
|
85 | debugtoolbar.exclude_prefixes = | |
86 | /css |
|
86 | /css | |
87 | /fonts |
|
87 | /fonts | |
88 | /images |
|
88 | /images | |
89 | /js |
|
89 | /js | |
90 |
|
90 | |||
91 | ## RHODECODE PLUGINS ## |
|
91 | ## RHODECODE PLUGINS ## | |
92 | rhodecode.includes = |
|
92 | rhodecode.includes = | |
93 | rhodecode.api |
|
93 | rhodecode.api | |
94 |
|
94 | |||
95 |
|
95 | |||
96 | # api prefix url |
|
96 | # api prefix url | |
97 | rhodecode.api.url = /_admin/api |
|
97 | rhodecode.api.url = /_admin/api | |
98 |
|
98 | |||
99 | ; enable debug style page |
|
99 | ; enable debug style page | |
100 | debug_style = true |
|
100 | debug_style = true | |
101 |
|
101 | |||
102 | ; ################# |
|
102 | ; ################# | |
103 | ; END DEBUG OPTIONS |
|
103 | ; END DEBUG OPTIONS | |
104 | ; ################# |
|
104 | ; ################# | |
105 |
|
105 | |||
106 | ; encryption key used to encrypt social plugin tokens, |
|
106 | ; encryption key used to encrypt social plugin tokens, | |
107 | ; remote_urls with credentials etc, if not set it defaults to |
|
107 | ; remote_urls with credentials etc, if not set it defaults to | |
108 | ; `beaker.session.secret` |
|
108 | ; `beaker.session.secret` | |
109 | #rhodecode.encrypted_values.secret = |
|
109 | #rhodecode.encrypted_values.secret = | |
110 |
|
110 | |||
111 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
111 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
112 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
112 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
113 | #rhodecode.encrypted_values.strict = false |
|
113 | #rhodecode.encrypted_values.strict = false | |
114 |
|
114 | |||
115 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
115 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
116 | ; fernet is safer, and we strongly recommend switching to it. |
|
116 | ; fernet is safer, and we strongly recommend switching to it. | |
117 | ; Due to backward compatibility aes is used as default. |
|
117 | ; Due to backward compatibility aes is used as default. | |
118 | #rhodecode.encrypted_values.algorithm = fernet |
|
118 | #rhodecode.encrypted_values.algorithm = fernet | |
119 |
|
119 | |||
120 | ; Return gzipped responses from RhodeCode (static files/application) |
|
120 | ; Return gzipped responses from RhodeCode (static files/application) | |
121 | gzip_responses = false |
|
121 | gzip_responses = false | |
122 |
|
122 | |||
123 | ; Auto-generate javascript routes file on startup |
|
123 | ; Auto-generate javascript routes file on startup | |
124 | generate_js_files = false |
|
124 | generate_js_files = false | |
125 |
|
125 | |||
126 | ; System global default language. |
|
126 | ; System global default language. | |
127 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
127 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
128 | lang = en |
|
128 | lang = en | |
129 |
|
129 | |||
130 | ; Perform a full repository scan and import on each server start. |
|
130 | ; Perform a full repository scan and import on each server start. | |
131 | ; Settings this to true could lead to very long startup time. |
|
131 | ; Settings this to true could lead to very long startup time. | |
132 | startup.import_repos = false |
|
132 | startup.import_repos = false | |
133 |
|
133 | |||
134 | ; URL at which the application is running. This is used for Bootstrapping |
|
134 | ; URL at which the application is running. This is used for Bootstrapping | |
135 | ; requests in context when no web request is available. Used in ishell, or |
|
135 | ; requests in context when no web request is available. Used in ishell, or | |
136 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
136 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
137 | app.base_url = http://rhodecode.local |
|
137 | app.base_url = http://rhodecode.local | |
138 |
|
138 | |||
139 | ; Host at which the Service API is running. |
|
139 | ; Host at which the Service API is running. | |
140 | app.service_api.host = http://rhodecode.local:10020 |
|
140 | app.service_api.host = http://rhodecode.local:10020 | |
141 |
|
141 | |||
142 | ; Secret for Service API authentication. |
|
142 | ; Secret for Service API authentication. | |
143 | app.service_api.token = |
|
143 | app.service_api.token = | |
144 |
|
144 | |||
145 | ; Unique application ID. Should be a random unique string for security. |
|
145 | ; Unique application ID. Should be a random unique string for security. | |
146 | app_instance_uuid = rc-production |
|
146 | app_instance_uuid = rc-production | |
147 |
|
147 | |||
148 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
148 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
149 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
149 | ; commit, or pull request exceeds this limit this diff will be displayed | |
150 | ; partially. E.g 512000 == 512Kb |
|
150 | ; partially. E.g 512000 == 512Kb | |
151 | cut_off_limit_diff = 512000 |
|
151 | cut_off_limit_diff = 512000 | |
152 |
|
152 | |||
153 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
153 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
154 | ; file inside diff which exceeds this limit will be displayed partially. |
|
154 | ; file inside diff which exceeds this limit will be displayed partially. | |
155 | ; E.g 128000 == 128Kb |
|
155 | ; E.g 128000 == 128Kb | |
156 | cut_off_limit_file = 128000 |
|
156 | cut_off_limit_file = 128000 | |
157 |
|
157 | |||
158 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
158 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
159 | vcs_full_cache = true |
|
159 | vcs_full_cache = true | |
160 |
|
160 | |||
161 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
161 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
162 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
162 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
163 | force_https = false |
|
163 | force_https = false | |
164 |
|
164 | |||
165 | ; use Strict-Transport-Security headers |
|
165 | ; use Strict-Transport-Security headers | |
166 | use_htsts = false |
|
166 | use_htsts = false | |
167 |
|
167 | |||
168 | ; Set to true if your repos are exposed using the dumb protocol |
|
168 | ; Set to true if your repos are exposed using the dumb protocol | |
169 | git_update_server_info = false |
|
169 | git_update_server_info = false | |
170 |
|
170 | |||
171 | ; RSS/ATOM feed options |
|
171 | ; RSS/ATOM feed options | |
172 | rss_cut_off_limit = 256000 |
|
172 | rss_cut_off_limit = 256000 | |
173 | rss_items_per_page = 10 |
|
173 | rss_items_per_page = 10 | |
174 | rss_include_diff = false |
|
174 | rss_include_diff = false | |
175 |
|
175 | |||
176 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
176 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
177 | ; url that does rewrites to _admin/gists/{gistid}. |
|
177 | ; url that does rewrites to _admin/gists/{gistid}. | |
178 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
178 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
179 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
179 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
180 | gist_alias_url = |
|
180 | gist_alias_url = | |
181 |
|
181 | |||
182 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
182 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
183 | ; used for access. |
|
183 | ; used for access. | |
184 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
184 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
185 | ; came from the the logged in user who own this authentication token. |
|
185 | ; came from the the logged in user who own this authentication token. | |
186 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
186 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
187 | ; authentication token. Such view would be only accessible when used together |
|
187 | ; authentication token. Such view would be only accessible when used together | |
188 | ; with this authentication token |
|
188 | ; with this authentication token | |
189 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
189 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
190 | ; The list should be "," separated and on a single line. |
|
190 | ; The list should be "," separated and on a single line. | |
191 | ; Most common views to enable: |
|
191 | ; Most common views to enable: | |
192 |
|
192 | |||
193 | # RepoCommitsView:repo_commit_download |
|
193 | # RepoCommitsView:repo_commit_download | |
194 | # RepoCommitsView:repo_commit_patch |
|
194 | # RepoCommitsView:repo_commit_patch | |
195 | # RepoCommitsView:repo_commit_raw |
|
195 | # RepoCommitsView:repo_commit_raw | |
196 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
196 | # RepoCommitsView:repo_commit_raw@TOKEN | |
197 | # RepoFilesView:repo_files_diff |
|
197 | # RepoFilesView:repo_files_diff | |
198 | # RepoFilesView:repo_archivefile |
|
198 | # RepoFilesView:repo_archivefile | |
199 | # RepoFilesView:repo_file_raw |
|
199 | # RepoFilesView:repo_file_raw | |
200 | # GistView:* |
|
200 | # GistView:* | |
201 | api_access_controllers_whitelist = |
|
201 | api_access_controllers_whitelist = | |
202 |
|
202 | |||
203 | ; Default encoding used to convert from and to unicode |
|
203 | ; Default encoding used to convert from and to unicode | |
204 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
204 | ; can be also a comma separated list of encoding in case of mixed encodings | |
205 | default_encoding = UTF-8 |
|
205 | default_encoding = UTF-8 | |
206 |
|
206 | |||
207 | ; instance-id prefix |
|
207 | ; instance-id prefix | |
208 | ; a prefix key for this instance used for cache invalidation when running |
|
208 | ; a prefix key for this instance used for cache invalidation when running | |
209 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
209 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
210 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
210 | ; all running RhodeCode instances. Leave empty if you don't use it | |
211 | instance_id = |
|
211 | instance_id = | |
212 |
|
212 | |||
213 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
213 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
214 | ; of an authentication plugin also if it is disabled by it's settings. |
|
214 | ; of an authentication plugin also if it is disabled by it's settings. | |
215 | ; This could be useful if you are unable to log in to the system due to broken |
|
215 | ; This could be useful if you are unable to log in to the system due to broken | |
216 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
216 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
217 | ; module to log in again and fix the settings. |
|
217 | ; module to log in again and fix the settings. | |
218 | ; Available builtin plugin IDs (hash is part of the ID): |
|
218 | ; Available builtin plugin IDs (hash is part of the ID): | |
219 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
219 | ; egg:rhodecode-enterprise-ce#rhodecode | |
220 | ; egg:rhodecode-enterprise-ce#pam |
|
220 | ; egg:rhodecode-enterprise-ce#pam | |
221 | ; egg:rhodecode-enterprise-ce#ldap |
|
221 | ; egg:rhodecode-enterprise-ce#ldap | |
222 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
222 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
223 | ; egg:rhodecode-enterprise-ce#headers |
|
223 | ; egg:rhodecode-enterprise-ce#headers | |
224 | ; egg:rhodecode-enterprise-ce#crowd |
|
224 | ; egg:rhodecode-enterprise-ce#crowd | |
225 |
|
225 | |||
226 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
226 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
227 |
|
227 | |||
228 | ; Flag to control loading of legacy plugins in py:/path format |
|
228 | ; Flag to control loading of legacy plugins in py:/path format | |
229 | auth_plugin.import_legacy_plugins = true |
|
229 | auth_plugin.import_legacy_plugins = true | |
230 |
|
230 | |||
231 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
231 | ; alternative return HTTP header for failed authentication. Default HTTP | |
232 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
232 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
233 | ; handling that causing a series of failed authentication calls. |
|
233 | ; handling that causing a series of failed authentication calls. | |
234 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
234 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
235 | ; This will be served instead of default 401 on bad authentication |
|
235 | ; This will be served instead of default 401 on bad authentication | |
236 | auth_ret_code = |
|
236 | auth_ret_code = | |
237 |
|
237 | |||
238 | ; use special detection method when serving auth_ret_code, instead of serving |
|
238 | ; use special detection method when serving auth_ret_code, instead of serving | |
239 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
239 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
240 | ; and then serve auth_ret_code to clients |
|
240 | ; and then serve auth_ret_code to clients | |
241 | auth_ret_code_detection = false |
|
241 | auth_ret_code_detection = false | |
242 |
|
242 | |||
243 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
243 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
244 | ; codes don't break the transactions while 4XX codes do |
|
244 | ; codes don't break the transactions while 4XX codes do | |
245 | lock_ret_code = 423 |
|
245 | lock_ret_code = 423 | |
246 |
|
246 | |||
247 | ; Filesystem location were repositories should be stored |
|
247 | ; Filesystem location were repositories should be stored | |
248 | repo_store.path = /var/opt/rhodecode_repo_store |
|
248 | repo_store.path = /var/opt/rhodecode_repo_store | |
249 |
|
249 | |||
250 | ; allows to setup custom hooks in settings page |
|
250 | ; allows to setup custom hooks in settings page | |
251 | allow_custom_hooks_settings = true |
|
251 | allow_custom_hooks_settings = true | |
252 |
|
252 | |||
253 | ; Generated license token required for EE edition license. |
|
253 | ; Generated license token required for EE edition license. | |
254 | ; New generated token value can be found in Admin > settings > license page. |
|
254 | ; New generated token value can be found in Admin > settings > license page. | |
255 | license_token = |
|
255 | license_token = | |
256 |
|
256 | |||
257 | ; This flag hides sensitive information on the license page such as token, and license data |
|
257 | ; This flag hides sensitive information on the license page such as token, and license data | |
258 | license.hide_license_info = false |
|
258 | license.hide_license_info = false | |
259 |
|
259 | |||
260 | ; supervisor connection uri, for managing supervisor and logs. |
|
260 | ; supervisor connection uri, for managing supervisor and logs. | |
261 | supervisor.uri = |
|
261 | supervisor.uri = | |
262 |
|
262 | |||
263 | ; supervisord group name/id we only want this RC instance to handle |
|
263 | ; supervisord group name/id we only want this RC instance to handle | |
264 | supervisor.group_id = dev |
|
264 | supervisor.group_id = dev | |
265 |
|
265 | |||
266 | ; Display extended labs settings |
|
266 | ; Display extended labs settings | |
267 | labs_settings_active = true |
|
267 | labs_settings_active = true | |
268 |
|
268 | |||
269 | ; Custom exception store path, defaults to TMPDIR |
|
269 | ; Custom exception store path, defaults to TMPDIR | |
270 | ; This is used to store exception from RhodeCode in shared directory |
|
270 | ; This is used to store exception from RhodeCode in shared directory | |
271 | #exception_tracker.store_path = |
|
271 | #exception_tracker.store_path = | |
272 |
|
272 | |||
273 | ; Send email with exception details when it happens |
|
273 | ; Send email with exception details when it happens | |
274 | #exception_tracker.send_email = false |
|
274 | #exception_tracker.send_email = false | |
275 |
|
275 | |||
276 | ; Comma separated list of recipients for exception emails, |
|
276 | ; Comma separated list of recipients for exception emails, | |
277 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
277 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
278 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
278 | ; Can be left empty, then emails will be sent to ALL super-admins | |
279 | #exception_tracker.send_email_recipients = |
|
279 | #exception_tracker.send_email_recipients = | |
280 |
|
280 | |||
281 | ; optional prefix to Add to email Subject |
|
281 | ; optional prefix to Add to email Subject | |
282 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
282 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
283 |
|
283 | |||
284 | ; File store configuration. This is used to store and serve uploaded files |
|
284 | ; File store configuration. This is used to store and serve uploaded files | |
285 | file_store.enabled = true |
|
285 | file_store.enabled = true | |
286 |
|
286 | |||
287 | ; Storage backend, available options are: local |
|
287 | ; Storage backend, available options are: local | |
288 | file_store.backend = local |
|
288 | file_store.backend = local | |
289 |
|
289 | |||
290 | ; path to store the uploaded binaries and artifacts |
|
290 | ; path to store the uploaded binaries and artifacts | |
291 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
291 | file_store.storage_path = /var/opt/rhodecode_data/file_store | |
292 |
|
292 | |||
293 |
|
293 | |||
294 | ; Redis url to acquire/check generation of archives locks |
|
294 | ; Redis url to acquire/check generation of archives locks | |
295 | archive_cache.locking.url = redis://redis:6379/1 |
|
295 | archive_cache.locking.url = redis://redis:6379/1 | |
296 |
|
296 | |||
297 | ; Storage backend, only 'filesystem' and 'objectstore' are available now |
|
297 | ; Storage backend, only 'filesystem' and 'objectstore' are available now | |
298 | archive_cache.backend.type = filesystem |
|
298 | archive_cache.backend.type = filesystem | |
299 |
|
299 | |||
300 | ; url for s3 compatible storage that allows to upload artifacts |
|
300 | ; url for s3 compatible storage that allows to upload artifacts | |
301 | ; e.g http://minio:9000 |
|
301 | ; e.g http://minio:9000 | |
302 | archive_cache.objectstore.url = http://s3-minio:9000 |
|
302 | archive_cache.objectstore.url = http://s3-minio:9000 | |
303 |
|
303 | |||
304 | ; key for s3 auth |
|
304 | ; key for s3 auth | |
305 | archive_cache.objectstore.key = key |
|
305 | archive_cache.objectstore.key = key | |
306 |
|
306 | |||
307 | ; secret for s3 auth |
|
307 | ; secret for s3 auth | |
308 | archive_cache.objectstore.secret = secret |
|
308 | archive_cache.objectstore.secret = secret | |
309 |
|
309 | |||
310 | ;region for s3 storage |
|
310 | ;region for s3 storage | |
311 | archive_cache.objectstore.region = eu-central-1 |
|
311 | archive_cache.objectstore.region = eu-central-1 | |
312 |
|
312 | |||
313 | ; number of sharded buckets to create to distribute archives across |
|
313 | ; number of sharded buckets to create to distribute archives across | |
314 | ; default is 8 shards |
|
314 | ; default is 8 shards | |
315 | archive_cache.objectstore.bucket_shards = 8 |
|
315 | archive_cache.objectstore.bucket_shards = 8 | |
316 |
|
316 | |||
317 | ; a top-level bucket to put all other shards in |
|
317 | ; a top-level bucket to put all other shards in | |
318 | ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number |
|
318 | ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number | |
319 | archive_cache.objectstore.bucket = rhodecode-archive-cache |
|
319 | archive_cache.objectstore.bucket = rhodecode-archive-cache | |
320 |
|
320 | |||
321 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time |
|
321 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |
322 | archive_cache.objectstore.retry = false |
|
322 | archive_cache.objectstore.retry = false | |
323 |
|
323 | |||
324 | ; number of seconds to wait for next try using retry |
|
324 | ; number of seconds to wait for next try using retry | |
325 | archive_cache.objectstore.retry_backoff = 1 |
|
325 | archive_cache.objectstore.retry_backoff = 1 | |
326 |
|
326 | |||
327 | ; how many tries do do a retry fetch from this backend |
|
327 | ; how many tries do do a retry fetch from this backend | |
328 | archive_cache.objectstore.retry_attempts = 10 |
|
328 | archive_cache.objectstore.retry_attempts = 10 | |
329 |
|
329 | |||
330 | ; Default is $cache_dir/archive_cache if not set |
|
330 | ; Default is $cache_dir/archive_cache if not set | |
331 | ; Generated repo archives will be cached at this location |
|
331 | ; Generated repo archives will be cached at this location | |
332 | ; and served from the cache during subsequent requests for the same archive of |
|
332 | ; and served from the cache during subsequent requests for the same archive of | |
333 | ; the repository. This path is important to be shared across filesystems and with |
|
333 | ; the repository. This path is important to be shared across filesystems and with | |
334 | ; RhodeCode and vcsserver |
|
334 | ; RhodeCode and vcsserver | |
335 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache |
|
335 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache | |
336 |
|
336 | |||
337 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
337 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
338 | archive_cache.filesystem.cache_size_gb = 1 |
|
338 | archive_cache.filesystem.cache_size_gb = 1 | |
339 |
|
339 | |||
340 | ; Eviction policy used to clear out after cache_size_gb limit is reached |
|
340 | ; Eviction policy used to clear out after cache_size_gb limit is reached | |
341 | archive_cache.filesystem.eviction_policy = least-recently-stored |
|
341 | archive_cache.filesystem.eviction_policy = least-recently-stored | |
342 |
|
342 | |||
343 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
343 | ; By default cache uses sharding technique, this specifies how many shards are there | |
344 | ; default is 8 shards |
|
344 | ; default is 8 shards | |
345 | archive_cache.filesystem.cache_shards = 8 |
|
345 | archive_cache.filesystem.cache_shards = 8 | |
346 |
|
346 | |||
347 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time |
|
347 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |
348 | archive_cache.filesystem.retry = false |
|
348 | archive_cache.filesystem.retry = false | |
349 |
|
349 | |||
350 | ; number of seconds to wait for next try using retry |
|
350 | ; number of seconds to wait for next try using retry | |
351 | archive_cache.filesystem.retry_backoff = 1 |
|
351 | archive_cache.filesystem.retry_backoff = 1 | |
352 |
|
352 | |||
353 | ; how many tries do do a retry fetch from this backend |
|
353 | ; how many tries do do a retry fetch from this backend | |
354 | archive_cache.filesystem.retry_attempts = 10 |
|
354 | archive_cache.filesystem.retry_attempts = 10 | |
355 |
|
355 | |||
356 |
|
356 | |||
357 | ; ############# |
|
357 | ; ############# | |
358 | ; CELERY CONFIG |
|
358 | ; CELERY CONFIG | |
359 | ; ############# |
|
359 | ; ############# | |
360 |
|
360 | |||
361 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
361 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
362 |
|
362 | |||
363 | use_celery = true |
|
363 | use_celery = true | |
364 |
|
364 | |||
365 | ; path to store schedule database |
|
365 | ; path to store schedule database | |
366 | #celerybeat-schedule.path = |
|
366 | #celerybeat-schedule.path = | |
367 |
|
367 | |||
368 | ; connection url to the message broker (default redis) |
|
368 | ; connection url to the message broker (default redis) | |
369 | celery.broker_url = redis://redis:6379/8 |
|
369 | celery.broker_url = redis://redis:6379/8 | |
370 |
|
370 | |||
371 | ; results backend to get results for (default redis) |
|
371 | ; results backend to get results for (default redis) | |
372 | celery.result_backend = redis://redis:6379/8 |
|
372 | celery.result_backend = redis://redis:6379/8 | |
373 |
|
373 | |||
374 | ; rabbitmq example |
|
374 | ; rabbitmq example | |
375 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
375 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
376 |
|
376 | |||
377 | ; maximum tasks to execute before worker restart |
|
377 | ; maximum tasks to execute before worker restart | |
378 | celery.max_tasks_per_child = 20 |
|
378 | celery.max_tasks_per_child = 20 | |
379 |
|
379 | |||
380 | ; tasks will never be sent to the queue, but executed locally instead. |
|
380 | ; tasks will never be sent to the queue, but executed locally instead. | |
381 | celery.task_always_eager = false |
|
381 | celery.task_always_eager = false | |
382 |
|
382 | |||
383 | ; ############# |
|
383 | ; ############# | |
384 | ; DOGPILE CACHE |
|
384 | ; DOGPILE CACHE | |
385 | ; ############# |
|
385 | ; ############# | |
386 |
|
386 | |||
387 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
387 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
388 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
388 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
389 | cache_dir = /var/opt/rhodecode_data |
|
389 | cache_dir = /var/opt/rhodecode_data | |
390 |
|
390 | |||
391 | ; ********************************************* |
|
391 | ; ********************************************* | |
392 | ; `sql_cache_short` cache for heavy SQL queries |
|
392 | ; `sql_cache_short` cache for heavy SQL queries | |
393 | ; Only supported backend is `memory_lru` |
|
393 | ; Only supported backend is `memory_lru` | |
394 | ; ********************************************* |
|
394 | ; ********************************************* | |
395 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
395 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
396 | rc_cache.sql_cache_short.expiration_time = 30 |
|
396 | rc_cache.sql_cache_short.expiration_time = 30 | |
397 |
|
397 | |||
398 |
|
398 | |||
399 | ; ***************************************************** |
|
399 | ; ***************************************************** | |
400 | ; `cache_repo_longterm` cache for repo object instances |
|
400 | ; `cache_repo_longterm` cache for repo object instances | |
401 | ; Only supported backend is `memory_lru` |
|
401 | ; Only supported backend is `memory_lru` | |
402 | ; ***************************************************** |
|
402 | ; ***************************************************** | |
403 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
403 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
404 | ; by default we use 30 Days, cache is still invalidated on push |
|
404 | ; by default we use 30 Days, cache is still invalidated on push | |
405 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
405 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
406 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
406 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
407 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
407 | rc_cache.cache_repo_longterm.max_size = 10000 | |
408 |
|
408 | |||
409 |
|
409 | |||
410 | ; ********************************************* |
|
410 | ; ********************************************* | |
411 | ; `cache_general` cache for general purpose use |
|
411 | ; `cache_general` cache for general purpose use | |
412 | ; for simplicity use rc.file_namespace backend, |
|
412 | ; for simplicity use rc.file_namespace backend, | |
413 | ; for performance and scale use rc.redis |
|
413 | ; for performance and scale use rc.redis | |
414 | ; ********************************************* |
|
414 | ; ********************************************* | |
415 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
415 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
416 | rc_cache.cache_general.expiration_time = 43200 |
|
416 | rc_cache.cache_general.expiration_time = 43200 | |
417 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
417 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
418 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
418 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db | |
419 |
|
419 | |||
420 | ; alternative `cache_general` redis backend with distributed lock |
|
420 | ; alternative `cache_general` redis backend with distributed lock | |
421 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
421 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
422 | #rc_cache.cache_general.expiration_time = 300 |
|
422 | #rc_cache.cache_general.expiration_time = 300 | |
423 |
|
423 | |||
424 | ; redis_expiration_time needs to be greater then expiration_time |
|
424 | ; redis_expiration_time needs to be greater then expiration_time | |
425 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
425 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
426 |
|
426 | |||
427 | #rc_cache.cache_general.arguments.host = localhost |
|
427 | #rc_cache.cache_general.arguments.host = localhost | |
428 | #rc_cache.cache_general.arguments.port = 6379 |
|
428 | #rc_cache.cache_general.arguments.port = 6379 | |
429 | #rc_cache.cache_general.arguments.db = 0 |
|
429 | #rc_cache.cache_general.arguments.db = 0 | |
430 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
430 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
431 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
431 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
432 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
432 | #rc_cache.cache_general.arguments.distributed_lock = true | |
433 |
|
433 | |||
434 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
434 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
435 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
435 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
436 |
|
436 | |||
437 | ; ************************************************* |
|
437 | ; ************************************************* | |
438 | ; `cache_perms` cache for permission tree, auth TTL |
|
438 | ; `cache_perms` cache for permission tree, auth TTL | |
439 | ; for simplicity use rc.file_namespace backend, |
|
439 | ; for simplicity use rc.file_namespace backend, | |
440 | ; for performance and scale use rc.redis |
|
440 | ; for performance and scale use rc.redis | |
441 | ; ************************************************* |
|
441 | ; ************************************************* | |
442 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
442 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
443 | rc_cache.cache_perms.expiration_time = 3600 |
|
443 | rc_cache.cache_perms.expiration_time = 3600 | |
444 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
444 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
445 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
445 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db | |
446 |
|
446 | |||
447 | ; alternative `cache_perms` redis backend with distributed lock |
|
447 | ; alternative `cache_perms` redis backend with distributed lock | |
448 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
448 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
449 | #rc_cache.cache_perms.expiration_time = 300 |
|
449 | #rc_cache.cache_perms.expiration_time = 300 | |
450 |
|
450 | |||
451 | ; redis_expiration_time needs to be greater then expiration_time |
|
451 | ; redis_expiration_time needs to be greater then expiration_time | |
452 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
452 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
453 |
|
453 | |||
454 | #rc_cache.cache_perms.arguments.host = localhost |
|
454 | #rc_cache.cache_perms.arguments.host = localhost | |
455 | #rc_cache.cache_perms.arguments.port = 6379 |
|
455 | #rc_cache.cache_perms.arguments.port = 6379 | |
456 | #rc_cache.cache_perms.arguments.db = 0 |
|
456 | #rc_cache.cache_perms.arguments.db = 0 | |
457 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
457 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
458 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
458 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
459 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
459 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
460 |
|
460 | |||
461 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
461 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
462 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
462 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
463 |
|
463 | |||
464 | ; *************************************************** |
|
464 | ; *************************************************** | |
465 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
465 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
466 | ; for simplicity use rc.file_namespace backend, |
|
466 | ; for simplicity use rc.file_namespace backend, | |
467 | ; for performance and scale use rc.redis |
|
467 | ; for performance and scale use rc.redis | |
468 | ; *************************************************** |
|
468 | ; *************************************************** | |
469 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
469 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
470 | rc_cache.cache_repo.expiration_time = 2592000 |
|
470 | rc_cache.cache_repo.expiration_time = 2592000 | |
471 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
471 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
472 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
472 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db | |
473 |
|
473 | |||
474 | ; alternative `cache_repo` redis backend with distributed lock |
|
474 | ; alternative `cache_repo` redis backend with distributed lock | |
475 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
475 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
476 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
476 | #rc_cache.cache_repo.expiration_time = 2592000 | |
477 |
|
477 | |||
478 | ; redis_expiration_time needs to be greater then expiration_time |
|
478 | ; redis_expiration_time needs to be greater then expiration_time | |
479 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
479 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
480 |
|
480 | |||
481 | #rc_cache.cache_repo.arguments.host = localhost |
|
481 | #rc_cache.cache_repo.arguments.host = localhost | |
482 | #rc_cache.cache_repo.arguments.port = 6379 |
|
482 | #rc_cache.cache_repo.arguments.port = 6379 | |
483 | #rc_cache.cache_repo.arguments.db = 1 |
|
483 | #rc_cache.cache_repo.arguments.db = 1 | |
484 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
484 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
485 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
485 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
486 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
486 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
487 |
|
487 | |||
488 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
488 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
489 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
489 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
490 |
|
490 | |||
491 | ; ############## |
|
491 | ; ############## | |
492 | ; BEAKER SESSION |
|
492 | ; BEAKER SESSION | |
493 | ; ############## |
|
493 | ; ############## | |
494 |
|
494 | |||
495 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
495 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
496 | ; types are file, ext:redis, ext:database, ext:memcached |
|
496 | ; types are file, ext:redis, ext:database, ext:memcached | |
497 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
497 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
498 | #beaker.session.type = file |
|
498 | #beaker.session.type = file | |
499 | #beaker.session.data_dir = %(here)s/data/sessions |
|
499 | #beaker.session.data_dir = %(here)s/data/sessions | |
500 |
|
500 | |||
501 | ; Redis based sessions |
|
501 | ; Redis based sessions | |
502 | beaker.session.type = ext:redis |
|
502 | beaker.session.type = ext:redis | |
503 | beaker.session.url = redis://redis:6379/2 |
|
503 | beaker.session.url = redis://redis:6379/2 | |
504 |
|
504 | |||
505 | ; DB based session, fast, and allows easy management over logged in users |
|
505 | ; DB based session, fast, and allows easy management over logged in users | |
506 | #beaker.session.type = ext:database |
|
506 | #beaker.session.type = ext:database | |
507 | #beaker.session.table_name = db_session |
|
507 | #beaker.session.table_name = db_session | |
508 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
508 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
509 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
509 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
510 | #beaker.session.sa.pool_recycle = 3600 |
|
510 | #beaker.session.sa.pool_recycle = 3600 | |
511 | #beaker.session.sa.echo = false |
|
511 | #beaker.session.sa.echo = false | |
512 |
|
512 | |||
513 | beaker.session.key = rhodecode |
|
513 | beaker.session.key = rhodecode | |
514 | beaker.session.secret = develop-rc-uytcxaz |
|
514 | beaker.session.secret = develop-rc-uytcxaz | |
515 | beaker.session.lock_dir = /data_ramdisk/lock |
|
515 | beaker.session.lock_dir = /data_ramdisk/lock | |
516 |
|
516 | |||
517 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
517 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
518 | ; you must disable beaker.session.secret to use this |
|
518 | ; you must disable beaker.session.secret to use this | |
519 | #beaker.session.encrypt_key = key_for_encryption |
|
519 | #beaker.session.encrypt_key = key_for_encryption | |
520 | #beaker.session.validate_key = validation_key |
|
520 | #beaker.session.validate_key = validation_key | |
521 |
|
521 | |||
522 | ; Sets session as invalid (also logging out user) if it haven not been |
|
522 | ; Sets session as invalid (also logging out user) if it haven not been | |
523 | ; accessed for given amount of time in seconds |
|
523 | ; accessed for given amount of time in seconds | |
524 | beaker.session.timeout = 2592000 |
|
524 | beaker.session.timeout = 2592000 | |
525 | beaker.session.httponly = true |
|
525 | beaker.session.httponly = true | |
526 |
|
526 | |||
527 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
527 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
528 | #beaker.session.cookie_path = /custom_prefix |
|
528 | #beaker.session.cookie_path = /custom_prefix | |
529 |
|
529 | |||
530 | ; Set https secure cookie |
|
530 | ; Set https secure cookie | |
531 | beaker.session.secure = false |
|
531 | beaker.session.secure = false | |
532 |
|
532 | |||
533 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
533 | ; default cookie expiration time in seconds, set to `true` to set expire | |
534 | ; at browser close |
|
534 | ; at browser close | |
535 | #beaker.session.cookie_expires = 3600 |
|
535 | #beaker.session.cookie_expires = 3600 | |
536 |
|
536 | |||
537 | ; ############################# |
|
537 | ; ############################# | |
538 | ; SEARCH INDEXING CONFIGURATION |
|
538 | ; SEARCH INDEXING CONFIGURATION | |
539 | ; ############################# |
|
539 | ; ############################# | |
540 |
|
540 | |||
541 | ; Full text search indexer is available in rhodecode-tools under |
|
541 | ; Full text search indexer is available in rhodecode-tools under | |
542 | ; `rhodecode-tools index` command |
|
542 | ; `rhodecode-tools index` command | |
543 |
|
543 | |||
544 | ; WHOOSH Backend, doesn't require additional services to run |
|
544 | ; WHOOSH Backend, doesn't require additional services to run | |
545 | ; it works good with few dozen repos |
|
545 | ; it works good with few dozen repos | |
546 | search.module = rhodecode.lib.index.whoosh |
|
546 | search.module = rhodecode.lib.index.whoosh | |
547 | search.location = %(here)s/data/index |
|
547 | search.location = %(here)s/data/index | |
548 |
|
548 | |||
549 | ; #################### |
|
549 | ; #################### | |
550 | ; CHANNELSTREAM CONFIG |
|
550 | ; CHANNELSTREAM CONFIG | |
551 | ; #################### |
|
551 | ; #################### | |
552 |
|
552 | |||
553 | ; channelstream enables persistent connections and live notification |
|
553 | ; channelstream enables persistent connections and live notification | |
554 | ; in the system. It's also used by the chat system |
|
554 | ; in the system. It's also used by the chat system | |
555 |
|
555 | |||
556 | channelstream.enabled = true |
|
556 | channelstream.enabled = true | |
557 |
|
557 | |||
558 | ; server address for channelstream server on the backend |
|
558 | ; server address for channelstream server on the backend | |
559 | channelstream.server = channelstream:9800 |
|
559 | channelstream.server = channelstream:9800 | |
560 |
|
560 | |||
561 | ; location of the channelstream server from outside world |
|
561 | ; location of the channelstream server from outside world | |
562 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
562 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
563 | ; by external HTTP server such as Nginx or Apache |
|
563 | ; by external HTTP server such as Nginx or Apache | |
564 | ; see Nginx/Apache configuration examples in our docs |
|
564 | ; see Nginx/Apache configuration examples in our docs | |
565 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
565 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
566 | channelstream.secret = ENV_GENERATED |
|
566 | channelstream.secret = ENV_GENERATED | |
567 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history |
|
567 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history | |
568 |
|
568 | |||
569 | ; Internal application path that Javascript uses to connect into. |
|
569 | ; Internal application path that Javascript uses to connect into. | |
570 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
570 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
571 | channelstream.proxy_path = /_channelstream |
|
571 | channelstream.proxy_path = /_channelstream | |
572 |
|
572 | |||
573 |
|
573 | |||
574 | ; ############################## |
|
574 | ; ############################## | |
575 | ; MAIN RHODECODE DATABASE CONFIG |
|
575 | ; MAIN RHODECODE DATABASE CONFIG | |
576 | ; ############################## |
|
576 | ; ############################## | |
577 |
|
577 | |||
578 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
578 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
579 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
579 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
580 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
580 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
581 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
581 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
582 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
582 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
583 |
|
583 | |||
584 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
584 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
585 |
|
585 | |||
586 | ; see sqlalchemy docs for other advanced settings |
|
586 | ; see sqlalchemy docs for other advanced settings | |
587 | ; print the sql statements to output |
|
587 | ; print the sql statements to output | |
588 | sqlalchemy.db1.echo = false |
|
588 | sqlalchemy.db1.echo = false | |
589 |
|
589 | |||
590 | ; recycle the connections after this amount of seconds |
|
590 | ; recycle the connections after this amount of seconds | |
591 | sqlalchemy.db1.pool_recycle = 3600 |
|
591 | sqlalchemy.db1.pool_recycle = 3600 | |
592 |
|
592 | |||
593 | ; the number of connections to keep open inside the connection pool. |
|
593 | ; the number of connections to keep open inside the connection pool. | |
594 | ; 0 indicates no limit |
|
594 | ; 0 indicates no limit | |
595 | ; the general calculus with gevent is: |
|
595 | ; the general calculus with gevent is: | |
596 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
596 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, | |
597 | ; then increase pool size + max overflow so that they add up to 500. |
|
597 | ; then increase pool size + max overflow so that they add up to 500. | |
598 | #sqlalchemy.db1.pool_size = 5 |
|
598 | #sqlalchemy.db1.pool_size = 5 | |
599 |
|
599 | |||
600 | ; The number of connections to allow in connection pool "overflow", that is |
|
600 | ; The number of connections to allow in connection pool "overflow", that is | |
601 | ; connections that can be opened above and beyond the pool_size setting, |
|
601 | ; connections that can be opened above and beyond the pool_size setting, | |
602 | ; which defaults to five. |
|
602 | ; which defaults to five. | |
603 | #sqlalchemy.db1.max_overflow = 10 |
|
603 | #sqlalchemy.db1.max_overflow = 10 | |
604 |
|
604 | |||
605 | ; Connection check ping, used to detect broken database connections |
|
605 | ; Connection check ping, used to detect broken database connections | |
606 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
606 | ; could be enabled to better handle cases if MySQL has gone away errors | |
607 | #sqlalchemy.db1.ping_connection = true |
|
607 | #sqlalchemy.db1.ping_connection = true | |
608 |
|
608 | |||
609 | ; ########## |
|
609 | ; ########## | |
610 | ; VCS CONFIG |
|
610 | ; VCS CONFIG | |
611 | ; ########## |
|
611 | ; ########## | |
612 | vcs.server.enable = true |
|
612 | vcs.server.enable = true | |
613 | vcs.server = vcsserver:10010 |
|
613 | vcs.server = vcsserver:10010 | |
614 |
|
614 | |||
615 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
615 | ; Web server connectivity protocol, responsible for web based VCS operations | |
616 | ; Available protocols are: |
|
616 | ; Available protocols are: | |
617 | ; `http` - use http-rpc backend (default) |
|
617 | ; `http` - use http-rpc backend (default) | |
618 | vcs.server.protocol = http |
|
618 | vcs.server.protocol = http | |
619 |
|
619 | |||
620 | ; Push/Pull operations protocol, available options are: |
|
620 | ; Push/Pull operations protocol, available options are: | |
621 | ; `http` - use http-rpc backend (default) |
|
621 | ; `http` - use http-rpc backend (default) | |
622 | vcs.scm_app_implementation = http |
|
622 | vcs.scm_app_implementation = http | |
623 |
|
623 | |||
624 | ; Push/Pull operations hooks protocol, available options are: |
|
624 | ; Push/Pull operations hooks protocol, available options are: | |
625 | ; `http` - use http-rpc backend (default) |
|
625 | ; `http` - use http-rpc backend (default) | |
626 | ; `celery` - use celery based hooks |
|
626 | ; `celery` - use celery based hooks | |
627 | vcs.hooks.protocol = http |
|
627 | #DEPRECATED:vcs.hooks.protocol = http | |
|
628 | vcs.hooks.protocol.v2 = celery | |||
628 |
|
629 | |||
629 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
630 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
630 | ; accessible via network. |
|
631 | ; accessible via network. | |
631 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
632 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
632 | vcs.hooks.host = * |
|
633 | vcs.hooks.host = * | |
633 |
|
634 | |||
634 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
635 | ; Start VCSServer with this instance as a subprocess, useful for development | |
635 | vcs.start_server = false |
|
636 | vcs.start_server = false | |
636 |
|
637 | |||
637 | ; List of enabled VCS backends, available options are: |
|
638 | ; List of enabled VCS backends, available options are: | |
638 | ; `hg` - mercurial |
|
639 | ; `hg` - mercurial | |
639 | ; `git` - git |
|
640 | ; `git` - git | |
640 | ; `svn` - subversion |
|
641 | ; `svn` - subversion | |
641 | vcs.backends = hg, git, svn |
|
642 | vcs.backends = hg, git, svn | |
642 |
|
643 | |||
643 | ; Wait this number of seconds before killing connection to the vcsserver |
|
644 | ; Wait this number of seconds before killing connection to the vcsserver | |
644 | vcs.connection_timeout = 3600 |
|
645 | vcs.connection_timeout = 3600 | |
645 |
|
646 | |||
646 | ; Cache flag to cache vcsserver remote calls locally |
|
647 | ; Cache flag to cache vcsserver remote calls locally | |
647 | ; It uses cache_region `cache_repo` |
|
648 | ; It uses cache_region `cache_repo` | |
648 | vcs.methods.cache = true |
|
649 | vcs.methods.cache = true | |
649 |
|
650 | |||
650 | ; #################################################### |
|
651 | ; #################################################### | |
651 | ; Subversion proxy support (mod_dav_svn) |
|
652 | ; Subversion proxy support (mod_dav_svn) | |
652 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
653 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
653 | ; #################################################### |
|
654 | ; #################################################### | |
654 |
|
655 | |||
655 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
656 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
656 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
657 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
657 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
658 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
658 | #vcs.svn.compatible_version = 1.8 |
|
659 | #vcs.svn.compatible_version = 1.8 | |
659 |
|
660 | |||
660 | ; Redis connection settings for svn integrations logic |
|
661 | ; Redis connection settings for svn integrations logic | |
661 | ; This connection string needs to be the same on ce and vcsserver |
|
662 | ; This connection string needs to be the same on ce and vcsserver | |
662 | vcs.svn.redis_conn = redis://redis:6379/0 |
|
663 | vcs.svn.redis_conn = redis://redis:6379/0 | |
663 |
|
664 | |||
664 | ; Enable SVN proxy of requests over HTTP |
|
665 | ; Enable SVN proxy of requests over HTTP | |
665 | vcs.svn.proxy.enabled = true |
|
666 | vcs.svn.proxy.enabled = true | |
666 |
|
667 | |||
667 | ; host to connect to running SVN subsystem |
|
668 | ; host to connect to running SVN subsystem | |
668 | vcs.svn.proxy.host = http://svn:8090 |
|
669 | vcs.svn.proxy.host = http://svn:8090 | |
669 |
|
670 | |||
670 | ; Enable or disable the config file generation. |
|
671 | ; Enable or disable the config file generation. | |
671 | svn.proxy.generate_config = true |
|
672 | svn.proxy.generate_config = true | |
672 |
|
673 | |||
673 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
674 | ; Generate config file with `SVNListParentPath` set to `On`. | |
674 | svn.proxy.list_parent_path = true |
|
675 | svn.proxy.list_parent_path = true | |
675 |
|
676 | |||
676 | ; Set location and file name of generated config file. |
|
677 | ; Set location and file name of generated config file. | |
677 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf |
|
678 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf | |
678 |
|
679 | |||
679 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
680 | ; alternative mod_dav config template. This needs to be a valid mako template | |
680 | ; Example template can be found in the source code: |
|
681 | ; Example template can be found in the source code: | |
681 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
682 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
682 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
683 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
683 |
|
684 | |||
684 | ; Used as a prefix to the `Location` block in the generated config file. |
|
685 | ; Used as a prefix to the `Location` block in the generated config file. | |
685 | ; In most cases it should be set to `/`. |
|
686 | ; In most cases it should be set to `/`. | |
686 | svn.proxy.location_root = / |
|
687 | svn.proxy.location_root = / | |
687 |
|
688 | |||
688 | ; Command to reload the mod dav svn configuration on change. |
|
689 | ; Command to reload the mod dav svn configuration on change. | |
689 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
690 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
690 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
691 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
691 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
692 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
692 |
|
693 | |||
693 | ; If the timeout expires before the reload command finishes, the command will |
|
694 | ; If the timeout expires before the reload command finishes, the command will | |
694 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
695 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
695 | #svn.proxy.reload_timeout = 10 |
|
696 | #svn.proxy.reload_timeout = 10 | |
696 |
|
697 | |||
697 | ; #################### |
|
698 | ; #################### | |
698 | ; SSH Support Settings |
|
699 | ; SSH Support Settings | |
699 | ; #################### |
|
700 | ; #################### | |
700 |
|
701 | |||
701 | ; Defines if a custom authorized_keys file should be created and written on |
|
702 | ; Defines if a custom authorized_keys file should be created and written on | |
702 | ; any change user ssh keys. Setting this to false also disables possibility |
|
703 | ; any change user ssh keys. Setting this to false also disables possibility | |
703 | ; of adding SSH keys by users from web interface. Super admins can still |
|
704 | ; of adding SSH keys by users from web interface. Super admins can still | |
704 | ; manage SSH Keys. |
|
705 | ; manage SSH Keys. | |
705 | ssh.generate_authorized_keyfile = true |
|
706 | ssh.generate_authorized_keyfile = true | |
706 |
|
707 | |||
707 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
708 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
708 | # ssh.authorized_keys_ssh_opts = |
|
709 | # ssh.authorized_keys_ssh_opts = | |
709 |
|
710 | |||
710 | ; Path to the authorized_keys file where the generate entries are placed. |
|
711 | ; Path to the authorized_keys file where the generate entries are placed. | |
711 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
712 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
712 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
713 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
713 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode |
|
714 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode | |
714 |
|
715 | |||
715 | ; Command to execute the SSH wrapper. The binary is available in the |
|
716 | ; Command to execute the SSH wrapper. The binary is available in the | |
716 | ; RhodeCode installation directory. |
|
717 | ; RhodeCode installation directory. | |
717 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
718 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
718 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
719 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
719 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
720 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
720 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
721 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
721 |
|
722 | |||
722 | ; Allow shell when executing the ssh-wrapper command |
|
723 | ; Allow shell when executing the ssh-wrapper command | |
723 | ssh.wrapper_cmd_allow_shell = false |
|
724 | ssh.wrapper_cmd_allow_shell = false | |
724 |
|
725 | |||
725 | ; Enables logging, and detailed output send back to the client during SSH |
|
726 | ; Enables logging, and detailed output send back to the client during SSH | |
726 | ; operations. Useful for debugging, shouldn't be used in production. |
|
727 | ; operations. Useful for debugging, shouldn't be used in production. | |
727 | ssh.enable_debug_logging = true |
|
728 | ssh.enable_debug_logging = true | |
728 |
|
729 | |||
729 | ; Paths to binary executable, by default they are the names, but we can |
|
730 | ; Paths to binary executable, by default they are the names, but we can | |
730 | ; override them if we want to use a custom one |
|
731 | ; override them if we want to use a custom one | |
731 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg |
|
732 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg | |
732 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git |
|
733 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git | |
733 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve |
|
734 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve | |
734 |
|
735 | |||
735 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
736 | ; Enables SSH key generator web interface. Disabling this still allows users | |
736 | ; to add their own keys. |
|
737 | ; to add their own keys. | |
737 | ssh.enable_ui_key_generator = true |
|
738 | ssh.enable_ui_key_generator = true | |
738 |
|
739 | |||
739 | ; Statsd client config, this is used to send metrics to statsd |
|
740 | ; Statsd client config, this is used to send metrics to statsd | |
740 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
741 | ; We recommend setting statsd_exported and scrape them using Prometheus | |
741 | #statsd.enabled = false |
|
742 | #statsd.enabled = false | |
742 | #statsd.statsd_host = 0.0.0.0 |
|
743 | #statsd.statsd_host = 0.0.0.0 | |
743 | #statsd.statsd_port = 8125 |
|
744 | #statsd.statsd_port = 8125 | |
744 | #statsd.statsd_prefix = |
|
745 | #statsd.statsd_prefix = | |
745 | #statsd.statsd_ipv6 = false |
|
746 | #statsd.statsd_ipv6 = false | |
746 |
|
747 | |||
747 | ; configure logging automatically at server startup set to false |
|
748 | ; configure logging automatically at server startup set to false | |
748 | ; to use the below custom logging config. |
|
749 | ; to use the below custom logging config. | |
749 | ; RC_LOGGING_FORMATTER |
|
750 | ; RC_LOGGING_FORMATTER | |
750 | ; RC_LOGGING_LEVEL |
|
751 | ; RC_LOGGING_LEVEL | |
751 | ; env variables can control the settings for logging in case of autoconfigure |
|
752 | ; env variables can control the settings for logging in case of autoconfigure | |
752 |
|
753 | |||
753 | #logging.autoconfigure = true |
|
754 | #logging.autoconfigure = true | |
754 |
|
755 | |||
755 | ; specify your own custom logging config file to configure logging |
|
756 | ; specify your own custom logging config file to configure logging | |
756 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
757 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
757 |
|
758 | |||
758 | ; Dummy marker to add new entries after. |
|
759 | ; Dummy marker to add new entries after. | |
759 | ; Add any custom entries below. Please don't remove this marker. |
|
760 | ; Add any custom entries below. Please don't remove this marker. | |
760 | custom.conf = 1 |
|
761 | custom.conf = 1 | |
761 |
|
762 | |||
762 |
|
763 | |||
763 | ; ##################### |
|
764 | ; ##################### | |
764 | ; LOGGING CONFIGURATION |
|
765 | ; LOGGING CONFIGURATION | |
765 | ; ##################### |
|
766 | ; ##################### | |
766 |
|
767 | |||
767 | [loggers] |
|
768 | [loggers] | |
768 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
769 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
769 |
|
770 | |||
770 | [handlers] |
|
771 | [handlers] | |
771 | keys = console, console_sql |
|
772 | keys = console, console_sql | |
772 |
|
773 | |||
773 | [formatters] |
|
774 | [formatters] | |
774 | keys = generic, json, color_formatter, color_formatter_sql |
|
775 | keys = generic, json, color_formatter, color_formatter_sql | |
775 |
|
776 | |||
776 | ; ####### |
|
777 | ; ####### | |
777 | ; LOGGERS |
|
778 | ; LOGGERS | |
778 | ; ####### |
|
779 | ; ####### | |
779 | [logger_root] |
|
780 | [logger_root] | |
780 | level = NOTSET |
|
781 | level = NOTSET | |
781 | handlers = console |
|
782 | handlers = console | |
782 |
|
783 | |||
783 | [logger_sqlalchemy] |
|
784 | [logger_sqlalchemy] | |
784 | level = INFO |
|
785 | level = INFO | |
785 | handlers = console_sql |
|
786 | handlers = console_sql | |
786 | qualname = sqlalchemy.engine |
|
787 | qualname = sqlalchemy.engine | |
787 | propagate = 0 |
|
788 | propagate = 0 | |
788 |
|
789 | |||
789 | [logger_beaker] |
|
790 | [logger_beaker] | |
790 | level = DEBUG |
|
791 | level = DEBUG | |
791 | handlers = |
|
792 | handlers = | |
792 | qualname = beaker.container |
|
793 | qualname = beaker.container | |
793 | propagate = 1 |
|
794 | propagate = 1 | |
794 |
|
795 | |||
795 | [logger_rhodecode] |
|
796 | [logger_rhodecode] | |
796 | level = DEBUG |
|
797 | level = DEBUG | |
797 | handlers = |
|
798 | handlers = | |
798 | qualname = rhodecode |
|
799 | qualname = rhodecode | |
799 | propagate = 1 |
|
800 | propagate = 1 | |
800 |
|
801 | |||
801 | [logger_ssh_wrapper] |
|
802 | [logger_ssh_wrapper] | |
802 | level = DEBUG |
|
803 | level = DEBUG | |
803 | handlers = |
|
804 | handlers = | |
804 | qualname = ssh_wrapper |
|
805 | qualname = ssh_wrapper | |
805 | propagate = 1 |
|
806 | propagate = 1 | |
806 |
|
807 | |||
807 | [logger_celery] |
|
808 | [logger_celery] | |
808 | level = DEBUG |
|
809 | level = DEBUG | |
809 | handlers = |
|
810 | handlers = | |
810 | qualname = celery |
|
811 | qualname = celery | |
811 |
|
812 | |||
812 |
|
813 | |||
813 | ; ######## |
|
814 | ; ######## | |
814 | ; HANDLERS |
|
815 | ; HANDLERS | |
815 | ; ######## |
|
816 | ; ######## | |
816 |
|
817 | |||
817 | [handler_console] |
|
818 | [handler_console] | |
818 | class = StreamHandler |
|
819 | class = StreamHandler | |
819 | args = (sys.stderr, ) |
|
820 | args = (sys.stderr, ) | |
820 | level = DEBUG |
|
821 | level = DEBUG | |
821 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
822 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
822 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
823 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
823 | formatter = color_formatter |
|
824 | formatter = color_formatter | |
824 |
|
825 | |||
825 | [handler_console_sql] |
|
826 | [handler_console_sql] | |
826 | ; "level = DEBUG" logs SQL queries and results. |
|
827 | ; "level = DEBUG" logs SQL queries and results. | |
827 | ; "level = INFO" logs SQL queries. |
|
828 | ; "level = INFO" logs SQL queries. | |
828 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
829 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
829 | class = StreamHandler |
|
830 | class = StreamHandler | |
830 | args = (sys.stderr, ) |
|
831 | args = (sys.stderr, ) | |
831 | level = WARN |
|
832 | level = WARN | |
832 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
833 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
833 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
834 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
834 | formatter = color_formatter_sql |
|
835 | formatter = color_formatter_sql | |
835 |
|
836 | |||
836 | ; ########## |
|
837 | ; ########## | |
837 | ; FORMATTERS |
|
838 | ; FORMATTERS | |
838 | ; ########## |
|
839 | ; ########## | |
839 |
|
840 | |||
840 | [formatter_generic] |
|
841 | [formatter_generic] | |
841 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
842 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
842 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
843 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
843 | datefmt = %Y-%m-%d %H:%M:%S |
|
844 | datefmt = %Y-%m-%d %H:%M:%S | |
844 |
|
845 | |||
845 | [formatter_color_formatter] |
|
846 | [formatter_color_formatter] | |
846 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
847 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
847 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
848 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
848 | datefmt = %Y-%m-%d %H:%M:%S |
|
849 | datefmt = %Y-%m-%d %H:%M:%S | |
849 |
|
850 | |||
850 | [formatter_color_formatter_sql] |
|
851 | [formatter_color_formatter_sql] | |
851 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
852 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
852 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
853 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
853 | datefmt = %Y-%m-%d %H:%M:%S |
|
854 | datefmt = %Y-%m-%d %H:%M:%S | |
854 |
|
855 | |||
855 | [formatter_json] |
|
856 | [formatter_json] | |
856 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
857 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
857 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
858 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,825 +1,826 b'' | |||||
1 |
|
1 | |||
2 | ; ######################################### |
|
2 | ; ######################################### | |
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION | |
4 | ; ######################################### |
|
4 | ; ######################################### | |
5 |
|
5 | |||
6 | [DEFAULT] |
|
6 | [DEFAULT] | |
7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
7 | ; Debug flag sets all loggers to debug, and enables request tracking | |
8 | debug = false |
|
8 | debug = false | |
9 |
|
9 | |||
10 | ; ######################################################################## |
|
10 | ; ######################################################################## | |
11 | ; EMAIL CONFIGURATION |
|
11 | ; EMAIL CONFIGURATION | |
12 | ; These settings will be used by the RhodeCode mailing system |
|
12 | ; These settings will be used by the RhodeCode mailing system | |
13 | ; ######################################################################## |
|
13 | ; ######################################################################## | |
14 |
|
14 | |||
15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
15 | ; prefix all emails subjects with given prefix, helps filtering out emails | |
16 | #email_prefix = [RhodeCode] |
|
16 | #email_prefix = [RhodeCode] | |
17 |
|
17 | |||
18 | ; email FROM address all mails will be sent |
|
18 | ; email FROM address all mails will be sent | |
19 | #app_email_from = rhodecode-noreply@localhost |
|
19 | #app_email_from = rhodecode-noreply@localhost | |
20 |
|
20 | |||
21 | #smtp_server = mail.server.com |
|
21 | #smtp_server = mail.server.com | |
22 | #smtp_username = |
|
22 | #smtp_username = | |
23 | #smtp_password = |
|
23 | #smtp_password = | |
24 | #smtp_port = |
|
24 | #smtp_port = | |
25 | #smtp_use_tls = false |
|
25 | #smtp_use_tls = false | |
26 | #smtp_use_ssl = true |
|
26 | #smtp_use_ssl = true | |
27 |
|
27 | |||
28 | [server:main] |
|
28 | [server:main] | |
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, | |
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
30 | ; Host port for gunicorn are controlled by gunicorn_conf.py | |
31 | host = 127.0.0.1 |
|
31 | host = 127.0.0.1 | |
32 | port = 10020 |
|
32 | port = 10020 | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | ; ########################### |
|
35 | ; ########################### | |
36 | ; GUNICORN APPLICATION SERVER |
|
36 | ; GUNICORN APPLICATION SERVER | |
37 | ; ########################### |
|
37 | ; ########################### | |
38 |
|
38 | |||
39 | ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini |
|
39 | ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini | |
40 |
|
40 | |||
41 | ; Module to use, this setting shouldn't be changed |
|
41 | ; Module to use, this setting shouldn't be changed | |
42 | use = egg:gunicorn#main |
|
42 | use = egg:gunicorn#main | |
43 |
|
43 | |||
44 | ; Prefix middleware for RhodeCode. |
|
44 | ; Prefix middleware for RhodeCode. | |
45 | ; recommended when using proxy setup. |
|
45 | ; recommended when using proxy setup. | |
46 | ; allows to set RhodeCode under a prefix in server. |
|
46 | ; allows to set RhodeCode under a prefix in server. | |
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. | |
48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
48 | ; And set your prefix like: `prefix = /custom_prefix` | |
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need | |
50 | ; to make your cookies only work on prefix url |
|
50 | ; to make your cookies only work on prefix url | |
51 | [filter:proxy-prefix] |
|
51 | [filter:proxy-prefix] | |
52 | use = egg:PasteDeploy#prefix |
|
52 | use = egg:PasteDeploy#prefix | |
53 | prefix = / |
|
53 | prefix = / | |
54 |
|
54 | |||
55 | [app:main] |
|
55 | [app:main] | |
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
56 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
57 | ; of this file |
|
57 | ; of this file | |
58 | ; Each option in the app:main can be override by an environmental variable |
|
58 | ; Each option in the app:main can be override by an environmental variable | |
59 | ; |
|
59 | ; | |
60 | ;To override an option: |
|
60 | ;To override an option: | |
61 | ; |
|
61 | ; | |
62 | ;RC_<KeyName> |
|
62 | ;RC_<KeyName> | |
63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
63 | ;Everything should be uppercase, . and - should be replaced by _. | |
64 | ;For example, if you have these configuration settings: |
|
64 | ;For example, if you have these configuration settings: | |
65 | ;rc_cache.repo_object.backend = foo |
|
65 | ;rc_cache.repo_object.backend = foo | |
66 | ;can be overridden by |
|
66 | ;can be overridden by | |
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo | |
68 |
|
68 | |||
69 | use = egg:rhodecode-enterprise-ce |
|
69 | use = egg:rhodecode-enterprise-ce | |
70 |
|
70 | |||
71 | ; enable proxy prefix middleware, defined above |
|
71 | ; enable proxy prefix middleware, defined above | |
72 | #filter-with = proxy-prefix |
|
72 | #filter-with = proxy-prefix | |
73 |
|
73 | |||
74 | ; encryption key used to encrypt social plugin tokens, |
|
74 | ; encryption key used to encrypt social plugin tokens, | |
75 | ; remote_urls with credentials etc, if not set it defaults to |
|
75 | ; remote_urls with credentials etc, if not set it defaults to | |
76 | ; `beaker.session.secret` |
|
76 | ; `beaker.session.secret` | |
77 | #rhodecode.encrypted_values.secret = |
|
77 | #rhodecode.encrypted_values.secret = | |
78 |
|
78 | |||
79 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
79 | ; decryption strict mode (enabled by default). It controls if decryption raises | |
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. | |
81 | #rhodecode.encrypted_values.strict = false |
|
81 | #rhodecode.encrypted_values.strict = false | |
82 |
|
82 | |||
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) | |
84 | ; fernet is safer, and we strongly recommend switching to it. |
|
84 | ; fernet is safer, and we strongly recommend switching to it. | |
85 | ; Due to backward compatibility aes is used as default. |
|
85 | ; Due to backward compatibility aes is used as default. | |
86 | #rhodecode.encrypted_values.algorithm = fernet |
|
86 | #rhodecode.encrypted_values.algorithm = fernet | |
87 |
|
87 | |||
88 | ; Return gzipped responses from RhodeCode (static files/application) |
|
88 | ; Return gzipped responses from RhodeCode (static files/application) | |
89 | gzip_responses = false |
|
89 | gzip_responses = false | |
90 |
|
90 | |||
91 | ; Auto-generate javascript routes file on startup |
|
91 | ; Auto-generate javascript routes file on startup | |
92 | generate_js_files = false |
|
92 | generate_js_files = false | |
93 |
|
93 | |||
94 | ; System global default language. |
|
94 | ; System global default language. | |
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh | |
96 | lang = en |
|
96 | lang = en | |
97 |
|
97 | |||
98 | ; Perform a full repository scan and import on each server start. |
|
98 | ; Perform a full repository scan and import on each server start. | |
99 | ; Settings this to true could lead to very long startup time. |
|
99 | ; Settings this to true could lead to very long startup time. | |
100 | startup.import_repos = false |
|
100 | startup.import_repos = false | |
101 |
|
101 | |||
102 | ; URL at which the application is running. This is used for Bootstrapping |
|
102 | ; URL at which the application is running. This is used for Bootstrapping | |
103 | ; requests in context when no web request is available. Used in ishell, or |
|
103 | ; requests in context when no web request is available. Used in ishell, or | |
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
104 | ; SSH calls. Set this for events to receive proper url for SSH calls. | |
105 | app.base_url = http://rhodecode.local |
|
105 | app.base_url = http://rhodecode.local | |
106 |
|
106 | |||
107 | ; Host at which the Service API is running. |
|
107 | ; Host at which the Service API is running. | |
108 | app.service_api.host = http://rhodecode.local:10020 |
|
108 | app.service_api.host = http://rhodecode.local:10020 | |
109 |
|
109 | |||
110 | ; Secret for Service API authentication. |
|
110 | ; Secret for Service API authentication. | |
111 | app.service_api.token = |
|
111 | app.service_api.token = | |
112 |
|
112 | |||
113 | ; Unique application ID. Should be a random unique string for security. |
|
113 | ; Unique application ID. Should be a random unique string for security. | |
114 | app_instance_uuid = rc-production |
|
114 | app_instance_uuid = rc-production | |
115 |
|
115 | |||
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
117 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
117 | ; commit, or pull request exceeds this limit this diff will be displayed | |
118 | ; partially. E.g 512000 == 512Kb |
|
118 | ; partially. E.g 512000 == 512Kb | |
119 | cut_off_limit_diff = 512000 |
|
119 | cut_off_limit_diff = 512000 | |
120 |
|
120 | |||
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
122 | ; file inside diff which exceeds this limit will be displayed partially. |
|
122 | ; file inside diff which exceeds this limit will be displayed partially. | |
123 | ; E.g 128000 == 128Kb |
|
123 | ; E.g 128000 == 128Kb | |
124 | cut_off_limit_file = 128000 |
|
124 | cut_off_limit_file = 128000 | |
125 |
|
125 | |||
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` | |
127 | vcs_full_cache = true |
|
127 | vcs_full_cache = true | |
128 |
|
128 | |||
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. | |
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache | |
131 | force_https = false |
|
131 | force_https = false | |
132 |
|
132 | |||
133 | ; use Strict-Transport-Security headers |
|
133 | ; use Strict-Transport-Security headers | |
134 | use_htsts = false |
|
134 | use_htsts = false | |
135 |
|
135 | |||
136 | ; Set to true if your repos are exposed using the dumb protocol |
|
136 | ; Set to true if your repos are exposed using the dumb protocol | |
137 | git_update_server_info = false |
|
137 | git_update_server_info = false | |
138 |
|
138 | |||
139 | ; RSS/ATOM feed options |
|
139 | ; RSS/ATOM feed options | |
140 | rss_cut_off_limit = 256000 |
|
140 | rss_cut_off_limit = 256000 | |
141 | rss_items_per_page = 10 |
|
141 | rss_items_per_page = 10 | |
142 | rss_include_diff = false |
|
142 | rss_include_diff = false | |
143 |
|
143 | |||
144 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
144 | ; gist URL alias, used to create nicer urls for gist. This should be an | |
145 | ; url that does rewrites to _admin/gists/{gistid}. |
|
145 | ; url that does rewrites to _admin/gists/{gistid}. | |
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal | |
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} | |
148 | gist_alias_url = |
|
148 | gist_alias_url = | |
149 |
|
149 | |||
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be | |
151 | ; used for access. |
|
151 | ; used for access. | |
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it | |
153 | ; came from the the logged in user who own this authentication token. |
|
153 | ; came from the the logged in user who own this authentication token. | |
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
154 | ; Additionally @TOKEN syntax can be used to bound the view to specific | |
155 | ; authentication token. Such view would be only accessible when used together |
|
155 | ; authentication token. Such view would be only accessible when used together | |
156 | ; with this authentication token |
|
156 | ; with this authentication token | |
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` | |
158 | ; The list should be "," separated and on a single line. |
|
158 | ; The list should be "," separated and on a single line. | |
159 | ; Most common views to enable: |
|
159 | ; Most common views to enable: | |
160 |
|
160 | |||
161 | # RepoCommitsView:repo_commit_download |
|
161 | # RepoCommitsView:repo_commit_download | |
162 | # RepoCommitsView:repo_commit_patch |
|
162 | # RepoCommitsView:repo_commit_patch | |
163 | # RepoCommitsView:repo_commit_raw |
|
163 | # RepoCommitsView:repo_commit_raw | |
164 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
164 | # RepoCommitsView:repo_commit_raw@TOKEN | |
165 | # RepoFilesView:repo_files_diff |
|
165 | # RepoFilesView:repo_files_diff | |
166 | # RepoFilesView:repo_archivefile |
|
166 | # RepoFilesView:repo_archivefile | |
167 | # RepoFilesView:repo_file_raw |
|
167 | # RepoFilesView:repo_file_raw | |
168 | # GistView:* |
|
168 | # GistView:* | |
169 | api_access_controllers_whitelist = |
|
169 | api_access_controllers_whitelist = | |
170 |
|
170 | |||
171 | ; Default encoding used to convert from and to unicode |
|
171 | ; Default encoding used to convert from and to unicode | |
172 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
172 | ; can be also a comma separated list of encoding in case of mixed encodings | |
173 | default_encoding = UTF-8 |
|
173 | default_encoding = UTF-8 | |
174 |
|
174 | |||
175 | ; instance-id prefix |
|
175 | ; instance-id prefix | |
176 | ; a prefix key for this instance used for cache invalidation when running |
|
176 | ; a prefix key for this instance used for cache invalidation when running | |
177 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
177 | ; multiple instances of RhodeCode, make sure it's globally unique for | |
178 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
178 | ; all running RhodeCode instances. Leave empty if you don't use it | |
179 | instance_id = |
|
179 | instance_id = | |
180 |
|
180 | |||
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage | |
182 | ; of an authentication plugin also if it is disabled by it's settings. |
|
182 | ; of an authentication plugin also if it is disabled by it's settings. | |
183 | ; This could be useful if you are unable to log in to the system due to broken |
|
183 | ; This could be useful if you are unable to log in to the system due to broken | |
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth | |
185 | ; module to log in again and fix the settings. |
|
185 | ; module to log in again and fix the settings. | |
186 | ; Available builtin plugin IDs (hash is part of the ID): |
|
186 | ; Available builtin plugin IDs (hash is part of the ID): | |
187 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
187 | ; egg:rhodecode-enterprise-ce#rhodecode | |
188 | ; egg:rhodecode-enterprise-ce#pam |
|
188 | ; egg:rhodecode-enterprise-ce#pam | |
189 | ; egg:rhodecode-enterprise-ce#ldap |
|
189 | ; egg:rhodecode-enterprise-ce#ldap | |
190 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
190 | ; egg:rhodecode-enterprise-ce#jasig_cas | |
191 | ; egg:rhodecode-enterprise-ce#headers |
|
191 | ; egg:rhodecode-enterprise-ce#headers | |
192 | ; egg:rhodecode-enterprise-ce#crowd |
|
192 | ; egg:rhodecode-enterprise-ce#crowd | |
193 |
|
193 | |||
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode | |
195 |
|
195 | |||
196 | ; Flag to control loading of legacy plugins in py:/path format |
|
196 | ; Flag to control loading of legacy plugins in py:/path format | |
197 | auth_plugin.import_legacy_plugins = true |
|
197 | auth_plugin.import_legacy_plugins = true | |
198 |
|
198 | |||
199 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
199 | ; alternative return HTTP header for failed authentication. Default HTTP | |
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with | |
201 | ; handling that causing a series of failed authentication calls. |
|
201 | ; handling that causing a series of failed authentication calls. | |
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code | |
203 | ; This will be served instead of default 401 on bad authentication |
|
203 | ; This will be served instead of default 401 on bad authentication | |
204 | auth_ret_code = |
|
204 | auth_ret_code = | |
205 |
|
205 | |||
206 | ; use special detection method when serving auth_ret_code, instead of serving |
|
206 | ; use special detection method when serving auth_ret_code, instead of serving | |
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) | |
208 | ; and then serve auth_ret_code to clients |
|
208 | ; and then serve auth_ret_code to clients | |
209 | auth_ret_code_detection = false |
|
209 | auth_ret_code_detection = false | |
210 |
|
210 | |||
211 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
211 | ; locking return code. When repository is locked return this HTTP code. 2XX | |
212 | ; codes don't break the transactions while 4XX codes do |
|
212 | ; codes don't break the transactions while 4XX codes do | |
213 | lock_ret_code = 423 |
|
213 | lock_ret_code = 423 | |
214 |
|
214 | |||
215 | ; Filesystem location were repositories should be stored |
|
215 | ; Filesystem location were repositories should be stored | |
216 | repo_store.path = /var/opt/rhodecode_repo_store |
|
216 | repo_store.path = /var/opt/rhodecode_repo_store | |
217 |
|
217 | |||
218 | ; allows to setup custom hooks in settings page |
|
218 | ; allows to setup custom hooks in settings page | |
219 | allow_custom_hooks_settings = true |
|
219 | allow_custom_hooks_settings = true | |
220 |
|
220 | |||
221 | ; Generated license token required for EE edition license. |
|
221 | ; Generated license token required for EE edition license. | |
222 | ; New generated token value can be found in Admin > settings > license page. |
|
222 | ; New generated token value can be found in Admin > settings > license page. | |
223 | license_token = |
|
223 | license_token = | |
224 |
|
224 | |||
225 | ; This flag hides sensitive information on the license page such as token, and license data |
|
225 | ; This flag hides sensitive information on the license page such as token, and license data | |
226 | license.hide_license_info = false |
|
226 | license.hide_license_info = false | |
227 |
|
227 | |||
228 | ; supervisor connection uri, for managing supervisor and logs. |
|
228 | ; supervisor connection uri, for managing supervisor and logs. | |
229 | supervisor.uri = |
|
229 | supervisor.uri = | |
230 |
|
230 | |||
231 | ; supervisord group name/id we only want this RC instance to handle |
|
231 | ; supervisord group name/id we only want this RC instance to handle | |
232 | supervisor.group_id = prod |
|
232 | supervisor.group_id = prod | |
233 |
|
233 | |||
234 | ; Display extended labs settings |
|
234 | ; Display extended labs settings | |
235 | labs_settings_active = true |
|
235 | labs_settings_active = true | |
236 |
|
236 | |||
237 | ; Custom exception store path, defaults to TMPDIR |
|
237 | ; Custom exception store path, defaults to TMPDIR | |
238 | ; This is used to store exception from RhodeCode in shared directory |
|
238 | ; This is used to store exception from RhodeCode in shared directory | |
239 | #exception_tracker.store_path = |
|
239 | #exception_tracker.store_path = | |
240 |
|
240 | |||
241 | ; Send email with exception details when it happens |
|
241 | ; Send email with exception details when it happens | |
242 | #exception_tracker.send_email = false |
|
242 | #exception_tracker.send_email = false | |
243 |
|
243 | |||
244 | ; Comma separated list of recipients for exception emails, |
|
244 | ; Comma separated list of recipients for exception emails, | |
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
245 | ; e.g admin@rhodecode.com,devops@rhodecode.com | |
246 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
246 | ; Can be left empty, then emails will be sent to ALL super-admins | |
247 | #exception_tracker.send_email_recipients = |
|
247 | #exception_tracker.send_email_recipients = | |
248 |
|
248 | |||
249 | ; optional prefix to Add to email Subject |
|
249 | ; optional prefix to Add to email Subject | |
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
250 | #exception_tracker.email_prefix = [RHODECODE ERROR] | |
251 |
|
251 | |||
252 | ; File store configuration. This is used to store and serve uploaded files |
|
252 | ; File store configuration. This is used to store and serve uploaded files | |
253 | file_store.enabled = true |
|
253 | file_store.enabled = true | |
254 |
|
254 | |||
255 | ; Storage backend, available options are: local |
|
255 | ; Storage backend, available options are: local | |
256 | file_store.backend = local |
|
256 | file_store.backend = local | |
257 |
|
257 | |||
258 | ; path to store the uploaded binaries and artifacts |
|
258 | ; path to store the uploaded binaries and artifacts | |
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store |
|
259 | file_store.storage_path = /var/opt/rhodecode_data/file_store | |
260 |
|
260 | |||
261 |
|
261 | |||
262 | ; Redis url to acquire/check generation of archives locks |
|
262 | ; Redis url to acquire/check generation of archives locks | |
263 | archive_cache.locking.url = redis://redis:6379/1 |
|
263 | archive_cache.locking.url = redis://redis:6379/1 | |
264 |
|
264 | |||
265 | ; Storage backend, only 'filesystem' and 'objectstore' are available now |
|
265 | ; Storage backend, only 'filesystem' and 'objectstore' are available now | |
266 | archive_cache.backend.type = filesystem |
|
266 | archive_cache.backend.type = filesystem | |
267 |
|
267 | |||
268 | ; url for s3 compatible storage that allows to upload artifacts |
|
268 | ; url for s3 compatible storage that allows to upload artifacts | |
269 | ; e.g http://minio:9000 |
|
269 | ; e.g http://minio:9000 | |
270 | archive_cache.objectstore.url = http://s3-minio:9000 |
|
270 | archive_cache.objectstore.url = http://s3-minio:9000 | |
271 |
|
271 | |||
272 | ; key for s3 auth |
|
272 | ; key for s3 auth | |
273 | archive_cache.objectstore.key = key |
|
273 | archive_cache.objectstore.key = key | |
274 |
|
274 | |||
275 | ; secret for s3 auth |
|
275 | ; secret for s3 auth | |
276 | archive_cache.objectstore.secret = secret |
|
276 | archive_cache.objectstore.secret = secret | |
277 |
|
277 | |||
278 | ;region for s3 storage |
|
278 | ;region for s3 storage | |
279 | archive_cache.objectstore.region = eu-central-1 |
|
279 | archive_cache.objectstore.region = eu-central-1 | |
280 |
|
280 | |||
281 | ; number of sharded buckets to create to distribute archives across |
|
281 | ; number of sharded buckets to create to distribute archives across | |
282 | ; default is 8 shards |
|
282 | ; default is 8 shards | |
283 | archive_cache.objectstore.bucket_shards = 8 |
|
283 | archive_cache.objectstore.bucket_shards = 8 | |
284 |
|
284 | |||
285 | ; a top-level bucket to put all other shards in |
|
285 | ; a top-level bucket to put all other shards in | |
286 | ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number |
|
286 | ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number | |
287 | archive_cache.objectstore.bucket = rhodecode-archive-cache |
|
287 | archive_cache.objectstore.bucket = rhodecode-archive-cache | |
288 |
|
288 | |||
289 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time |
|
289 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |
290 | archive_cache.objectstore.retry = false |
|
290 | archive_cache.objectstore.retry = false | |
291 |
|
291 | |||
292 | ; number of seconds to wait for next try using retry |
|
292 | ; number of seconds to wait for next try using retry | |
293 | archive_cache.objectstore.retry_backoff = 1 |
|
293 | archive_cache.objectstore.retry_backoff = 1 | |
294 |
|
294 | |||
295 | ; how many tries do do a retry fetch from this backend |
|
295 | ; how many tries do do a retry fetch from this backend | |
296 | archive_cache.objectstore.retry_attempts = 10 |
|
296 | archive_cache.objectstore.retry_attempts = 10 | |
297 |
|
297 | |||
298 | ; Default is $cache_dir/archive_cache if not set |
|
298 | ; Default is $cache_dir/archive_cache if not set | |
299 | ; Generated repo archives will be cached at this location |
|
299 | ; Generated repo archives will be cached at this location | |
300 | ; and served from the cache during subsequent requests for the same archive of |
|
300 | ; and served from the cache during subsequent requests for the same archive of | |
301 | ; the repository. This path is important to be shared across filesystems and with |
|
301 | ; the repository. This path is important to be shared across filesystems and with | |
302 | ; RhodeCode and vcsserver |
|
302 | ; RhodeCode and vcsserver | |
303 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache |
|
303 | archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache | |
304 |
|
304 | |||
305 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
305 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
306 | archive_cache.filesystem.cache_size_gb = 40 |
|
306 | archive_cache.filesystem.cache_size_gb = 40 | |
307 |
|
307 | |||
308 | ; Eviction policy used to clear out after cache_size_gb limit is reached |
|
308 | ; Eviction policy used to clear out after cache_size_gb limit is reached | |
309 | archive_cache.filesystem.eviction_policy = least-recently-stored |
|
309 | archive_cache.filesystem.eviction_policy = least-recently-stored | |
310 |
|
310 | |||
311 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
311 | ; By default cache uses sharding technique, this specifies how many shards are there | |
312 | ; default is 8 shards |
|
312 | ; default is 8 shards | |
313 | archive_cache.filesystem.cache_shards = 8 |
|
313 | archive_cache.filesystem.cache_shards = 8 | |
314 |
|
314 | |||
315 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time |
|
315 | ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time | |
316 | archive_cache.filesystem.retry = false |
|
316 | archive_cache.filesystem.retry = false | |
317 |
|
317 | |||
318 | ; number of seconds to wait for next try using retry |
|
318 | ; number of seconds to wait for next try using retry | |
319 | archive_cache.filesystem.retry_backoff = 1 |
|
319 | archive_cache.filesystem.retry_backoff = 1 | |
320 |
|
320 | |||
321 | ; how many tries do do a retry fetch from this backend |
|
321 | ; how many tries do do a retry fetch from this backend | |
322 | archive_cache.filesystem.retry_attempts = 10 |
|
322 | archive_cache.filesystem.retry_attempts = 10 | |
323 |
|
323 | |||
324 |
|
324 | |||
325 | ; ############# |
|
325 | ; ############# | |
326 | ; CELERY CONFIG |
|
326 | ; CELERY CONFIG | |
327 | ; ############# |
|
327 | ; ############# | |
328 |
|
328 | |||
329 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
329 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini | |
330 |
|
330 | |||
331 | use_celery = true |
|
331 | use_celery = true | |
332 |
|
332 | |||
333 | ; path to store schedule database |
|
333 | ; path to store schedule database | |
334 | #celerybeat-schedule.path = |
|
334 | #celerybeat-schedule.path = | |
335 |
|
335 | |||
336 | ; connection url to the message broker (default redis) |
|
336 | ; connection url to the message broker (default redis) | |
337 | celery.broker_url = redis://redis:6379/8 |
|
337 | celery.broker_url = redis://redis:6379/8 | |
338 |
|
338 | |||
339 | ; results backend to get results for (default redis) |
|
339 | ; results backend to get results for (default redis) | |
340 | celery.result_backend = redis://redis:6379/8 |
|
340 | celery.result_backend = redis://redis:6379/8 | |
341 |
|
341 | |||
342 | ; rabbitmq example |
|
342 | ; rabbitmq example | |
343 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
343 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost | |
344 |
|
344 | |||
345 | ; maximum tasks to execute before worker restart |
|
345 | ; maximum tasks to execute before worker restart | |
346 | celery.max_tasks_per_child = 20 |
|
346 | celery.max_tasks_per_child = 20 | |
347 |
|
347 | |||
348 | ; tasks will never be sent to the queue, but executed locally instead. |
|
348 | ; tasks will never be sent to the queue, but executed locally instead. | |
349 | celery.task_always_eager = false |
|
349 | celery.task_always_eager = false | |
350 |
|
350 | |||
351 | ; ############# |
|
351 | ; ############# | |
352 | ; DOGPILE CACHE |
|
352 | ; DOGPILE CACHE | |
353 | ; ############# |
|
353 | ; ############# | |
354 |
|
354 | |||
355 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
355 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
356 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
356 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
357 | cache_dir = /var/opt/rhodecode_data |
|
357 | cache_dir = /var/opt/rhodecode_data | |
358 |
|
358 | |||
359 | ; ********************************************* |
|
359 | ; ********************************************* | |
360 | ; `sql_cache_short` cache for heavy SQL queries |
|
360 | ; `sql_cache_short` cache for heavy SQL queries | |
361 | ; Only supported backend is `memory_lru` |
|
361 | ; Only supported backend is `memory_lru` | |
362 | ; ********************************************* |
|
362 | ; ********************************************* | |
363 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
363 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
364 | rc_cache.sql_cache_short.expiration_time = 30 |
|
364 | rc_cache.sql_cache_short.expiration_time = 30 | |
365 |
|
365 | |||
366 |
|
366 | |||
367 | ; ***************************************************** |
|
367 | ; ***************************************************** | |
368 | ; `cache_repo_longterm` cache for repo object instances |
|
368 | ; `cache_repo_longterm` cache for repo object instances | |
369 | ; Only supported backend is `memory_lru` |
|
369 | ; Only supported backend is `memory_lru` | |
370 | ; ***************************************************** |
|
370 | ; ***************************************************** | |
371 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
371 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
372 | ; by default we use 30 Days, cache is still invalidated on push |
|
372 | ; by default we use 30 Days, cache is still invalidated on push | |
373 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
373 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
374 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
374 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
375 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
375 | rc_cache.cache_repo_longterm.max_size = 10000 | |
376 |
|
376 | |||
377 |
|
377 | |||
378 | ; ********************************************* |
|
378 | ; ********************************************* | |
379 | ; `cache_general` cache for general purpose use |
|
379 | ; `cache_general` cache for general purpose use | |
380 | ; for simplicity use rc.file_namespace backend, |
|
380 | ; for simplicity use rc.file_namespace backend, | |
381 | ; for performance and scale use rc.redis |
|
381 | ; for performance and scale use rc.redis | |
382 | ; ********************************************* |
|
382 | ; ********************************************* | |
383 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
383 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
384 | rc_cache.cache_general.expiration_time = 43200 |
|
384 | rc_cache.cache_general.expiration_time = 43200 | |
385 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
385 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
386 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
386 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db | |
387 |
|
387 | |||
388 | ; alternative `cache_general` redis backend with distributed lock |
|
388 | ; alternative `cache_general` redis backend with distributed lock | |
389 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
389 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
390 | #rc_cache.cache_general.expiration_time = 300 |
|
390 | #rc_cache.cache_general.expiration_time = 300 | |
391 |
|
391 | |||
392 | ; redis_expiration_time needs to be greater then expiration_time |
|
392 | ; redis_expiration_time needs to be greater then expiration_time | |
393 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
393 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
394 |
|
394 | |||
395 | #rc_cache.cache_general.arguments.host = localhost |
|
395 | #rc_cache.cache_general.arguments.host = localhost | |
396 | #rc_cache.cache_general.arguments.port = 6379 |
|
396 | #rc_cache.cache_general.arguments.port = 6379 | |
397 | #rc_cache.cache_general.arguments.db = 0 |
|
397 | #rc_cache.cache_general.arguments.db = 0 | |
398 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
398 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
399 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
399 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
400 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
400 | #rc_cache.cache_general.arguments.distributed_lock = true | |
401 |
|
401 | |||
402 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
402 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
403 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
403 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
404 |
|
404 | |||
405 | ; ************************************************* |
|
405 | ; ************************************************* | |
406 | ; `cache_perms` cache for permission tree, auth TTL |
|
406 | ; `cache_perms` cache for permission tree, auth TTL | |
407 | ; for simplicity use rc.file_namespace backend, |
|
407 | ; for simplicity use rc.file_namespace backend, | |
408 | ; for performance and scale use rc.redis |
|
408 | ; for performance and scale use rc.redis | |
409 | ; ************************************************* |
|
409 | ; ************************************************* | |
410 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
410 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
411 | rc_cache.cache_perms.expiration_time = 3600 |
|
411 | rc_cache.cache_perms.expiration_time = 3600 | |
412 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
412 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
413 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
413 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db | |
414 |
|
414 | |||
415 | ; alternative `cache_perms` redis backend with distributed lock |
|
415 | ; alternative `cache_perms` redis backend with distributed lock | |
416 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
416 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
417 | #rc_cache.cache_perms.expiration_time = 300 |
|
417 | #rc_cache.cache_perms.expiration_time = 300 | |
418 |
|
418 | |||
419 | ; redis_expiration_time needs to be greater then expiration_time |
|
419 | ; redis_expiration_time needs to be greater then expiration_time | |
420 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
420 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
421 |
|
421 | |||
422 | #rc_cache.cache_perms.arguments.host = localhost |
|
422 | #rc_cache.cache_perms.arguments.host = localhost | |
423 | #rc_cache.cache_perms.arguments.port = 6379 |
|
423 | #rc_cache.cache_perms.arguments.port = 6379 | |
424 | #rc_cache.cache_perms.arguments.db = 0 |
|
424 | #rc_cache.cache_perms.arguments.db = 0 | |
425 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
425 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
426 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
426 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
427 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
427 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
428 |
|
428 | |||
429 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
429 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
430 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
430 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
431 |
|
431 | |||
432 | ; *************************************************** |
|
432 | ; *************************************************** | |
433 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
433 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
434 | ; for simplicity use rc.file_namespace backend, |
|
434 | ; for simplicity use rc.file_namespace backend, | |
435 | ; for performance and scale use rc.redis |
|
435 | ; for performance and scale use rc.redis | |
436 | ; *************************************************** |
|
436 | ; *************************************************** | |
437 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
437 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
438 | rc_cache.cache_repo.expiration_time = 2592000 |
|
438 | rc_cache.cache_repo.expiration_time = 2592000 | |
439 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
439 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
440 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
440 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db | |
441 |
|
441 | |||
442 | ; alternative `cache_repo` redis backend with distributed lock |
|
442 | ; alternative `cache_repo` redis backend with distributed lock | |
443 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
443 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
444 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
444 | #rc_cache.cache_repo.expiration_time = 2592000 | |
445 |
|
445 | |||
446 | ; redis_expiration_time needs to be greater then expiration_time |
|
446 | ; redis_expiration_time needs to be greater then expiration_time | |
447 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
447 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
448 |
|
448 | |||
449 | #rc_cache.cache_repo.arguments.host = localhost |
|
449 | #rc_cache.cache_repo.arguments.host = localhost | |
450 | #rc_cache.cache_repo.arguments.port = 6379 |
|
450 | #rc_cache.cache_repo.arguments.port = 6379 | |
451 | #rc_cache.cache_repo.arguments.db = 1 |
|
451 | #rc_cache.cache_repo.arguments.db = 1 | |
452 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
452 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
453 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
453 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
454 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
454 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
455 |
|
455 | |||
456 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
456 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
457 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
457 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
458 |
|
458 | |||
459 | ; ############## |
|
459 | ; ############## | |
460 | ; BEAKER SESSION |
|
460 | ; BEAKER SESSION | |
461 | ; ############## |
|
461 | ; ############## | |
462 |
|
462 | |||
463 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
463 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed | |
464 | ; types are file, ext:redis, ext:database, ext:memcached |
|
464 | ; types are file, ext:redis, ext:database, ext:memcached | |
465 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
465 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
466 | #beaker.session.type = file |
|
466 | #beaker.session.type = file | |
467 | #beaker.session.data_dir = %(here)s/data/sessions |
|
467 | #beaker.session.data_dir = %(here)s/data/sessions | |
468 |
|
468 | |||
469 | ; Redis based sessions |
|
469 | ; Redis based sessions | |
470 | beaker.session.type = ext:redis |
|
470 | beaker.session.type = ext:redis | |
471 | beaker.session.url = redis://redis:6379/2 |
|
471 | beaker.session.url = redis://redis:6379/2 | |
472 |
|
472 | |||
473 | ; DB based session, fast, and allows easy management over logged in users |
|
473 | ; DB based session, fast, and allows easy management over logged in users | |
474 | #beaker.session.type = ext:database |
|
474 | #beaker.session.type = ext:database | |
475 | #beaker.session.table_name = db_session |
|
475 | #beaker.session.table_name = db_session | |
476 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
476 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode | |
477 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
477 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode | |
478 | #beaker.session.sa.pool_recycle = 3600 |
|
478 | #beaker.session.sa.pool_recycle = 3600 | |
479 | #beaker.session.sa.echo = false |
|
479 | #beaker.session.sa.echo = false | |
480 |
|
480 | |||
481 | beaker.session.key = rhodecode |
|
481 | beaker.session.key = rhodecode | |
482 | beaker.session.secret = production-rc-uytcxaz |
|
482 | beaker.session.secret = production-rc-uytcxaz | |
483 | beaker.session.lock_dir = /data_ramdisk/lock |
|
483 | beaker.session.lock_dir = /data_ramdisk/lock | |
484 |
|
484 | |||
485 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
485 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
486 | ; you must disable beaker.session.secret to use this |
|
486 | ; you must disable beaker.session.secret to use this | |
487 | #beaker.session.encrypt_key = key_for_encryption |
|
487 | #beaker.session.encrypt_key = key_for_encryption | |
488 | #beaker.session.validate_key = validation_key |
|
488 | #beaker.session.validate_key = validation_key | |
489 |
|
489 | |||
490 | ; Sets session as invalid (also logging out user) if it haven not been |
|
490 | ; Sets session as invalid (also logging out user) if it haven not been | |
491 | ; accessed for given amount of time in seconds |
|
491 | ; accessed for given amount of time in seconds | |
492 | beaker.session.timeout = 2592000 |
|
492 | beaker.session.timeout = 2592000 | |
493 | beaker.session.httponly = true |
|
493 | beaker.session.httponly = true | |
494 |
|
494 | |||
495 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
495 | ; Path to use for the cookie. Set to prefix if you use prefix middleware | |
496 | #beaker.session.cookie_path = /custom_prefix |
|
496 | #beaker.session.cookie_path = /custom_prefix | |
497 |
|
497 | |||
498 | ; Set https secure cookie |
|
498 | ; Set https secure cookie | |
499 | beaker.session.secure = false |
|
499 | beaker.session.secure = false | |
500 |
|
500 | |||
501 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
501 | ; default cookie expiration time in seconds, set to `true` to set expire | |
502 | ; at browser close |
|
502 | ; at browser close | |
503 | #beaker.session.cookie_expires = 3600 |
|
503 | #beaker.session.cookie_expires = 3600 | |
504 |
|
504 | |||
505 | ; ############################# |
|
505 | ; ############################# | |
506 | ; SEARCH INDEXING CONFIGURATION |
|
506 | ; SEARCH INDEXING CONFIGURATION | |
507 | ; ############################# |
|
507 | ; ############################# | |
508 |
|
508 | |||
509 | ; Full text search indexer is available in rhodecode-tools under |
|
509 | ; Full text search indexer is available in rhodecode-tools under | |
510 | ; `rhodecode-tools index` command |
|
510 | ; `rhodecode-tools index` command | |
511 |
|
511 | |||
512 | ; WHOOSH Backend, doesn't require additional services to run |
|
512 | ; WHOOSH Backend, doesn't require additional services to run | |
513 | ; it works good with few dozen repos |
|
513 | ; it works good with few dozen repos | |
514 | search.module = rhodecode.lib.index.whoosh |
|
514 | search.module = rhodecode.lib.index.whoosh | |
515 | search.location = %(here)s/data/index |
|
515 | search.location = %(here)s/data/index | |
516 |
|
516 | |||
517 | ; #################### |
|
517 | ; #################### | |
518 | ; CHANNELSTREAM CONFIG |
|
518 | ; CHANNELSTREAM CONFIG | |
519 | ; #################### |
|
519 | ; #################### | |
520 |
|
520 | |||
521 | ; channelstream enables persistent connections and live notification |
|
521 | ; channelstream enables persistent connections and live notification | |
522 | ; in the system. It's also used by the chat system |
|
522 | ; in the system. It's also used by the chat system | |
523 |
|
523 | |||
524 | channelstream.enabled = true |
|
524 | channelstream.enabled = true | |
525 |
|
525 | |||
526 | ; server address for channelstream server on the backend |
|
526 | ; server address for channelstream server on the backend | |
527 | channelstream.server = channelstream:9800 |
|
527 | channelstream.server = channelstream:9800 | |
528 |
|
528 | |||
529 | ; location of the channelstream server from outside world |
|
529 | ; location of the channelstream server from outside world | |
530 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
530 | ; use ws:// for http or wss:// for https. This address needs to be handled | |
531 | ; by external HTTP server such as Nginx or Apache |
|
531 | ; by external HTTP server such as Nginx or Apache | |
532 | ; see Nginx/Apache configuration examples in our docs |
|
532 | ; see Nginx/Apache configuration examples in our docs | |
533 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
533 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
534 | channelstream.secret = ENV_GENERATED |
|
534 | channelstream.secret = ENV_GENERATED | |
535 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history |
|
535 | channelstream.history.location = /var/opt/rhodecode_data/channelstream_history | |
536 |
|
536 | |||
537 | ; Internal application path that Javascript uses to connect into. |
|
537 | ; Internal application path that Javascript uses to connect into. | |
538 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
538 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
539 | channelstream.proxy_path = /_channelstream |
|
539 | channelstream.proxy_path = /_channelstream | |
540 |
|
540 | |||
541 |
|
541 | |||
542 | ; ############################## |
|
542 | ; ############################## | |
543 | ; MAIN RHODECODE DATABASE CONFIG |
|
543 | ; MAIN RHODECODE DATABASE CONFIG | |
544 | ; ############################## |
|
544 | ; ############################## | |
545 |
|
545 | |||
546 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
546 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 | |
547 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
547 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
548 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
548 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 | |
549 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
549 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
550 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
550 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
551 |
|
551 | |||
552 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
552 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode | |
553 |
|
553 | |||
554 | ; see sqlalchemy docs for other advanced settings |
|
554 | ; see sqlalchemy docs for other advanced settings | |
555 | ; print the sql statements to output |
|
555 | ; print the sql statements to output | |
556 | sqlalchemy.db1.echo = false |
|
556 | sqlalchemy.db1.echo = false | |
557 |
|
557 | |||
558 | ; recycle the connections after this amount of seconds |
|
558 | ; recycle the connections after this amount of seconds | |
559 | sqlalchemy.db1.pool_recycle = 3600 |
|
559 | sqlalchemy.db1.pool_recycle = 3600 | |
560 |
|
560 | |||
561 | ; the number of connections to keep open inside the connection pool. |
|
561 | ; the number of connections to keep open inside the connection pool. | |
562 | ; 0 indicates no limit |
|
562 | ; 0 indicates no limit | |
563 | ; the general calculus with gevent is: |
|
563 | ; the general calculus with gevent is: | |
564 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
564 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, | |
565 | ; then increase pool size + max overflow so that they add up to 500. |
|
565 | ; then increase pool size + max overflow so that they add up to 500. | |
566 | #sqlalchemy.db1.pool_size = 5 |
|
566 | #sqlalchemy.db1.pool_size = 5 | |
567 |
|
567 | |||
568 | ; The number of connections to allow in connection pool "overflow", that is |
|
568 | ; The number of connections to allow in connection pool "overflow", that is | |
569 | ; connections that can be opened above and beyond the pool_size setting, |
|
569 | ; connections that can be opened above and beyond the pool_size setting, | |
570 | ; which defaults to five. |
|
570 | ; which defaults to five. | |
571 | #sqlalchemy.db1.max_overflow = 10 |
|
571 | #sqlalchemy.db1.max_overflow = 10 | |
572 |
|
572 | |||
573 | ; Connection check ping, used to detect broken database connections |
|
573 | ; Connection check ping, used to detect broken database connections | |
574 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
574 | ; could be enabled to better handle cases if MySQL has gone away errors | |
575 | #sqlalchemy.db1.ping_connection = true |
|
575 | #sqlalchemy.db1.ping_connection = true | |
576 |
|
576 | |||
577 | ; ########## |
|
577 | ; ########## | |
578 | ; VCS CONFIG |
|
578 | ; VCS CONFIG | |
579 | ; ########## |
|
579 | ; ########## | |
580 | vcs.server.enable = true |
|
580 | vcs.server.enable = true | |
581 | vcs.server = vcsserver:10010 |
|
581 | vcs.server = vcsserver:10010 | |
582 |
|
582 | |||
583 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
583 | ; Web server connectivity protocol, responsible for web based VCS operations | |
584 | ; Available protocols are: |
|
584 | ; Available protocols are: | |
585 | ; `http` - use http-rpc backend (default) |
|
585 | ; `http` - use http-rpc backend (default) | |
586 | vcs.server.protocol = http |
|
586 | vcs.server.protocol = http | |
587 |
|
587 | |||
588 | ; Push/Pull operations protocol, available options are: |
|
588 | ; Push/Pull operations protocol, available options are: | |
589 | ; `http` - use http-rpc backend (default) |
|
589 | ; `http` - use http-rpc backend (default) | |
590 | vcs.scm_app_implementation = http |
|
590 | vcs.scm_app_implementation = http | |
591 |
|
591 | |||
592 | ; Push/Pull operations hooks protocol, available options are: |
|
592 | ; Push/Pull operations hooks protocol, available options are: | |
593 | ; `http` - use http-rpc backend (default) |
|
593 | ; `http` - use http-rpc backend (default) | |
594 | ; `celery` - use celery based hooks |
|
594 | ; `celery` - use celery based hooks | |
595 | vcs.hooks.protocol = http |
|
595 | #DEPRECATED:vcs.hooks.protocol = http | |
|
596 | vcs.hooks.protocol.v2 = celery | |||
596 |
|
597 | |||
597 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
598 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
598 | ; accessible via network. |
|
599 | ; accessible via network. | |
599 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
600 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
600 | vcs.hooks.host = * |
|
601 | vcs.hooks.host = * | |
601 |
|
602 | |||
602 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
603 | ; Start VCSServer with this instance as a subprocess, useful for development | |
603 | vcs.start_server = false |
|
604 | vcs.start_server = false | |
604 |
|
605 | |||
605 | ; List of enabled VCS backends, available options are: |
|
606 | ; List of enabled VCS backends, available options are: | |
606 | ; `hg` - mercurial |
|
607 | ; `hg` - mercurial | |
607 | ; `git` - git |
|
608 | ; `git` - git | |
608 | ; `svn` - subversion |
|
609 | ; `svn` - subversion | |
609 | vcs.backends = hg, git, svn |
|
610 | vcs.backends = hg, git, svn | |
610 |
|
611 | |||
611 | ; Wait this number of seconds before killing connection to the vcsserver |
|
612 | ; Wait this number of seconds before killing connection to the vcsserver | |
612 | vcs.connection_timeout = 3600 |
|
613 | vcs.connection_timeout = 3600 | |
613 |
|
614 | |||
614 | ; Cache flag to cache vcsserver remote calls locally |
|
615 | ; Cache flag to cache vcsserver remote calls locally | |
615 | ; It uses cache_region `cache_repo` |
|
616 | ; It uses cache_region `cache_repo` | |
616 | vcs.methods.cache = true |
|
617 | vcs.methods.cache = true | |
617 |
|
618 | |||
618 | ; #################################################### |
|
619 | ; #################################################### | |
619 | ; Subversion proxy support (mod_dav_svn) |
|
620 | ; Subversion proxy support (mod_dav_svn) | |
620 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
621 | ; Maps RhodeCode repo groups into SVN paths for Apache | |
621 | ; #################################################### |
|
622 | ; #################################################### | |
622 |
|
623 | |||
623 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
624 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. | |
624 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
625 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 | |
625 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
626 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible | |
626 | #vcs.svn.compatible_version = 1.8 |
|
627 | #vcs.svn.compatible_version = 1.8 | |
627 |
|
628 | |||
628 | ; Redis connection settings for svn integrations logic |
|
629 | ; Redis connection settings for svn integrations logic | |
629 | ; This connection string needs to be the same on ce and vcsserver |
|
630 | ; This connection string needs to be the same on ce and vcsserver | |
630 | vcs.svn.redis_conn = redis://redis:6379/0 |
|
631 | vcs.svn.redis_conn = redis://redis:6379/0 | |
631 |
|
632 | |||
632 | ; Enable SVN proxy of requests over HTTP |
|
633 | ; Enable SVN proxy of requests over HTTP | |
633 | vcs.svn.proxy.enabled = true |
|
634 | vcs.svn.proxy.enabled = true | |
634 |
|
635 | |||
635 | ; host to connect to running SVN subsystem |
|
636 | ; host to connect to running SVN subsystem | |
636 | vcs.svn.proxy.host = http://svn:8090 |
|
637 | vcs.svn.proxy.host = http://svn:8090 | |
637 |
|
638 | |||
638 | ; Enable or disable the config file generation. |
|
639 | ; Enable or disable the config file generation. | |
639 | svn.proxy.generate_config = true |
|
640 | svn.proxy.generate_config = true | |
640 |
|
641 | |||
641 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
642 | ; Generate config file with `SVNListParentPath` set to `On`. | |
642 | svn.proxy.list_parent_path = true |
|
643 | svn.proxy.list_parent_path = true | |
643 |
|
644 | |||
644 | ; Set location and file name of generated config file. |
|
645 | ; Set location and file name of generated config file. | |
645 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf |
|
646 | svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf | |
646 |
|
647 | |||
647 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
648 | ; alternative mod_dav config template. This needs to be a valid mako template | |
648 | ; Example template can be found in the source code: |
|
649 | ; Example template can be found in the source code: | |
649 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
650 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako | |
650 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
651 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako | |
651 |
|
652 | |||
652 | ; Used as a prefix to the `Location` block in the generated config file. |
|
653 | ; Used as a prefix to the `Location` block in the generated config file. | |
653 | ; In most cases it should be set to `/`. |
|
654 | ; In most cases it should be set to `/`. | |
654 | svn.proxy.location_root = / |
|
655 | svn.proxy.location_root = / | |
655 |
|
656 | |||
656 | ; Command to reload the mod dav svn configuration on change. |
|
657 | ; Command to reload the mod dav svn configuration on change. | |
657 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
658 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh | |
658 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
659 | ; Make sure user who runs RhodeCode process is allowed to reload Apache | |
659 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
660 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload | |
660 |
|
661 | |||
661 | ; If the timeout expires before the reload command finishes, the command will |
|
662 | ; If the timeout expires before the reload command finishes, the command will | |
662 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
663 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. | |
663 | #svn.proxy.reload_timeout = 10 |
|
664 | #svn.proxy.reload_timeout = 10 | |
664 |
|
665 | |||
665 | ; #################### |
|
666 | ; #################### | |
666 | ; SSH Support Settings |
|
667 | ; SSH Support Settings | |
667 | ; #################### |
|
668 | ; #################### | |
668 |
|
669 | |||
669 | ; Defines if a custom authorized_keys file should be created and written on |
|
670 | ; Defines if a custom authorized_keys file should be created and written on | |
670 | ; any change user ssh keys. Setting this to false also disables possibility |
|
671 | ; any change user ssh keys. Setting this to false also disables possibility | |
671 | ; of adding SSH keys by users from web interface. Super admins can still |
|
672 | ; of adding SSH keys by users from web interface. Super admins can still | |
672 | ; manage SSH Keys. |
|
673 | ; manage SSH Keys. | |
673 | ssh.generate_authorized_keyfile = true |
|
674 | ssh.generate_authorized_keyfile = true | |
674 |
|
675 | |||
675 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
676 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` | |
676 | # ssh.authorized_keys_ssh_opts = |
|
677 | # ssh.authorized_keys_ssh_opts = | |
677 |
|
678 | |||
678 | ; Path to the authorized_keys file where the generate entries are placed. |
|
679 | ; Path to the authorized_keys file where the generate entries are placed. | |
679 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
680 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
680 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
681 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
681 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode |
|
682 | ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode | |
682 |
|
683 | |||
683 | ; Command to execute the SSH wrapper. The binary is available in the |
|
684 | ; Command to execute the SSH wrapper. The binary is available in the | |
684 | ; RhodeCode installation directory. |
|
685 | ; RhodeCode installation directory. | |
685 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
686 | ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
686 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
687 | ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
687 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper |
|
688 | #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper | |
688 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 |
|
689 | ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2 | |
689 |
|
690 | |||
690 | ; Allow shell when executing the ssh-wrapper command |
|
691 | ; Allow shell when executing the ssh-wrapper command | |
691 | ssh.wrapper_cmd_allow_shell = false |
|
692 | ssh.wrapper_cmd_allow_shell = false | |
692 |
|
693 | |||
693 | ; Enables logging, and detailed output send back to the client during SSH |
|
694 | ; Enables logging, and detailed output send back to the client during SSH | |
694 | ; operations. Useful for debugging, shouldn't be used in production. |
|
695 | ; operations. Useful for debugging, shouldn't be used in production. | |
695 | ssh.enable_debug_logging = false |
|
696 | ssh.enable_debug_logging = false | |
696 |
|
697 | |||
697 | ; Paths to binary executable, by default they are the names, but we can |
|
698 | ; Paths to binary executable, by default they are the names, but we can | |
698 | ; override them if we want to use a custom one |
|
699 | ; override them if we want to use a custom one | |
699 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg |
|
700 | ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg | |
700 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git |
|
701 | ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git | |
701 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve |
|
702 | ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve | |
702 |
|
703 | |||
703 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
704 | ; Enables SSH key generator web interface. Disabling this still allows users | |
704 | ; to add their own keys. |
|
705 | ; to add their own keys. | |
705 | ssh.enable_ui_key_generator = true |
|
706 | ssh.enable_ui_key_generator = true | |
706 |
|
707 | |||
707 | ; Statsd client config, this is used to send metrics to statsd |
|
708 | ; Statsd client config, this is used to send metrics to statsd | |
708 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
709 | ; We recommend setting statsd_exported and scrape them using Prometheus | |
709 | #statsd.enabled = false |
|
710 | #statsd.enabled = false | |
710 | #statsd.statsd_host = 0.0.0.0 |
|
711 | #statsd.statsd_host = 0.0.0.0 | |
711 | #statsd.statsd_port = 8125 |
|
712 | #statsd.statsd_port = 8125 | |
712 | #statsd.statsd_prefix = |
|
713 | #statsd.statsd_prefix = | |
713 | #statsd.statsd_ipv6 = false |
|
714 | #statsd.statsd_ipv6 = false | |
714 |
|
715 | |||
715 | ; configure logging automatically at server startup set to false |
|
716 | ; configure logging automatically at server startup set to false | |
716 | ; to use the below custom logging config. |
|
717 | ; to use the below custom logging config. | |
717 | ; RC_LOGGING_FORMATTER |
|
718 | ; RC_LOGGING_FORMATTER | |
718 | ; RC_LOGGING_LEVEL |
|
719 | ; RC_LOGGING_LEVEL | |
719 | ; env variables can control the settings for logging in case of autoconfigure |
|
720 | ; env variables can control the settings for logging in case of autoconfigure | |
720 |
|
721 | |||
721 | #logging.autoconfigure = true |
|
722 | #logging.autoconfigure = true | |
722 |
|
723 | |||
723 | ; specify your own custom logging config file to configure logging |
|
724 | ; specify your own custom logging config file to configure logging | |
724 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
725 | #logging.logging_conf_file = /path/to/custom_logging.ini | |
725 |
|
726 | |||
726 | ; Dummy marker to add new entries after. |
|
727 | ; Dummy marker to add new entries after. | |
727 | ; Add any custom entries below. Please don't remove this marker. |
|
728 | ; Add any custom entries below. Please don't remove this marker. | |
728 | custom.conf = 1 |
|
729 | custom.conf = 1 | |
729 |
|
730 | |||
730 |
|
731 | |||
731 | ; ##################### |
|
732 | ; ##################### | |
732 | ; LOGGING CONFIGURATION |
|
733 | ; LOGGING CONFIGURATION | |
733 | ; ##################### |
|
734 | ; ##################### | |
734 |
|
735 | |||
735 | [loggers] |
|
736 | [loggers] | |
736 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
737 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
737 |
|
738 | |||
738 | [handlers] |
|
739 | [handlers] | |
739 | keys = console, console_sql |
|
740 | keys = console, console_sql | |
740 |
|
741 | |||
741 | [formatters] |
|
742 | [formatters] | |
742 | keys = generic, json, color_formatter, color_formatter_sql |
|
743 | keys = generic, json, color_formatter, color_formatter_sql | |
743 |
|
744 | |||
744 | ; ####### |
|
745 | ; ####### | |
745 | ; LOGGERS |
|
746 | ; LOGGERS | |
746 | ; ####### |
|
747 | ; ####### | |
747 | [logger_root] |
|
748 | [logger_root] | |
748 | level = NOTSET |
|
749 | level = NOTSET | |
749 | handlers = console |
|
750 | handlers = console | |
750 |
|
751 | |||
751 | [logger_sqlalchemy] |
|
752 | [logger_sqlalchemy] | |
752 | level = INFO |
|
753 | level = INFO | |
753 | handlers = console_sql |
|
754 | handlers = console_sql | |
754 | qualname = sqlalchemy.engine |
|
755 | qualname = sqlalchemy.engine | |
755 | propagate = 0 |
|
756 | propagate = 0 | |
756 |
|
757 | |||
757 | [logger_beaker] |
|
758 | [logger_beaker] | |
758 | level = DEBUG |
|
759 | level = DEBUG | |
759 | handlers = |
|
760 | handlers = | |
760 | qualname = beaker.container |
|
761 | qualname = beaker.container | |
761 | propagate = 1 |
|
762 | propagate = 1 | |
762 |
|
763 | |||
763 | [logger_rhodecode] |
|
764 | [logger_rhodecode] | |
764 | level = DEBUG |
|
765 | level = DEBUG | |
765 | handlers = |
|
766 | handlers = | |
766 | qualname = rhodecode |
|
767 | qualname = rhodecode | |
767 | propagate = 1 |
|
768 | propagate = 1 | |
768 |
|
769 | |||
769 | [logger_ssh_wrapper] |
|
770 | [logger_ssh_wrapper] | |
770 | level = DEBUG |
|
771 | level = DEBUG | |
771 | handlers = |
|
772 | handlers = | |
772 | qualname = ssh_wrapper |
|
773 | qualname = ssh_wrapper | |
773 | propagate = 1 |
|
774 | propagate = 1 | |
774 |
|
775 | |||
775 | [logger_celery] |
|
776 | [logger_celery] | |
776 | level = DEBUG |
|
777 | level = DEBUG | |
777 | handlers = |
|
778 | handlers = | |
778 | qualname = celery |
|
779 | qualname = celery | |
779 |
|
780 | |||
780 |
|
781 | |||
781 | ; ######## |
|
782 | ; ######## | |
782 | ; HANDLERS |
|
783 | ; HANDLERS | |
783 | ; ######## |
|
784 | ; ######## | |
784 |
|
785 | |||
785 | [handler_console] |
|
786 | [handler_console] | |
786 | class = StreamHandler |
|
787 | class = StreamHandler | |
787 | args = (sys.stderr, ) |
|
788 | args = (sys.stderr, ) | |
788 | level = INFO |
|
789 | level = INFO | |
789 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
790 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' | |
790 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
791 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
791 | formatter = generic |
|
792 | formatter = generic | |
792 |
|
793 | |||
793 | [handler_console_sql] |
|
794 | [handler_console_sql] | |
794 | ; "level = DEBUG" logs SQL queries and results. |
|
795 | ; "level = DEBUG" logs SQL queries and results. | |
795 | ; "level = INFO" logs SQL queries. |
|
796 | ; "level = INFO" logs SQL queries. | |
796 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
797 | ; "level = WARN" logs neither. (Recommended for production systems.) | |
797 | class = StreamHandler |
|
798 | class = StreamHandler | |
798 | args = (sys.stderr, ) |
|
799 | args = (sys.stderr, ) | |
799 | level = WARN |
|
800 | level = WARN | |
800 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
801 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' | |
801 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
802 | ; This allows sending properly formatted logs to grafana loki or elasticsearch | |
802 | formatter = generic |
|
803 | formatter = generic | |
803 |
|
804 | |||
804 | ; ########## |
|
805 | ; ########## | |
805 | ; FORMATTERS |
|
806 | ; FORMATTERS | |
806 | ; ########## |
|
807 | ; ########## | |
807 |
|
808 | |||
808 | [formatter_generic] |
|
809 | [formatter_generic] | |
809 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
810 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter | |
810 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
811 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
811 | datefmt = %Y-%m-%d %H:%M:%S |
|
812 | datefmt = %Y-%m-%d %H:%M:%S | |
812 |
|
813 | |||
813 | [formatter_color_formatter] |
|
814 | [formatter_color_formatter] | |
814 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
815 | class = rhodecode.lib.logging_formatter.ColorFormatter | |
815 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
816 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
816 | datefmt = %Y-%m-%d %H:%M:%S |
|
817 | datefmt = %Y-%m-%d %H:%M:%S | |
817 |
|
818 | |||
818 | [formatter_color_formatter_sql] |
|
819 | [formatter_color_formatter_sql] | |
819 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
820 | class = rhodecode.lib.logging_formatter.ColorFormatterSql | |
820 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
821 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s | |
821 | datefmt = %Y-%m-%d %H:%M:%S |
|
822 | datefmt = %Y-%m-%d %H:%M:%S | |
822 |
|
823 | |||
823 | [formatter_json] |
|
824 | [formatter_json] | |
824 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
825 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s | |
825 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
|
826 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,175 +1,175 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import sys |
|
20 | import sys | |
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from rhodecode.lib.hook_daemon.base import prepare_callback_daemon |
|
23 | from rhodecode.lib.hook_daemon.base import prepare_callback_daemon | |
24 | from rhodecode.lib.ext_json import sjson as json |
|
24 | from rhodecode.lib.ext_json import sjson as json | |
25 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
25 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
26 | from rhodecode.lib.api_utils import call_service_api |
|
26 | from rhodecode.lib.api_utils import call_service_api | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class SshVcsServer(object): |
|
31 | class SshVcsServer(object): | |
32 | repo_user_agent = None # set in child classes |
|
32 | repo_user_agent = None # set in child classes | |
33 | _path = None # set executable path for hg/git/svn binary |
|
33 | _path = None # set executable path for hg/git/svn binary | |
34 | backend = None # set in child classes |
|
34 | backend = None # set in child classes | |
35 | tunnel = None # subprocess handling tunnel |
|
35 | tunnel = None # subprocess handling tunnel | |
36 | settings = None # parsed settings module |
|
36 | settings = None # parsed settings module | |
37 | write_perms = ['repository.admin', 'repository.write'] |
|
37 | write_perms = ['repository.admin', 'repository.write'] | |
38 | read_perms = ['repository.read', 'repository.admin', 'repository.write'] |
|
38 | read_perms = ['repository.read', 'repository.admin', 'repository.write'] | |
39 |
|
39 | |||
40 | def __init__(self, user, user_permissions, settings, env): |
|
40 | def __init__(self, user, user_permissions, settings, env): | |
41 | self.user = user |
|
41 | self.user = user | |
42 | self.user_permissions = user_permissions |
|
42 | self.user_permissions = user_permissions | |
43 | self.settings = settings |
|
43 | self.settings = settings | |
44 | self.env = env |
|
44 | self.env = env | |
45 | self.stdin = sys.stdin |
|
45 | self.stdin = sys.stdin | |
46 |
|
46 | |||
47 | self.repo_name = None |
|
47 | self.repo_name = None | |
48 | self.repo_mode = None |
|
48 | self.repo_mode = None | |
49 | self.store = '' |
|
49 | self.store = '' | |
50 | self.ini_path = '' |
|
50 | self.ini_path = '' | |
51 | self.hooks_protocol = None |
|
51 | self.hooks_protocol = None | |
52 |
|
52 | |||
53 | def _invalidate_cache(self, repo_name): |
|
53 | def _invalidate_cache(self, repo_name): | |
54 | """ |
|
54 | """ | |
55 | Set's cache for this repository for invalidation on next access |
|
55 | Set's cache for this repository for invalidation on next access | |
56 |
|
56 | |||
57 | :param repo_name: full repo name, also a cache key |
|
57 | :param repo_name: full repo name, also a cache key | |
58 | """ |
|
58 | """ | |
59 | # Todo: Leave only "celery" case after transition. |
|
59 | # Todo: Leave only "celery" case after transition. | |
60 | match self.hooks_protocol: |
|
60 | match self.hooks_protocol: | |
61 | case 'http': |
|
61 | case 'http': | |
62 | from rhodecode.model.scm import ScmModel |
|
62 | from rhodecode.model.scm import ScmModel | |
63 | ScmModel().mark_for_invalidation(repo_name) |
|
63 | ScmModel().mark_for_invalidation(repo_name) | |
64 | case 'celery': |
|
64 | case 'celery': | |
65 | call_service_api(self.settings, { |
|
65 | call_service_api(self.settings, { | |
66 | "method": "service_mark_for_invalidation", |
|
66 | "method": "service_mark_for_invalidation", | |
67 | "args": {"repo_name": repo_name} |
|
67 | "args": {"repo_name": repo_name} | |
68 | }) |
|
68 | }) | |
69 |
|
69 | |||
70 | def has_write_perm(self): |
|
70 | def has_write_perm(self): | |
71 | permission = self.user_permissions.get(self.repo_name) |
|
71 | permission = self.user_permissions.get(self.repo_name) | |
72 | if permission in ['repository.write', 'repository.admin']: |
|
72 | if permission in ['repository.write', 'repository.admin']: | |
73 | return True |
|
73 | return True | |
74 |
|
74 | |||
75 | return False |
|
75 | return False | |
76 |
|
76 | |||
77 | def _check_permissions(self, action): |
|
77 | def _check_permissions(self, action): | |
78 | permission = self.user_permissions.get(self.repo_name) |
|
78 | permission = self.user_permissions.get(self.repo_name) | |
79 | user_info = f'{self.user["user_id"]}:{self.user["username"]}' |
|
79 | user_info = f'{self.user["user_id"]}:{self.user["username"]}' | |
80 | log.debug('permission for %s on %s are: %s', |
|
80 | log.debug('permission for %s on %s are: %s', | |
81 | user_info, self.repo_name, permission) |
|
81 | user_info, self.repo_name, permission) | |
82 |
|
82 | |||
83 | if not permission: |
|
83 | if not permission: | |
84 | log.error('user `%s` permissions to repo:%s are empty. Forbidding access.', |
|
84 | log.error('user `%s` permissions to repo:%s are empty. Forbidding access.', | |
85 | user_info, self.repo_name) |
|
85 | user_info, self.repo_name) | |
86 | return -2 |
|
86 | return -2 | |
87 |
|
87 | |||
88 | if action == 'pull': |
|
88 | if action == 'pull': | |
89 | if permission in self.read_perms: |
|
89 | if permission in self.read_perms: | |
90 | log.info( |
|
90 | log.info( | |
91 | 'READ Permissions for User "%s" detected to repo "%s"!', |
|
91 | 'READ Permissions for User "%s" detected to repo "%s"!', | |
92 | user_info, self.repo_name) |
|
92 | user_info, self.repo_name) | |
93 | return 0 |
|
93 | return 0 | |
94 | else: |
|
94 | else: | |
95 | if permission in self.write_perms: |
|
95 | if permission in self.write_perms: | |
96 | log.info( |
|
96 | log.info( | |
97 | 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!', |
|
97 | 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!', | |
98 | user_info, self.repo_name) |
|
98 | user_info, self.repo_name) | |
99 | return 0 |
|
99 | return 0 | |
100 |
|
100 | |||
101 | log.error('Cannot properly fetch or verify user `%s` permissions. ' |
|
101 | log.error('Cannot properly fetch or verify user `%s` permissions. ' | |
102 | 'Permissions: %s, vcs action: %s', |
|
102 | 'Permissions: %s, vcs action: %s', | |
103 | user_info, permission, action) |
|
103 | user_info, permission, action) | |
104 | return -2 |
|
104 | return -2 | |
105 |
|
105 | |||
106 | def update_environment(self, action, extras=None): |
|
106 | def update_environment(self, action, extras=None): | |
107 |
|
107 | |||
108 | scm_data = { |
|
108 | scm_data = { | |
109 | 'ip': os.environ['SSH_CLIENT'].split()[0], |
|
109 | 'ip': os.environ['SSH_CLIENT'].split()[0], | |
110 | 'username': self.user.username, |
|
110 | 'username': self.user.username, | |
111 | 'user_id': self.user.user_id, |
|
111 | 'user_id': self.user.user_id, | |
112 | 'action': action, |
|
112 | 'action': action, | |
113 | 'repository': self.repo_name, |
|
113 | 'repository': self.repo_name, | |
114 | 'scm': self.backend, |
|
114 | 'scm': self.backend, | |
115 | 'config': self.ini_path, |
|
115 | 'config': self.ini_path, | |
116 | 'repo_store': self.store, |
|
116 | 'repo_store': self.store, | |
117 | 'make_lock': None, |
|
117 | 'make_lock': None, | |
118 | 'locked_by': [None, None], |
|
118 | 'locked_by': [None, None], | |
119 | 'server_url': None, |
|
119 | 'server_url': None, | |
120 | 'user_agent': f'{self.repo_user_agent}/ssh-user-agent', |
|
120 | 'user_agent': f'{self.repo_user_agent}/ssh-user-agent', | |
121 | 'hooks': ['push', 'pull'], |
|
121 | 'hooks': ['push', 'pull'], | |
122 | 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module', |
|
122 | 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module', | |
123 | 'is_shadow_repo': False, |
|
123 | 'is_shadow_repo': False, | |
124 | 'detect_force_push': False, |
|
124 | 'detect_force_push': False, | |
125 | 'check_branch_perms': False, |
|
125 | 'check_branch_perms': False, | |
126 |
|
126 | |||
127 | 'SSH': True, |
|
127 | 'SSH': True, | |
128 | 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name), |
|
128 | 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name), | |
129 | } |
|
129 | } | |
130 | if extras: |
|
130 | if extras: | |
131 | scm_data.update(extras) |
|
131 | scm_data.update(extras) | |
132 | os.putenv("RC_SCM_DATA", json.dumps(scm_data)) |
|
132 | os.putenv("RC_SCM_DATA", json.dumps(scm_data)) | |
133 | return scm_data |
|
133 | return scm_data | |
134 |
|
134 | |||
135 | def get_root_store(self): |
|
135 | def get_root_store(self): | |
136 | root_store = self.store |
|
136 | root_store = self.store | |
137 | if not root_store.endswith('/'): |
|
137 | if not root_store.endswith('/'): | |
138 | # always append trailing slash |
|
138 | # always append trailing slash | |
139 | root_store = root_store + '/' |
|
139 | root_store = root_store + '/' | |
140 | return root_store |
|
140 | return root_store | |
141 |
|
141 | |||
142 | def _handle_tunnel(self, extras): |
|
142 | def _handle_tunnel(self, extras): | |
143 | # pre-auth |
|
143 | # pre-auth | |
144 | action = 'pull' |
|
144 | action = 'pull' | |
145 | exit_code = self._check_permissions(action) |
|
145 | exit_code = self._check_permissions(action) | |
146 | if exit_code: |
|
146 | if exit_code: | |
147 | return exit_code, False |
|
147 | return exit_code, False | |
148 |
|
148 | |||
149 | req = self.env.get('request') |
|
149 | req = self.env.get('request') | |
150 | if req: |
|
150 | if req: | |
151 | server_url = req.host_url + req.script_name |
|
151 | server_url = req.host_url + req.script_name | |
152 | extras['server_url'] = server_url |
|
152 | extras['server_url'] = server_url | |
153 |
|
153 | |||
154 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
154 | log.debug('Using %s binaries from path %s', self.backend, self._path) | |
155 | exit_code = self.tunnel.run(extras) |
|
155 | exit_code = self.tunnel.run(extras) | |
156 |
|
156 | |||
157 | return exit_code, action == "push" |
|
157 | return exit_code, action == "push" | |
158 |
|
158 | |||
159 | def run(self, tunnel_extras=None): |
|
159 | def run(self, tunnel_extras=None): | |
160 | self.hooks_protocol = self.settings['vcs.hooks.protocol'] |
|
160 | self.hooks_protocol = self.settings['vcs.hooks.protocol.v2'] | |
161 | tunnel_extras = tunnel_extras or {} |
|
161 | tunnel_extras = tunnel_extras or {} | |
162 | extras = {} |
|
162 | extras = {} | |
163 | extras.update(tunnel_extras) |
|
163 | extras.update(tunnel_extras) | |
164 |
|
164 | |||
165 | callback_daemon, extras = prepare_callback_daemon( |
|
165 | callback_daemon, extras = prepare_callback_daemon( | |
166 | extras, protocol=self.hooks_protocol, |
|
166 | extras, protocol=self.hooks_protocol, | |
167 | host=vcs_settings.HOOKS_HOST) |
|
167 | host=vcs_settings.HOOKS_HOST) | |
168 |
|
168 | |||
169 | with callback_daemon: |
|
169 | with callback_daemon: | |
170 | try: |
|
170 | try: | |
171 | return self._handle_tunnel(extras) |
|
171 | return self._handle_tunnel(extras) | |
172 | finally: |
|
172 | finally: | |
173 | log.debug('Running cleanup with cache invalidation') |
|
173 | log.debug('Running cleanup with cache invalidation') | |
174 | if self.repo_name: |
|
174 | if self.repo_name: | |
175 | self._invalidate_cache(self.repo_name) |
|
175 | self._invalidate_cache(self.repo_name) |
@@ -1,151 +1,151 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 |
|
20 | |||
21 | import mock |
|
21 | import mock | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | from rhodecode.apps.ssh_support.lib.backends.git import GitServer |
|
24 | from rhodecode.apps.ssh_support.lib.backends.git import GitServer | |
25 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user |
|
25 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user | |
26 | from rhodecode.lib.ext_json import json |
|
26 | from rhodecode.lib.ext_json import json | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class GitServerCreator(object): |
|
29 | class GitServerCreator(object): | |
30 | root = '/tmp/repo/path/' |
|
30 | root = '/tmp/repo/path/' | |
31 | git_path = '/usr/local/bin/git' |
|
31 | git_path = '/usr/local/bin/git' | |
32 | config_data = { |
|
32 | config_data = { | |
33 | 'app:main': { |
|
33 | 'app:main': { | |
34 | 'ssh.executable.git': git_path, |
|
34 | 'ssh.executable.git': git_path, | |
35 |
'vcs.hooks.protocol': ' |
|
35 | 'vcs.hooks.protocol.v2': 'celery', | |
36 | } |
|
36 | } | |
37 | } |
|
37 | } | |
38 | repo_name = 'test_git' |
|
38 | repo_name = 'test_git' | |
39 | repo_mode = 'receive-pack' |
|
39 | repo_mode = 'receive-pack' | |
40 | user = plain_dummy_user() |
|
40 | user = plain_dummy_user() | |
41 |
|
41 | |||
42 | def __init__(self): |
|
42 | def __init__(self): | |
43 | pass |
|
43 | pass | |
44 |
|
44 | |||
45 | def create(self, **kwargs): |
|
45 | def create(self, **kwargs): | |
46 | parameters = { |
|
46 | parameters = { | |
47 | 'store': self.root, |
|
47 | 'store': self.root, | |
48 | 'ini_path': '', |
|
48 | 'ini_path': '', | |
49 | 'user': self.user, |
|
49 | 'user': self.user, | |
50 | 'repo_name': self.repo_name, |
|
50 | 'repo_name': self.repo_name, | |
51 | 'repo_mode': self.repo_mode, |
|
51 | 'repo_mode': self.repo_mode, | |
52 | 'user_permissions': { |
|
52 | 'user_permissions': { | |
53 | self.repo_name: 'repository.admin' |
|
53 | self.repo_name: 'repository.admin' | |
54 | }, |
|
54 | }, | |
55 | 'settings': self.config_data['app:main'], |
|
55 | 'settings': self.config_data['app:main'], | |
56 | 'env': plain_dummy_env() |
|
56 | 'env': plain_dummy_env() | |
57 | } |
|
57 | } | |
58 | parameters.update(kwargs) |
|
58 | parameters.update(kwargs) | |
59 | server = GitServer(**parameters) |
|
59 | server = GitServer(**parameters) | |
60 | return server |
|
60 | return server | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | @pytest.fixture() |
|
63 | @pytest.fixture() | |
64 | def git_server(app): |
|
64 | def git_server(app): | |
65 | return GitServerCreator() |
|
65 | return GitServerCreator() | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | class TestGitServer(object): |
|
68 | class TestGitServer(object): | |
69 |
|
69 | |||
70 | def test_command(self, git_server): |
|
70 | def test_command(self, git_server): | |
71 | server = git_server.create() |
|
71 | server = git_server.create() | |
72 | expected_command = ( |
|
72 | expected_command = ( | |
73 | 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format( |
|
73 | 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format( | |
74 | root=git_server.root, git_path=git_server.git_path, |
|
74 | root=git_server.root, git_path=git_server.git_path, | |
75 | repo_mode=git_server.repo_mode, repo_name=git_server.repo_name) |
|
75 | repo_mode=git_server.repo_mode, repo_name=git_server.repo_name) | |
76 | ) |
|
76 | ) | |
77 | assert expected_command == server.tunnel.command() |
|
77 | assert expected_command == server.tunnel.command() | |
78 |
|
78 | |||
79 | @pytest.mark.parametrize('permissions, action, code', [ |
|
79 | @pytest.mark.parametrize('permissions, action, code', [ | |
80 | ({}, 'pull', -2), |
|
80 | ({}, 'pull', -2), | |
81 | ({'test_git': 'repository.read'}, 'pull', 0), |
|
81 | ({'test_git': 'repository.read'}, 'pull', 0), | |
82 | ({'test_git': 'repository.read'}, 'push', -2), |
|
82 | ({'test_git': 'repository.read'}, 'push', -2), | |
83 | ({'test_git': 'repository.write'}, 'push', 0), |
|
83 | ({'test_git': 'repository.write'}, 'push', 0), | |
84 | ({'test_git': 'repository.admin'}, 'push', 0), |
|
84 | ({'test_git': 'repository.admin'}, 'push', 0), | |
85 |
|
85 | |||
86 | ]) |
|
86 | ]) | |
87 | def test_permission_checks(self, git_server, permissions, action, code): |
|
87 | def test_permission_checks(self, git_server, permissions, action, code): | |
88 | server = git_server.create(user_permissions=permissions) |
|
88 | server = git_server.create(user_permissions=permissions) | |
89 | result = server._check_permissions(action) |
|
89 | result = server._check_permissions(action) | |
90 | assert result is code |
|
90 | assert result is code | |
91 |
|
91 | |||
92 | @pytest.mark.parametrize('permissions, value', [ |
|
92 | @pytest.mark.parametrize('permissions, value', [ | |
93 | ({}, False), |
|
93 | ({}, False), | |
94 | ({'test_git': 'repository.read'}, False), |
|
94 | ({'test_git': 'repository.read'}, False), | |
95 | ({'test_git': 'repository.write'}, True), |
|
95 | ({'test_git': 'repository.write'}, True), | |
96 | ({'test_git': 'repository.admin'}, True), |
|
96 | ({'test_git': 'repository.admin'}, True), | |
97 |
|
97 | |||
98 | ]) |
|
98 | ]) | |
99 | def test_has_write_permissions(self, git_server, permissions, value): |
|
99 | def test_has_write_permissions(self, git_server, permissions, value): | |
100 | server = git_server.create(user_permissions=permissions) |
|
100 | server = git_server.create(user_permissions=permissions) | |
101 | result = server.has_write_perm() |
|
101 | result = server.has_write_perm() | |
102 | assert result is value |
|
102 | assert result is value | |
103 |
|
103 | |||
104 | def test_run_returns_executes_command(self, git_server): |
|
104 | def test_run_returns_executes_command(self, git_server): | |
105 | server = git_server.create() |
|
105 | server = git_server.create() | |
106 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper |
|
106 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper | |
107 |
|
107 | |||
108 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
108 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
109 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: |
|
109 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: | |
110 | _patch.return_value = 0 |
|
110 | _patch.return_value = 0 | |
111 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): |
|
111 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): | |
112 | exit_code = server.run() |
|
112 | exit_code = server.run() | |
113 |
|
113 | |||
114 | assert exit_code == (0, False) |
|
114 | assert exit_code == (0, False) | |
115 |
|
115 | |||
116 | @pytest.mark.parametrize( |
|
116 | @pytest.mark.parametrize( | |
117 | 'repo_mode, action', [ |
|
117 | 'repo_mode, action', [ | |
118 | ['receive-pack', 'push'], |
|
118 | ['receive-pack', 'push'], | |
119 | ['upload-pack', 'pull'] |
|
119 | ['upload-pack', 'pull'] | |
120 | ]) |
|
120 | ]) | |
121 | def test_update_environment(self, git_server, repo_mode, action): |
|
121 | def test_update_environment(self, git_server, repo_mode, action): | |
122 | server = git_server.create(repo_mode=repo_mode) |
|
122 | server = git_server.create(repo_mode=repo_mode) | |
123 | store = server.store |
|
123 | store = server.store | |
124 |
|
124 | |||
125 | with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}): |
|
125 | with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}): | |
126 | with mock.patch('os.putenv') as putenv_mock: |
|
126 | with mock.patch('os.putenv') as putenv_mock: | |
127 | server.update_environment(action) |
|
127 | server.update_environment(action) | |
128 |
|
128 | |||
129 | expected_data = { |
|
129 | expected_data = { | |
130 | 'username': git_server.user.username, |
|
130 | 'username': git_server.user.username, | |
131 | 'user_id': git_server.user.user_id, |
|
131 | 'user_id': git_server.user.user_id, | |
132 | 'scm': 'git', |
|
132 | 'scm': 'git', | |
133 | 'repository': git_server.repo_name, |
|
133 | 'repository': git_server.repo_name, | |
134 | 'make_lock': None, |
|
134 | 'make_lock': None, | |
135 | 'action': action, |
|
135 | 'action': action, | |
136 | 'ip': '10.10.10.10', |
|
136 | 'ip': '10.10.10.10', | |
137 | 'locked_by': [None, None], |
|
137 | 'locked_by': [None, None], | |
138 | 'config': '', |
|
138 | 'config': '', | |
139 | 'repo_store': store, |
|
139 | 'repo_store': store, | |
140 | 'server_url': None, |
|
140 | 'server_url': None, | |
141 | 'hooks': ['push', 'pull'], |
|
141 | 'hooks': ['push', 'pull'], | |
142 | 'is_shadow_repo': False, |
|
142 | 'is_shadow_repo': False, | |
143 | 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module', |
|
143 | 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module', | |
144 | 'check_branch_perms': False, |
|
144 | 'check_branch_perms': False, | |
145 | 'detect_force_push': False, |
|
145 | 'detect_force_push': False, | |
146 | 'user_agent': u'git/ssh-user-agent', |
|
146 | 'user_agent': u'git/ssh-user-agent', | |
147 | 'SSH': True, |
|
147 | 'SSH': True, | |
148 | 'SSH_PERMISSIONS': 'repository.admin', |
|
148 | 'SSH_PERMISSIONS': 'repository.admin', | |
149 | } |
|
149 | } | |
150 | args, kwargs = putenv_mock.call_args |
|
150 | args, kwargs = putenv_mock.call_args | |
151 | assert json.loads(args[1]) == expected_data |
|
151 | assert json.loads(args[1]) == expected_data |
@@ -1,115 +1,115 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer |
|
23 | from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer | |
24 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user |
|
24 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | class MercurialServerCreator(object): |
|
27 | class MercurialServerCreator(object): | |
28 | root = '/tmp/repo/path/' |
|
28 | root = '/tmp/repo/path/' | |
29 | hg_path = '/usr/local/bin/hg' |
|
29 | hg_path = '/usr/local/bin/hg' | |
30 |
|
30 | |||
31 | config_data = { |
|
31 | config_data = { | |
32 | 'app:main': { |
|
32 | 'app:main': { | |
33 | 'ssh.executable.hg': hg_path, |
|
33 | 'ssh.executable.hg': hg_path, | |
34 |
'vcs.hooks.protocol': ' |
|
34 | 'vcs.hooks.protocol.v2': 'celery', | |
35 | } |
|
35 | } | |
36 | } |
|
36 | } | |
37 | repo_name = 'test_hg' |
|
37 | repo_name = 'test_hg' | |
38 | user = plain_dummy_user() |
|
38 | user = plain_dummy_user() | |
39 |
|
39 | |||
40 | def __init__(self): |
|
40 | def __init__(self): | |
41 | pass |
|
41 | pass | |
42 |
|
42 | |||
43 | def create(self, **kwargs): |
|
43 | def create(self, **kwargs): | |
44 | parameters = { |
|
44 | parameters = { | |
45 | 'store': self.root, |
|
45 | 'store': self.root, | |
46 | 'ini_path': '', |
|
46 | 'ini_path': '', | |
47 | 'user': self.user, |
|
47 | 'user': self.user, | |
48 | 'repo_name': self.repo_name, |
|
48 | 'repo_name': self.repo_name, | |
49 | 'user_permissions': { |
|
49 | 'user_permissions': { | |
50 | 'test_hg': 'repository.admin' |
|
50 | 'test_hg': 'repository.admin' | |
51 | }, |
|
51 | }, | |
52 | 'settings': self.config_data['app:main'], |
|
52 | 'settings': self.config_data['app:main'], | |
53 | 'env': plain_dummy_env() |
|
53 | 'env': plain_dummy_env() | |
54 | } |
|
54 | } | |
55 | parameters.update(kwargs) |
|
55 | parameters.update(kwargs) | |
56 | server = MercurialServer(**parameters) |
|
56 | server = MercurialServer(**parameters) | |
57 | return server |
|
57 | return server | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | @pytest.fixture() |
|
60 | @pytest.fixture() | |
61 | def hg_server(app): |
|
61 | def hg_server(app): | |
62 | return MercurialServerCreator() |
|
62 | return MercurialServerCreator() | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class TestMercurialServer(object): |
|
65 | class TestMercurialServer(object): | |
66 |
|
66 | |||
67 | def test_command(self, hg_server, tmpdir): |
|
67 | def test_command(self, hg_server, tmpdir): | |
68 | server = hg_server.create() |
|
68 | server = hg_server.create() | |
69 | custom_hgrc = os.path.join(str(tmpdir), 'hgrc') |
|
69 | custom_hgrc = os.path.join(str(tmpdir), 'hgrc') | |
70 | expected_command = ( |
|
70 | expected_command = ( | |
71 | 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format( |
|
71 | 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format( | |
72 | root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path, |
|
72 | root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path, | |
73 | repo_name=hg_server.repo_name) |
|
73 | repo_name=hg_server.repo_name) | |
74 | ) |
|
74 | ) | |
75 | server_command = server.tunnel.command(custom_hgrc) |
|
75 | server_command = server.tunnel.command(custom_hgrc) | |
76 | assert expected_command == server_command |
|
76 | assert expected_command == server_command | |
77 |
|
77 | |||
78 | @pytest.mark.parametrize('permissions, action, code', [ |
|
78 | @pytest.mark.parametrize('permissions, action, code', [ | |
79 | ({}, 'pull', -2), |
|
79 | ({}, 'pull', -2), | |
80 | ({'test_hg': 'repository.read'}, 'pull', 0), |
|
80 | ({'test_hg': 'repository.read'}, 'pull', 0), | |
81 | ({'test_hg': 'repository.read'}, 'push', -2), |
|
81 | ({'test_hg': 'repository.read'}, 'push', -2), | |
82 | ({'test_hg': 'repository.write'}, 'push', 0), |
|
82 | ({'test_hg': 'repository.write'}, 'push', 0), | |
83 | ({'test_hg': 'repository.admin'}, 'push', 0), |
|
83 | ({'test_hg': 'repository.admin'}, 'push', 0), | |
84 |
|
84 | |||
85 | ]) |
|
85 | ]) | |
86 | def test_permission_checks(self, hg_server, permissions, action, code): |
|
86 | def test_permission_checks(self, hg_server, permissions, action, code): | |
87 | server = hg_server.create(user_permissions=permissions) |
|
87 | server = hg_server.create(user_permissions=permissions) | |
88 | result = server._check_permissions(action) |
|
88 | result = server._check_permissions(action) | |
89 | assert result is code |
|
89 | assert result is code | |
90 |
|
90 | |||
91 | @pytest.mark.parametrize('permissions, value', [ |
|
91 | @pytest.mark.parametrize('permissions, value', [ | |
92 | ({}, False), |
|
92 | ({}, False), | |
93 | ({'test_hg': 'repository.read'}, False), |
|
93 | ({'test_hg': 'repository.read'}, False), | |
94 | ({'test_hg': 'repository.write'}, True), |
|
94 | ({'test_hg': 'repository.write'}, True), | |
95 | ({'test_hg': 'repository.admin'}, True), |
|
95 | ({'test_hg': 'repository.admin'}, True), | |
96 |
|
96 | |||
97 | ]) |
|
97 | ]) | |
98 | def test_has_write_permissions(self, hg_server, permissions, value): |
|
98 | def test_has_write_permissions(self, hg_server, permissions, value): | |
99 | server = hg_server.create(user_permissions=permissions) |
|
99 | server = hg_server.create(user_permissions=permissions) | |
100 | result = server.has_write_perm() |
|
100 | result = server.has_write_perm() | |
101 | assert result is value |
|
101 | assert result is value | |
102 |
|
102 | |||
103 | def test_run_returns_executes_command(self, hg_server): |
|
103 | def test_run_returns_executes_command(self, hg_server): | |
104 | server = hg_server.create() |
|
104 | server = hg_server.create() | |
105 | from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper |
|
105 | from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper | |
106 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
106 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
107 | with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch: |
|
107 | with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch: | |
108 | _patch.return_value = 0 |
|
108 | _patch.return_value = 0 | |
109 | with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'): |
|
109 | with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'): | |
110 | exit_code = server.run() |
|
110 | exit_code = server.run() | |
111 |
|
111 | |||
112 | assert exit_code == (0, False) |
|
112 | assert exit_code == (0, False) | |
113 |
|
113 | |||
114 |
|
114 | |||
115 |
|
115 |
@@ -1,203 +1,203 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 | import os |
|
18 | import os | |
19 | import mock |
|
19 | import mock | |
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer |
|
22 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer | |
23 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user |
|
23 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | class SubversionServerCreator(object): |
|
26 | class SubversionServerCreator(object): | |
27 | root = '/tmp/repo/path/' |
|
27 | root = '/tmp/repo/path/' | |
28 | svn_path = '/usr/local/bin/svnserve' |
|
28 | svn_path = '/usr/local/bin/svnserve' | |
29 | config_data = { |
|
29 | config_data = { | |
30 | 'app:main': { |
|
30 | 'app:main': { | |
31 | 'ssh.executable.svn': svn_path, |
|
31 | 'ssh.executable.svn': svn_path, | |
32 |
'vcs.hooks.protocol': ' |
|
32 | 'vcs.hooks.protocol.v2': 'celery', | |
33 | } |
|
33 | } | |
34 | } |
|
34 | } | |
35 | repo_name = 'test-svn' |
|
35 | repo_name = 'test-svn' | |
36 | user = plain_dummy_user() |
|
36 | user = plain_dummy_user() | |
37 |
|
37 | |||
38 | def __init__(self): |
|
38 | def __init__(self): | |
39 | pass |
|
39 | pass | |
40 |
|
40 | |||
41 | def create(self, **kwargs): |
|
41 | def create(self, **kwargs): | |
42 | parameters = { |
|
42 | parameters = { | |
43 | 'store': self.root, |
|
43 | 'store': self.root, | |
44 | 'repo_name': self.repo_name, |
|
44 | 'repo_name': self.repo_name, | |
45 | 'ini_path': '', |
|
45 | 'ini_path': '', | |
46 | 'user': self.user, |
|
46 | 'user': self.user, | |
47 | 'user_permissions': { |
|
47 | 'user_permissions': { | |
48 | self.repo_name: 'repository.admin' |
|
48 | self.repo_name: 'repository.admin' | |
49 | }, |
|
49 | }, | |
50 | 'settings': self.config_data['app:main'], |
|
50 | 'settings': self.config_data['app:main'], | |
51 | 'env': plain_dummy_env() |
|
51 | 'env': plain_dummy_env() | |
52 | } |
|
52 | } | |
53 |
|
53 | |||
54 | parameters.update(kwargs) |
|
54 | parameters.update(kwargs) | |
55 | server = SubversionServer(**parameters) |
|
55 | server = SubversionServer(**parameters) | |
56 | return server |
|
56 | return server | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | @pytest.fixture() |
|
59 | @pytest.fixture() | |
60 | def svn_server(app): |
|
60 | def svn_server(app): | |
61 | return SubversionServerCreator() |
|
61 | return SubversionServerCreator() | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | class TestSubversionServer(object): |
|
64 | class TestSubversionServer(object): | |
65 |
|
65 | |||
66 | def test_command(self, svn_server): |
|
66 | def test_command(self, svn_server): | |
67 | server = svn_server.create() |
|
67 | server = svn_server.create() | |
68 | expected_command = [ |
|
68 | expected_command = [ | |
69 | svn_server.svn_path, '-t', |
|
69 | svn_server.svn_path, '-t', | |
70 | '--config-file', server.tunnel.svn_conf_path, |
|
70 | '--config-file', server.tunnel.svn_conf_path, | |
71 | '--tunnel-user', svn_server.user.username, |
|
71 | '--tunnel-user', svn_server.user.username, | |
72 | '-r', svn_server.root |
|
72 | '-r', svn_server.root | |
73 | ] |
|
73 | ] | |
74 |
|
74 | |||
75 | assert expected_command == server.tunnel.command() |
|
75 | assert expected_command == server.tunnel.command() | |
76 |
|
76 | |||
77 | @pytest.mark.parametrize('permissions, action, code', [ |
|
77 | @pytest.mark.parametrize('permissions, action, code', [ | |
78 | ({}, 'pull', -2), |
|
78 | ({}, 'pull', -2), | |
79 | ({'test-svn': 'repository.read'}, 'pull', 0), |
|
79 | ({'test-svn': 'repository.read'}, 'pull', 0), | |
80 | ({'test-svn': 'repository.read'}, 'push', -2), |
|
80 | ({'test-svn': 'repository.read'}, 'push', -2), | |
81 | ({'test-svn': 'repository.write'}, 'push', 0), |
|
81 | ({'test-svn': 'repository.write'}, 'push', 0), | |
82 | ({'test-svn': 'repository.admin'}, 'push', 0), |
|
82 | ({'test-svn': 'repository.admin'}, 'push', 0), | |
83 |
|
83 | |||
84 | ]) |
|
84 | ]) | |
85 | def test_permission_checks(self, svn_server, permissions, action, code): |
|
85 | def test_permission_checks(self, svn_server, permissions, action, code): | |
86 | server = svn_server.create(user_permissions=permissions) |
|
86 | server = svn_server.create(user_permissions=permissions) | |
87 | result = server._check_permissions(action) |
|
87 | result = server._check_permissions(action) | |
88 | assert result is code |
|
88 | assert result is code | |
89 |
|
89 | |||
90 | @pytest.mark.parametrize('permissions, access_paths, expected_match', [ |
|
90 | @pytest.mark.parametrize('permissions, access_paths, expected_match', [ | |
91 | # not matched repository name |
|
91 | # not matched repository name | |
92 | ({ |
|
92 | ({ | |
93 | 'test-svn': '' |
|
93 | 'test-svn': '' | |
94 | }, ['test-svn-1', 'test-svn-1/subpath'], |
|
94 | }, ['test-svn-1', 'test-svn-1/subpath'], | |
95 | None), |
|
95 | None), | |
96 |
|
96 | |||
97 | # exact match |
|
97 | # exact match | |
98 | ({ |
|
98 | ({ | |
99 | 'test-svn': '' |
|
99 | 'test-svn': '' | |
100 | }, |
|
100 | }, | |
101 | ['test-svn'], |
|
101 | ['test-svn'], | |
102 | 'test-svn'), |
|
102 | 'test-svn'), | |
103 |
|
103 | |||
104 | # subdir commits |
|
104 | # subdir commits | |
105 | ({ |
|
105 | ({ | |
106 | 'test-svn': '' |
|
106 | 'test-svn': '' | |
107 | }, |
|
107 | }, | |
108 | ['test-svn/foo', |
|
108 | ['test-svn/foo', | |
109 | 'test-svn/foo/test-svn', |
|
109 | 'test-svn/foo/test-svn', | |
110 | 'test-svn/trunk/development.txt', |
|
110 | 'test-svn/trunk/development.txt', | |
111 | ], |
|
111 | ], | |
112 | 'test-svn'), |
|
112 | 'test-svn'), | |
113 |
|
113 | |||
114 | # subgroups + similar patterns |
|
114 | # subgroups + similar patterns | |
115 | ({ |
|
115 | ({ | |
116 | 'test-svn': '', |
|
116 | 'test-svn': '', | |
117 | 'test-svn-1': '', |
|
117 | 'test-svn-1': '', | |
118 | 'test-svn-subgroup/test-svn': '', |
|
118 | 'test-svn-subgroup/test-svn': '', | |
119 |
|
119 | |||
120 | }, |
|
120 | }, | |
121 | ['test-svn-1', |
|
121 | ['test-svn-1', | |
122 | 'test-svn-1/foo/test-svn', |
|
122 | 'test-svn-1/foo/test-svn', | |
123 | 'test-svn-1/test-svn', |
|
123 | 'test-svn-1/test-svn', | |
124 | ], |
|
124 | ], | |
125 | 'test-svn-1'), |
|
125 | 'test-svn-1'), | |
126 |
|
126 | |||
127 | # subgroups + similar patterns |
|
127 | # subgroups + similar patterns | |
128 | ({ |
|
128 | ({ | |
129 | 'test-svn-1': '', |
|
129 | 'test-svn-1': '', | |
130 | 'test-svn-10': '', |
|
130 | 'test-svn-10': '', | |
131 | 'test-svn-100': '', |
|
131 | 'test-svn-100': '', | |
132 | }, |
|
132 | }, | |
133 | ['test-svn-10', |
|
133 | ['test-svn-10', | |
134 | 'test-svn-10/foo/test-svn', |
|
134 | 'test-svn-10/foo/test-svn', | |
135 | 'test-svn-10/test-svn', |
|
135 | 'test-svn-10/test-svn', | |
136 | ], |
|
136 | ], | |
137 | 'test-svn-10'), |
|
137 | 'test-svn-10'), | |
138 |
|
138 | |||
139 | # subgroups + similar patterns |
|
139 | # subgroups + similar patterns | |
140 | ({ |
|
140 | ({ | |
141 | 'name': '', |
|
141 | 'name': '', | |
142 | 'nameContains': '', |
|
142 | 'nameContains': '', | |
143 | 'nameContainsThis': '', |
|
143 | 'nameContainsThis': '', | |
144 | }, |
|
144 | }, | |
145 | ['nameContains', |
|
145 | ['nameContains', | |
146 | 'nameContains/This', |
|
146 | 'nameContains/This', | |
147 | 'nameContains/This/test-svn', |
|
147 | 'nameContains/This/test-svn', | |
148 | ], |
|
148 | ], | |
149 | 'nameContains'), |
|
149 | 'nameContains'), | |
150 |
|
150 | |||
151 | # subgroups + similar patterns |
|
151 | # subgroups + similar patterns | |
152 | ({ |
|
152 | ({ | |
153 | 'test-svn': '', |
|
153 | 'test-svn': '', | |
154 | 'test-svn-1': '', |
|
154 | 'test-svn-1': '', | |
155 | 'test-svn-subgroup/test-svn': '', |
|
155 | 'test-svn-subgroup/test-svn': '', | |
156 |
|
156 | |||
157 | }, |
|
157 | }, | |
158 | ['test-svn-subgroup/test-svn', |
|
158 | ['test-svn-subgroup/test-svn', | |
159 | 'test-svn-subgroup/test-svn/foo/test-svn', |
|
159 | 'test-svn-subgroup/test-svn/foo/test-svn', | |
160 | 'test-svn-subgroup/test-svn/trunk/example.txt', |
|
160 | 'test-svn-subgroup/test-svn/trunk/example.txt', | |
161 | ], |
|
161 | ], | |
162 | 'test-svn-subgroup/test-svn'), |
|
162 | 'test-svn-subgroup/test-svn'), | |
163 | ]) |
|
163 | ]) | |
164 | def test_repo_extraction_on_subdir(self, svn_server, permissions, access_paths, expected_match): |
|
164 | def test_repo_extraction_on_subdir(self, svn_server, permissions, access_paths, expected_match): | |
165 | server = svn_server.create(user_permissions=permissions) |
|
165 | server = svn_server.create(user_permissions=permissions) | |
166 | for path in access_paths: |
|
166 | for path in access_paths: | |
167 | repo_name = server.tunnel._match_repo_name(path) |
|
167 | repo_name = server.tunnel._match_repo_name(path) | |
168 | assert repo_name == expected_match |
|
168 | assert repo_name == expected_match | |
169 |
|
169 | |||
170 | def test_run_returns_executes_command(self, svn_server): |
|
170 | def test_run_returns_executes_command(self, svn_server): | |
171 | server = svn_server.create() |
|
171 | server = svn_server.create() | |
172 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper |
|
172 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper | |
173 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
173 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
174 | with mock.patch.object( |
|
174 | with mock.patch.object( | |
175 | SubversionTunnelWrapper, 'get_first_client_response', |
|
175 | SubversionTunnelWrapper, 'get_first_client_response', | |
176 | return_value={'url': 'http://server/test-svn'}): |
|
176 | return_value={'url': 'http://server/test-svn'}): | |
177 | with mock.patch.object( |
|
177 | with mock.patch.object( | |
178 | SubversionTunnelWrapper, 'patch_first_client_response', |
|
178 | SubversionTunnelWrapper, 'patch_first_client_response', | |
179 | return_value=0): |
|
179 | return_value=0): | |
180 | with mock.patch.object( |
|
180 | with mock.patch.object( | |
181 | SubversionTunnelWrapper, 'sync', |
|
181 | SubversionTunnelWrapper, 'sync', | |
182 | return_value=0): |
|
182 | return_value=0): | |
183 | with mock.patch.object( |
|
183 | with mock.patch.object( | |
184 | SubversionTunnelWrapper, 'command', |
|
184 | SubversionTunnelWrapper, 'command', | |
185 | return_value=['date']): |
|
185 | return_value=['date']): | |
186 |
|
186 | |||
187 | exit_code = server.run() |
|
187 | exit_code = server.run() | |
188 | # SVN has this differently configured, and we get in our mock env |
|
188 | # SVN has this differently configured, and we get in our mock env | |
189 | # None as return code |
|
189 | # None as return code | |
190 | assert exit_code == (None, False) |
|
190 | assert exit_code == (None, False) | |
191 |
|
191 | |||
192 | def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server): |
|
192 | def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server): | |
193 | server = svn_server.create() |
|
193 | server = svn_server.create() | |
194 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper |
|
194 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper | |
195 | with mock.patch.object( |
|
195 | with mock.patch.object( | |
196 | SubversionTunnelWrapper, 'command', |
|
196 | SubversionTunnelWrapper, 'command', | |
197 | return_value=['date']): |
|
197 | return_value=['date']): | |
198 | with mock.patch.object( |
|
198 | with mock.patch.object( | |
199 | SubversionTunnelWrapper, 'get_first_client_response', |
|
199 | SubversionTunnelWrapper, 'get_first_client_response', | |
200 | return_value=None): |
|
200 | return_value=None): | |
201 | exit_code = server.run() |
|
201 | exit_code = server.run() | |
202 |
|
202 | |||
203 | assert exit_code == (1, False) |
|
203 | assert exit_code == (1, False) |
@@ -1,228 +1,228 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import tempfile |
|
20 | import tempfile | |
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.settings import asbool |
|
23 | from pyramid.settings import asbool | |
24 |
|
24 | |||
25 | from rhodecode.config.settings_maker import SettingsMaker |
|
25 | from rhodecode.config.settings_maker import SettingsMaker | |
26 | from rhodecode.config import utils as config_utils |
|
26 | from rhodecode.config import utils as config_utils | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def sanitize_settings_and_apply_defaults(global_config, settings): |
|
31 | def sanitize_settings_and_apply_defaults(global_config, settings): | |
32 | """ |
|
32 | """ | |
33 | Applies settings defaults and does all type conversion. |
|
33 | Applies settings defaults and does all type conversion. | |
34 |
|
34 | |||
35 | We would move all settings parsing and preparation into this place, so that |
|
35 | We would move all settings parsing and preparation into this place, so that | |
36 | we have only one place left which deals with this part. The remaining parts |
|
36 | we have only one place left which deals with this part. The remaining parts | |
37 | of the application would start to rely fully on well-prepared settings. |
|
37 | of the application would start to rely fully on well-prepared settings. | |
38 |
|
38 | |||
39 | This piece would later be split up per topic to avoid a big fat monster |
|
39 | This piece would later be split up per topic to avoid a big fat monster | |
40 | function. |
|
40 | function. | |
41 | """ |
|
41 | """ | |
42 | jn = os.path.join |
|
42 | jn = os.path.join | |
43 |
|
43 | |||
44 | global_settings_maker = SettingsMaker(global_config) |
|
44 | global_settings_maker = SettingsMaker(global_config) | |
45 | global_settings_maker.make_setting('debug', default=False, parser='bool') |
|
45 | global_settings_maker.make_setting('debug', default=False, parser='bool') | |
46 | debug_enabled = asbool(global_config.get('debug')) |
|
46 | debug_enabled = asbool(global_config.get('debug')) | |
47 |
|
47 | |||
48 | settings_maker = SettingsMaker(settings) |
|
48 | settings_maker = SettingsMaker(settings) | |
49 |
|
49 | |||
50 | settings_maker.make_setting( |
|
50 | settings_maker.make_setting( | |
51 | 'logging.autoconfigure', |
|
51 | 'logging.autoconfigure', | |
52 | default=False, |
|
52 | default=False, | |
53 | parser='bool') |
|
53 | parser='bool') | |
54 |
|
54 | |||
55 | logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini') |
|
55 | logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini') | |
56 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') |
|
56 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') | |
57 |
|
57 | |||
58 | # Default includes, possible to change as a user |
|
58 | # Default includes, possible to change as a user | |
59 | pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline') |
|
59 | pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline') | |
60 | log.debug( |
|
60 | log.debug( | |
61 | "Using the following pyramid.includes: %s", |
|
61 | "Using the following pyramid.includes: %s", | |
62 | pyramid_includes) |
|
62 | pyramid_includes) | |
63 |
|
63 | |||
64 | settings_maker.make_setting('rhodecode.edition', 'Community Edition') |
|
64 | settings_maker.make_setting('rhodecode.edition', 'Community Edition') | |
65 | settings_maker.make_setting('rhodecode.edition_id', 'CE') |
|
65 | settings_maker.make_setting('rhodecode.edition_id', 'CE') | |
66 |
|
66 | |||
67 | if 'mako.default_filters' not in settings: |
|
67 | if 'mako.default_filters' not in settings: | |
68 | # set custom default filters if we don't have it defined |
|
68 | # set custom default filters if we don't have it defined | |
69 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
69 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' | |
70 | settings['mako.default_filters'] = 'h_filter' |
|
70 | settings['mako.default_filters'] = 'h_filter' | |
71 |
|
71 | |||
72 | if 'mako.directories' not in settings: |
|
72 | if 'mako.directories' not in settings: | |
73 | mako_directories = settings.setdefault('mako.directories', [ |
|
73 | mako_directories = settings.setdefault('mako.directories', [ | |
74 | # Base templates of the original application |
|
74 | # Base templates of the original application | |
75 | 'rhodecode:templates', |
|
75 | 'rhodecode:templates', | |
76 | ]) |
|
76 | ]) | |
77 | log.debug( |
|
77 | log.debug( | |
78 | "Using the following Mako template directories: %s", |
|
78 | "Using the following Mako template directories: %s", | |
79 | mako_directories) |
|
79 | mako_directories) | |
80 |
|
80 | |||
81 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X |
|
81 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X | |
82 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': |
|
82 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': | |
83 | raw_url = settings['beaker.session.url'] |
|
83 | raw_url = settings['beaker.session.url'] | |
84 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): |
|
84 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): | |
85 | settings['beaker.session.url'] = 'redis://' + raw_url |
|
85 | settings['beaker.session.url'] = 'redis://' + raw_url | |
86 |
|
86 | |||
87 | settings_maker.make_setting('__file__', global_config.get('__file__')) |
|
87 | settings_maker.make_setting('__file__', global_config.get('__file__')) | |
88 |
|
88 | |||
89 | # TODO: johbo: Re-think this, usually the call to config.include |
|
89 | # TODO: johbo: Re-think this, usually the call to config.include | |
90 | # should allow to pass in a prefix. |
|
90 | # should allow to pass in a prefix. | |
91 | settings_maker.make_setting('rhodecode.api.url', '/_admin/api') |
|
91 | settings_maker.make_setting('rhodecode.api.url', '/_admin/api') | |
92 |
|
92 | |||
93 | # Sanitize generic settings. |
|
93 | # Sanitize generic settings. | |
94 | settings_maker.make_setting('default_encoding', 'UTF-8', parser='list') |
|
94 | settings_maker.make_setting('default_encoding', 'UTF-8', parser='list') | |
95 | settings_maker.make_setting('gzip_responses', False, parser='bool') |
|
95 | settings_maker.make_setting('gzip_responses', False, parser='bool') | |
96 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') |
|
96 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') | |
97 |
|
97 | |||
98 | # statsd |
|
98 | # statsd | |
99 | settings_maker.make_setting('statsd.enabled', False, parser='bool') |
|
99 | settings_maker.make_setting('statsd.enabled', False, parser='bool') | |
100 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') |
|
100 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') | |
101 | settings_maker.make_setting('statsd.statsd_port', 9125, parser='int') |
|
101 | settings_maker.make_setting('statsd.statsd_port', 9125, parser='int') | |
102 | settings_maker.make_setting('statsd.statsd_prefix', '') |
|
102 | settings_maker.make_setting('statsd.statsd_prefix', '') | |
103 | settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') |
|
103 | settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') | |
104 |
|
104 | |||
105 | settings_maker.make_setting('vcs.svn.compatible_version', '') |
|
105 | settings_maker.make_setting('vcs.svn.compatible_version', '') | |
106 | settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0') |
|
106 | settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0') | |
107 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') |
|
107 | settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool') | |
108 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') |
|
108 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string') | |
109 |
settings_maker.make_setting('vcs.hooks.protocol', ' |
|
109 | settings_maker.make_setting('vcs.hooks.protocol.v2', 'celery') | |
110 | settings_maker.make_setting('vcs.hooks.host', '*') |
|
110 | settings_maker.make_setting('vcs.hooks.host', '*') | |
111 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') |
|
111 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') | |
112 | settings_maker.make_setting('vcs.server', '') |
|
112 | settings_maker.make_setting('vcs.server', '') | |
113 | settings_maker.make_setting('vcs.server.protocol', 'http') |
|
113 | settings_maker.make_setting('vcs.server.protocol', 'http') | |
114 | settings_maker.make_setting('vcs.server.enable', 'true', parser='bool') |
|
114 | settings_maker.make_setting('vcs.server.enable', 'true', parser='bool') | |
115 | settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool') |
|
115 | settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool') | |
116 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') |
|
116 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') | |
117 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') |
|
117 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') | |
118 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') |
|
118 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') | |
119 |
|
119 | |||
120 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') |
|
120 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') | |
121 |
|
121 | |||
122 | # repo_store path |
|
122 | # repo_store path | |
123 | settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store') |
|
123 | settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store') | |
124 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
124 | # Support legacy values of vcs.scm_app_implementation. Legacy | |
125 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or |
|
125 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or | |
126 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. |
|
126 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. | |
127 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
127 | scm_app_impl = settings['vcs.scm_app_implementation'] | |
128 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: |
|
128 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: | |
129 | settings['vcs.scm_app_implementation'] = 'http' |
|
129 | settings['vcs.scm_app_implementation'] = 'http' | |
130 |
|
130 | |||
131 | settings_maker.make_setting('appenlight', False, parser='bool') |
|
131 | settings_maker.make_setting('appenlight', False, parser='bool') | |
132 |
|
132 | |||
133 | temp_store = tempfile.gettempdir() |
|
133 | temp_store = tempfile.gettempdir() | |
134 | tmp_cache_dir = jn(temp_store, 'rc_cache') |
|
134 | tmp_cache_dir = jn(temp_store, 'rc_cache') | |
135 |
|
135 | |||
136 | # save default, cache dir, and use it for all backends later. |
|
136 | # save default, cache dir, and use it for all backends later. | |
137 | default_cache_dir = settings_maker.make_setting( |
|
137 | default_cache_dir = settings_maker.make_setting( | |
138 | 'cache_dir', |
|
138 | 'cache_dir', | |
139 | default=tmp_cache_dir, default_when_empty=True, |
|
139 | default=tmp_cache_dir, default_when_empty=True, | |
140 | parser='dir:ensured') |
|
140 | parser='dir:ensured') | |
141 |
|
141 | |||
142 | # exception store cache |
|
142 | # exception store cache | |
143 | settings_maker.make_setting( |
|
143 | settings_maker.make_setting( | |
144 | 'exception_tracker.store_path', |
|
144 | 'exception_tracker.store_path', | |
145 | default=jn(default_cache_dir, 'exc_store'), default_when_empty=True, |
|
145 | default=jn(default_cache_dir, 'exc_store'), default_when_empty=True, | |
146 | parser='dir:ensured' |
|
146 | parser='dir:ensured' | |
147 | ) |
|
147 | ) | |
148 |
|
148 | |||
149 | settings_maker.make_setting( |
|
149 | settings_maker.make_setting( | |
150 | 'celerybeat-schedule.path', |
|
150 | 'celerybeat-schedule.path', | |
151 | default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True, |
|
151 | default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True, | |
152 | parser='file:ensured' |
|
152 | parser='file:ensured' | |
153 | ) |
|
153 | ) | |
154 |
|
154 | |||
155 | # celery |
|
155 | # celery | |
156 | broker_url = settings_maker.make_setting('celery.broker_url', 'redis://redis:6379/8') |
|
156 | broker_url = settings_maker.make_setting('celery.broker_url', 'redis://redis:6379/8') | |
157 | settings_maker.make_setting('celery.result_backend', broker_url) |
|
157 | settings_maker.make_setting('celery.result_backend', broker_url) | |
158 |
|
158 | |||
159 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') |
|
159 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') | |
160 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) |
|
160 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) | |
161 |
|
161 | |||
162 | # sessions, ensure file since no-value is memory |
|
162 | # sessions, ensure file since no-value is memory | |
163 | settings_maker.make_setting('beaker.session.type', 'file') |
|
163 | settings_maker.make_setting('beaker.session.type', 'file') | |
164 | settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data')) |
|
164 | settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data')) | |
165 |
|
165 | |||
166 | # cache_general |
|
166 | # cache_general | |
167 | settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace') |
|
167 | settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace') | |
168 | settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int') |
|
168 | settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int') | |
169 | settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db')) |
|
169 | settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db')) | |
170 |
|
170 | |||
171 | # cache_perms |
|
171 | # cache_perms | |
172 | settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace') |
|
172 | settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace') | |
173 | settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int') |
|
173 | settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int') | |
174 | settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db')) |
|
174 | settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db')) | |
175 |
|
175 | |||
176 | # cache_repo |
|
176 | # cache_repo | |
177 | settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace') |
|
177 | settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace') | |
178 | settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int') |
|
178 | settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int') | |
179 | settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db')) |
|
179 | settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db')) | |
180 |
|
180 | |||
181 | # cache_license |
|
181 | # cache_license | |
182 | settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace') |
|
182 | settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace') | |
183 | settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int') |
|
183 | settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int') | |
184 | settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db')) |
|
184 | settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db')) | |
185 |
|
185 | |||
186 | # cache_repo_longterm memory, 96H |
|
186 | # cache_repo_longterm memory, 96H | |
187 | settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru') |
|
187 | settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru') | |
188 | settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int') |
|
188 | settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int') | |
189 | settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int') |
|
189 | settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int') | |
190 |
|
190 | |||
191 | # sql_cache_short |
|
191 | # sql_cache_short | |
192 | settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru') |
|
192 | settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru') | |
193 | settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int') |
|
193 | settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int') | |
194 | settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int') |
|
194 | settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int') | |
195 |
|
195 | |||
196 | # archive_cache |
|
196 | # archive_cache | |
197 | settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1') |
|
197 | settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1') | |
198 | settings_maker.make_setting('archive_cache.backend.type', 'filesystem') |
|
198 | settings_maker.make_setting('archive_cache.backend.type', 'filesystem') | |
199 |
|
199 | |||
200 | settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) |
|
200 | settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) | |
201 | settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int') |
|
201 | settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int') | |
202 | settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float') |
|
202 | settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float') | |
203 | settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored') |
|
203 | settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored') | |
204 |
|
204 | |||
205 | settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool') |
|
205 | settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool') | |
206 | settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int') |
|
206 | settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int') | |
207 | settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int') |
|
207 | settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int') | |
208 |
|
208 | |||
209 | settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) |
|
209 | settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) | |
210 | settings_maker.make_setting('archive_cache.objectstore.key', '') |
|
210 | settings_maker.make_setting('archive_cache.objectstore.key', '') | |
211 | settings_maker.make_setting('archive_cache.objectstore.secret', '') |
|
211 | settings_maker.make_setting('archive_cache.objectstore.secret', '') | |
212 | settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1') |
|
212 | settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1') | |
213 | settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,) |
|
213 | settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,) | |
214 | settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int') |
|
214 | settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int') | |
215 |
|
215 | |||
216 | settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float') |
|
216 | settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float') | |
217 | settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored') |
|
217 | settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored') | |
218 |
|
218 | |||
219 | settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool') |
|
219 | settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool') | |
220 | settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int') |
|
220 | settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int') | |
221 | settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int') |
|
221 | settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int') | |
222 |
|
222 | |||
223 | settings_maker.env_expand() |
|
223 | settings_maker.env_expand() | |
224 |
|
224 | |||
225 | # configure instance id |
|
225 | # configure instance id | |
226 | config_utils.set_instance_id(settings) |
|
226 | config_utils.set_instance_id(settings) | |
227 |
|
227 | |||
228 | return settings |
|
228 | return settings |
@@ -1,110 +1,110 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
20 | import platform |
|
20 | import platform | |
21 |
|
21 | |||
22 | DEFAULT_USER = 'default' |
|
22 | DEFAULT_USER = 'default' | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | def configure_vcs(config): |
|
25 | def configure_vcs(config): | |
26 | """ |
|
26 | """ | |
27 | Patch VCS config with some RhodeCode specific stuff |
|
27 | Patch VCS config with some RhodeCode specific stuff | |
28 | """ |
|
28 | """ | |
29 | from rhodecode.lib.vcs import conf |
|
29 | from rhodecode.lib.vcs import conf | |
30 | import rhodecode.lib.vcs.conf.settings |
|
30 | import rhodecode.lib.vcs.conf.settings | |
31 |
|
31 | |||
32 | conf.settings.BACKENDS = { |
|
32 | conf.settings.BACKENDS = { | |
33 | 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository', |
|
33 | 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository', | |
34 | 'git': 'rhodecode.lib.vcs.backends.git.GitRepository', |
|
34 | 'git': 'rhodecode.lib.vcs.backends.git.GitRepository', | |
35 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', |
|
35 | 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository', | |
36 | } |
|
36 | } | |
37 |
|
37 | |||
38 | conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol'] |
|
38 | conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol.v2'] | |
39 | conf.settings.HOOKS_HOST = config['vcs.hooks.host'] |
|
39 | conf.settings.HOOKS_HOST = config['vcs.hooks.host'] | |
40 | conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] |
|
40 | conf.settings.DEFAULT_ENCODINGS = config['default_encoding'] | |
41 | conf.settings.ALIASES[:] = config['vcs.backends'] |
|
41 | conf.settings.ALIASES[:] = config['vcs.backends'] | |
42 | conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version'] |
|
42 | conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version'] | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def initialize_database(config): |
|
45 | def initialize_database(config): | |
46 | from rhodecode.lib.utils2 import engine_from_config, get_encryption_key |
|
46 | from rhodecode.lib.utils2 import engine_from_config, get_encryption_key | |
47 | from rhodecode.model import init_model |
|
47 | from rhodecode.model import init_model | |
48 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
48 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
49 | init_model(engine, encryption_key=get_encryption_key(config)) |
|
49 | init_model(engine, encryption_key=get_encryption_key(config)) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def initialize_test_environment(settings, test_env=None): |
|
52 | def initialize_test_environment(settings, test_env=None): | |
53 | if test_env is None: |
|
53 | if test_env is None: | |
54 | test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) |
|
54 | test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) | |
55 |
|
55 | |||
56 | from rhodecode.lib.utils import ( |
|
56 | from rhodecode.lib.utils import ( | |
57 | create_test_directory, create_test_database, create_test_repositories, |
|
57 | create_test_directory, create_test_database, create_test_repositories, | |
58 | create_test_index) |
|
58 | create_test_index) | |
59 | from rhodecode.tests import TESTS_TMP_PATH |
|
59 | from rhodecode.tests import TESTS_TMP_PATH | |
60 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
60 | from rhodecode.lib.vcs.backends.hg import largefiles_store | |
61 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
61 | from rhodecode.lib.vcs.backends.git import lfs_store | |
62 |
|
62 | |||
63 | # test repos |
|
63 | # test repos | |
64 | if test_env: |
|
64 | if test_env: | |
65 | create_test_directory(TESTS_TMP_PATH) |
|
65 | create_test_directory(TESTS_TMP_PATH) | |
66 | # large object stores |
|
66 | # large object stores | |
67 | create_test_directory(largefiles_store(TESTS_TMP_PATH)) |
|
67 | create_test_directory(largefiles_store(TESTS_TMP_PATH)) | |
68 | create_test_directory(lfs_store(TESTS_TMP_PATH)) |
|
68 | create_test_directory(lfs_store(TESTS_TMP_PATH)) | |
69 |
|
69 | |||
70 | create_test_database(TESTS_TMP_PATH, settings) |
|
70 | create_test_database(TESTS_TMP_PATH, settings) | |
71 | create_test_repositories(TESTS_TMP_PATH, settings) |
|
71 | create_test_repositories(TESTS_TMP_PATH, settings) | |
72 | create_test_index(TESTS_TMP_PATH, settings) |
|
72 | create_test_index(TESTS_TMP_PATH, settings) | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def get_vcs_server_protocol(config): |
|
75 | def get_vcs_server_protocol(config): | |
76 | return config['vcs.server.protocol'] |
|
76 | return config['vcs.server.protocol'] | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | def set_instance_id(config): |
|
79 | def set_instance_id(config): | |
80 | """ |
|
80 | """ | |
81 | Sets a dynamic generated config['instance_id'] if missing or '*' |
|
81 | Sets a dynamic generated config['instance_id'] if missing or '*' | |
82 | E.g instance_id = *cluster-1 or instance_id = * |
|
82 | E.g instance_id = *cluster-1 or instance_id = * | |
83 | """ |
|
83 | """ | |
84 |
|
84 | |||
85 | config['instance_id'] = config.get('instance_id') or '' |
|
85 | config['instance_id'] = config.get('instance_id') or '' | |
86 | instance_id = config['instance_id'] |
|
86 | instance_id = config['instance_id'] | |
87 | if instance_id.startswith('*') or not instance_id: |
|
87 | if instance_id.startswith('*') or not instance_id: | |
88 | prefix = instance_id.lstrip('*') |
|
88 | prefix = instance_id.lstrip('*') | |
89 | _platform_id = platform.uname()[1] or 'instance' |
|
89 | _platform_id = platform.uname()[1] or 'instance' | |
90 | config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format( |
|
90 | config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format( | |
91 | prefix=prefix, |
|
91 | prefix=prefix, | |
92 | platform=_platform_id, |
|
92 | platform=_platform_id, | |
93 | pid=os.getpid()) |
|
93 | pid=os.getpid()) | |
94 |
|
94 | |||
95 |
|
95 | |||
96 | def get_default_user_id(): |
|
96 | def get_default_user_id(): | |
97 | from sqlalchemy import text |
|
97 | from sqlalchemy import text | |
98 | from rhodecode.model import meta |
|
98 | from rhodecode.model import meta | |
99 |
|
99 | |||
100 | engine = meta.get_engine() |
|
100 | engine = meta.get_engine() | |
101 | with meta.SA_Session(engine) as session: |
|
101 | with meta.SA_Session(engine) as session: | |
102 | result = session.execute(text( |
|
102 | result = session.execute(text( | |
103 | "SELECT user_id from users where username = :uname" |
|
103 | "SELECT user_id from users where username = :uname" | |
104 | ), {'uname': DEFAULT_USER}) |
|
104 | ), {'uname': DEFAULT_USER}) | |
105 | user = result.first() |
|
105 | user = result.first() | |
106 | if not user: |
|
106 | if not user: | |
107 | raise ValueError('Unable to retrieve default user data from DB') |
|
107 | raise ValueError('Unable to retrieve default user data from DB') | |
108 | user_id = user[0] |
|
108 | user_id = user[0] | |
109 |
|
109 | |||
110 | return user_id |
|
110 | return user_id |
@@ -1,205 +1,205 b'' | |||||
1 |
|
1 | |||
2 |
|
2 | |||
3 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | from rhodecode.tests import no_newline_id_generator |
|
24 | from rhodecode.tests import no_newline_id_generator | |
25 | from rhodecode.config.middleware import sanitize_settings_and_apply_defaults |
|
25 | from rhodecode.config.middleware import sanitize_settings_and_apply_defaults | |
26 | from rhodecode.config.settings_maker import SettingsMaker |
|
26 | from rhodecode.config.settings_maker import SettingsMaker | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class TestHelperFunctions(object): |
|
29 | class TestHelperFunctions(object): | |
30 | @pytest.mark.parametrize('raw, expected', [ |
|
30 | @pytest.mark.parametrize('raw, expected', [ | |
31 | ('true', True), (u'true', True), |
|
31 | ('true', True), (u'true', True), | |
32 | ('yes', True), (u'yes', True), |
|
32 | ('yes', True), (u'yes', True), | |
33 | ('on', True), (u'on', True), |
|
33 | ('on', True), (u'on', True), | |
34 | ('false', False), (u'false', False), |
|
34 | ('false', False), (u'false', False), | |
35 | ('no', False), (u'no', False), |
|
35 | ('no', False), (u'no', False), | |
36 | ('off', False), (u'off', False), |
|
36 | ('off', False), (u'off', False), | |
37 | ('invalid-bool-value', False), |
|
37 | ('invalid-bool-value', False), | |
38 | ('invalid-∫øø@-√å@¨€', False), |
|
38 | ('invalid-∫øø@-√å@¨€', False), | |
39 | (u'invalid-∫øø@-√å@¨€', False), |
|
39 | (u'invalid-∫øø@-√å@¨€', False), | |
40 | ]) |
|
40 | ]) | |
41 | def test_bool_func_helper(self, raw, expected): |
|
41 | def test_bool_func_helper(self, raw, expected): | |
42 | val = SettingsMaker._bool_func(raw) |
|
42 | val = SettingsMaker._bool_func(raw) | |
43 | assert val == expected |
|
43 | assert val == expected | |
44 |
|
44 | |||
45 | @pytest.mark.parametrize('raw, expected', [ |
|
45 | @pytest.mark.parametrize('raw, expected', [ | |
46 | ('', ''), |
|
46 | ('', ''), | |
47 | ('test-string', 'test-string'), |
|
47 | ('test-string', 'test-string'), | |
48 | ('CaSe-TeSt', 'case-test'), |
|
48 | ('CaSe-TeSt', 'case-test'), | |
49 | ('test-string-烩€', 'test-string-烩€'), |
|
49 | ('test-string-烩€', 'test-string-烩€'), | |
50 | (u'test-string-烩€', u'test-string-烩€'), |
|
50 | (u'test-string-烩€', u'test-string-烩€'), | |
51 | ]) |
|
51 | ]) | |
52 | def test_string_func_helper(self, raw, expected): |
|
52 | def test_string_func_helper(self, raw, expected): | |
53 | val = SettingsMaker._string_func(raw) |
|
53 | val = SettingsMaker._string_func(raw) | |
54 | assert val == expected |
|
54 | assert val == expected | |
55 |
|
55 | |||
56 | @pytest.mark.parametrize('raw, expected', [ |
|
56 | @pytest.mark.parametrize('raw, expected', [ | |
57 | ('', []), |
|
57 | ('', []), | |
58 | ('test', ['test']), |
|
58 | ('test', ['test']), | |
59 | ('CaSe-TeSt', ['CaSe-TeSt']), |
|
59 | ('CaSe-TeSt', ['CaSe-TeSt']), | |
60 | ('test-string-烩€', ['test-string-烩€']), |
|
60 | ('test-string-烩€', ['test-string-烩€']), | |
61 | (u'test-string-烩€', [u'test-string-烩€']), |
|
61 | (u'test-string-烩€', [u'test-string-烩€']), | |
62 | ('hg,git,svn', ['hg', 'git', 'svn']), |
|
62 | ('hg,git,svn', ['hg', 'git', 'svn']), | |
63 | ('hg, git, svn', ['hg', 'git', 'svn']), |
|
63 | ('hg, git, svn', ['hg', 'git', 'svn']), | |
64 |
|
64 | |||
65 | (', hg , git , svn , ', ['', 'hg', 'git', 'svn', '']), |
|
65 | (', hg , git , svn , ', ['', 'hg', 'git', 'svn', '']), | |
66 | ('cheese,free node,other', ['cheese', 'free node', 'other']), |
|
66 | ('cheese,free node,other', ['cheese', 'free node', 'other']), | |
67 | ], ids=no_newline_id_generator) |
|
67 | ], ids=no_newline_id_generator) | |
68 | def test_list_setting_helper(self, raw, expected): |
|
68 | def test_list_setting_helper(self, raw, expected): | |
69 | val = SettingsMaker._list_func(raw) |
|
69 | val = SettingsMaker._list_func(raw) | |
70 | assert val == expected |
|
70 | assert val == expected | |
71 |
|
71 | |||
72 | @pytest.mark.parametrize('raw, expected', [ |
|
72 | @pytest.mark.parametrize('raw, expected', [ | |
73 | ('hg git svn', ['hg', 'git', 'svn']), |
|
73 | ('hg git svn', ['hg', 'git', 'svn']), | |
74 | ], ids=no_newline_id_generator) |
|
74 | ], ids=no_newline_id_generator) | |
75 | def test_list_setting_spaces_helper(self, raw, expected): |
|
75 | def test_list_setting_spaces_helper(self, raw, expected): | |
76 | val = SettingsMaker._list_func(raw, sep=' ') |
|
76 | val = SettingsMaker._list_func(raw, sep=' ') | |
77 | assert val == expected |
|
77 | assert val == expected | |
78 |
|
78 | |||
79 | @pytest.mark.parametrize('raw, expected', [ |
|
79 | @pytest.mark.parametrize('raw, expected', [ | |
80 | ('hg\ngit\nsvn', ['hg', 'git', 'svn']), |
|
80 | ('hg\ngit\nsvn', ['hg', 'git', 'svn']), | |
81 | (' hg\n git\n svn ', ['hg', 'git', 'svn']), |
|
81 | (' hg\n git\n svn ', ['hg', 'git', 'svn']), | |
82 | ], ids=no_newline_id_generator) |
|
82 | ], ids=no_newline_id_generator) | |
83 | def test_list_setting_newlines_helper(self, raw, expected): |
|
83 | def test_list_setting_newlines_helper(self, raw, expected): | |
84 | val = SettingsMaker._list_func(raw, sep='\n') |
|
84 | val = SettingsMaker._list_func(raw, sep='\n') | |
85 | assert val == expected |
|
85 | assert val == expected | |
86 |
|
86 | |||
87 | @pytest.mark.parametrize('raw, expected', [ |
|
87 | @pytest.mark.parametrize('raw, expected', [ | |
88 | ('0', 0), |
|
88 | ('0', 0), | |
89 | ('-0', 0), |
|
89 | ('-0', 0), | |
90 | ('12345', 12345), |
|
90 | ('12345', 12345), | |
91 | ('-12345', -12345), |
|
91 | ('-12345', -12345), | |
92 | (u'-12345', -12345), |
|
92 | (u'-12345', -12345), | |
93 | ]) |
|
93 | ]) | |
94 | def test_int_setting_helper(self, raw, expected): |
|
94 | def test_int_setting_helper(self, raw, expected): | |
95 | val = SettingsMaker._int_func(raw) |
|
95 | val = SettingsMaker._int_func(raw) | |
96 | assert val == expected |
|
96 | assert val == expected | |
97 |
|
97 | |||
98 | @pytest.mark.parametrize('raw', [ |
|
98 | @pytest.mark.parametrize('raw', [ | |
99 | ('0xff'), |
|
99 | ('0xff'), | |
100 | (''), |
|
100 | (''), | |
101 | ('invalid-int'), |
|
101 | ('invalid-int'), | |
102 | ('invalid-⁄~†'), |
|
102 | ('invalid-⁄~†'), | |
103 | (u'invalid-⁄~†'), |
|
103 | (u'invalid-⁄~†'), | |
104 | ]) |
|
104 | ]) | |
105 | def test_int_setting_helper_invalid_input(self, raw): |
|
105 | def test_int_setting_helper_invalid_input(self, raw): | |
106 | with pytest.raises(Exception): |
|
106 | with pytest.raises(Exception): | |
107 | SettingsMaker._int_func(raw) |
|
107 | SettingsMaker._int_func(raw) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | class TestSanitizeVcsSettings(object): |
|
110 | class TestSanitizeVcsSettings(object): | |
111 | _bool_funcs = [ |
|
111 | _bool_funcs = [ | |
112 | ('vcs.hooks.direct_calls', False), |
|
112 | ('vcs.hooks.direct_calls', False), | |
113 | ('vcs.server.enable', True), |
|
113 | ('vcs.server.enable', True), | |
114 | ('vcs.start_server', False), |
|
114 | ('vcs.start_server', False), | |
115 | ('startup.import_repos', False), |
|
115 | ('startup.import_repos', False), | |
116 | ] |
|
116 | ] | |
117 |
|
117 | |||
118 | _string_funcs = [ |
|
118 | _string_funcs = [ | |
119 | ('vcs.svn.compatible_version', ''), |
|
119 | ('vcs.svn.compatible_version', ''), | |
120 |
('vcs.hooks.protocol', ' |
|
120 | ('vcs.hooks.protocol.v2', 'celery'), | |
121 | ('vcs.hooks.host', '*'), |
|
121 | ('vcs.hooks.host', '*'), | |
122 | ('vcs.scm_app_implementation', 'http'), |
|
122 | ('vcs.scm_app_implementation', 'http'), | |
123 | ('vcs.server', ''), |
|
123 | ('vcs.server', ''), | |
124 | ('vcs.server.protocol', 'http'), |
|
124 | ('vcs.server.protocol', 'http'), | |
125 | ] |
|
125 | ] | |
126 |
|
126 | |||
127 | _list_settings = [ |
|
127 | _list_settings = [ | |
128 | ('vcs.backends', 'hg git'), |
|
128 | ('vcs.backends', 'hg git'), | |
129 | ] |
|
129 | ] | |
130 |
|
130 | |||
131 | # @pytest.mark.parametrize('key, default', _list_settings) |
|
131 | # @pytest.mark.parametrize('key, default', _list_settings) | |
132 | # def test_list_setting_spacesep_list(self, key, default): |
|
132 | # def test_list_setting_spacesep_list(self, key, default): | |
133 | # test_list = ['test', 'list', 'values', 'for', key] |
|
133 | # test_list = ['test', 'list', 'values', 'for', key] | |
134 | # input_value = ' '.join(test_list) |
|
134 | # input_value = ' '.join(test_list) | |
135 | # settings = {key: input_value} |
|
135 | # settings = {key: input_value} | |
136 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
136 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
137 | # assert settings[key] == test_list |
|
137 | # assert settings[key] == test_list | |
138 | # |
|
138 | # | |
139 | # @pytest.mark.parametrize('key, default', _list_settings) |
|
139 | # @pytest.mark.parametrize('key, default', _list_settings) | |
140 | # def test_list_setting_newlinesep_list(self, key, default): |
|
140 | # def test_list_setting_newlinesep_list(self, key, default): | |
141 | # test_list = ['test', 'list', 'values', 'for', key] |
|
141 | # test_list = ['test', 'list', 'values', 'for', key] | |
142 | # input_value = '\n'.join(test_list) |
|
142 | # input_value = '\n'.join(test_list) | |
143 | # settings = {key: input_value} |
|
143 | # settings = {key: input_value} | |
144 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
144 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
145 | # assert settings[key] == test_list |
|
145 | # assert settings[key] == test_list | |
146 |
|
146 | |||
147 | @pytest.mark.parametrize('key, default', _list_settings) |
|
147 | @pytest.mark.parametrize('key, default', _list_settings) | |
148 | def test_list_setting_commasep_list(self, key, default): |
|
148 | def test_list_setting_commasep_list(self, key, default): | |
149 | test_list = ['test', 'list', 'values', 'for', key] |
|
149 | test_list = ['test', 'list', 'values', 'for', key] | |
150 | input_value = ','.join(test_list) |
|
150 | input_value = ','.join(test_list) | |
151 | settings = {key: input_value} |
|
151 | settings = {key: input_value} | |
152 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
152 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
153 | assert settings[key] == test_list |
|
153 | assert settings[key] == test_list | |
154 |
|
154 | |||
155 | @pytest.mark.parametrize('key, default', _list_settings) |
|
155 | @pytest.mark.parametrize('key, default', _list_settings) | |
156 | def test_list_setting_comma_and_space_sep_list(self, key, default): |
|
156 | def test_list_setting_comma_and_space_sep_list(self, key, default): | |
157 | test_list = ['test', 'list', 'values', 'for', key] |
|
157 | test_list = ['test', 'list', 'values', 'for', key] | |
158 | input_value = ', '.join(test_list) |
|
158 | input_value = ', '.join(test_list) | |
159 | settings = {key: input_value} |
|
159 | settings = {key: input_value} | |
160 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
160 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
161 | assert settings[key] == test_list |
|
161 | assert settings[key] == test_list | |
162 |
|
162 | |||
163 | @pytest.mark.parametrize('key, default', _string_funcs) |
|
163 | @pytest.mark.parametrize('key, default', _string_funcs) | |
164 | def test_string_func_string(self, key, default): |
|
164 | def test_string_func_string(self, key, default): | |
165 | test_value = 'test-string-for-{}'.format(key) |
|
165 | test_value = 'test-string-for-{}'.format(key) | |
166 | settings = {key: test_value} |
|
166 | settings = {key: test_value} | |
167 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
167 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
168 | assert settings[key] == test_value |
|
168 | assert settings[key] == test_value | |
169 |
|
169 | |||
170 | @pytest.mark.parametrize('key, default', _string_funcs) |
|
170 | @pytest.mark.parametrize('key, default', _string_funcs) | |
171 | def test_string_func_default(self, key, default): |
|
171 | def test_string_func_default(self, key, default): | |
172 | settings = {} |
|
172 | settings = {} | |
173 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
173 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
174 | assert settings[key] == default |
|
174 | assert settings[key] == default | |
175 |
|
175 | |||
176 | # @pytest.mark.parametrize('key, default', _string_funcs) |
|
176 | # @pytest.mark.parametrize('key, default', _string_funcs) | |
177 | # def test_string_func_lowercase(self, key, default): |
|
177 | # def test_string_func_lowercase(self, key, default): | |
178 | # test_value = 'Test-String-For-{}'.format(key) |
|
178 | # test_value = 'Test-String-For-{}'.format(key) | |
179 | # settings = {key: test_value} |
|
179 | # settings = {key: test_value} | |
180 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
180 | # sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
181 | # assert settings[key] == test_value.lower() |
|
181 | # assert settings[key] == test_value.lower() | |
182 |
|
182 | |||
183 | @pytest.mark.parametrize('key, default', _bool_funcs) |
|
183 | @pytest.mark.parametrize('key, default', _bool_funcs) | |
184 | def test_bool_func_true(self, key, default): |
|
184 | def test_bool_func_true(self, key, default): | |
185 | settings = {key: 'true'} |
|
185 | settings = {key: 'true'} | |
186 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
186 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
187 | assert settings[key] is True |
|
187 | assert settings[key] is True | |
188 |
|
188 | |||
189 | @pytest.mark.parametrize('key, default', _bool_funcs) |
|
189 | @pytest.mark.parametrize('key, default', _bool_funcs) | |
190 | def test_bool_func_false(self, key, default): |
|
190 | def test_bool_func_false(self, key, default): | |
191 | settings = {key: 'false'} |
|
191 | settings = {key: 'false'} | |
192 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
192 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
193 | assert settings[key] is False |
|
193 | assert settings[key] is False | |
194 |
|
194 | |||
195 | @pytest.mark.parametrize('key, default', _bool_funcs) |
|
195 | @pytest.mark.parametrize('key, default', _bool_funcs) | |
196 | def test_bool_func_invalid_string(self, key, default): |
|
196 | def test_bool_func_invalid_string(self, key, default): | |
197 | settings = {key: 'no-bool-val-string'} |
|
197 | settings = {key: 'no-bool-val-string'} | |
198 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
198 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
199 | assert settings[key] is False |
|
199 | assert settings[key] is False | |
200 |
|
200 | |||
201 | @pytest.mark.parametrize('key, default', _bool_funcs) |
|
201 | @pytest.mark.parametrize('key, default', _bool_funcs) | |
202 | def test_bool_func_default(self, key, default): |
|
202 | def test_bool_func_default(self, key, default): | |
203 | settings = {} |
|
203 | settings = {} | |
204 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) |
|
204 | sanitize_settings_and_apply_defaults({'__file__': ''}, settings) | |
205 | assert settings[key] is default |
|
205 | assert settings[key] is default |
@@ -1,226 +1,226 b'' | |||||
1 |
|
1 | |||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
4 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
6 | # (only), as published by the Free Software Foundation. |
|
6 | # (only), as published by the Free Software Foundation. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU Affero General Public License |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | # |
|
15 | # | |
16 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
19 | |||
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode.lib.config_utils import get_app_config |
|
22 | from rhodecode.lib.config_utils import get_app_config | |
23 | from rhodecode.tests.fixture import TestINI |
|
23 | from rhodecode.tests.fixture import TestINI | |
24 | from rhodecode.tests import TESTS_TMP_PATH |
|
24 | from rhodecode.tests import TESTS_TMP_PATH | |
25 | from rhodecode.tests.server_utils import RcVCSServer |
|
25 | from rhodecode.tests.server_utils import RcVCSServer | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | @pytest.fixture(scope='session') |
|
28 | @pytest.fixture(scope='session') | |
29 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
29 | def vcsserver(request, vcsserver_port, vcsserver_factory): | |
30 | """ |
|
30 | """ | |
31 | Session scope VCSServer. |
|
31 | Session scope VCSServer. | |
32 |
|
32 | |||
33 | Tests which need the VCSServer have to rely on this fixture in order |
|
33 | Tests which need the VCSServer have to rely on this fixture in order | |
34 | to ensure it will be running. |
|
34 | to ensure it will be running. | |
35 |
|
35 | |||
36 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
36 | For specific needs, the fixture vcsserver_factory can be used. It allows to | |
37 | adjust the configuration file for the test run. |
|
37 | adjust the configuration file for the test run. | |
38 |
|
38 | |||
39 | Command line args: |
|
39 | Command line args: | |
40 |
|
40 | |||
41 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
41 | --without-vcsserver: Allows to switch this fixture off. You have to | |
42 | manually start the server. |
|
42 | manually start the server. | |
43 |
|
43 | |||
44 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
44 | --vcsserver-port: Will expect the VCSServer to listen on this port. | |
45 | """ |
|
45 | """ | |
46 |
|
46 | |||
47 | if not request.config.getoption('with_vcsserver'): |
|
47 | if not request.config.getoption('with_vcsserver'): | |
48 | return None |
|
48 | return None | |
49 |
|
49 | |||
50 | return vcsserver_factory( |
|
50 | return vcsserver_factory( | |
51 | request, vcsserver_port=vcsserver_port) |
|
51 | request, vcsserver_port=vcsserver_port) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | @pytest.fixture(scope='session') |
|
54 | @pytest.fixture(scope='session') | |
55 | def vcsserver_factory(tmpdir_factory): |
|
55 | def vcsserver_factory(tmpdir_factory): | |
56 | """ |
|
56 | """ | |
57 | Use this if you need a running vcsserver with a special configuration. |
|
57 | Use this if you need a running vcsserver with a special configuration. | |
58 | """ |
|
58 | """ | |
59 |
|
59 | |||
60 | def factory(request, overrides=(), vcsserver_port=None, |
|
60 | def factory(request, overrides=(), vcsserver_port=None, | |
61 | log_file=None, workers='3'): |
|
61 | log_file=None, workers='3'): | |
62 |
|
62 | |||
63 | if vcsserver_port is None: |
|
63 | if vcsserver_port is None: | |
64 | vcsserver_port = get_available_port() |
|
64 | vcsserver_port = get_available_port() | |
65 |
|
65 | |||
66 | overrides = list(overrides) |
|
66 | overrides = list(overrides) | |
67 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
67 | overrides.append({'server:main': {'port': vcsserver_port}}) | |
68 |
|
68 | |||
69 | option_name = 'vcsserver_config_http' |
|
69 | option_name = 'vcsserver_config_http' | |
70 | override_option_name = 'vcsserver_config_override' |
|
70 | override_option_name = 'vcsserver_config_override' | |
71 | config_file = get_config( |
|
71 | config_file = get_config( | |
72 | request.config, option_name=option_name, |
|
72 | request.config, option_name=option_name, | |
73 | override_option_name=override_option_name, overrides=overrides, |
|
73 | override_option_name=override_option_name, overrides=overrides, | |
74 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
74 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
75 | prefix='test_vcs_') |
|
75 | prefix='test_vcs_') | |
76 |
|
76 | |||
77 | server = RcVCSServer(config_file, log_file, workers) |
|
77 | server = RcVCSServer(config_file, log_file, workers) | |
78 | server.start() |
|
78 | server.start() | |
79 |
|
79 | |||
80 | @request.addfinalizer |
|
80 | @request.addfinalizer | |
81 | def cleanup(): |
|
81 | def cleanup(): | |
82 | server.shutdown() |
|
82 | server.shutdown() | |
83 |
|
83 | |||
84 | server.wait_until_ready() |
|
84 | server.wait_until_ready() | |
85 | return server |
|
85 | return server | |
86 |
|
86 | |||
87 | return factory |
|
87 | return factory | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | def _use_log_level(config): |
|
90 | def _use_log_level(config): | |
91 | level = config.getoption('test_loglevel') or 'critical' |
|
91 | level = config.getoption('test_loglevel') or 'critical' | |
92 | return level.upper() |
|
92 | return level.upper() | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | @pytest.fixture(scope='session') |
|
95 | @pytest.fixture(scope='session') | |
96 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
96 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): | |
97 | option_name = 'pyramid_config' |
|
97 | option_name = 'pyramid_config' | |
98 | log_level = _use_log_level(request.config) |
|
98 | log_level = _use_log_level(request.config) | |
99 |
|
99 | |||
100 | overrides = [ |
|
100 | overrides = [ | |
101 | {'server:main': {'port': rcserver_port}}, |
|
101 | {'server:main': {'port': rcserver_port}}, | |
102 | {'app:main': { |
|
102 | {'app:main': { | |
103 | 'cache_dir': '%(here)s/rc-tests/rc_data', |
|
103 | 'cache_dir': '%(here)s/rc-tests/rc_data', | |
104 | 'vcs.server': f'localhost:{vcsserver_port}', |
|
104 | 'vcs.server': f'localhost:{vcsserver_port}', | |
105 | # johbo: We will always start the VCSServer on our own based on the |
|
105 | # johbo: We will always start the VCSServer on our own based on the | |
106 | # fixtures of the test cases. For the test run it must always be |
|
106 | # fixtures of the test cases. For the test run it must always be | |
107 | # off in the INI file. |
|
107 | # off in the INI file. | |
108 | 'vcs.start_server': 'false', |
|
108 | 'vcs.start_server': 'false', | |
109 |
|
109 | |||
110 | 'vcs.server.protocol': 'http', |
|
110 | 'vcs.server.protocol': 'http', | |
111 | 'vcs.scm_app_implementation': 'http', |
|
111 | 'vcs.scm_app_implementation': 'http', | |
112 | 'vcs.svn.proxy.enabled': 'true', |
|
112 | 'vcs.svn.proxy.enabled': 'true', | |
113 |
'vcs.hooks.protocol': ' |
|
113 | 'vcs.hooks.protocol.v2': 'celery', | |
114 | 'vcs.hooks.host': '*', |
|
114 | 'vcs.hooks.host': '*', | |
115 | 'repo_store.path': TESTS_TMP_PATH, |
|
115 | 'repo_store.path': TESTS_TMP_PATH, | |
116 | 'app.service_api.token': 'service_secret_token', |
|
116 | 'app.service_api.token': 'service_secret_token', | |
117 | }}, |
|
117 | }}, | |
118 |
|
118 | |||
119 | {'handler_console': { |
|
119 | {'handler_console': { | |
120 | 'class': 'StreamHandler', |
|
120 | 'class': 'StreamHandler', | |
121 | 'args': '(sys.stderr,)', |
|
121 | 'args': '(sys.stderr,)', | |
122 | 'level': log_level, |
|
122 | 'level': log_level, | |
123 | }}, |
|
123 | }}, | |
124 |
|
124 | |||
125 | ] |
|
125 | ] | |
126 |
|
126 | |||
127 | filename = get_config( |
|
127 | filename = get_config( | |
128 | request.config, option_name=option_name, |
|
128 | request.config, option_name=option_name, | |
129 | override_option_name='{}_override'.format(option_name), |
|
129 | override_option_name='{}_override'.format(option_name), | |
130 | overrides=overrides, |
|
130 | overrides=overrides, | |
131 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
131 | basetemp=tmpdir_factory.getbasetemp().strpath, | |
132 | prefix='test_rce_') |
|
132 | prefix='test_rce_') | |
133 | return filename |
|
133 | return filename | |
134 |
|
134 | |||
135 |
|
135 | |||
136 | @pytest.fixture(scope='session') |
|
136 | @pytest.fixture(scope='session') | |
137 | def ini_settings(ini_config): |
|
137 | def ini_settings(ini_config): | |
138 | ini_path = ini_config |
|
138 | ini_path = ini_config | |
139 | return get_app_config(ini_path) |
|
139 | return get_app_config(ini_path) | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | def get_available_port(min_port=40000, max_port=55555): |
|
142 | def get_available_port(min_port=40000, max_port=55555): | |
143 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
143 | from rhodecode.lib.utils2 import get_available_port as _get_port | |
144 | return _get_port(min_port, max_port) |
|
144 | return _get_port(min_port, max_port) | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | @pytest.fixture(scope='session') |
|
147 | @pytest.fixture(scope='session') | |
148 | def rcserver_port(request): |
|
148 | def rcserver_port(request): | |
149 | port = get_available_port() |
|
149 | port = get_available_port() | |
150 | print(f'Using rhodecode port {port}') |
|
150 | print(f'Using rhodecode port {port}') | |
151 | return port |
|
151 | return port | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | @pytest.fixture(scope='session') |
|
154 | @pytest.fixture(scope='session') | |
155 | def vcsserver_port(request): |
|
155 | def vcsserver_port(request): | |
156 | port = request.config.getoption('--vcsserver-port') |
|
156 | port = request.config.getoption('--vcsserver-port') | |
157 | if port is None: |
|
157 | if port is None: | |
158 | port = get_available_port() |
|
158 | port = get_available_port() | |
159 | print(f'Using vcsserver port {port}') |
|
159 | print(f'Using vcsserver port {port}') | |
160 | return port |
|
160 | return port | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | @pytest.fixture(scope='session') |
|
163 | @pytest.fixture(scope='session') | |
164 | def available_port_factory() -> get_available_port: |
|
164 | def available_port_factory() -> get_available_port: | |
165 | """ |
|
165 | """ | |
166 | Returns a callable which returns free port numbers. |
|
166 | Returns a callable which returns free port numbers. | |
167 | """ |
|
167 | """ | |
168 | return get_available_port |
|
168 | return get_available_port | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | @pytest.fixture() |
|
171 | @pytest.fixture() | |
172 | def available_port(available_port_factory): |
|
172 | def available_port(available_port_factory): | |
173 | """ |
|
173 | """ | |
174 | Gives you one free port for the current test. |
|
174 | Gives you one free port for the current test. | |
175 |
|
175 | |||
176 | Uses "available_port_factory" to retrieve the port. |
|
176 | Uses "available_port_factory" to retrieve the port. | |
177 | """ |
|
177 | """ | |
178 | return available_port_factory() |
|
178 | return available_port_factory() | |
179 |
|
179 | |||
180 |
|
180 | |||
181 | @pytest.fixture(scope='session') |
|
181 | @pytest.fixture(scope='session') | |
182 | def testini_factory(tmpdir_factory, ini_config): |
|
182 | def testini_factory(tmpdir_factory, ini_config): | |
183 | """ |
|
183 | """ | |
184 | Factory to create an INI file based on TestINI. |
|
184 | Factory to create an INI file based on TestINI. | |
185 |
|
185 | |||
186 | It will make sure to place the INI file in the correct directory. |
|
186 | It will make sure to place the INI file in the correct directory. | |
187 | """ |
|
187 | """ | |
188 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
188 | basetemp = tmpdir_factory.getbasetemp().strpath | |
189 | return TestIniFactory(basetemp, ini_config) |
|
189 | return TestIniFactory(basetemp, ini_config) | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | class TestIniFactory(object): |
|
192 | class TestIniFactory(object): | |
193 |
|
193 | |||
194 | def __init__(self, basetemp, template_ini): |
|
194 | def __init__(self, basetemp, template_ini): | |
195 | self._basetemp = basetemp |
|
195 | self._basetemp = basetemp | |
196 | self._template_ini = template_ini |
|
196 | self._template_ini = template_ini | |
197 |
|
197 | |||
198 | def __call__(self, ini_params, new_file_prefix='test'): |
|
198 | def __call__(self, ini_params, new_file_prefix='test'): | |
199 | ini_file = TestINI( |
|
199 | ini_file = TestINI( | |
200 | self._template_ini, ini_params=ini_params, |
|
200 | self._template_ini, ini_params=ini_params, | |
201 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
201 | new_file_prefix=new_file_prefix, dir=self._basetemp) | |
202 | result = ini_file.create() |
|
202 | result = ini_file.create() | |
203 | return result |
|
203 | return result | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | def get_config( |
|
206 | def get_config( | |
207 | config, option_name, override_option_name, overrides=None, |
|
207 | config, option_name, override_option_name, overrides=None, | |
208 | basetemp=None, prefix='test'): |
|
208 | basetemp=None, prefix='test'): | |
209 | """ |
|
209 | """ | |
210 | Find a configuration file and apply overrides for the given `prefix`. |
|
210 | Find a configuration file and apply overrides for the given `prefix`. | |
211 | """ |
|
211 | """ | |
212 | config_file = ( |
|
212 | config_file = ( | |
213 | config.getoption(option_name) or config.getini(option_name)) |
|
213 | config.getoption(option_name) or config.getini(option_name)) | |
214 | if not config_file: |
|
214 | if not config_file: | |
215 | pytest.exit( |
|
215 | pytest.exit( | |
216 | "Configuration error, could not extract {}.".format(option_name)) |
|
216 | "Configuration error, could not extract {}.".format(option_name)) | |
217 |
|
217 | |||
218 | overrides = overrides or [] |
|
218 | overrides = overrides or [] | |
219 | config_override = config.getoption(override_option_name) |
|
219 | config_override = config.getoption(override_option_name) | |
220 | if config_override: |
|
220 | if config_override: | |
221 | overrides.append(config_override) |
|
221 | overrides.append(config_override) | |
222 | temp_ini_file = TestINI( |
|
222 | temp_ini_file = TestINI( | |
223 | config_file, ini_params=overrides, new_file_prefix=prefix, |
|
223 | config_file, ini_params=overrides, new_file_prefix=prefix, | |
224 | dir=basetemp) |
|
224 | dir=basetemp) | |
225 |
|
225 | |||
226 | return temp_ini_file.create() |
|
226 | return temp_ini_file.create() |
@@ -1,451 +1,451 b'' | |||||
1 |
|
1 | |||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
4 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
6 | # (only), as published by the Free Software Foundation. |
|
6 | # (only), as published by the Free Software Foundation. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU Affero General Public License |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | # |
|
15 | # | |
16 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
19 | |||
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.lib.str_utils import base64_to_str |
|
23 | from rhodecode.lib.str_utils import base64_to_str | |
24 | from rhodecode.lib.utils2 import AttributeDict |
|
24 | from rhodecode.lib.utils2 import AttributeDict | |
25 | from rhodecode.tests.utils import CustomTestApp |
|
25 | from rhodecode.tests.utils import CustomTestApp | |
26 |
|
26 | |||
27 | from rhodecode.lib.caching_query import FromCache |
|
27 | from rhodecode.lib.caching_query import FromCache | |
28 | from rhodecode.lib.middleware import simplevcs |
|
28 | from rhodecode.lib.middleware import simplevcs | |
29 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
29 | from rhodecode.lib.middleware.https_fixup import HttpsFixup | |
30 | from rhodecode.lib.middleware.utils import scm_app_http |
|
30 | from rhodecode.lib.middleware.utils import scm_app_http | |
31 | from rhodecode.model.db import User, _hash_key |
|
31 | from rhodecode.model.db import User, _hash_key | |
32 | from rhodecode.model.meta import Session, cache as db_cache |
|
32 | from rhodecode.model.meta import Session, cache as db_cache | |
33 | from rhodecode.tests import ( |
|
33 | from rhodecode.tests import ( | |
34 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
34 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
35 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
35 | from rhodecode.tests.lib.middleware import mock_scm_app | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | class StubVCSController(simplevcs.SimpleVCS): |
|
38 | class StubVCSController(simplevcs.SimpleVCS): | |
39 |
|
39 | |||
40 | SCM = 'hg' |
|
40 | SCM = 'hg' | |
41 | stub_response_body = tuple() |
|
41 | stub_response_body = tuple() | |
42 |
|
42 | |||
43 | def __init__(self, *args, **kwargs): |
|
43 | def __init__(self, *args, **kwargs): | |
44 | super(StubVCSController, self).__init__(*args, **kwargs) |
|
44 | super(StubVCSController, self).__init__(*args, **kwargs) | |
45 | self._action = 'pull' |
|
45 | self._action = 'pull' | |
46 | self._is_shadow_repo_dir = True |
|
46 | self._is_shadow_repo_dir = True | |
47 | self._name = HG_REPO |
|
47 | self._name = HG_REPO | |
48 | self.set_repo_names(None) |
|
48 | self.set_repo_names(None) | |
49 |
|
49 | |||
50 | @property |
|
50 | @property | |
51 | def is_shadow_repo_dir(self): |
|
51 | def is_shadow_repo_dir(self): | |
52 | return self._is_shadow_repo_dir |
|
52 | return self._is_shadow_repo_dir | |
53 |
|
53 | |||
54 | def _get_repository_name(self, environ): |
|
54 | def _get_repository_name(self, environ): | |
55 | return self._name |
|
55 | return self._name | |
56 |
|
56 | |||
57 | def _get_action(self, environ): |
|
57 | def _get_action(self, environ): | |
58 | return self._action |
|
58 | return self._action | |
59 |
|
59 | |||
60 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
60 | def _create_wsgi_app(self, repo_path, repo_name, config): | |
61 | def fake_app(environ, start_response): |
|
61 | def fake_app(environ, start_response): | |
62 | headers = [ |
|
62 | headers = [ | |
63 | ('Http-Accept', 'application/mercurial') |
|
63 | ('Http-Accept', 'application/mercurial') | |
64 | ] |
|
64 | ] | |
65 | start_response('200 OK', headers) |
|
65 | start_response('200 OK', headers) | |
66 | return self.stub_response_body |
|
66 | return self.stub_response_body | |
67 | return fake_app |
|
67 | return fake_app | |
68 |
|
68 | |||
69 | def _create_config(self, extras, repo_name, scheme='http'): |
|
69 | def _create_config(self, extras, repo_name, scheme='http'): | |
70 | return None |
|
70 | return None | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | @pytest.fixture() |
|
73 | @pytest.fixture() | |
74 | def vcscontroller(baseapp, config_stub, request_stub): |
|
74 | def vcscontroller(baseapp, config_stub, request_stub): | |
75 | from rhodecode.config.middleware import ce_auth_resources |
|
75 | from rhodecode.config.middleware import ce_auth_resources | |
76 |
|
76 | |||
77 | config_stub.testing_securitypolicy() |
|
77 | config_stub.testing_securitypolicy() | |
78 | config_stub.include('rhodecode.authentication') |
|
78 | config_stub.include('rhodecode.authentication') | |
79 |
|
79 | |||
80 | for resource in ce_auth_resources: |
|
80 | for resource in ce_auth_resources: | |
81 | config_stub.include(resource) |
|
81 | config_stub.include(resource) | |
82 |
|
82 | |||
83 | controller = StubVCSController( |
|
83 | controller = StubVCSController( | |
84 | baseapp.config.get_settings(), request_stub.registry) |
|
84 | baseapp.config.get_settings(), request_stub.registry) | |
85 | app = HttpsFixup(controller, baseapp.config.get_settings()) |
|
85 | app = HttpsFixup(controller, baseapp.config.get_settings()) | |
86 | app = CustomTestApp(app) |
|
86 | app = CustomTestApp(app) | |
87 |
|
87 | |||
88 | _remove_default_user_from_query_cache() |
|
88 | _remove_default_user_from_query_cache() | |
89 |
|
89 | |||
90 | # Sanity checks that things are set up correctly |
|
90 | # Sanity checks that things are set up correctly | |
91 | app.get('/' + HG_REPO, status=200) |
|
91 | app.get('/' + HG_REPO, status=200) | |
92 |
|
92 | |||
93 | app.controller = controller |
|
93 | app.controller = controller | |
94 | return app |
|
94 | return app | |
95 |
|
95 | |||
96 |
|
96 | |||
97 | def _remove_default_user_from_query_cache(): |
|
97 | def _remove_default_user_from_query_cache(): | |
98 | user = User.get_default_user(cache=True) |
|
98 | user = User.get_default_user(cache=True) | |
99 | query = Session().query(User).filter(User.username == user.username) |
|
99 | query = Session().query(User).filter(User.username == user.username) | |
100 | query = query.options( |
|
100 | query = query.options( | |
101 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) |
|
101 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) | |
102 |
|
102 | |||
103 | db_cache.invalidate( |
|
103 | db_cache.invalidate( | |
104 | query, {}, |
|
104 | query, {}, | |
105 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) |
|
105 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) | |
106 |
|
106 | |||
107 | Session().expire(user) |
|
107 | Session().expire(user) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | def test_handles_exceptions_during_permissions_checks( |
|
110 | def test_handles_exceptions_during_permissions_checks( | |
111 | vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): |
|
111 | vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): | |
112 |
|
112 | |||
113 | test_password = 'qweqwe' |
|
113 | test_password = 'qweqwe' | |
114 | test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') |
|
114 | test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') | |
115 | test_username = test_user.username |
|
115 | test_username = test_user.username | |
116 |
|
116 | |||
117 | enable_auth_plugins.enable([ |
|
117 | enable_auth_plugins.enable([ | |
118 | 'egg:rhodecode-enterprise-ce#headers', |
|
118 | 'egg:rhodecode-enterprise-ce#headers', | |
119 | 'egg:rhodecode-enterprise-ce#token', |
|
119 | 'egg:rhodecode-enterprise-ce#token', | |
120 | 'egg:rhodecode-enterprise-ce#rhodecode'], |
|
120 | 'egg:rhodecode-enterprise-ce#rhodecode'], | |
121 | override={ |
|
121 | override={ | |
122 | 'egg:rhodecode-enterprise-ce#headers': {'auth_headers_header': 'REMOTE_USER'} |
|
122 | 'egg:rhodecode-enterprise-ce#headers': {'auth_headers_header': 'REMOTE_USER'} | |
123 | }) |
|
123 | }) | |
124 |
|
124 | |||
125 | user_and_pass = f'{test_username}:{test_password}' |
|
125 | user_and_pass = f'{test_username}:{test_password}' | |
126 | auth_password = base64_to_str(user_and_pass) |
|
126 | auth_password = base64_to_str(user_and_pass) | |
127 |
|
127 | |||
128 | extra_environ = { |
|
128 | extra_environ = { | |
129 | 'AUTH_TYPE': 'Basic', |
|
129 | 'AUTH_TYPE': 'Basic', | |
130 | 'HTTP_AUTHORIZATION': f'Basic {auth_password}', |
|
130 | 'HTTP_AUTHORIZATION': f'Basic {auth_password}', | |
131 | 'REMOTE_USER': test_username, |
|
131 | 'REMOTE_USER': test_username, | |
132 | } |
|
132 | } | |
133 |
|
133 | |||
134 | # Verify that things are hooked up correctly, we pass user with headers bound auth, and headers filled in |
|
134 | # Verify that things are hooked up correctly, we pass user with headers bound auth, and headers filled in | |
135 | vcscontroller.get('/', status=200, extra_environ=extra_environ) |
|
135 | vcscontroller.get('/', status=200, extra_environ=extra_environ) | |
136 |
|
136 | |||
137 | # Simulate trouble during permission checks |
|
137 | # Simulate trouble during permission checks | |
138 | with mock.patch('rhodecode.model.db.User.get_by_username', |
|
138 | with mock.patch('rhodecode.model.db.User.get_by_username', | |
139 | side_effect=Exception('permission_error_test')) as get_user: |
|
139 | side_effect=Exception('permission_error_test')) as get_user: | |
140 | # Verify that a correct 500 is returned and check that the expected |
|
140 | # Verify that a correct 500 is returned and check that the expected | |
141 | # code path was hit. |
|
141 | # code path was hit. | |
142 | vcscontroller.get('/', status=500, extra_environ=extra_environ) |
|
142 | vcscontroller.get('/', status=500, extra_environ=extra_environ) | |
143 | assert get_user.called |
|
143 | assert get_user.called | |
144 |
|
144 | |||
145 |
|
145 | |||
146 | class StubFailVCSController(simplevcs.SimpleVCS): |
|
146 | class StubFailVCSController(simplevcs.SimpleVCS): | |
147 | def _handle_request(self, environ, start_response): |
|
147 | def _handle_request(self, environ, start_response): | |
148 | raise Exception("BOOM") |
|
148 | raise Exception("BOOM") | |
149 |
|
149 | |||
150 |
|
150 | |||
151 | @pytest.fixture(scope='module') |
|
151 | @pytest.fixture(scope='module') | |
152 | def fail_controller(baseapp): |
|
152 | def fail_controller(baseapp): | |
153 | controller = StubFailVCSController( |
|
153 | controller = StubFailVCSController( | |
154 | baseapp.config.get_settings(), baseapp.config) |
|
154 | baseapp.config.get_settings(), baseapp.config) | |
155 | controller = HttpsFixup(controller, baseapp.config.get_settings()) |
|
155 | controller = HttpsFixup(controller, baseapp.config.get_settings()) | |
156 | controller = CustomTestApp(controller) |
|
156 | controller = CustomTestApp(controller) | |
157 | return controller |
|
157 | return controller | |
158 |
|
158 | |||
159 |
|
159 | |||
160 | def test_handles_exceptions_as_internal_server_error(fail_controller): |
|
160 | def test_handles_exceptions_as_internal_server_error(fail_controller): | |
161 | fail_controller.get('/', status=500) |
|
161 | fail_controller.get('/', status=500) | |
162 |
|
162 | |||
163 |
|
163 | |||
164 | def test_provides_traceback_for_appenlight(fail_controller): |
|
164 | def test_provides_traceback_for_appenlight(fail_controller): | |
165 | response = fail_controller.get( |
|
165 | response = fail_controller.get( | |
166 | '/', status=500, extra_environ={'appenlight.client': 'fake'}) |
|
166 | '/', status=500, extra_environ={'appenlight.client': 'fake'}) | |
167 | assert 'appenlight.__traceback' in response.request.environ |
|
167 | assert 'appenlight.__traceback' in response.request.environ | |
168 |
|
168 | |||
169 |
|
169 | |||
170 | def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub): |
|
170 | def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub): | |
171 | controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry) |
|
171 | controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry) | |
172 | assert controller.scm_app is scm_app_http |
|
172 | assert controller.scm_app is scm_app_http | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | def test_allows_to_override_scm_app_via_config(baseapp, request_stub): |
|
175 | def test_allows_to_override_scm_app_via_config(baseapp, request_stub): | |
176 | config = baseapp.config.get_settings().copy() |
|
176 | config = baseapp.config.get_settings().copy() | |
177 | config['vcs.scm_app_implementation'] = ( |
|
177 | config['vcs.scm_app_implementation'] = ( | |
178 | 'rhodecode.tests.lib.middleware.mock_scm_app') |
|
178 | 'rhodecode.tests.lib.middleware.mock_scm_app') | |
179 | controller = StubVCSController(config, request_stub.registry) |
|
179 | controller = StubVCSController(config, request_stub.registry) | |
180 | assert controller.scm_app is mock_scm_app |
|
180 | assert controller.scm_app is mock_scm_app | |
181 |
|
181 | |||
182 |
|
182 | |||
183 | @pytest.mark.parametrize('query_string, expected', [ |
|
183 | @pytest.mark.parametrize('query_string, expected', [ | |
184 | ('cmd=stub_command', True), |
|
184 | ('cmd=stub_command', True), | |
185 | ('cmd=listkeys', False), |
|
185 | ('cmd=listkeys', False), | |
186 | ]) |
|
186 | ]) | |
187 | def test_should_check_locking(query_string, expected): |
|
187 | def test_should_check_locking(query_string, expected): | |
188 | result = simplevcs._should_check_locking(query_string) |
|
188 | result = simplevcs._should_check_locking(query_string) | |
189 | assert result == expected |
|
189 | assert result == expected | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | class TestShadowRepoRegularExpression(object): |
|
192 | class TestShadowRepoRegularExpression(object): | |
193 | pr_segment = 'pull-request' |
|
193 | pr_segment = 'pull-request' | |
194 | shadow_segment = 'repository' |
|
194 | shadow_segment = 'repository' | |
195 |
|
195 | |||
196 | @pytest.mark.parametrize('url, expected', [ |
|
196 | @pytest.mark.parametrize('url, expected', [ | |
197 | # repo with/without groups |
|
197 | # repo with/without groups | |
198 | ('My-Repo/{pr_segment}/1/{shadow_segment}', True), |
|
198 | ('My-Repo/{pr_segment}/1/{shadow_segment}', True), | |
199 | ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True), |
|
199 | ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True), | |
200 | ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True), |
|
200 | ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True), | |
201 | ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True), |
|
201 | ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True), | |
202 |
|
202 | |||
203 | # pull request ID |
|
203 | # pull request ID | |
204 | ('MyRepo/{pr_segment}/1/{shadow_segment}', True), |
|
204 | ('MyRepo/{pr_segment}/1/{shadow_segment}', True), | |
205 | ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True), |
|
205 | ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True), | |
206 | ('MyRepo/{pr_segment}/-1/{shadow_segment}', False), |
|
206 | ('MyRepo/{pr_segment}/-1/{shadow_segment}', False), | |
207 | ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False), |
|
207 | ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False), | |
208 |
|
208 | |||
209 | # unicode |
|
209 | # unicode | |
210 | (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), |
|
210 | (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), | |
211 | (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), |
|
211 | (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True), | |
212 |
|
212 | |||
213 | # trailing/leading slash |
|
213 | # trailing/leading slash | |
214 | ('/My-Repo/{pr_segment}/1/{shadow_segment}', False), |
|
214 | ('/My-Repo/{pr_segment}/1/{shadow_segment}', False), | |
215 | ('My-Repo/{pr_segment}/1/{shadow_segment}/', False), |
|
215 | ('My-Repo/{pr_segment}/1/{shadow_segment}/', False), | |
216 | ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False), |
|
216 | ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False), | |
217 |
|
217 | |||
218 | # misc |
|
218 | # misc | |
219 | ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False), |
|
219 | ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False), | |
220 | ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False), |
|
220 | ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False), | |
221 | ]) |
|
221 | ]) | |
222 | def test_shadow_repo_regular_expression(self, url, expected): |
|
222 | def test_shadow_repo_regular_expression(self, url, expected): | |
223 | from rhodecode.lib.middleware.simplevcs import SimpleVCS |
|
223 | from rhodecode.lib.middleware.simplevcs import SimpleVCS | |
224 | url = url.format( |
|
224 | url = url.format( | |
225 | pr_segment=self.pr_segment, |
|
225 | pr_segment=self.pr_segment, | |
226 | shadow_segment=self.shadow_segment) |
|
226 | shadow_segment=self.shadow_segment) | |
227 | match_obj = SimpleVCS.shadow_repo_re.match(url) |
|
227 | match_obj = SimpleVCS.shadow_repo_re.match(url) | |
228 | assert (match_obj is not None) == expected |
|
228 | assert (match_obj is not None) == expected | |
229 |
|
229 | |||
230 |
|
230 | |||
231 | @pytest.mark.backends('git', 'hg') |
|
231 | @pytest.mark.backends('git', 'hg') | |
232 | class TestShadowRepoExposure(object): |
|
232 | class TestShadowRepoExposure(object): | |
233 |
|
233 | |||
234 | def test_pull_on_shadow_repo_propagates_to_wsgi_app( |
|
234 | def test_pull_on_shadow_repo_propagates_to_wsgi_app( | |
235 | self, baseapp, request_stub): |
|
235 | self, baseapp, request_stub): | |
236 | """ |
|
236 | """ | |
237 | Check that a pull action to a shadow repo is propagated to the |
|
237 | Check that a pull action to a shadow repo is propagated to the | |
238 | underlying wsgi app. |
|
238 | underlying wsgi app. | |
239 | """ |
|
239 | """ | |
240 | controller = StubVCSController( |
|
240 | controller = StubVCSController( | |
241 | baseapp.config.get_settings(), request_stub.registry) |
|
241 | baseapp.config.get_settings(), request_stub.registry) | |
242 | controller._check_ssl = mock.Mock() |
|
242 | controller._check_ssl = mock.Mock() | |
243 | controller.is_shadow_repo = True |
|
243 | controller.is_shadow_repo = True | |
244 | controller._action = 'pull' |
|
244 | controller._action = 'pull' | |
245 | controller._is_shadow_repo_dir = True |
|
245 | controller._is_shadow_repo_dir = True | |
246 | controller.stub_response_body = (b'dummy body value',) |
|
246 | controller.stub_response_body = (b'dummy body value',) | |
247 | controller._get_default_cache_ttl = mock.Mock( |
|
247 | controller._get_default_cache_ttl = mock.Mock( | |
248 | return_value=(False, 0)) |
|
248 | return_value=(False, 0)) | |
249 |
|
249 | |||
250 | environ_stub = { |
|
250 | environ_stub = { | |
251 | 'HTTP_HOST': 'test.example.com', |
|
251 | 'HTTP_HOST': 'test.example.com', | |
252 | 'HTTP_ACCEPT': 'application/mercurial', |
|
252 | 'HTTP_ACCEPT': 'application/mercurial', | |
253 | 'REQUEST_METHOD': 'GET', |
|
253 | 'REQUEST_METHOD': 'GET', | |
254 | 'wsgi.url_scheme': 'http', |
|
254 | 'wsgi.url_scheme': 'http', | |
255 | } |
|
255 | } | |
256 |
|
256 | |||
257 | response = controller(environ_stub, mock.Mock()) |
|
257 | response = controller(environ_stub, mock.Mock()) | |
258 | response_body = b''.join(response) |
|
258 | response_body = b''.join(response) | |
259 |
|
259 | |||
260 | # Assert that we got the response from the wsgi app. |
|
260 | # Assert that we got the response from the wsgi app. | |
261 | assert response_body == b''.join(controller.stub_response_body) |
|
261 | assert response_body == b''.join(controller.stub_response_body) | |
262 |
|
262 | |||
263 | def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub): |
|
263 | def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub): | |
264 | """ |
|
264 | """ | |
265 | Check that a pull action to a shadow repo is propagated to the |
|
265 | Check that a pull action to a shadow repo is propagated to the | |
266 | underlying wsgi app. |
|
266 | underlying wsgi app. | |
267 | """ |
|
267 | """ | |
268 | controller = StubVCSController( |
|
268 | controller = StubVCSController( | |
269 | baseapp.config.get_settings(), request_stub.registry) |
|
269 | baseapp.config.get_settings(), request_stub.registry) | |
270 | controller._check_ssl = mock.Mock() |
|
270 | controller._check_ssl = mock.Mock() | |
271 | controller.is_shadow_repo = True |
|
271 | controller.is_shadow_repo = True | |
272 | controller._action = 'pull' |
|
272 | controller._action = 'pull' | |
273 | controller._is_shadow_repo_dir = False |
|
273 | controller._is_shadow_repo_dir = False | |
274 | controller.stub_response_body = (b'dummy body value',) |
|
274 | controller.stub_response_body = (b'dummy body value',) | |
275 | environ_stub = { |
|
275 | environ_stub = { | |
276 | 'HTTP_HOST': 'test.example.com', |
|
276 | 'HTTP_HOST': 'test.example.com', | |
277 | 'HTTP_ACCEPT': 'application/mercurial', |
|
277 | 'HTTP_ACCEPT': 'application/mercurial', | |
278 | 'REQUEST_METHOD': 'GET', |
|
278 | 'REQUEST_METHOD': 'GET', | |
279 | 'wsgi.url_scheme': 'http', |
|
279 | 'wsgi.url_scheme': 'http', | |
280 | } |
|
280 | } | |
281 |
|
281 | |||
282 | response = controller(environ_stub, mock.Mock()) |
|
282 | response = controller(environ_stub, mock.Mock()) | |
283 | response_body = b''.join(response) |
|
283 | response_body = b''.join(response) | |
284 |
|
284 | |||
285 | # Assert that we got the response from the wsgi app. |
|
285 | # Assert that we got the response from the wsgi app. | |
286 | assert b'404 Not Found' in response_body |
|
286 | assert b'404 Not Found' in response_body | |
287 |
|
287 | |||
288 | def test_push_on_shadow_repo_raises(self, baseapp, request_stub): |
|
288 | def test_push_on_shadow_repo_raises(self, baseapp, request_stub): | |
289 | """ |
|
289 | """ | |
290 | Check that a push action to a shadow repo is aborted. |
|
290 | Check that a push action to a shadow repo is aborted. | |
291 | """ |
|
291 | """ | |
292 | controller = StubVCSController( |
|
292 | controller = StubVCSController( | |
293 | baseapp.config.get_settings(), request_stub.registry) |
|
293 | baseapp.config.get_settings(), request_stub.registry) | |
294 | controller._check_ssl = mock.Mock() |
|
294 | controller._check_ssl = mock.Mock() | |
295 | controller.is_shadow_repo = True |
|
295 | controller.is_shadow_repo = True | |
296 | controller._action = 'push' |
|
296 | controller._action = 'push' | |
297 | controller.stub_response_body = (b'dummy body value',) |
|
297 | controller.stub_response_body = (b'dummy body value',) | |
298 | environ_stub = { |
|
298 | environ_stub = { | |
299 | 'HTTP_HOST': 'test.example.com', |
|
299 | 'HTTP_HOST': 'test.example.com', | |
300 | 'HTTP_ACCEPT': 'application/mercurial', |
|
300 | 'HTTP_ACCEPT': 'application/mercurial', | |
301 | 'REQUEST_METHOD': 'GET', |
|
301 | 'REQUEST_METHOD': 'GET', | |
302 | 'wsgi.url_scheme': 'http', |
|
302 | 'wsgi.url_scheme': 'http', | |
303 | } |
|
303 | } | |
304 |
|
304 | |||
305 | response = controller(environ_stub, mock.Mock()) |
|
305 | response = controller(environ_stub, mock.Mock()) | |
306 | response_body = b''.join(response) |
|
306 | response_body = b''.join(response) | |
307 |
|
307 | |||
308 | assert response_body != controller.stub_response_body |
|
308 | assert response_body != controller.stub_response_body | |
309 | # Assert that a 406 error is returned. |
|
309 | # Assert that a 406 error is returned. | |
310 | assert b'406 Not Acceptable' in response_body |
|
310 | assert b'406 Not Acceptable' in response_body | |
311 |
|
311 | |||
312 | def test_set_repo_names_no_shadow(self, baseapp, request_stub): |
|
312 | def test_set_repo_names_no_shadow(self, baseapp, request_stub): | |
313 | """ |
|
313 | """ | |
314 | Check that the set_repo_names method sets all names to the one returned |
|
314 | Check that the set_repo_names method sets all names to the one returned | |
315 | by the _get_repository_name method on a request to a non shadow repo. |
|
315 | by the _get_repository_name method on a request to a non shadow repo. | |
316 | """ |
|
316 | """ | |
317 | environ_stub = {} |
|
317 | environ_stub = {} | |
318 | controller = StubVCSController( |
|
318 | controller = StubVCSController( | |
319 | baseapp.config.get_settings(), request_stub.registry) |
|
319 | baseapp.config.get_settings(), request_stub.registry) | |
320 | controller._name = 'RepoGroup/MyRepo' |
|
320 | controller._name = 'RepoGroup/MyRepo' | |
321 | controller.set_repo_names(environ_stub) |
|
321 | controller.set_repo_names(environ_stub) | |
322 | assert not controller.is_shadow_repo |
|
322 | assert not controller.is_shadow_repo | |
323 | assert (controller.url_repo_name == |
|
323 | assert (controller.url_repo_name == | |
324 | controller.acl_repo_name == |
|
324 | controller.acl_repo_name == | |
325 | controller.vcs_repo_name == |
|
325 | controller.vcs_repo_name == | |
326 | controller._get_repository_name(environ_stub)) |
|
326 | controller._get_repository_name(environ_stub)) | |
327 |
|
327 | |||
328 | def test_set_repo_names_with_shadow( |
|
328 | def test_set_repo_names_with_shadow( | |
329 | self, baseapp, pr_util, config_stub, request_stub): |
|
329 | self, baseapp, pr_util, config_stub, request_stub): | |
330 | """ |
|
330 | """ | |
331 | Check that the set_repo_names method sets correct names on a request |
|
331 | Check that the set_repo_names method sets correct names on a request | |
332 | to a shadow repo. |
|
332 | to a shadow repo. | |
333 | """ |
|
333 | """ | |
334 | from rhodecode.model.pull_request import PullRequestModel |
|
334 | from rhodecode.model.pull_request import PullRequestModel | |
335 |
|
335 | |||
336 | pull_request = pr_util.create_pull_request() |
|
336 | pull_request = pr_util.create_pull_request() | |
337 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( |
|
337 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( | |
338 | target=pull_request.target_repo.repo_name, |
|
338 | target=pull_request.target_repo.repo_name, | |
339 | pr_id=pull_request.pull_request_id, |
|
339 | pr_id=pull_request.pull_request_id, | |
340 | pr_segment=TestShadowRepoRegularExpression.pr_segment, |
|
340 | pr_segment=TestShadowRepoRegularExpression.pr_segment, | |
341 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) |
|
341 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) | |
342 | controller = StubVCSController( |
|
342 | controller = StubVCSController( | |
343 | baseapp.config.get_settings(), request_stub.registry) |
|
343 | baseapp.config.get_settings(), request_stub.registry) | |
344 | controller._name = shadow_url |
|
344 | controller._name = shadow_url | |
345 | controller.set_repo_names({}) |
|
345 | controller.set_repo_names({}) | |
346 |
|
346 | |||
347 | # Get file system path to shadow repo for assertions. |
|
347 | # Get file system path to shadow repo for assertions. | |
348 | workspace_id = PullRequestModel()._workspace_id(pull_request) |
|
348 | workspace_id = PullRequestModel()._workspace_id(pull_request) | |
349 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) |
|
349 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) | |
350 |
|
350 | |||
351 | assert controller.vcs_repo_name == vcs_repo_name |
|
351 | assert controller.vcs_repo_name == vcs_repo_name | |
352 | assert controller.url_repo_name == shadow_url |
|
352 | assert controller.url_repo_name == shadow_url | |
353 | assert controller.acl_repo_name == pull_request.target_repo.repo_name |
|
353 | assert controller.acl_repo_name == pull_request.target_repo.repo_name | |
354 | assert controller.is_shadow_repo |
|
354 | assert controller.is_shadow_repo | |
355 |
|
355 | |||
356 | def test_set_repo_names_with_shadow_but_missing_pr( |
|
356 | def test_set_repo_names_with_shadow_but_missing_pr( | |
357 | self, baseapp, pr_util, config_stub, request_stub): |
|
357 | self, baseapp, pr_util, config_stub, request_stub): | |
358 | """ |
|
358 | """ | |
359 | Checks that the set_repo_names method enforces matching target repos |
|
359 | Checks that the set_repo_names method enforces matching target repos | |
360 | and pull request IDs. |
|
360 | and pull request IDs. | |
361 | """ |
|
361 | """ | |
362 | pull_request = pr_util.create_pull_request() |
|
362 | pull_request = pr_util.create_pull_request() | |
363 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( |
|
363 | shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format( | |
364 | target=pull_request.target_repo.repo_name, |
|
364 | target=pull_request.target_repo.repo_name, | |
365 | pr_id=999999999, |
|
365 | pr_id=999999999, | |
366 | pr_segment=TestShadowRepoRegularExpression.pr_segment, |
|
366 | pr_segment=TestShadowRepoRegularExpression.pr_segment, | |
367 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) |
|
367 | shadow_segment=TestShadowRepoRegularExpression.shadow_segment) | |
368 | controller = StubVCSController( |
|
368 | controller = StubVCSController( | |
369 | baseapp.config.get_settings(), request_stub.registry) |
|
369 | baseapp.config.get_settings(), request_stub.registry) | |
370 | controller._name = shadow_url |
|
370 | controller._name = shadow_url | |
371 | controller.set_repo_names({}) |
|
371 | controller.set_repo_names({}) | |
372 |
|
372 | |||
373 | assert not controller.is_shadow_repo |
|
373 | assert not controller.is_shadow_repo | |
374 | assert (controller.url_repo_name == |
|
374 | assert (controller.url_repo_name == | |
375 | controller.acl_repo_name == |
|
375 | controller.acl_repo_name == | |
376 | controller.vcs_repo_name) |
|
376 | controller.vcs_repo_name) | |
377 |
|
377 | |||
378 |
|
378 | |||
379 | @pytest.mark.usefixtures('baseapp') |
|
379 | @pytest.mark.usefixtures('baseapp') | |
380 | class TestGenerateVcsResponse(object): |
|
380 | class TestGenerateVcsResponse(object): | |
381 |
|
381 | |||
382 | def test_ensures_that_start_response_is_called_early_enough(self): |
|
382 | def test_ensures_that_start_response_is_called_early_enough(self): | |
383 | self.call_controller_with_response_body(iter(['a', 'b'])) |
|
383 | self.call_controller_with_response_body(iter(['a', 'b'])) | |
384 | assert self.start_response.called |
|
384 | assert self.start_response.called | |
385 |
|
385 | |||
386 | def test_invalidates_cache_after_body_is_consumed(self): |
|
386 | def test_invalidates_cache_after_body_is_consumed(self): | |
387 | result = self.call_controller_with_response_body(iter(['a', 'b'])) |
|
387 | result = self.call_controller_with_response_body(iter(['a', 'b'])) | |
388 | assert not self.was_cache_invalidated() |
|
388 | assert not self.was_cache_invalidated() | |
389 | # Consume the result |
|
389 | # Consume the result | |
390 | list(result) |
|
390 | list(result) | |
391 | assert self.was_cache_invalidated() |
|
391 | assert self.was_cache_invalidated() | |
392 |
|
392 | |||
393 | def test_raises_unknown_exceptions(self): |
|
393 | def test_raises_unknown_exceptions(self): | |
394 | result = self.call_controller_with_response_body( |
|
394 | result = self.call_controller_with_response_body( | |
395 | self.raise_result_iter(vcs_kind='unknown')) |
|
395 | self.raise_result_iter(vcs_kind='unknown')) | |
396 | with pytest.raises(Exception): |
|
396 | with pytest.raises(Exception): | |
397 | list(result) |
|
397 | list(result) | |
398 |
|
398 | |||
399 | def call_controller_with_response_body(self, response_body): |
|
399 | def call_controller_with_response_body(self, response_body): | |
400 | settings = { |
|
400 | settings = { | |
401 | 'base_path': 'fake_base_path', |
|
401 | 'base_path': 'fake_base_path', | |
402 |
'vcs.hooks.protocol': ' |
|
402 | 'vcs.hooks.protocol.v2': 'celery', | |
403 | 'vcs.hooks.direct_calls': False, |
|
403 | 'vcs.hooks.direct_calls': False, | |
404 | } |
|
404 | } | |
405 | registry = AttributeDict() |
|
405 | registry = AttributeDict() | |
406 | controller = StubVCSController(settings, registry) |
|
406 | controller = StubVCSController(settings, registry) | |
407 | controller._invalidate_cache = mock.Mock() |
|
407 | controller._invalidate_cache = mock.Mock() | |
408 | controller.stub_response_body = response_body |
|
408 | controller.stub_response_body = response_body | |
409 | self.start_response = mock.Mock() |
|
409 | self.start_response = mock.Mock() | |
410 | result = controller._generate_vcs_response( |
|
410 | result = controller._generate_vcs_response( | |
411 | environ={}, start_response=self.start_response, |
|
411 | environ={}, start_response=self.start_response, | |
412 | repo_path='fake_repo_path', |
|
412 | repo_path='fake_repo_path', | |
413 | extras={}, action='push') |
|
413 | extras={}, action='push') | |
414 | self.controller = controller |
|
414 | self.controller = controller | |
415 | return result |
|
415 | return result | |
416 |
|
416 | |||
417 | def raise_result_iter(self, vcs_kind='repo_locked'): |
|
417 | def raise_result_iter(self, vcs_kind='repo_locked'): | |
418 | """ |
|
418 | """ | |
419 | Simulates an exception due to a vcs raised exception if kind vcs_kind |
|
419 | Simulates an exception due to a vcs raised exception if kind vcs_kind | |
420 | """ |
|
420 | """ | |
421 | raise self.vcs_exception(vcs_kind=vcs_kind) |
|
421 | raise self.vcs_exception(vcs_kind=vcs_kind) | |
422 | yield "never_reached" |
|
422 | yield "never_reached" | |
423 |
|
423 | |||
424 | def vcs_exception(self, vcs_kind='repo_locked'): |
|
424 | def vcs_exception(self, vcs_kind='repo_locked'): | |
425 | locked_exception = Exception('TEST_MESSAGE') |
|
425 | locked_exception = Exception('TEST_MESSAGE') | |
426 | locked_exception._vcs_kind = vcs_kind |
|
426 | locked_exception._vcs_kind = vcs_kind | |
427 | return locked_exception |
|
427 | return locked_exception | |
428 |
|
428 | |||
429 | def was_cache_invalidated(self): |
|
429 | def was_cache_invalidated(self): | |
430 | return self.controller._invalidate_cache.called |
|
430 | return self.controller._invalidate_cache.called | |
431 |
|
431 | |||
432 |
|
432 | |||
433 | class TestInitializeGenerator(object): |
|
433 | class TestInitializeGenerator(object): | |
434 |
|
434 | |||
435 | def test_drains_first_element(self): |
|
435 | def test_drains_first_element(self): | |
436 | gen = self.factory(['__init__', 1, 2]) |
|
436 | gen = self.factory(['__init__', 1, 2]) | |
437 | result = list(gen) |
|
437 | result = list(gen) | |
438 | assert result == [1, 2] |
|
438 | assert result == [1, 2] | |
439 |
|
439 | |||
440 | @pytest.mark.parametrize('values', [ |
|
440 | @pytest.mark.parametrize('values', [ | |
441 | [], |
|
441 | [], | |
442 | [1, 2], |
|
442 | [1, 2], | |
443 | ]) |
|
443 | ]) | |
444 | def test_raises_value_error(self, values): |
|
444 | def test_raises_value_error(self, values): | |
445 | with pytest.raises(ValueError): |
|
445 | with pytest.raises(ValueError): | |
446 | self.factory(values) |
|
446 | self.factory(values) | |
447 |
|
447 | |||
448 | @simplevcs.initialize_generator |
|
448 | @simplevcs.initialize_generator | |
449 | def factory(self, iterable): |
|
449 | def factory(self, iterable): | |
450 | for elem in iterable: |
|
450 | for elem in iterable: | |
451 | yield elem |
|
451 | yield elem |
@@ -1,978 +1,978 b'' | |||||
1 |
|
1 | |||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
4 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
6 | # (only), as published by the Free Software Foundation. |
|
6 | # (only), as published by the Free Software Foundation. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU Affero General Public License |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | # |
|
15 | # | |
16 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
19 | |||
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
22 | import textwrap |
|
22 | import textwrap | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.lib.vcs.backends import get_backend |
|
25 | from rhodecode.lib.vcs.backends import get_backend | |
26 | from rhodecode.lib.vcs.backends.base import ( |
|
26 | from rhodecode.lib.vcs.backends.base import ( | |
27 | MergeResponse, MergeFailureReason, Reference) |
|
27 | MergeResponse, MergeFailureReason, Reference) | |
28 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
28 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
29 | from rhodecode.lib.vcs.nodes import FileNode |
|
29 | from rhodecode.lib.vcs.nodes import FileNode | |
30 | from rhodecode.model.comment import CommentsModel |
|
30 | from rhodecode.model.comment import CommentsModel | |
31 | from rhodecode.model.db import PullRequest, Session |
|
31 | from rhodecode.model.db import PullRequest, Session | |
32 | from rhodecode.model.pull_request import PullRequestModel |
|
32 | from rhodecode.model.pull_request import PullRequestModel | |
33 | from rhodecode.model.user import UserModel |
|
33 | from rhodecode.model.user import UserModel | |
34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
35 | from rhodecode.lib.str_utils import safe_str |
|
35 | from rhodecode.lib.str_utils import safe_str | |
36 |
|
36 | |||
37 | pytestmark = [ |
|
37 | pytestmark = [ | |
38 | pytest.mark.backends("git", "hg"), |
|
38 | pytest.mark.backends("git", "hg"), | |
39 | ] |
|
39 | ] | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | @pytest.mark.usefixtures('config_stub') |
|
42 | @pytest.mark.usefixtures('config_stub') | |
43 | class TestPullRequestModel(object): |
|
43 | class TestPullRequestModel(object): | |
44 |
|
44 | |||
45 | @pytest.fixture() |
|
45 | @pytest.fixture() | |
46 | def pull_request(self, request, backend, pr_util): |
|
46 | def pull_request(self, request, backend, pr_util): | |
47 | """ |
|
47 | """ | |
48 | A pull request combined with multiples patches. |
|
48 | A pull request combined with multiples patches. | |
49 | """ |
|
49 | """ | |
50 | BackendClass = get_backend(backend.alias) |
|
50 | BackendClass = get_backend(backend.alias) | |
51 | merge_resp = MergeResponse( |
|
51 | merge_resp = MergeResponse( | |
52 | False, False, None, MergeFailureReason.UNKNOWN, |
|
52 | False, False, None, MergeFailureReason.UNKNOWN, | |
53 | metadata={'exception': 'MockError'}) |
|
53 | metadata={'exception': 'MockError'}) | |
54 | self.merge_patcher = mock.patch.object( |
|
54 | self.merge_patcher = mock.patch.object( | |
55 | BackendClass, 'merge', return_value=merge_resp) |
|
55 | BackendClass, 'merge', return_value=merge_resp) | |
56 | self.workspace_remove_patcher = mock.patch.object( |
|
56 | self.workspace_remove_patcher = mock.patch.object( | |
57 | BackendClass, 'cleanup_merge_workspace') |
|
57 | BackendClass, 'cleanup_merge_workspace') | |
58 |
|
58 | |||
59 | self.workspace_remove_mock = self.workspace_remove_patcher.start() |
|
59 | self.workspace_remove_mock = self.workspace_remove_patcher.start() | |
60 | self.merge_mock = self.merge_patcher.start() |
|
60 | self.merge_mock = self.merge_patcher.start() | |
61 | self.comment_patcher = mock.patch( |
|
61 | self.comment_patcher = mock.patch( | |
62 | 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') |
|
62 | 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') | |
63 | self.comment_patcher.start() |
|
63 | self.comment_patcher.start() | |
64 | self.notification_patcher = mock.patch( |
|
64 | self.notification_patcher = mock.patch( | |
65 | 'rhodecode.model.notification.NotificationModel.create') |
|
65 | 'rhodecode.model.notification.NotificationModel.create') | |
66 | self.notification_patcher.start() |
|
66 | self.notification_patcher.start() | |
67 | self.helper_patcher = mock.patch( |
|
67 | self.helper_patcher = mock.patch( | |
68 | 'rhodecode.lib.helpers.route_path') |
|
68 | 'rhodecode.lib.helpers.route_path') | |
69 | self.helper_patcher.start() |
|
69 | self.helper_patcher.start() | |
70 |
|
70 | |||
71 | self.hook_patcher = mock.patch.object(PullRequestModel, |
|
71 | self.hook_patcher = mock.patch.object(PullRequestModel, | |
72 | 'trigger_pull_request_hook') |
|
72 | 'trigger_pull_request_hook') | |
73 | self.hook_mock = self.hook_patcher.start() |
|
73 | self.hook_mock = self.hook_patcher.start() | |
74 |
|
74 | |||
75 | self.invalidation_patcher = mock.patch( |
|
75 | self.invalidation_patcher = mock.patch( | |
76 | 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') |
|
76 | 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') | |
77 | self.invalidation_mock = self.invalidation_patcher.start() |
|
77 | self.invalidation_mock = self.invalidation_patcher.start() | |
78 |
|
78 | |||
79 | self.pull_request = pr_util.create_pull_request( |
|
79 | self.pull_request = pr_util.create_pull_request( | |
80 | mergeable=True, name_suffix=u'ąć') |
|
80 | mergeable=True, name_suffix=u'ąć') | |
81 | self.source_commit = self.pull_request.source_ref_parts.commit_id |
|
81 | self.source_commit = self.pull_request.source_ref_parts.commit_id | |
82 | self.target_commit = self.pull_request.target_ref_parts.commit_id |
|
82 | self.target_commit = self.pull_request.target_ref_parts.commit_id | |
83 | self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id |
|
83 | self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id | |
84 | self.repo_id = self.pull_request.target_repo.repo_id |
|
84 | self.repo_id = self.pull_request.target_repo.repo_id | |
85 |
|
85 | |||
86 | @request.addfinalizer |
|
86 | @request.addfinalizer | |
87 | def cleanup_pull_request(): |
|
87 | def cleanup_pull_request(): | |
88 | calls = [mock.call( |
|
88 | calls = [mock.call( | |
89 | self.pull_request, self.pull_request.author, 'create')] |
|
89 | self.pull_request, self.pull_request.author, 'create')] | |
90 | self.hook_mock.assert_has_calls(calls) |
|
90 | self.hook_mock.assert_has_calls(calls) | |
91 |
|
91 | |||
92 | self.workspace_remove_patcher.stop() |
|
92 | self.workspace_remove_patcher.stop() | |
93 | self.merge_patcher.stop() |
|
93 | self.merge_patcher.stop() | |
94 | self.comment_patcher.stop() |
|
94 | self.comment_patcher.stop() | |
95 | self.notification_patcher.stop() |
|
95 | self.notification_patcher.stop() | |
96 | self.helper_patcher.stop() |
|
96 | self.helper_patcher.stop() | |
97 | self.hook_patcher.stop() |
|
97 | self.hook_patcher.stop() | |
98 | self.invalidation_patcher.stop() |
|
98 | self.invalidation_patcher.stop() | |
99 |
|
99 | |||
100 | return self.pull_request |
|
100 | return self.pull_request | |
101 |
|
101 | |||
102 | def test_get_all(self, pull_request): |
|
102 | def test_get_all(self, pull_request): | |
103 | prs = PullRequestModel().get_all(pull_request.target_repo) |
|
103 | prs = PullRequestModel().get_all(pull_request.target_repo) | |
104 | assert isinstance(prs, list) |
|
104 | assert isinstance(prs, list) | |
105 | assert len(prs) == 1 |
|
105 | assert len(prs) == 1 | |
106 |
|
106 | |||
107 | def test_count_all(self, pull_request): |
|
107 | def test_count_all(self, pull_request): | |
108 | pr_count = PullRequestModel().count_all(pull_request.target_repo) |
|
108 | pr_count = PullRequestModel().count_all(pull_request.target_repo) | |
109 | assert pr_count == 1 |
|
109 | assert pr_count == 1 | |
110 |
|
110 | |||
111 | def test_get_awaiting_review(self, pull_request): |
|
111 | def test_get_awaiting_review(self, pull_request): | |
112 | prs = PullRequestModel().get_awaiting_review(pull_request.target_repo) |
|
112 | prs = PullRequestModel().get_awaiting_review(pull_request.target_repo) | |
113 | assert isinstance(prs, list) |
|
113 | assert isinstance(prs, list) | |
114 | assert len(prs) == 1 |
|
114 | assert len(prs) == 1 | |
115 |
|
115 | |||
116 | def test_count_awaiting_review(self, pull_request): |
|
116 | def test_count_awaiting_review(self, pull_request): | |
117 | pr_count = PullRequestModel().count_awaiting_review( |
|
117 | pr_count = PullRequestModel().count_awaiting_review( | |
118 | pull_request.target_repo) |
|
118 | pull_request.target_repo) | |
119 | assert pr_count == 1 |
|
119 | assert pr_count == 1 | |
120 |
|
120 | |||
121 | def test_get_awaiting_my_review(self, pull_request): |
|
121 | def test_get_awaiting_my_review(self, pull_request): | |
122 | PullRequestModel().update_reviewers( |
|
122 | PullRequestModel().update_reviewers( | |
123 | pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], |
|
123 | pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], | |
124 | pull_request.author) |
|
124 | pull_request.author) | |
125 | Session().commit() |
|
125 | Session().commit() | |
126 |
|
126 | |||
127 | prs = PullRequestModel().get_awaiting_my_review( |
|
127 | prs = PullRequestModel().get_awaiting_my_review( | |
128 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) |
|
128 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) | |
129 | assert isinstance(prs, list) |
|
129 | assert isinstance(prs, list) | |
130 | assert len(prs) == 1 |
|
130 | assert len(prs) == 1 | |
131 |
|
131 | |||
132 | def test_count_awaiting_my_review(self, pull_request): |
|
132 | def test_count_awaiting_my_review(self, pull_request): | |
133 | PullRequestModel().update_reviewers( |
|
133 | PullRequestModel().update_reviewers( | |
134 | pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], |
|
134 | pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], | |
135 | pull_request.author) |
|
135 | pull_request.author) | |
136 | Session().commit() |
|
136 | Session().commit() | |
137 |
|
137 | |||
138 | pr_count = PullRequestModel().count_awaiting_my_review( |
|
138 | pr_count = PullRequestModel().count_awaiting_my_review( | |
139 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) |
|
139 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) | |
140 | assert pr_count == 1 |
|
140 | assert pr_count == 1 | |
141 |
|
141 | |||
142 | def test_delete_calls_cleanup_merge(self, pull_request): |
|
142 | def test_delete_calls_cleanup_merge(self, pull_request): | |
143 | repo_id = pull_request.target_repo.repo_id |
|
143 | repo_id = pull_request.target_repo.repo_id | |
144 | PullRequestModel().delete(pull_request, pull_request.author) |
|
144 | PullRequestModel().delete(pull_request, pull_request.author) | |
145 | Session().commit() |
|
145 | Session().commit() | |
146 |
|
146 | |||
147 | self.workspace_remove_mock.assert_called_once_with( |
|
147 | self.workspace_remove_mock.assert_called_once_with( | |
148 | repo_id, self.workspace_id) |
|
148 | repo_id, self.workspace_id) | |
149 |
|
149 | |||
150 | def test_close_calls_cleanup_and_hook(self, pull_request): |
|
150 | def test_close_calls_cleanup_and_hook(self, pull_request): | |
151 | PullRequestModel().close_pull_request( |
|
151 | PullRequestModel().close_pull_request( | |
152 | pull_request, pull_request.author) |
|
152 | pull_request, pull_request.author) | |
153 | Session().commit() |
|
153 | Session().commit() | |
154 |
|
154 | |||
155 | repo_id = pull_request.target_repo.repo_id |
|
155 | repo_id = pull_request.target_repo.repo_id | |
156 |
|
156 | |||
157 | self.workspace_remove_mock.assert_called_once_with( |
|
157 | self.workspace_remove_mock.assert_called_once_with( | |
158 | repo_id, self.workspace_id) |
|
158 | repo_id, self.workspace_id) | |
159 | self.hook_mock.assert_called_with( |
|
159 | self.hook_mock.assert_called_with( | |
160 | self.pull_request, self.pull_request.author, 'close') |
|
160 | self.pull_request, self.pull_request.author, 'close') | |
161 |
|
161 | |||
162 | def test_merge_status(self, pull_request): |
|
162 | def test_merge_status(self, pull_request): | |
163 | self.merge_mock.return_value = MergeResponse( |
|
163 | self.merge_mock.return_value = MergeResponse( | |
164 | True, False, None, MergeFailureReason.NONE) |
|
164 | True, False, None, MergeFailureReason.NONE) | |
165 |
|
165 | |||
166 | assert pull_request._last_merge_source_rev is None |
|
166 | assert pull_request._last_merge_source_rev is None | |
167 | assert pull_request._last_merge_target_rev is None |
|
167 | assert pull_request._last_merge_target_rev is None | |
168 | assert pull_request.last_merge_status is None |
|
168 | assert pull_request.last_merge_status is None | |
169 |
|
169 | |||
170 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
170 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
171 | assert status is True |
|
171 | assert status is True | |
172 | assert msg == 'This pull request can be automatically merged.' |
|
172 | assert msg == 'This pull request can be automatically merged.' | |
173 | self.merge_mock.assert_called_with( |
|
173 | self.merge_mock.assert_called_with( | |
174 | self.repo_id, self.workspace_id, |
|
174 | self.repo_id, self.workspace_id, | |
175 | pull_request.target_ref_parts, |
|
175 | pull_request.target_ref_parts, | |
176 | pull_request.source_repo.scm_instance(), |
|
176 | pull_request.source_repo.scm_instance(), | |
177 | pull_request.source_ref_parts, dry_run=True, |
|
177 | pull_request.source_ref_parts, dry_run=True, | |
178 | use_rebase=False, close_branch=False) |
|
178 | use_rebase=False, close_branch=False) | |
179 |
|
179 | |||
180 | assert pull_request._last_merge_source_rev == self.source_commit |
|
180 | assert pull_request._last_merge_source_rev == self.source_commit | |
181 | assert pull_request._last_merge_target_rev == self.target_commit |
|
181 | assert pull_request._last_merge_target_rev == self.target_commit | |
182 | assert pull_request.last_merge_status is MergeFailureReason.NONE |
|
182 | assert pull_request.last_merge_status is MergeFailureReason.NONE | |
183 |
|
183 | |||
184 | self.merge_mock.reset_mock() |
|
184 | self.merge_mock.reset_mock() | |
185 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
185 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
186 | assert status is True |
|
186 | assert status is True | |
187 | assert msg == 'This pull request can be automatically merged.' |
|
187 | assert msg == 'This pull request can be automatically merged.' | |
188 | assert self.merge_mock.called is False |
|
188 | assert self.merge_mock.called is False | |
189 |
|
189 | |||
190 | def test_merge_status_known_failure(self, pull_request): |
|
190 | def test_merge_status_known_failure(self, pull_request): | |
191 | self.merge_mock.return_value = MergeResponse( |
|
191 | self.merge_mock.return_value = MergeResponse( | |
192 | False, False, None, MergeFailureReason.MERGE_FAILED, |
|
192 | False, False, None, MergeFailureReason.MERGE_FAILED, | |
193 | metadata={'unresolved_files': 'file1'}) |
|
193 | metadata={'unresolved_files': 'file1'}) | |
194 |
|
194 | |||
195 | assert pull_request._last_merge_source_rev is None |
|
195 | assert pull_request._last_merge_source_rev is None | |
196 | assert pull_request._last_merge_target_rev is None |
|
196 | assert pull_request._last_merge_target_rev is None | |
197 | assert pull_request.last_merge_status is None |
|
197 | assert pull_request.last_merge_status is None | |
198 |
|
198 | |||
199 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
199 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
200 | assert status is False |
|
200 | assert status is False | |
201 | assert msg == 'This pull request cannot be merged because of merge conflicts. file1' |
|
201 | assert msg == 'This pull request cannot be merged because of merge conflicts. file1' | |
202 | self.merge_mock.assert_called_with( |
|
202 | self.merge_mock.assert_called_with( | |
203 | self.repo_id, self.workspace_id, |
|
203 | self.repo_id, self.workspace_id, | |
204 | pull_request.target_ref_parts, |
|
204 | pull_request.target_ref_parts, | |
205 | pull_request.source_repo.scm_instance(), |
|
205 | pull_request.source_repo.scm_instance(), | |
206 | pull_request.source_ref_parts, dry_run=True, |
|
206 | pull_request.source_ref_parts, dry_run=True, | |
207 | use_rebase=False, close_branch=False) |
|
207 | use_rebase=False, close_branch=False) | |
208 |
|
208 | |||
209 | assert pull_request._last_merge_source_rev == self.source_commit |
|
209 | assert pull_request._last_merge_source_rev == self.source_commit | |
210 | assert pull_request._last_merge_target_rev == self.target_commit |
|
210 | assert pull_request._last_merge_target_rev == self.target_commit | |
211 | assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED |
|
211 | assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED | |
212 |
|
212 | |||
213 | self.merge_mock.reset_mock() |
|
213 | self.merge_mock.reset_mock() | |
214 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
214 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
215 | assert status is False |
|
215 | assert status is False | |
216 | assert msg == 'This pull request cannot be merged because of merge conflicts. file1' |
|
216 | assert msg == 'This pull request cannot be merged because of merge conflicts. file1' | |
217 | assert self.merge_mock.called is False |
|
217 | assert self.merge_mock.called is False | |
218 |
|
218 | |||
219 | def test_merge_status_unknown_failure(self, pull_request): |
|
219 | def test_merge_status_unknown_failure(self, pull_request): | |
220 | self.merge_mock.return_value = MergeResponse( |
|
220 | self.merge_mock.return_value = MergeResponse( | |
221 | False, False, None, MergeFailureReason.UNKNOWN, |
|
221 | False, False, None, MergeFailureReason.UNKNOWN, | |
222 | metadata={'exception': 'MockError'}) |
|
222 | metadata={'exception': 'MockError'}) | |
223 |
|
223 | |||
224 | assert pull_request._last_merge_source_rev is None |
|
224 | assert pull_request._last_merge_source_rev is None | |
225 | assert pull_request._last_merge_target_rev is None |
|
225 | assert pull_request._last_merge_target_rev is None | |
226 | assert pull_request.last_merge_status is None |
|
226 | assert pull_request.last_merge_status is None | |
227 |
|
227 | |||
228 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
228 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
229 | assert status is False |
|
229 | assert status is False | |
230 | assert msg == ( |
|
230 | assert msg == ( | |
231 | 'This pull request cannot be merged because of an unhandled exception. ' |
|
231 | 'This pull request cannot be merged because of an unhandled exception. ' | |
232 | 'MockError') |
|
232 | 'MockError') | |
233 | self.merge_mock.assert_called_with( |
|
233 | self.merge_mock.assert_called_with( | |
234 | self.repo_id, self.workspace_id, |
|
234 | self.repo_id, self.workspace_id, | |
235 | pull_request.target_ref_parts, |
|
235 | pull_request.target_ref_parts, | |
236 | pull_request.source_repo.scm_instance(), |
|
236 | pull_request.source_repo.scm_instance(), | |
237 | pull_request.source_ref_parts, dry_run=True, |
|
237 | pull_request.source_ref_parts, dry_run=True, | |
238 | use_rebase=False, close_branch=False) |
|
238 | use_rebase=False, close_branch=False) | |
239 |
|
239 | |||
240 | assert pull_request._last_merge_source_rev is None |
|
240 | assert pull_request._last_merge_source_rev is None | |
241 | assert pull_request._last_merge_target_rev is None |
|
241 | assert pull_request._last_merge_target_rev is None | |
242 | assert pull_request.last_merge_status is None |
|
242 | assert pull_request.last_merge_status is None | |
243 |
|
243 | |||
244 | self.merge_mock.reset_mock() |
|
244 | self.merge_mock.reset_mock() | |
245 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
245 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
246 | assert status is False |
|
246 | assert status is False | |
247 | assert msg == ( |
|
247 | assert msg == ( | |
248 | 'This pull request cannot be merged because of an unhandled exception. ' |
|
248 | 'This pull request cannot be merged because of an unhandled exception. ' | |
249 | 'MockError') |
|
249 | 'MockError') | |
250 | assert self.merge_mock.called is True |
|
250 | assert self.merge_mock.called is True | |
251 |
|
251 | |||
252 | def test_merge_status_when_target_is_locked(self, pull_request): |
|
252 | def test_merge_status_when_target_is_locked(self, pull_request): | |
253 | pull_request.target_repo.locked = [1, u'12345.50', 'lock_web'] |
|
253 | pull_request.target_repo.locked = [1, u'12345.50', 'lock_web'] | |
254 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
254 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
255 | assert status is False |
|
255 | assert status is False | |
256 | assert msg == ( |
|
256 | assert msg == ( | |
257 | 'This pull request cannot be merged because the target repository ' |
|
257 | 'This pull request cannot be merged because the target repository ' | |
258 | 'is locked by user:1.') |
|
258 | 'is locked by user:1.') | |
259 |
|
259 | |||
260 | def test_merge_status_requirements_check_target(self, pull_request): |
|
260 | def test_merge_status_requirements_check_target(self, pull_request): | |
261 |
|
261 | |||
262 | def has_largefiles(self, repo): |
|
262 | def has_largefiles(self, repo): | |
263 | return repo == pull_request.source_repo |
|
263 | return repo == pull_request.source_repo | |
264 |
|
264 | |||
265 | patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) |
|
265 | patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) | |
266 | with patcher: |
|
266 | with patcher: | |
267 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
267 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
268 |
|
268 | |||
269 | assert status is False |
|
269 | assert status is False | |
270 | assert msg == 'Target repository large files support is disabled.' |
|
270 | assert msg == 'Target repository large files support is disabled.' | |
271 |
|
271 | |||
272 | def test_merge_status_requirements_check_source(self, pull_request): |
|
272 | def test_merge_status_requirements_check_source(self, pull_request): | |
273 |
|
273 | |||
274 | def has_largefiles(self, repo): |
|
274 | def has_largefiles(self, repo): | |
275 | return repo == pull_request.target_repo |
|
275 | return repo == pull_request.target_repo | |
276 |
|
276 | |||
277 | patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) |
|
277 | patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) | |
278 | with patcher: |
|
278 | with patcher: | |
279 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
279 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
280 |
|
280 | |||
281 | assert status is False |
|
281 | assert status is False | |
282 | assert msg == 'Source repository large files support is disabled.' |
|
282 | assert msg == 'Source repository large files support is disabled.' | |
283 |
|
283 | |||
284 | def test_merge(self, pull_request, merge_extras): |
|
284 | def test_merge(self, pull_request, merge_extras): | |
285 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
285 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
286 | merge_ref = Reference( |
|
286 | merge_ref = Reference( | |
287 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
287 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') | |
288 | self.merge_mock.return_value = MergeResponse( |
|
288 | self.merge_mock.return_value = MergeResponse( | |
289 | True, True, merge_ref, MergeFailureReason.NONE) |
|
289 | True, True, merge_ref, MergeFailureReason.NONE) | |
290 |
|
290 | |||
291 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
291 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
292 | PullRequestModel().merge_repo( |
|
292 | PullRequestModel().merge_repo( | |
293 | pull_request, pull_request.author, extras=merge_extras) |
|
293 | pull_request, pull_request.author, extras=merge_extras) | |
294 | Session().commit() |
|
294 | Session().commit() | |
295 |
|
295 | |||
296 | message = ( |
|
296 | message = ( | |
297 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
297 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' | |
298 | u'\n\n {pr_title}'.format( |
|
298 | u'\n\n {pr_title}'.format( | |
299 | pr_id=pull_request.pull_request_id, |
|
299 | pr_id=pull_request.pull_request_id, | |
300 | source_repo=safe_str( |
|
300 | source_repo=safe_str( | |
301 | pull_request.source_repo.scm_instance().name), |
|
301 | pull_request.source_repo.scm_instance().name), | |
302 | source_ref_name=pull_request.source_ref_parts.name, |
|
302 | source_ref_name=pull_request.source_ref_parts.name, | |
303 | pr_title=safe_str(pull_request.title) |
|
303 | pr_title=safe_str(pull_request.title) | |
304 | ) |
|
304 | ) | |
305 | ) |
|
305 | ) | |
306 | self.merge_mock.assert_called_with( |
|
306 | self.merge_mock.assert_called_with( | |
307 | self.repo_id, self.workspace_id, |
|
307 | self.repo_id, self.workspace_id, | |
308 | pull_request.target_ref_parts, |
|
308 | pull_request.target_ref_parts, | |
309 | pull_request.source_repo.scm_instance(), |
|
309 | pull_request.source_repo.scm_instance(), | |
310 | pull_request.source_ref_parts, |
|
310 | pull_request.source_ref_parts, | |
311 | user_name=user.short_contact, user_email=user.email, message=message, |
|
311 | user_name=user.short_contact, user_email=user.email, message=message, | |
312 | use_rebase=False, close_branch=False |
|
312 | use_rebase=False, close_branch=False | |
313 | ) |
|
313 | ) | |
314 | self.invalidation_mock.assert_called_once_with( |
|
314 | self.invalidation_mock.assert_called_once_with( | |
315 | pull_request.target_repo.repo_name) |
|
315 | pull_request.target_repo.repo_name) | |
316 |
|
316 | |||
317 | self.hook_mock.assert_called_with( |
|
317 | self.hook_mock.assert_called_with( | |
318 | self.pull_request, self.pull_request.author, 'merge') |
|
318 | self.pull_request, self.pull_request.author, 'merge') | |
319 |
|
319 | |||
320 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
320 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
321 | assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' |
|
321 | assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' | |
322 |
|
322 | |||
323 | def test_merge_with_status_lock(self, pull_request, merge_extras): |
|
323 | def test_merge_with_status_lock(self, pull_request, merge_extras): | |
324 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
324 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
325 | merge_ref = Reference( |
|
325 | merge_ref = Reference( | |
326 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
326 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') | |
327 | self.merge_mock.return_value = MergeResponse( |
|
327 | self.merge_mock.return_value = MergeResponse( | |
328 | True, True, merge_ref, MergeFailureReason.NONE) |
|
328 | True, True, merge_ref, MergeFailureReason.NONE) | |
329 |
|
329 | |||
330 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
330 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
331 |
|
331 | |||
332 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
332 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
333 | assert pull_request.pull_request_state == PullRequest.STATE_UPDATING |
|
333 | assert pull_request.pull_request_state == PullRequest.STATE_UPDATING | |
334 | PullRequestModel().merge_repo( |
|
334 | PullRequestModel().merge_repo( | |
335 | pull_request, pull_request.author, extras=merge_extras) |
|
335 | pull_request, pull_request.author, extras=merge_extras) | |
336 | Session().commit() |
|
336 | Session().commit() | |
337 |
|
337 | |||
338 | assert pull_request.pull_request_state == PullRequest.STATE_CREATED |
|
338 | assert pull_request.pull_request_state == PullRequest.STATE_CREATED | |
339 |
|
339 | |||
340 | message = ( |
|
340 | message = ( | |
341 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
341 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' | |
342 | u'\n\n {pr_title}'.format( |
|
342 | u'\n\n {pr_title}'.format( | |
343 | pr_id=pull_request.pull_request_id, |
|
343 | pr_id=pull_request.pull_request_id, | |
344 | source_repo=safe_str( |
|
344 | source_repo=safe_str( | |
345 | pull_request.source_repo.scm_instance().name), |
|
345 | pull_request.source_repo.scm_instance().name), | |
346 | source_ref_name=pull_request.source_ref_parts.name, |
|
346 | source_ref_name=pull_request.source_ref_parts.name, | |
347 | pr_title=safe_str(pull_request.title) |
|
347 | pr_title=safe_str(pull_request.title) | |
348 | ) |
|
348 | ) | |
349 | ) |
|
349 | ) | |
350 | self.merge_mock.assert_called_with( |
|
350 | self.merge_mock.assert_called_with( | |
351 | self.repo_id, self.workspace_id, |
|
351 | self.repo_id, self.workspace_id, | |
352 | pull_request.target_ref_parts, |
|
352 | pull_request.target_ref_parts, | |
353 | pull_request.source_repo.scm_instance(), |
|
353 | pull_request.source_repo.scm_instance(), | |
354 | pull_request.source_ref_parts, |
|
354 | pull_request.source_ref_parts, | |
355 | user_name=user.short_contact, user_email=user.email, message=message, |
|
355 | user_name=user.short_contact, user_email=user.email, message=message, | |
356 | use_rebase=False, close_branch=False |
|
356 | use_rebase=False, close_branch=False | |
357 | ) |
|
357 | ) | |
358 | self.invalidation_mock.assert_called_once_with( |
|
358 | self.invalidation_mock.assert_called_once_with( | |
359 | pull_request.target_repo.repo_name) |
|
359 | pull_request.target_repo.repo_name) | |
360 |
|
360 | |||
361 | self.hook_mock.assert_called_with( |
|
361 | self.hook_mock.assert_called_with( | |
362 | self.pull_request, self.pull_request.author, 'merge') |
|
362 | self.pull_request, self.pull_request.author, 'merge') | |
363 |
|
363 | |||
364 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
364 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
365 | assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' |
|
365 | assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' | |
366 |
|
366 | |||
367 | def test_merge_failed(self, pull_request, merge_extras): |
|
367 | def test_merge_failed(self, pull_request, merge_extras): | |
368 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
368 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
369 | merge_ref = Reference( |
|
369 | merge_ref = Reference( | |
370 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
370 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') | |
371 | self.merge_mock.return_value = MergeResponse( |
|
371 | self.merge_mock.return_value = MergeResponse( | |
372 | False, False, merge_ref, MergeFailureReason.MERGE_FAILED) |
|
372 | False, False, merge_ref, MergeFailureReason.MERGE_FAILED) | |
373 |
|
373 | |||
374 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
374 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
375 | PullRequestModel().merge_repo( |
|
375 | PullRequestModel().merge_repo( | |
376 | pull_request, pull_request.author, extras=merge_extras) |
|
376 | pull_request, pull_request.author, extras=merge_extras) | |
377 | Session().commit() |
|
377 | Session().commit() | |
378 |
|
378 | |||
379 | message = ( |
|
379 | message = ( | |
380 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
380 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' | |
381 | u'\n\n {pr_title}'.format( |
|
381 | u'\n\n {pr_title}'.format( | |
382 | pr_id=pull_request.pull_request_id, |
|
382 | pr_id=pull_request.pull_request_id, | |
383 | source_repo=safe_str( |
|
383 | source_repo=safe_str( | |
384 | pull_request.source_repo.scm_instance().name), |
|
384 | pull_request.source_repo.scm_instance().name), | |
385 | source_ref_name=pull_request.source_ref_parts.name, |
|
385 | source_ref_name=pull_request.source_ref_parts.name, | |
386 | pr_title=safe_str(pull_request.title) |
|
386 | pr_title=safe_str(pull_request.title) | |
387 | ) |
|
387 | ) | |
388 | ) |
|
388 | ) | |
389 | self.merge_mock.assert_called_with( |
|
389 | self.merge_mock.assert_called_with( | |
390 | self.repo_id, self.workspace_id, |
|
390 | self.repo_id, self.workspace_id, | |
391 | pull_request.target_ref_parts, |
|
391 | pull_request.target_ref_parts, | |
392 | pull_request.source_repo.scm_instance(), |
|
392 | pull_request.source_repo.scm_instance(), | |
393 | pull_request.source_ref_parts, |
|
393 | pull_request.source_ref_parts, | |
394 | user_name=user.short_contact, user_email=user.email, message=message, |
|
394 | user_name=user.short_contact, user_email=user.email, message=message, | |
395 | use_rebase=False, close_branch=False |
|
395 | use_rebase=False, close_branch=False | |
396 | ) |
|
396 | ) | |
397 |
|
397 | |||
398 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
398 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
399 | assert self.invalidation_mock.called is False |
|
399 | assert self.invalidation_mock.called is False | |
400 | assert pull_request.merge_rev is None |
|
400 | assert pull_request.merge_rev is None | |
401 |
|
401 | |||
402 | def test_get_commit_ids(self, pull_request): |
|
402 | def test_get_commit_ids(self, pull_request): | |
403 | # The PR has been not merged yet, so expect an exception |
|
403 | # The PR has been not merged yet, so expect an exception | |
404 | with pytest.raises(ValueError): |
|
404 | with pytest.raises(ValueError): | |
405 | PullRequestModel()._get_commit_ids(pull_request) |
|
405 | PullRequestModel()._get_commit_ids(pull_request) | |
406 |
|
406 | |||
407 | # Merge revision is in the revisions list |
|
407 | # Merge revision is in the revisions list | |
408 | pull_request.merge_rev = pull_request.revisions[0] |
|
408 | pull_request.merge_rev = pull_request.revisions[0] | |
409 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
409 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) | |
410 | assert commit_ids == pull_request.revisions |
|
410 | assert commit_ids == pull_request.revisions | |
411 |
|
411 | |||
412 | # Merge revision is not in the revisions list |
|
412 | # Merge revision is not in the revisions list | |
413 | pull_request.merge_rev = 'f000' * 10 |
|
413 | pull_request.merge_rev = 'f000' * 10 | |
414 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
414 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) | |
415 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] |
|
415 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] | |
416 |
|
416 | |||
417 | def test_get_diff_from_pr_version(self, pull_request): |
|
417 | def test_get_diff_from_pr_version(self, pull_request): | |
418 | source_repo = pull_request.source_repo |
|
418 | source_repo = pull_request.source_repo | |
419 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
419 | source_ref_id = pull_request.source_ref_parts.commit_id | |
420 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
420 | target_ref_id = pull_request.target_ref_parts.commit_id | |
421 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
421 | diff = PullRequestModel()._get_diff_from_pr_or_version( | |
422 | source_repo, source_ref_id, target_ref_id, |
|
422 | source_repo, source_ref_id, target_ref_id, | |
423 | hide_whitespace_changes=False, diff_context=6) |
|
423 | hide_whitespace_changes=False, diff_context=6) | |
424 | assert b'file_1' in diff.raw.tobytes() |
|
424 | assert b'file_1' in diff.raw.tobytes() | |
425 |
|
425 | |||
426 | def test_generate_title_returns_unicode(self): |
|
426 | def test_generate_title_returns_unicode(self): | |
427 | title = PullRequestModel().generate_pullrequest_title( |
|
427 | title = PullRequestModel().generate_pullrequest_title( | |
428 | source='source-dummy', |
|
428 | source='source-dummy', | |
429 | source_ref='source-ref-dummy', |
|
429 | source_ref='source-ref-dummy', | |
430 | target='target-dummy', |
|
430 | target='target-dummy', | |
431 | ) |
|
431 | ) | |
432 | assert type(title) == str |
|
432 | assert type(title) == str | |
433 |
|
433 | |||
434 | @pytest.mark.parametrize('title, has_wip', [ |
|
434 | @pytest.mark.parametrize('title, has_wip', [ | |
435 | ('hello', False), |
|
435 | ('hello', False), | |
436 | ('hello wip', False), |
|
436 | ('hello wip', False), | |
437 | ('hello wip: xxx', False), |
|
437 | ('hello wip: xxx', False), | |
438 | ('[wip] hello', True), |
|
438 | ('[wip] hello', True), | |
439 | ('[wip] hello', True), |
|
439 | ('[wip] hello', True), | |
440 | ('wip: hello', True), |
|
440 | ('wip: hello', True), | |
441 | ('wip hello', True), |
|
441 | ('wip hello', True), | |
442 |
|
442 | |||
443 | ]) |
|
443 | ]) | |
444 | def test_wip_title_marker(self, pull_request, title, has_wip): |
|
444 | def test_wip_title_marker(self, pull_request, title, has_wip): | |
445 | pull_request.title = title |
|
445 | pull_request.title = title | |
446 | assert pull_request.work_in_progress == has_wip |
|
446 | assert pull_request.work_in_progress == has_wip | |
447 |
|
447 | |||
448 |
|
448 | |||
449 | @pytest.mark.usefixtures('config_stub') |
|
449 | @pytest.mark.usefixtures('config_stub') | |
450 | class TestIntegrationMerge(object): |
|
450 | class TestIntegrationMerge(object): | |
451 | @pytest.mark.parametrize('extra_config', ( |
|
451 | @pytest.mark.parametrize('extra_config', ( | |
452 |
{'vcs.hooks.protocol': ' |
|
452 | {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False}, | |
453 | )) |
|
453 | )) | |
454 | def test_merge_triggers_push_hooks( |
|
454 | def test_merge_triggers_push_hooks( | |
455 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
|
455 | self, pr_util, user_admin, capture_rcextensions, merge_extras, | |
456 | extra_config): |
|
456 | extra_config): | |
457 |
|
457 | |||
458 | pull_request = pr_util.create_pull_request( |
|
458 | pull_request = pr_util.create_pull_request( | |
459 | approved=True, mergeable=True) |
|
459 | approved=True, mergeable=True) | |
460 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
460 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it | |
461 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
461 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
462 | Session().commit() |
|
462 | Session().commit() | |
463 |
|
463 | |||
464 | with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): |
|
464 | with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): | |
465 | merge_state = PullRequestModel().merge_repo( |
|
465 | merge_state = PullRequestModel().merge_repo( | |
466 | pull_request, user_admin, extras=merge_extras) |
|
466 | pull_request, user_admin, extras=merge_extras) | |
467 | Session().commit() |
|
467 | Session().commit() | |
468 |
|
468 | |||
469 | assert merge_state.executed |
|
469 | assert merge_state.executed | |
470 | assert '_pre_push_hook' in capture_rcextensions |
|
470 | assert '_pre_push_hook' in capture_rcextensions | |
471 | assert '_push_hook' in capture_rcextensions |
|
471 | assert '_push_hook' in capture_rcextensions | |
472 |
|
472 | |||
473 | def test_merge_can_be_rejected_by_pre_push_hook( |
|
473 | def test_merge_can_be_rejected_by_pre_push_hook( | |
474 | self, pr_util, user_admin, capture_rcextensions, merge_extras): |
|
474 | self, pr_util, user_admin, capture_rcextensions, merge_extras): | |
475 | pull_request = pr_util.create_pull_request( |
|
475 | pull_request = pr_util.create_pull_request( | |
476 | approved=True, mergeable=True) |
|
476 | approved=True, mergeable=True) | |
477 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
477 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it | |
478 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
478 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
479 | Session().commit() |
|
479 | Session().commit() | |
480 |
|
480 | |||
481 | with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: |
|
481 | with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: | |
482 | pre_pull.side_effect = RepositoryError("Disallow push!") |
|
482 | pre_pull.side_effect = RepositoryError("Disallow push!") | |
483 | merge_status = PullRequestModel().merge_repo( |
|
483 | merge_status = PullRequestModel().merge_repo( | |
484 | pull_request, user_admin, extras=merge_extras) |
|
484 | pull_request, user_admin, extras=merge_extras) | |
485 | Session().commit() |
|
485 | Session().commit() | |
486 |
|
486 | |||
487 | assert not merge_status.executed |
|
487 | assert not merge_status.executed | |
488 | assert 'pre_push' not in capture_rcextensions |
|
488 | assert 'pre_push' not in capture_rcextensions | |
489 | assert 'post_push' not in capture_rcextensions |
|
489 | assert 'post_push' not in capture_rcextensions | |
490 |
|
490 | |||
491 | def test_merge_fails_if_target_is_locked( |
|
491 | def test_merge_fails_if_target_is_locked( | |
492 | self, pr_util, user_regular, merge_extras): |
|
492 | self, pr_util, user_regular, merge_extras): | |
493 | pull_request = pr_util.create_pull_request( |
|
493 | pull_request = pr_util.create_pull_request( | |
494 | approved=True, mergeable=True) |
|
494 | approved=True, mergeable=True) | |
495 | locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] |
|
495 | locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] | |
496 | pull_request.target_repo.locked = locked_by |
|
496 | pull_request.target_repo.locked = locked_by | |
497 | # TODO: johbo: Check if this can work based on the database, currently |
|
497 | # TODO: johbo: Check if this can work based on the database, currently | |
498 | # all data is pre-computed, that's why just updating the DB is not |
|
498 | # all data is pre-computed, that's why just updating the DB is not | |
499 | # enough. |
|
499 | # enough. | |
500 | merge_extras['locked_by'] = locked_by |
|
500 | merge_extras['locked_by'] = locked_by | |
501 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
501 | merge_extras['repository'] = pull_request.target_repo.repo_name | |
502 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
502 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it | |
503 | Session().commit() |
|
503 | Session().commit() | |
504 | merge_status = PullRequestModel().merge_repo( |
|
504 | merge_status = PullRequestModel().merge_repo( | |
505 | pull_request, user_regular, extras=merge_extras) |
|
505 | pull_request, user_regular, extras=merge_extras) | |
506 | Session().commit() |
|
506 | Session().commit() | |
507 |
|
507 | |||
508 | assert not merge_status.executed |
|
508 | assert not merge_status.executed | |
509 |
|
509 | |||
510 |
|
510 | |||
511 | @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ |
|
511 | @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ | |
512 | (False, 1, 0), |
|
512 | (False, 1, 0), | |
513 | (True, 0, 1), |
|
513 | (True, 0, 1), | |
514 | ]) |
|
514 | ]) | |
515 | def test_outdated_comments( |
|
515 | def test_outdated_comments( | |
516 | pr_util, use_outdated, inlines_count, outdated_count, config_stub): |
|
516 | pr_util, use_outdated, inlines_count, outdated_count, config_stub): | |
517 | pull_request = pr_util.create_pull_request() |
|
517 | pull_request = pr_util.create_pull_request() | |
518 | pr_util.create_inline_comment(file_path='not_in_updated_diff') |
|
518 | pr_util.create_inline_comment(file_path='not_in_updated_diff') | |
519 |
|
519 | |||
520 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: |
|
520 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: | |
521 | pr_util.add_one_commit() |
|
521 | pr_util.add_one_commit() | |
522 | assert_inline_comments( |
|
522 | assert_inline_comments( | |
523 | pull_request, visible=inlines_count, outdated=outdated_count) |
|
523 | pull_request, visible=inlines_count, outdated=outdated_count) | |
524 | outdated_comment_mock.assert_called_with(pull_request) |
|
524 | outdated_comment_mock.assert_called_with(pull_request) | |
525 |
|
525 | |||
526 |
|
526 | |||
527 | @pytest.mark.parametrize('mr_type, expected_msg', [ |
|
527 | @pytest.mark.parametrize('mr_type, expected_msg', [ | |
528 | (MergeFailureReason.NONE, |
|
528 | (MergeFailureReason.NONE, | |
529 | 'This pull request can be automatically merged.'), |
|
529 | 'This pull request can be automatically merged.'), | |
530 | (MergeFailureReason.UNKNOWN, |
|
530 | (MergeFailureReason.UNKNOWN, | |
531 | 'This pull request cannot be merged because of an unhandled exception. CRASH'), |
|
531 | 'This pull request cannot be merged because of an unhandled exception. CRASH'), | |
532 | (MergeFailureReason.MERGE_FAILED, |
|
532 | (MergeFailureReason.MERGE_FAILED, | |
533 | 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'), |
|
533 | 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'), | |
534 | (MergeFailureReason.PUSH_FAILED, |
|
534 | (MergeFailureReason.PUSH_FAILED, | |
535 | 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'), |
|
535 | 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'), | |
536 | (MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
536 | (MergeFailureReason.TARGET_IS_NOT_HEAD, | |
537 | 'This pull request cannot be merged because the target `ref_name` is not a head.'), |
|
537 | 'This pull request cannot be merged because the target `ref_name` is not a head.'), | |
538 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, |
|
538 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, | |
539 | 'This pull request cannot be merged because the source contains more branches than the target.'), |
|
539 | 'This pull request cannot be merged because the source contains more branches than the target.'), | |
540 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
540 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
541 | 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'), |
|
541 | 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'), | |
542 | (MergeFailureReason.TARGET_IS_LOCKED, |
|
542 | (MergeFailureReason.TARGET_IS_LOCKED, | |
543 | 'This pull request cannot be merged because the target repository is locked by user:123.'), |
|
543 | 'This pull request cannot be merged because the target repository is locked by user:123.'), | |
544 | (MergeFailureReason.MISSING_TARGET_REF, |
|
544 | (MergeFailureReason.MISSING_TARGET_REF, | |
545 | 'This pull request cannot be merged because the target reference `ref_name` is missing.'), |
|
545 | 'This pull request cannot be merged because the target reference `ref_name` is missing.'), | |
546 | (MergeFailureReason.MISSING_SOURCE_REF, |
|
546 | (MergeFailureReason.MISSING_SOURCE_REF, | |
547 | 'This pull request cannot be merged because the source reference `ref_name` is missing.'), |
|
547 | 'This pull request cannot be merged because the source reference `ref_name` is missing.'), | |
548 | (MergeFailureReason.SUBREPO_MERGE_FAILED, |
|
548 | (MergeFailureReason.SUBREPO_MERGE_FAILED, | |
549 | 'This pull request cannot be merged because of conflicts related to sub repositories.'), |
|
549 | 'This pull request cannot be merged because of conflicts related to sub repositories.'), | |
550 |
|
550 | |||
551 | ]) |
|
551 | ]) | |
552 | def test_merge_response_message(mr_type, expected_msg): |
|
552 | def test_merge_response_message(mr_type, expected_msg): | |
553 | merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
553 | merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') | |
554 | metadata = { |
|
554 | metadata = { | |
555 | 'unresolved_files': 'CONFLICT_FILE', |
|
555 | 'unresolved_files': 'CONFLICT_FILE', | |
556 | 'exception': "CRASH", |
|
556 | 'exception': "CRASH", | |
557 | 'target': 'some-repo', |
|
557 | 'target': 'some-repo', | |
558 | 'merge_commit': 'merge_commit', |
|
558 | 'merge_commit': 'merge_commit', | |
559 | 'target_ref': merge_ref, |
|
559 | 'target_ref': merge_ref, | |
560 | 'source_ref': merge_ref, |
|
560 | 'source_ref': merge_ref, | |
561 | 'heads': ','.join(['a', 'b', 'c']), |
|
561 | 'heads': ','.join(['a', 'b', 'c']), | |
562 | 'locked_by': 'user:123' |
|
562 | 'locked_by': 'user:123' | |
563 | } |
|
563 | } | |
564 |
|
564 | |||
565 | merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) |
|
565 | merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) | |
566 | assert merge_response.merge_status_message == expected_msg |
|
566 | assert merge_response.merge_status_message == expected_msg | |
567 |
|
567 | |||
568 |
|
568 | |||
569 | @pytest.fixture() |
|
569 | @pytest.fixture() | |
570 | def merge_extras(user_regular): |
|
570 | def merge_extras(user_regular): | |
571 | """ |
|
571 | """ | |
572 | Context for the vcs operation when running a merge. |
|
572 | Context for the vcs operation when running a merge. | |
573 | """ |
|
573 | """ | |
574 | extras = { |
|
574 | extras = { | |
575 | 'ip': '127.0.0.1', |
|
575 | 'ip': '127.0.0.1', | |
576 | 'username': user_regular.username, |
|
576 | 'username': user_regular.username, | |
577 | 'user_id': user_regular.user_id, |
|
577 | 'user_id': user_regular.user_id, | |
578 | 'action': 'push', |
|
578 | 'action': 'push', | |
579 | 'repository': 'fake_target_repo_name', |
|
579 | 'repository': 'fake_target_repo_name', | |
580 | 'scm': 'git', |
|
580 | 'scm': 'git', | |
581 | 'config': 'fake_config_ini_path', |
|
581 | 'config': 'fake_config_ini_path', | |
582 | 'repo_store': '', |
|
582 | 'repo_store': '', | |
583 | 'make_lock': None, |
|
583 | 'make_lock': None, | |
584 | 'locked_by': [None, None, None], |
|
584 | 'locked_by': [None, None, None], | |
585 | 'server_url': 'http://test.example.com:5000', |
|
585 | 'server_url': 'http://test.example.com:5000', | |
586 | 'hooks': ['push', 'pull'], |
|
586 | 'hooks': ['push', 'pull'], | |
587 | 'is_shadow_repo': False, |
|
587 | 'is_shadow_repo': False, | |
588 | } |
|
588 | } | |
589 | return extras |
|
589 | return extras | |
590 |
|
590 | |||
591 |
|
591 | |||
592 | @pytest.mark.usefixtures('config_stub') |
|
592 | @pytest.mark.usefixtures('config_stub') | |
593 | class TestUpdateCommentHandling(object): |
|
593 | class TestUpdateCommentHandling(object): | |
594 |
|
594 | |||
595 | @pytest.fixture(autouse=True, scope='class') |
|
595 | @pytest.fixture(autouse=True, scope='class') | |
596 | def enable_outdated_comments(self, request, baseapp): |
|
596 | def enable_outdated_comments(self, request, baseapp): | |
597 | config_patch = mock.patch.dict( |
|
597 | config_patch = mock.patch.dict( | |
598 | 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) |
|
598 | 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) | |
599 | config_patch.start() |
|
599 | config_patch.start() | |
600 |
|
600 | |||
601 | @request.addfinalizer |
|
601 | @request.addfinalizer | |
602 | def cleanup(): |
|
602 | def cleanup(): | |
603 | config_patch.stop() |
|
603 | config_patch.stop() | |
604 |
|
604 | |||
605 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): |
|
605 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): | |
606 | commits = [ |
|
606 | commits = [ | |
607 | {'message': 'a'}, |
|
607 | {'message': 'a'}, | |
608 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, |
|
608 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, | |
609 | {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]}, |
|
609 | {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]}, | |
610 | ] |
|
610 | ] | |
611 | pull_request = pr_util.create_pull_request( |
|
611 | pull_request = pr_util.create_pull_request( | |
612 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
612 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
613 | pr_util.create_inline_comment(file_path='file_b') |
|
613 | pr_util.create_inline_comment(file_path='file_b') | |
614 | pr_util.add_one_commit(head='c') |
|
614 | pr_util.add_one_commit(head='c') | |
615 |
|
615 | |||
616 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
616 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
617 |
|
617 | |||
618 | def test_comment_stays_unflagged_on_change_above(self, pr_util): |
|
618 | def test_comment_stays_unflagged_on_change_above(self, pr_util): | |
619 | original_content = b''.join((b'line %d\n' % x for x in range(1, 11))) |
|
619 | original_content = b''.join((b'line %d\n' % x for x in range(1, 11))) | |
620 | updated_content = b'new_line_at_top\n' + original_content |
|
620 | updated_content = b'new_line_at_top\n' + original_content | |
621 | commits = [ |
|
621 | commits = [ | |
622 | {'message': 'a'}, |
|
622 | {'message': 'a'}, | |
623 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, |
|
623 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, | |
624 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, |
|
624 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, | |
625 | ] |
|
625 | ] | |
626 | pull_request = pr_util.create_pull_request( |
|
626 | pull_request = pr_util.create_pull_request( | |
627 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
627 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
628 |
|
628 | |||
629 | with outdated_comments_patcher(): |
|
629 | with outdated_comments_patcher(): | |
630 | comment = pr_util.create_inline_comment( |
|
630 | comment = pr_util.create_inline_comment( | |
631 | line_no=u'n8', file_path='file_b') |
|
631 | line_no=u'n8', file_path='file_b') | |
632 | pr_util.add_one_commit(head='c') |
|
632 | pr_util.add_one_commit(head='c') | |
633 |
|
633 | |||
634 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
634 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
635 | assert comment.line_no == u'n9' |
|
635 | assert comment.line_no == u'n9' | |
636 |
|
636 | |||
637 | def test_comment_stays_unflagged_on_change_below(self, pr_util): |
|
637 | def test_comment_stays_unflagged_on_change_below(self, pr_util): | |
638 | original_content = b''.join([b'line %d\n' % x for x in range(10)]) |
|
638 | original_content = b''.join([b'line %d\n' % x for x in range(10)]) | |
639 | updated_content = original_content + b'new_line_at_end\n' |
|
639 | updated_content = original_content + b'new_line_at_end\n' | |
640 | commits = [ |
|
640 | commits = [ | |
641 | {'message': 'a'}, |
|
641 | {'message': 'a'}, | |
642 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, |
|
642 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, | |
643 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, |
|
643 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, | |
644 | ] |
|
644 | ] | |
645 | pull_request = pr_util.create_pull_request( |
|
645 | pull_request = pr_util.create_pull_request( | |
646 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
646 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
647 | pr_util.create_inline_comment(file_path='file_b') |
|
647 | pr_util.create_inline_comment(file_path='file_b') | |
648 | pr_util.add_one_commit(head='c') |
|
648 | pr_util.add_one_commit(head='c') | |
649 |
|
649 | |||
650 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
650 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
651 |
|
651 | |||
652 | @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) |
|
652 | @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) | |
653 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): |
|
653 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): | |
654 | base_lines = [b'line %d\n' % x for x in range(1, 13)] |
|
654 | base_lines = [b'line %d\n' % x for x in range(1, 13)] | |
655 | change_lines = list(base_lines) |
|
655 | change_lines = list(base_lines) | |
656 | change_lines.insert(6, b'line 6a added\n') |
|
656 | change_lines.insert(6, b'line 6a added\n') | |
657 |
|
657 | |||
658 | # Changes on the last line of sight |
|
658 | # Changes on the last line of sight | |
659 | update_lines = list(change_lines) |
|
659 | update_lines = list(change_lines) | |
660 | update_lines[0] = b'line 1 changed\n' |
|
660 | update_lines[0] = b'line 1 changed\n' | |
661 | update_lines[-1] = b'line 12 changed\n' |
|
661 | update_lines[-1] = b'line 12 changed\n' | |
662 |
|
662 | |||
663 | def file_b(lines): |
|
663 | def file_b(lines): | |
664 | return FileNode(b'file_b', b''.join(lines)) |
|
664 | return FileNode(b'file_b', b''.join(lines)) | |
665 |
|
665 | |||
666 | commits = [ |
|
666 | commits = [ | |
667 | {'message': 'a', 'added': [file_b(base_lines)]}, |
|
667 | {'message': 'a', 'added': [file_b(base_lines)]}, | |
668 | {'message': 'b', 'changed': [file_b(change_lines)]}, |
|
668 | {'message': 'b', 'changed': [file_b(change_lines)]}, | |
669 | {'message': 'c', 'changed': [file_b(update_lines)]}, |
|
669 | {'message': 'c', 'changed': [file_b(update_lines)]}, | |
670 | ] |
|
670 | ] | |
671 |
|
671 | |||
672 | pull_request = pr_util.create_pull_request( |
|
672 | pull_request = pr_util.create_pull_request( | |
673 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
673 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
674 | pr_util.create_inline_comment(line_no=line_no, file_path='file_b') |
|
674 | pr_util.create_inline_comment(line_no=line_no, file_path='file_b') | |
675 |
|
675 | |||
676 | with outdated_comments_patcher(): |
|
676 | with outdated_comments_patcher(): | |
677 | pr_util.add_one_commit(head='c') |
|
677 | pr_util.add_one_commit(head='c') | |
678 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
678 | assert_inline_comments(pull_request, visible=0, outdated=1) | |
679 |
|
679 | |||
680 | @pytest.mark.parametrize("change, content", [ |
|
680 | @pytest.mark.parametrize("change, content", [ | |
681 | ('changed', b'changed\n'), |
|
681 | ('changed', b'changed\n'), | |
682 | ('removed', b''), |
|
682 | ('removed', b''), | |
683 | ], ids=['changed', b'removed']) |
|
683 | ], ids=['changed', b'removed']) | |
684 | def test_comment_flagged_on_change(self, pr_util, change, content): |
|
684 | def test_comment_flagged_on_change(self, pr_util, change, content): | |
685 | commits = [ |
|
685 | commits = [ | |
686 | {'message': 'a'}, |
|
686 | {'message': 'a'}, | |
687 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, |
|
687 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, | |
688 | {'message': 'c', change: [FileNode(b'file_b', content)]}, |
|
688 | {'message': 'c', change: [FileNode(b'file_b', content)]}, | |
689 | ] |
|
689 | ] | |
690 | pull_request = pr_util.create_pull_request( |
|
690 | pull_request = pr_util.create_pull_request( | |
691 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
691 | commits=commits, target_head='a', source_head='b', revisions=['b']) | |
692 | pr_util.create_inline_comment(file_path='file_b') |
|
692 | pr_util.create_inline_comment(file_path='file_b') | |
693 |
|
693 | |||
694 | with outdated_comments_patcher(): |
|
694 | with outdated_comments_patcher(): | |
695 | pr_util.add_one_commit(head='c') |
|
695 | pr_util.add_one_commit(head='c') | |
696 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
696 | assert_inline_comments(pull_request, visible=0, outdated=1) | |
697 |
|
697 | |||
698 |
|
698 | |||
699 | @pytest.mark.usefixtures('config_stub') |
|
699 | @pytest.mark.usefixtures('config_stub') | |
700 | class TestUpdateChangedFiles(object): |
|
700 | class TestUpdateChangedFiles(object): | |
701 |
|
701 | |||
702 | def test_no_changes_on_unchanged_diff(self, pr_util): |
|
702 | def test_no_changes_on_unchanged_diff(self, pr_util): | |
703 | commits = [ |
|
703 | commits = [ | |
704 | {'message': 'a'}, |
|
704 | {'message': 'a'}, | |
705 | {'message': 'b', |
|
705 | {'message': 'b', | |
706 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, |
|
706 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, | |
707 | {'message': 'c', |
|
707 | {'message': 'c', | |
708 | 'added': [FileNode(b'file_c', b'test_content c\n')]}, |
|
708 | 'added': [FileNode(b'file_c', b'test_content c\n')]}, | |
709 | ] |
|
709 | ] | |
710 | # open a PR from a to b, adding file_b |
|
710 | # open a PR from a to b, adding file_b | |
711 | pull_request = pr_util.create_pull_request( |
|
711 | pull_request = pr_util.create_pull_request( | |
712 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
712 | commits=commits, target_head='a', source_head='b', revisions=['b'], | |
713 | name_suffix='per-file-review') |
|
713 | name_suffix='per-file-review') | |
714 |
|
714 | |||
715 | # modify PR adding new file file_c |
|
715 | # modify PR adding new file file_c | |
716 | pr_util.add_one_commit(head='c') |
|
716 | pr_util.add_one_commit(head='c') | |
717 |
|
717 | |||
718 | assert_pr_file_changes( |
|
718 | assert_pr_file_changes( | |
719 | pull_request, |
|
719 | pull_request, | |
720 | added=['file_c'], |
|
720 | added=['file_c'], | |
721 | modified=[], |
|
721 | modified=[], | |
722 | removed=[]) |
|
722 | removed=[]) | |
723 |
|
723 | |||
724 | def test_modify_and_undo_modification_diff(self, pr_util): |
|
724 | def test_modify_and_undo_modification_diff(self, pr_util): | |
725 | commits = [ |
|
725 | commits = [ | |
726 | {'message': 'a'}, |
|
726 | {'message': 'a'}, | |
727 | {'message': 'b', |
|
727 | {'message': 'b', | |
728 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, |
|
728 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, | |
729 | {'message': 'c', |
|
729 | {'message': 'c', | |
730 | 'changed': [FileNode(b'file_b', b'test_content b modified\n')]}, |
|
730 | 'changed': [FileNode(b'file_b', b'test_content b modified\n')]}, | |
731 | {'message': 'd', |
|
731 | {'message': 'd', | |
732 | 'changed': [FileNode(b'file_b', b'test_content b\n')]}, |
|
732 | 'changed': [FileNode(b'file_b', b'test_content b\n')]}, | |
733 | ] |
|
733 | ] | |
734 | # open a PR from a to b, adding file_b |
|
734 | # open a PR from a to b, adding file_b | |
735 | pull_request = pr_util.create_pull_request( |
|
735 | pull_request = pr_util.create_pull_request( | |
736 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
736 | commits=commits, target_head='a', source_head='b', revisions=['b'], | |
737 | name_suffix='per-file-review') |
|
737 | name_suffix='per-file-review') | |
738 |
|
738 | |||
739 | # modify PR modifying file file_b |
|
739 | # modify PR modifying file file_b | |
740 | pr_util.add_one_commit(head='c') |
|
740 | pr_util.add_one_commit(head='c') | |
741 |
|
741 | |||
742 | assert_pr_file_changes( |
|
742 | assert_pr_file_changes( | |
743 | pull_request, |
|
743 | pull_request, | |
744 | added=[], |
|
744 | added=[], | |
745 | modified=['file_b'], |
|
745 | modified=['file_b'], | |
746 | removed=[]) |
|
746 | removed=[]) | |
747 |
|
747 | |||
748 | # move the head again to d, which rollbacks change, |
|
748 | # move the head again to d, which rollbacks change, | |
749 | # meaning we should indicate no changes |
|
749 | # meaning we should indicate no changes | |
750 | pr_util.add_one_commit(head='d') |
|
750 | pr_util.add_one_commit(head='d') | |
751 |
|
751 | |||
752 | assert_pr_file_changes( |
|
752 | assert_pr_file_changes( | |
753 | pull_request, |
|
753 | pull_request, | |
754 | added=[], |
|
754 | added=[], | |
755 | modified=[], |
|
755 | modified=[], | |
756 | removed=[]) |
|
756 | removed=[]) | |
757 |
|
757 | |||
758 | def test_updated_all_files_in_pr(self, pr_util): |
|
758 | def test_updated_all_files_in_pr(self, pr_util): | |
759 | commits = [ |
|
759 | commits = [ | |
760 | {'message': 'a'}, |
|
760 | {'message': 'a'}, | |
761 | {'message': 'b', 'added': [ |
|
761 | {'message': 'b', 'added': [ | |
762 | FileNode(b'file_a', b'test_content a\n'), |
|
762 | FileNode(b'file_a', b'test_content a\n'), | |
763 | FileNode(b'file_b', b'test_content b\n'), |
|
763 | FileNode(b'file_b', b'test_content b\n'), | |
764 | FileNode(b'file_c', b'test_content c\n')]}, |
|
764 | FileNode(b'file_c', b'test_content c\n')]}, | |
765 | {'message': 'c', 'changed': [ |
|
765 | {'message': 'c', 'changed': [ | |
766 | FileNode(b'file_a', b'test_content a changed\n'), |
|
766 | FileNode(b'file_a', b'test_content a changed\n'), | |
767 | FileNode(b'file_b', b'test_content b changed\n'), |
|
767 | FileNode(b'file_b', b'test_content b changed\n'), | |
768 | FileNode(b'file_c', b'test_content c changed\n')]}, |
|
768 | FileNode(b'file_c', b'test_content c changed\n')]}, | |
769 | ] |
|
769 | ] | |
770 | # open a PR from a to b, changing 3 files |
|
770 | # open a PR from a to b, changing 3 files | |
771 | pull_request = pr_util.create_pull_request( |
|
771 | pull_request = pr_util.create_pull_request( | |
772 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
772 | commits=commits, target_head='a', source_head='b', revisions=['b'], | |
773 | name_suffix='per-file-review') |
|
773 | name_suffix='per-file-review') | |
774 |
|
774 | |||
775 | pr_util.add_one_commit(head='c') |
|
775 | pr_util.add_one_commit(head='c') | |
776 |
|
776 | |||
777 | assert_pr_file_changes( |
|
777 | assert_pr_file_changes( | |
778 | pull_request, |
|
778 | pull_request, | |
779 | added=[], |
|
779 | added=[], | |
780 | modified=['file_a', 'file_b', 'file_c'], |
|
780 | modified=['file_a', 'file_b', 'file_c'], | |
781 | removed=[]) |
|
781 | removed=[]) | |
782 |
|
782 | |||
783 | def test_updated_and_removed_all_files_in_pr(self, pr_util): |
|
783 | def test_updated_and_removed_all_files_in_pr(self, pr_util): | |
784 | commits = [ |
|
784 | commits = [ | |
785 | {'message': 'a'}, |
|
785 | {'message': 'a'}, | |
786 | {'message': 'b', 'added': [ |
|
786 | {'message': 'b', 'added': [ | |
787 | FileNode(b'file_a', b'test_content a\n'), |
|
787 | FileNode(b'file_a', b'test_content a\n'), | |
788 | FileNode(b'file_b', b'test_content b\n'), |
|
788 | FileNode(b'file_b', b'test_content b\n'), | |
789 | FileNode(b'file_c', b'test_content c\n')]}, |
|
789 | FileNode(b'file_c', b'test_content c\n')]}, | |
790 | {'message': 'c', 'removed': [ |
|
790 | {'message': 'c', 'removed': [ | |
791 | FileNode(b'file_a', b'test_content a changed\n'), |
|
791 | FileNode(b'file_a', b'test_content a changed\n'), | |
792 | FileNode(b'file_b', b'test_content b changed\n'), |
|
792 | FileNode(b'file_b', b'test_content b changed\n'), | |
793 | FileNode(b'file_c', b'test_content c changed\n')]}, |
|
793 | FileNode(b'file_c', b'test_content c changed\n')]}, | |
794 | ] |
|
794 | ] | |
795 | # open a PR from a to b, removing 3 files |
|
795 | # open a PR from a to b, removing 3 files | |
796 | pull_request = pr_util.create_pull_request( |
|
796 | pull_request = pr_util.create_pull_request( | |
797 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
797 | commits=commits, target_head='a', source_head='b', revisions=['b'], | |
798 | name_suffix='per-file-review') |
|
798 | name_suffix='per-file-review') | |
799 |
|
799 | |||
800 | pr_util.add_one_commit(head='c') |
|
800 | pr_util.add_one_commit(head='c') | |
801 |
|
801 | |||
802 | assert_pr_file_changes( |
|
802 | assert_pr_file_changes( | |
803 | pull_request, |
|
803 | pull_request, | |
804 | added=[], |
|
804 | added=[], | |
805 | modified=[], |
|
805 | modified=[], | |
806 | removed=['file_a', 'file_b', 'file_c']) |
|
806 | removed=['file_a', 'file_b', 'file_c']) | |
807 |
|
807 | |||
808 |
|
808 | |||
809 | def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): |
|
809 | def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): | |
810 | model = PullRequestModel() |
|
810 | model = PullRequestModel() | |
811 | pull_request = pr_util.create_pull_request() |
|
811 | pull_request = pr_util.create_pull_request() | |
812 | pr_util.update_source_repository() |
|
812 | pr_util.update_source_repository() | |
813 |
|
813 | |||
814 | model.update_commits(pull_request, pull_request.author) |
|
814 | model.update_commits(pull_request, pull_request.author) | |
815 |
|
815 | |||
816 | # Expect that it has a version entry now |
|
816 | # Expect that it has a version entry now | |
817 | assert len(model.get_versions(pull_request)) == 1 |
|
817 | assert len(model.get_versions(pull_request)) == 1 | |
818 |
|
818 | |||
819 |
|
819 | |||
820 | def test_update_skips_new_version_if_unchanged(pr_util, config_stub): |
|
820 | def test_update_skips_new_version_if_unchanged(pr_util, config_stub): | |
821 | pull_request = pr_util.create_pull_request() |
|
821 | pull_request = pr_util.create_pull_request() | |
822 | model = PullRequestModel() |
|
822 | model = PullRequestModel() | |
823 | model.update_commits(pull_request, pull_request.author) |
|
823 | model.update_commits(pull_request, pull_request.author) | |
824 |
|
824 | |||
825 | # Expect that it still has no versions |
|
825 | # Expect that it still has no versions | |
826 | assert len(model.get_versions(pull_request)) == 0 |
|
826 | assert len(model.get_versions(pull_request)) == 0 | |
827 |
|
827 | |||
828 |
|
828 | |||
829 | def test_update_assigns_comments_to_the_new_version(pr_util, config_stub): |
|
829 | def test_update_assigns_comments_to_the_new_version(pr_util, config_stub): | |
830 | model = PullRequestModel() |
|
830 | model = PullRequestModel() | |
831 | pull_request = pr_util.create_pull_request() |
|
831 | pull_request = pr_util.create_pull_request() | |
832 | comment = pr_util.create_comment() |
|
832 | comment = pr_util.create_comment() | |
833 | pr_util.update_source_repository() |
|
833 | pr_util.update_source_repository() | |
834 |
|
834 | |||
835 | model.update_commits(pull_request, pull_request.author) |
|
835 | model.update_commits(pull_request, pull_request.author) | |
836 |
|
836 | |||
837 | # Expect that the comment is linked to the pr version now |
|
837 | # Expect that the comment is linked to the pr version now | |
838 | assert comment.pull_request_version == model.get_versions(pull_request)[0] |
|
838 | assert comment.pull_request_version == model.get_versions(pull_request)[0] | |
839 |
|
839 | |||
840 |
|
840 | |||
841 | def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub): |
|
841 | def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub): | |
842 | model = PullRequestModel() |
|
842 | model = PullRequestModel() | |
843 | pull_request = pr_util.create_pull_request() |
|
843 | pull_request = pr_util.create_pull_request() | |
844 | pr_util.update_source_repository() |
|
844 | pr_util.update_source_repository() | |
845 | pr_util.update_source_repository() |
|
845 | pr_util.update_source_repository() | |
846 |
|
846 | |||
847 | update_response = model.update_commits(pull_request, pull_request.author) |
|
847 | update_response = model.update_commits(pull_request, pull_request.author) | |
848 |
|
848 | |||
849 | commit_id = update_response.common_ancestor_id |
|
849 | commit_id = update_response.common_ancestor_id | |
850 | # Expect to find a new comment about the change |
|
850 | # Expect to find a new comment about the change | |
851 | expected_message = textwrap.dedent( |
|
851 | expected_message = textwrap.dedent( | |
852 | """\ |
|
852 | """\ | |
853 | Pull request updated. Auto status change to |under_review| |
|
853 | Pull request updated. Auto status change to |under_review| | |
854 |
|
854 | |||
855 | .. role:: added |
|
855 | .. role:: added | |
856 | .. role:: removed |
|
856 | .. role:: removed | |
857 | .. parsed-literal:: |
|
857 | .. parsed-literal:: | |
858 |
|
858 | |||
859 | Changed commits: |
|
859 | Changed commits: | |
860 | * :added:`1 added` |
|
860 | * :added:`1 added` | |
861 | * :removed:`0 removed` |
|
861 | * :removed:`0 removed` | |
862 |
|
862 | |||
863 | Changed files: |
|
863 | Changed files: | |
864 | * `A file_2 <#a_c-{}-92ed3b5f07b4>`_ |
|
864 | * `A file_2 <#a_c-{}-92ed3b5f07b4>`_ | |
865 |
|
865 | |||
866 | .. |under_review| replace:: *"Under Review"*""" |
|
866 | .. |under_review| replace:: *"Under Review"*""" | |
867 | ).format(commit_id[:12]) |
|
867 | ).format(commit_id[:12]) | |
868 | pull_request_comments = sorted( |
|
868 | pull_request_comments = sorted( | |
869 | pull_request.comments, key=lambda c: c.modified_at) |
|
869 | pull_request.comments, key=lambda c: c.modified_at) | |
870 | update_comment = pull_request_comments[-1] |
|
870 | update_comment = pull_request_comments[-1] | |
871 | assert update_comment.text == expected_message |
|
871 | assert update_comment.text == expected_message | |
872 |
|
872 | |||
873 |
|
873 | |||
874 | def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub): |
|
874 | def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub): | |
875 | pull_request = pr_util.create_pull_request() |
|
875 | pull_request = pr_util.create_pull_request() | |
876 |
|
876 | |||
877 | # Avoiding default values |
|
877 | # Avoiding default values | |
878 | pull_request.status = PullRequest.STATUS_CLOSED |
|
878 | pull_request.status = PullRequest.STATUS_CLOSED | |
879 | pull_request._last_merge_source_rev = "0" * 40 |
|
879 | pull_request._last_merge_source_rev = "0" * 40 | |
880 | pull_request._last_merge_target_rev = "1" * 40 |
|
880 | pull_request._last_merge_target_rev = "1" * 40 | |
881 | pull_request.last_merge_status = 1 |
|
881 | pull_request.last_merge_status = 1 | |
882 | pull_request.merge_rev = "2" * 40 |
|
882 | pull_request.merge_rev = "2" * 40 | |
883 |
|
883 | |||
884 | # Remember automatic values |
|
884 | # Remember automatic values | |
885 | created_on = pull_request.created_on |
|
885 | created_on = pull_request.created_on | |
886 | updated_on = pull_request.updated_on |
|
886 | updated_on = pull_request.updated_on | |
887 |
|
887 | |||
888 | # Create a new version of the pull request |
|
888 | # Create a new version of the pull request | |
889 | version = PullRequestModel()._create_version_from_snapshot(pull_request) |
|
889 | version = PullRequestModel()._create_version_from_snapshot(pull_request) | |
890 |
|
890 | |||
891 | # Check attributes |
|
891 | # Check attributes | |
892 | assert version.title == pr_util.create_parameters['title'] |
|
892 | assert version.title == pr_util.create_parameters['title'] | |
893 | assert version.description == pr_util.create_parameters['description'] |
|
893 | assert version.description == pr_util.create_parameters['description'] | |
894 | assert version.status == PullRequest.STATUS_CLOSED |
|
894 | assert version.status == PullRequest.STATUS_CLOSED | |
895 |
|
895 | |||
896 | # versions get updated created_on |
|
896 | # versions get updated created_on | |
897 | assert version.created_on != created_on |
|
897 | assert version.created_on != created_on | |
898 |
|
898 | |||
899 | assert version.updated_on == updated_on |
|
899 | assert version.updated_on == updated_on | |
900 | assert version.user_id == pull_request.user_id |
|
900 | assert version.user_id == pull_request.user_id | |
901 | assert version.revisions == pr_util.create_parameters['revisions'] |
|
901 | assert version.revisions == pr_util.create_parameters['revisions'] | |
902 | assert version.source_repo == pr_util.source_repository |
|
902 | assert version.source_repo == pr_util.source_repository | |
903 | assert version.source_ref == pr_util.create_parameters['source_ref'] |
|
903 | assert version.source_ref == pr_util.create_parameters['source_ref'] | |
904 | assert version.target_repo == pr_util.target_repository |
|
904 | assert version.target_repo == pr_util.target_repository | |
905 | assert version.target_ref == pr_util.create_parameters['target_ref'] |
|
905 | assert version.target_ref == pr_util.create_parameters['target_ref'] | |
906 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev |
|
906 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev | |
907 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev |
|
907 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev | |
908 | assert version.last_merge_status == pull_request.last_merge_status |
|
908 | assert version.last_merge_status == pull_request.last_merge_status | |
909 | assert version.merge_rev == pull_request.merge_rev |
|
909 | assert version.merge_rev == pull_request.merge_rev | |
910 | assert version.pull_request == pull_request |
|
910 | assert version.pull_request == pull_request | |
911 |
|
911 | |||
912 |
|
912 | |||
913 | def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub): |
|
913 | def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub): | |
914 | version1 = pr_util.create_version_of_pull_request() |
|
914 | version1 = pr_util.create_version_of_pull_request() | |
915 | comment_linked = pr_util.create_comment(linked_to=version1) |
|
915 | comment_linked = pr_util.create_comment(linked_to=version1) | |
916 | comment_unlinked = pr_util.create_comment() |
|
916 | comment_unlinked = pr_util.create_comment() | |
917 | version2 = pr_util.create_version_of_pull_request() |
|
917 | version2 = pr_util.create_version_of_pull_request() | |
918 |
|
918 | |||
919 | PullRequestModel()._link_comments_to_version(version2) |
|
919 | PullRequestModel()._link_comments_to_version(version2) | |
920 | Session().commit() |
|
920 | Session().commit() | |
921 |
|
921 | |||
922 | # Expect that only the new comment is linked to version2 |
|
922 | # Expect that only the new comment is linked to version2 | |
923 | assert ( |
|
923 | assert ( | |
924 | comment_unlinked.pull_request_version_id == |
|
924 | comment_unlinked.pull_request_version_id == | |
925 | version2.pull_request_version_id) |
|
925 | version2.pull_request_version_id) | |
926 | assert ( |
|
926 | assert ( | |
927 | comment_linked.pull_request_version_id == |
|
927 | comment_linked.pull_request_version_id == | |
928 | version1.pull_request_version_id) |
|
928 | version1.pull_request_version_id) | |
929 | assert ( |
|
929 | assert ( | |
930 | comment_unlinked.pull_request_version_id != |
|
930 | comment_unlinked.pull_request_version_id != | |
931 | comment_linked.pull_request_version_id) |
|
931 | comment_linked.pull_request_version_id) | |
932 |
|
932 | |||
933 |
|
933 | |||
934 | def test_calculate_commits(): |
|
934 | def test_calculate_commits(): | |
935 | old_ids = [1, 2, 3] |
|
935 | old_ids = [1, 2, 3] | |
936 | new_ids = [1, 3, 4, 5] |
|
936 | new_ids = [1, 3, 4, 5] | |
937 | change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids) |
|
937 | change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids) | |
938 | assert change.added == [4, 5] |
|
938 | assert change.added == [4, 5] | |
939 | assert change.common == [1, 3] |
|
939 | assert change.common == [1, 3] | |
940 | assert change.removed == [2] |
|
940 | assert change.removed == [2] | |
941 | assert change.total == [1, 3, 4, 5] |
|
941 | assert change.total == [1, 3, 4, 5] | |
942 |
|
942 | |||
943 |
|
943 | |||
944 | def assert_inline_comments(pull_request, visible=None, outdated=None): |
|
944 | def assert_inline_comments(pull_request, visible=None, outdated=None): | |
945 | if visible is not None: |
|
945 | if visible is not None: | |
946 | inline_comments = CommentsModel().get_inline_comments( |
|
946 | inline_comments = CommentsModel().get_inline_comments( | |
947 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
947 | pull_request.target_repo.repo_id, pull_request=pull_request) | |
948 | inline_cnt = len(CommentsModel().get_inline_comments_as_list( |
|
948 | inline_cnt = len(CommentsModel().get_inline_comments_as_list( | |
949 | inline_comments)) |
|
949 | inline_comments)) | |
950 | assert inline_cnt == visible |
|
950 | assert inline_cnt == visible | |
951 | if outdated is not None: |
|
951 | if outdated is not None: | |
952 | outdated_comments = CommentsModel().get_outdated_comments( |
|
952 | outdated_comments = CommentsModel().get_outdated_comments( | |
953 | pull_request.target_repo.repo_id, pull_request) |
|
953 | pull_request.target_repo.repo_id, pull_request) | |
954 | assert len(outdated_comments) == outdated |
|
954 | assert len(outdated_comments) == outdated | |
955 |
|
955 | |||
956 |
|
956 | |||
957 | def assert_pr_file_changes( |
|
957 | def assert_pr_file_changes( | |
958 | pull_request, added=None, modified=None, removed=None): |
|
958 | pull_request, added=None, modified=None, removed=None): | |
959 | pr_versions = PullRequestModel().get_versions(pull_request) |
|
959 | pr_versions = PullRequestModel().get_versions(pull_request) | |
960 | # always use first version, ie original PR to calculate changes |
|
960 | # always use first version, ie original PR to calculate changes | |
961 | pull_request_version = pr_versions[0] |
|
961 | pull_request_version = pr_versions[0] | |
962 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( |
|
962 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( | |
963 | pull_request, pull_request_version) |
|
963 | pull_request, pull_request_version) | |
964 | file_changes = PullRequestModel()._calculate_file_changes( |
|
964 | file_changes = PullRequestModel()._calculate_file_changes( | |
965 | old_diff_data, new_diff_data) |
|
965 | old_diff_data, new_diff_data) | |
966 |
|
966 | |||
967 | assert added == file_changes.added, \ |
|
967 | assert added == file_changes.added, \ | |
968 | 'expected added:%s vs value:%s' % (added, file_changes.added) |
|
968 | 'expected added:%s vs value:%s' % (added, file_changes.added) | |
969 | assert modified == file_changes.modified, \ |
|
969 | assert modified == file_changes.modified, \ | |
970 | 'expected modified:%s vs value:%s' % (modified, file_changes.modified) |
|
970 | 'expected modified:%s vs value:%s' % (modified, file_changes.modified) | |
971 | assert removed == file_changes.removed, \ |
|
971 | assert removed == file_changes.removed, \ | |
972 | 'expected removed:%s vs value:%s' % (removed, file_changes.removed) |
|
972 | 'expected removed:%s vs value:%s' % (removed, file_changes.removed) | |
973 |
|
973 | |||
974 |
|
974 | |||
975 | def outdated_comments_patcher(use_outdated=True): |
|
975 | def outdated_comments_patcher(use_outdated=True): | |
976 | return mock.patch.object( |
|
976 | return mock.patch.object( | |
977 | CommentsModel, 'use_outdated_comments', |
|
977 | CommentsModel, 'use_outdated_comments', | |
978 | return_value=use_outdated) |
|
978 | return_value=use_outdated) |
General Comments 0
You need to be logged in to leave comments.
Login now