Show More
@@ -1,871 +1,877 b'' | |||
|
1 | 1 | |
|
2 | 2 | ; ######################################### |
|
3 | 3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
4 | 4 | ; ######################################### |
|
5 | 5 | |
|
6 | 6 | [DEFAULT] |
|
7 | 7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
8 | 8 | debug = true |
|
9 | 9 | |
|
10 | 10 | ; ######################################################################## |
|
11 | 11 | ; EMAIL CONFIGURATION |
|
12 | 12 | ; These settings will be used by the RhodeCode mailing system |
|
13 | 13 | ; ######################################################################## |
|
14 | 14 | |
|
15 | 15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
16 | 16 | #email_prefix = [RhodeCode] |
|
17 | 17 | |
|
18 | 18 | ; email FROM address all mails will be sent |
|
19 | 19 | #app_email_from = rhodecode-noreply@localhost |
|
20 | 20 | |
|
21 | 21 | #smtp_server = mail.server.com |
|
22 | 22 | #smtp_username = |
|
23 | 23 | #smtp_password = |
|
24 | 24 | #smtp_port = |
|
25 | 25 | #smtp_use_tls = false |
|
26 | 26 | #smtp_use_ssl = true |
|
27 | 27 | |
|
28 | 28 | [server:main] |
|
29 | 29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
30 | 30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
31 | 31 | host = 127.0.0.1 |
|
32 | 32 | port = 10020 |
|
33 | 33 | |
|
34 | 34 | ; ################################################## |
|
35 | 35 | ; WAITRESS WSGI SERVER - Recommended for Development |
|
36 | 36 | ; ################################################## |
|
37 | 37 | |
|
38 | 38 | ; use server type |
|
39 | 39 | use = egg:waitress#main |
|
40 | 40 | |
|
41 | 41 | ; number of worker threads |
|
42 | 42 | threads = 5 |
|
43 | 43 | |
|
44 | 44 | ; MAX BODY SIZE 100GB |
|
45 | 45 | max_request_body_size = 107374182400 |
|
46 | 46 | |
|
47 | 47 | ; Use poll instead of select, fixes file descriptors limits problems. |
|
48 | 48 | ; May not work on old windows systems. |
|
49 | 49 | asyncore_use_poll = true |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | ; ########################### |
|
53 | 53 | ; GUNICORN APPLICATION SERVER |
|
54 | 54 | ; ########################### |
|
55 | 55 | |
|
56 | 56 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py |
|
57 | 57 | |
|
58 | 58 | ; Module to use, this setting shouldn't be changed |
|
59 | 59 | #use = egg:gunicorn#main |
|
60 | 60 | |
|
61 | 61 | ; Prefix middleware for RhodeCode. |
|
62 | 62 | ; recommended when using proxy setup. |
|
63 | 63 | ; allows to set RhodeCode under a prefix in server. |
|
64 | 64 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
65 | 65 | ; And set your prefix like: `prefix = /custom_prefix` |
|
66 | 66 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
67 | 67 | ; to make your cookies only work on prefix url |
|
68 | 68 | [filter:proxy-prefix] |
|
69 | 69 | use = egg:PasteDeploy#prefix |
|
70 | 70 | prefix = / |
|
71 | 71 | |
|
72 | 72 | [app:main] |
|
73 | 73 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
74 | 74 | ; of this file |
|
75 | 75 | ; Each option in the app:main can be override by an environmental variable |
|
76 | 76 | ; |
|
77 | 77 | ;To override an option: |
|
78 | 78 | ; |
|
79 | 79 | ;RC_<KeyName> |
|
80 | 80 | ;Everything should be uppercase, . and - should be replaced by _. |
|
81 | 81 | ;For example, if you have these configuration settings: |
|
82 | 82 | ;rc_cache.repo_object.backend = foo |
|
83 | 83 | ;can be overridden by |
|
84 | 84 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
85 | 85 | |
|
86 | 86 | use = egg:rhodecode-enterprise-ce |
|
87 | 87 | |
|
88 | 88 | ; enable proxy prefix middleware, defined above |
|
89 | 89 | #filter-with = proxy-prefix |
|
90 | 90 | |
|
91 | 91 | ; ############# |
|
92 | 92 | ; DEBUG OPTIONS |
|
93 | 93 | ; ############# |
|
94 | 94 | |
|
95 | 95 | pyramid.reload_templates = true |
|
96 | 96 | |
|
97 | 97 | # During development the we want to have the debug toolbar enabled |
|
98 | 98 | pyramid.includes = |
|
99 | 99 | pyramid_debugtoolbar |
|
100 | 100 | |
|
101 | 101 | debugtoolbar.hosts = 0.0.0.0/0 |
|
102 | 102 | debugtoolbar.exclude_prefixes = |
|
103 | 103 | /css |
|
104 | 104 | /fonts |
|
105 | 105 | /images |
|
106 | 106 | /js |
|
107 | 107 | |
|
108 | 108 | ## RHODECODE PLUGINS ## |
|
109 | 109 | rhodecode.includes = |
|
110 | 110 | rhodecode.api |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | # api prefix url |
|
114 | 114 | rhodecode.api.url = /_admin/api |
|
115 | 115 | |
|
116 | 116 | ; enable debug style page |
|
117 | 117 | debug_style = true |
|
118 | 118 | |
|
119 | 119 | ; ################# |
|
120 | 120 | ; END DEBUG OPTIONS |
|
121 | 121 | ; ################# |
|
122 | 122 | |
|
123 | 123 | ; encryption key used to encrypt social plugin tokens, |
|
124 | 124 | ; remote_urls with credentials etc, if not set it defaults to |
|
125 | 125 | ; `beaker.session.secret` |
|
126 | 126 | #rhodecode.encrypted_values.secret = |
|
127 | 127 | |
|
128 | 128 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
129 | 129 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
130 | 130 | #rhodecode.encrypted_values.strict = false |
|
131 | 131 | |
|
132 | 132 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
133 | 133 | ; fernet is safer, and we strongly recommend switching to it. |
|
134 | 134 | ; Due to backward compatibility aes is used as default. |
|
135 | 135 | #rhodecode.encrypted_values.algorithm = fernet |
|
136 | 136 | |
|
137 | 137 | ; Return gzipped responses from RhodeCode (static files/application) |
|
138 | 138 | gzip_responses = false |
|
139 | 139 | |
|
140 | 140 | ; Auto-generate javascript routes file on startup |
|
141 | 141 | generate_js_files = false |
|
142 | 142 | |
|
143 | 143 | ; System global default language. |
|
144 | 144 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
145 | 145 | lang = en |
|
146 | 146 | |
|
147 | 147 | ; Perform a full repository scan and import on each server start. |
|
148 | 148 | ; Settings this to true could lead to very long startup time. |
|
149 | 149 | startup.import_repos = false |
|
150 | 150 | |
|
151 | 151 | ; URL at which the application is running. This is used for Bootstrapping |
|
152 | 152 | ; requests in context when no web request is available. Used in ishell, or |
|
153 | 153 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
154 | 154 | app.base_url = http://rhodecode.local |
|
155 | 155 | |
|
156 | 156 | ; Host at which the Service API is running. |
|
157 | 157 | app.service_api.host = http://rhodecode.local:10020 |
|
158 | 158 | |
|
159 | 159 | ; Secret for Service API authentication. |
|
160 | 160 | app.service_api.token = |
|
161 | 161 | |
|
162 | 162 | ; Unique application ID. Should be a random unique string for security. |
|
163 | 163 | app_instance_uuid = rc-production |
|
164 | 164 | |
|
165 | 165 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
166 | 166 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
167 | 167 | ; partially. E.g 512000 == 512Kb |
|
168 | 168 | cut_off_limit_diff = 512000 |
|
169 | 169 | |
|
170 | 170 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
171 | 171 | ; file inside diff which exceeds this limit will be displayed partially. |
|
172 | 172 | ; E.g 128000 == 128Kb |
|
173 | 173 | cut_off_limit_file = 128000 |
|
174 | 174 | |
|
175 | 175 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
176 | 176 | vcs_full_cache = true |
|
177 | 177 | |
|
178 | 178 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
179 | 179 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
180 | 180 | force_https = false |
|
181 | 181 | |
|
182 | 182 | ; use Strict-Transport-Security headers |
|
183 | 183 | use_htsts = false |
|
184 | 184 | |
|
185 | 185 | ; Set to true if your repos are exposed using the dumb protocol |
|
186 | 186 | git_update_server_info = false |
|
187 | 187 | |
|
188 | 188 | ; RSS/ATOM feed options |
|
189 | 189 | rss_cut_off_limit = 256000 |
|
190 | 190 | rss_items_per_page = 10 |
|
191 | 191 | rss_include_diff = false |
|
192 | 192 | |
|
193 | 193 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
194 | 194 | ; url that does rewrites to _admin/gists/{gistid}. |
|
195 | 195 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
196 | 196 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
197 | 197 | gist_alias_url = |
|
198 | 198 | |
|
199 | 199 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
200 | 200 | ; used for access. |
|
201 | 201 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
202 | 202 | ; came from the the logged in user who own this authentication token. |
|
203 | 203 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
204 | 204 | ; authentication token. Such view would be only accessible when used together |
|
205 | 205 | ; with this authentication token |
|
206 | 206 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
207 | 207 | ; The list should be "," separated and on a single line. |
|
208 | 208 | ; Most common views to enable: |
|
209 | 209 | |
|
210 | 210 | # RepoCommitsView:repo_commit_download |
|
211 | 211 | # RepoCommitsView:repo_commit_patch |
|
212 | 212 | # RepoCommitsView:repo_commit_raw |
|
213 | 213 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
214 | 214 | # RepoFilesView:repo_files_diff |
|
215 | 215 | # RepoFilesView:repo_archivefile |
|
216 | 216 | # RepoFilesView:repo_file_raw |
|
217 | 217 | # GistView:* |
|
218 | 218 | api_access_controllers_whitelist = |
|
219 | 219 | |
|
220 | 220 | ; Default encoding used to convert from and to unicode |
|
221 | 221 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
222 | 222 | default_encoding = UTF-8 |
|
223 | 223 | |
|
224 | 224 | ; instance-id prefix |
|
225 | 225 | ; a prefix key for this instance used for cache invalidation when running |
|
226 | 226 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
227 | 227 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
228 | 228 | instance_id = |
|
229 | 229 | |
|
230 | 230 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
231 | 231 | ; of an authentication plugin also if it is disabled by it's settings. |
|
232 | 232 | ; This could be useful if you are unable to log in to the system due to broken |
|
233 | 233 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
234 | 234 | ; module to log in again and fix the settings. |
|
235 | 235 | ; Available builtin plugin IDs (hash is part of the ID): |
|
236 | 236 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
237 | 237 | ; egg:rhodecode-enterprise-ce#pam |
|
238 | 238 | ; egg:rhodecode-enterprise-ce#ldap |
|
239 | 239 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
240 | 240 | ; egg:rhodecode-enterprise-ce#headers |
|
241 | 241 | ; egg:rhodecode-enterprise-ce#crowd |
|
242 | 242 | |
|
243 | 243 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
244 | 244 | |
|
245 | 245 | ; Flag to control loading of legacy plugins in py:/path format |
|
246 | 246 | auth_plugin.import_legacy_plugins = true |
|
247 | 247 | |
|
248 | 248 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
249 | 249 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
250 | 250 | ; handling that causing a series of failed authentication calls. |
|
251 | 251 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
252 | 252 | ; This will be served instead of default 401 on bad authentication |
|
253 | 253 | auth_ret_code = |
|
254 | 254 | |
|
255 | 255 | ; use special detection method when serving auth_ret_code, instead of serving |
|
256 | 256 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
257 | 257 | ; and then serve auth_ret_code to clients |
|
258 | 258 | auth_ret_code_detection = false |
|
259 | 259 | |
|
260 | 260 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
261 | 261 | ; codes don't break the transactions while 4XX codes do |
|
262 | 262 | lock_ret_code = 423 |
|
263 | 263 | |
|
264 | 264 | ; allows to change the repository location in settings page |
|
265 | 265 | allow_repo_location_change = true |
|
266 | 266 | |
|
267 | 267 | ; allows to setup custom hooks in settings page |
|
268 | 268 | allow_custom_hooks_settings = true |
|
269 | 269 | |
|
270 | 270 | ; Generated license token required for EE edition license. |
|
271 | 271 | ; New generated token value can be found in Admin > settings > license page. |
|
272 | 272 | license_token = |
|
273 | 273 | |
|
274 | 274 | ; This flag hides sensitive information on the license page such as token, and license data |
|
275 | 275 | license.hide_license_info = false |
|
276 | 276 | |
|
277 | 277 | ; supervisor connection uri, for managing supervisor and logs. |
|
278 | 278 | supervisor.uri = |
|
279 | 279 | |
|
280 | 280 | ; supervisord group name/id we only want this RC instance to handle |
|
281 | 281 | supervisor.group_id = dev |
|
282 | 282 | |
|
283 | 283 | ; Display extended labs settings |
|
284 | 284 | labs_settings_active = true |
|
285 | 285 | |
|
286 | 286 | ; Custom exception store path, defaults to TMPDIR |
|
287 | 287 | ; This is used to store exception from RhodeCode in shared directory |
|
288 | 288 | #exception_tracker.store_path = |
|
289 | 289 | |
|
290 | 290 | ; Send email with exception details when it happens |
|
291 | 291 | #exception_tracker.send_email = false |
|
292 | 292 | |
|
293 | 293 | ; Comma separated list of recipients for exception emails, |
|
294 | 294 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
295 | 295 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
296 | 296 | #exception_tracker.send_email_recipients = |
|
297 | 297 | |
|
298 | 298 | ; optional prefix to Add to email Subject |
|
299 | 299 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
300 | 300 | |
|
301 | 301 | ; File store configuration. This is used to store and serve uploaded files |
|
302 | 302 | file_store.enabled = true |
|
303 | 303 | |
|
304 | 304 | ; Storage backend, available options are: local |
|
305 | 305 | file_store.backend = local |
|
306 | 306 | |
|
307 | 307 | ; path to store the uploaded binaries |
|
308 | 308 | file_store.storage_path = %(here)s/data/file_store |
|
309 | 309 | |
|
310 | 310 | ; Uncomment and set this path to control settings for archive download cache. |
|
311 | 311 | ; Generated repo archives will be cached at this location |
|
312 | 312 | ; and served from the cache during subsequent requests for the same archive of |
|
313 | 313 | ; the repository. This path is important to be shared across filesystems and with |
|
314 | 314 | ; RhodeCode and vcsserver |
|
315 | 315 | |
|
316 | 316 | ; Default is $cache_dir/archive_cache if not set |
|
317 | 317 | archive_cache.store_dir = %(here)s/data/archive_cache |
|
318 | 318 | |
|
319 | 319 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
320 | 320 | archive_cache.cache_size_gb = 10 |
|
321 | 321 | |
|
322 | 322 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
323 | 323 | archive_cache.cache_shards = 10 |
|
324 | 324 | |
|
325 | 325 | ; ############# |
|
326 | 326 | ; CELERY CONFIG |
|
327 | 327 | ; ############# |
|
328 | 328 | |
|
329 | 329 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
330 | 330 | |
|
331 | 331 | use_celery = false |
|
332 | 332 | |
|
333 | 333 | ; path to store schedule database |
|
334 | 334 | #celerybeat-schedule.path = |
|
335 | 335 | |
|
336 | 336 | ; connection url to the message broker (default redis) |
|
337 | 337 | celery.broker_url = redis://redis:6379/8 |
|
338 | 338 | |
|
339 | 339 | ; results backend to get results for (default redis) |
|
340 | 340 | celery.result_backend = redis://redis:6379/8 |
|
341 | 341 | |
|
342 | 342 | ; rabbitmq example |
|
343 | 343 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
344 | 344 | |
|
345 | 345 | ; maximum tasks to execute before worker restart |
|
346 | 346 | celery.max_tasks_per_child = 20 |
|
347 | 347 | |
|
348 | 348 | ; tasks will never be sent to the queue, but executed locally instead. |
|
349 | 349 | celery.task_always_eager = false |
|
350 | 350 | |
|
351 | 351 | ; ############# |
|
352 | 352 | ; DOGPILE CACHE |
|
353 | 353 | ; ############# |
|
354 | 354 | |
|
355 | 355 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
356 | 356 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
357 | 357 | cache_dir = %(here)s/data |
|
358 | 358 | |
|
359 | 359 | ; ********************************************* |
|
360 | 360 | ; `sql_cache_short` cache for heavy SQL queries |
|
361 | 361 | ; Only supported backend is `memory_lru` |
|
362 | 362 | ; ********************************************* |
|
363 | 363 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
364 | 364 | rc_cache.sql_cache_short.expiration_time = 30 |
|
365 | 365 | |
|
366 | 366 | |
|
367 | 367 | ; ***************************************************** |
|
368 | 368 | ; `cache_repo_longterm` cache for repo object instances |
|
369 | 369 | ; Only supported backend is `memory_lru` |
|
370 | 370 | ; ***************************************************** |
|
371 | 371 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
372 | 372 | ; by default we use 30 Days, cache is still invalidated on push |
|
373 | 373 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
374 | 374 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
375 | 375 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
376 | 376 | |
|
377 | 377 | |
|
378 | 378 | ; ********************************************* |
|
379 | 379 | ; `cache_general` cache for general purpose use |
|
380 | 380 | ; for simplicity use rc.file_namespace backend, |
|
381 | 381 | ; for performance and scale use rc.redis |
|
382 | 382 | ; ********************************************* |
|
383 | 383 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
384 | 384 | rc_cache.cache_general.expiration_time = 43200 |
|
385 | 385 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
386 | 386 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
387 | 387 | |
|
388 | 388 | ; alternative `cache_general` redis backend with distributed lock |
|
389 | 389 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
390 | 390 | #rc_cache.cache_general.expiration_time = 300 |
|
391 | 391 | |
|
392 | 392 | ; redis_expiration_time needs to be greater then expiration_time |
|
393 | 393 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
394 | 394 | |
|
395 | 395 | #rc_cache.cache_general.arguments.host = localhost |
|
396 | 396 | #rc_cache.cache_general.arguments.port = 6379 |
|
397 | 397 | #rc_cache.cache_general.arguments.db = 0 |
|
398 | 398 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
399 | 399 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
400 | 400 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
401 | 401 | |
|
402 | 402 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
403 | 403 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
404 | 404 | |
|
405 | 405 | ; ************************************************* |
|
406 | 406 | ; `cache_perms` cache for permission tree, auth TTL |
|
407 | 407 | ; for simplicity use rc.file_namespace backend, |
|
408 | 408 | ; for performance and scale use rc.redis |
|
409 | 409 | ; ************************************************* |
|
410 | 410 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
411 | 411 | rc_cache.cache_perms.expiration_time = 3600 |
|
412 | 412 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
413 | 413 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
414 | 414 | |
|
415 | 415 | ; alternative `cache_perms` redis backend with distributed lock |
|
416 | 416 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
417 | 417 | #rc_cache.cache_perms.expiration_time = 300 |
|
418 | 418 | |
|
419 | 419 | ; redis_expiration_time needs to be greater then expiration_time |
|
420 | 420 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
421 | 421 | |
|
422 | 422 | #rc_cache.cache_perms.arguments.host = localhost |
|
423 | 423 | #rc_cache.cache_perms.arguments.port = 6379 |
|
424 | 424 | #rc_cache.cache_perms.arguments.db = 0 |
|
425 | 425 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
426 | 426 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
427 | 427 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
428 | 428 | |
|
429 | 429 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
430 | 430 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
431 | 431 | |
|
432 | 432 | ; *************************************************** |
|
433 | 433 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
434 | 434 | ; for simplicity use rc.file_namespace backend, |
|
435 | 435 | ; for performance and scale use rc.redis |
|
436 | 436 | ; *************************************************** |
|
437 | 437 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
438 | 438 | rc_cache.cache_repo.expiration_time = 2592000 |
|
439 | 439 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
440 | 440 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
441 | 441 | |
|
442 | 442 | ; alternative `cache_repo` redis backend with distributed lock |
|
443 | 443 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
444 | 444 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
445 | 445 | |
|
446 | 446 | ; redis_expiration_time needs to be greater then expiration_time |
|
447 | 447 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
448 | 448 | |
|
449 | 449 | #rc_cache.cache_repo.arguments.host = localhost |
|
450 | 450 | #rc_cache.cache_repo.arguments.port = 6379 |
|
451 | 451 | #rc_cache.cache_repo.arguments.db = 1 |
|
452 | 452 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
453 | 453 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
454 | 454 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
455 | 455 | |
|
456 | 456 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
457 | 457 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
458 | 458 | |
|
459 | 459 | ; ############## |
|
460 | 460 | ; BEAKER SESSION |
|
461 | 461 | ; ############## |
|
462 | 462 | |
|
463 | 463 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
464 | 464 | ; types are file, ext:redis, ext:database, ext:memcached |
|
465 | 465 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
466 | 466 | beaker.session.type = file |
|
467 | 467 | beaker.session.data_dir = %(here)s/data/sessions |
|
468 | 468 | |
|
469 | 469 | ; Redis based sessions |
|
470 | 470 | #beaker.session.type = ext:redis |
|
471 | 471 | #beaker.session.url = redis://127.0.0.1:6379/2 |
|
472 | 472 | |
|
473 | 473 | ; DB based session, fast, and allows easy management over logged in users |
|
474 | 474 | #beaker.session.type = ext:database |
|
475 | 475 | #beaker.session.table_name = db_session |
|
476 | 476 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
477 | 477 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
478 | 478 | #beaker.session.sa.pool_recycle = 3600 |
|
479 | 479 | #beaker.session.sa.echo = false |
|
480 | 480 | |
|
481 | 481 | beaker.session.key = rhodecode |
|
482 | 482 | beaker.session.secret = develop-rc-uytcxaz |
|
483 | 483 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
484 | 484 | |
|
485 | 485 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
486 | 486 | ; you must disable beaker.session.secret to use this |
|
487 | 487 | #beaker.session.encrypt_key = key_for_encryption |
|
488 | 488 | #beaker.session.validate_key = validation_key |
|
489 | 489 | |
|
490 | 490 | ; Sets session as invalid (also logging out user) if it haven not been |
|
491 | 491 | ; accessed for given amount of time in seconds |
|
492 | 492 | beaker.session.timeout = 2592000 |
|
493 | 493 | beaker.session.httponly = true |
|
494 | 494 | |
|
495 | 495 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
496 | 496 | #beaker.session.cookie_path = /custom_prefix |
|
497 | 497 | |
|
498 | 498 | ; Set https secure cookie |
|
499 | 499 | beaker.session.secure = false |
|
500 | 500 | |
|
501 | 501 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
502 | 502 | ; at browser close |
|
503 | 503 | #beaker.session.cookie_expires = 3600 |
|
504 | 504 | |
|
505 | 505 | ; ############################# |
|
506 | 506 | ; SEARCH INDEXING CONFIGURATION |
|
507 | 507 | ; ############################# |
|
508 | 508 | |
|
509 | 509 | ; Full text search indexer is available in rhodecode-tools under |
|
510 | 510 | ; `rhodecode-tools index` command |
|
511 | 511 | |
|
512 | 512 | ; WHOOSH Backend, doesn't require additional services to run |
|
513 | 513 | ; it works good with few dozen repos |
|
514 | 514 | search.module = rhodecode.lib.index.whoosh |
|
515 | 515 | search.location = %(here)s/data/index |
|
516 | 516 | |
|
517 | 517 | ; #################### |
|
518 | 518 | ; CHANNELSTREAM CONFIG |
|
519 | 519 | ; #################### |
|
520 | 520 | |
|
521 | 521 | ; channelstream enables persistent connections and live notification |
|
522 | 522 | ; in the system. It's also used by the chat system |
|
523 | 523 | |
|
524 | 524 | channelstream.enabled = false |
|
525 | 525 | |
|
526 | 526 | ; server address for channelstream server on the backend |
|
527 | 527 | channelstream.server = 127.0.0.1:9800 |
|
528 | 528 | |
|
529 | 529 | ; location of the channelstream server from outside world |
|
530 | 530 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
531 | 531 | ; by external HTTP server such as Nginx or Apache |
|
532 | 532 | ; see Nginx/Apache configuration examples in our docs |
|
533 | 533 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
534 | 534 | channelstream.secret = secret |
|
535 | 535 | channelstream.history.location = %(here)s/channelstream_history |
|
536 | 536 | |
|
537 | 537 | ; Internal application path that Javascript uses to connect into. |
|
538 | 538 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
539 | 539 | channelstream.proxy_path = /_channelstream |
|
540 | 540 | |
|
541 | 541 | |
|
542 | 542 | ; ############################## |
|
543 | 543 | ; MAIN RHODECODE DATABASE CONFIG |
|
544 | 544 | ; ############################## |
|
545 | 545 | |
|
546 | 546 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
547 | 547 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
548 | 548 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
549 | 549 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
550 | 550 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
551 | 551 | |
|
552 | 552 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
553 | 553 | |
|
554 | 554 | ; see sqlalchemy docs for other advanced settings |
|
555 | 555 | ; print the sql statements to output |
|
556 | 556 | sqlalchemy.db1.echo = false |
|
557 | 557 | |
|
558 | 558 | ; recycle the connections after this amount of seconds |
|
559 | 559 | sqlalchemy.db1.pool_recycle = 3600 |
|
560 | 560 | |
|
561 | 561 | ; the number of connections to keep open inside the connection pool. |
|
562 | 562 | ; 0 indicates no limit |
|
563 | 563 | ; the general calculus with gevent is: |
|
564 | 564 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
565 | 565 | ; then increase pool size + max overflow so that they add up to 500. |
|
566 | 566 | #sqlalchemy.db1.pool_size = 5 |
|
567 | 567 | |
|
568 | 568 | ; The number of connections to allow in connection pool "overflow", that is |
|
569 | 569 | ; connections that can be opened above and beyond the pool_size setting, |
|
570 | 570 | ; which defaults to five. |
|
571 | 571 | #sqlalchemy.db1.max_overflow = 10 |
|
572 | 572 | |
|
573 | 573 | ; Connection check ping, used to detect broken database connections |
|
574 | 574 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
575 | 575 | #sqlalchemy.db1.ping_connection = true |
|
576 | 576 | |
|
577 | 577 | ; ########## |
|
578 | 578 | ; VCS CONFIG |
|
579 | 579 | ; ########## |
|
580 | 580 | vcs.server.enable = true |
|
581 | 581 | vcs.server = localhost:9900 |
|
582 | 582 | |
|
583 | 583 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
584 | 584 | ; Available protocols are: |
|
585 | 585 | ; `http` - use http-rpc backend (default) |
|
586 | 586 | vcs.server.protocol = http |
|
587 | 587 | |
|
588 | 588 | ; Push/Pull operations protocol, available options are: |
|
589 | 589 | ; `http` - use http-rpc backend (default) |
|
590 | 590 | vcs.scm_app_implementation = http |
|
591 | 591 | |
|
592 | 592 | ; Push/Pull operations hooks protocol, available options are: |
|
593 | 593 | ; `http` - use http-rpc backend (default) |
|
594 | 594 | vcs.hooks.protocol = http |
|
595 | 595 | |
|
596 | 596 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
597 | 597 | ; accessible via network. |
|
598 | 598 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
599 | 599 | vcs.hooks.host = * |
|
600 | 600 | |
|
601 | 601 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
602 | 602 | vcs.start_server = false |
|
603 | 603 | |
|
604 | 604 | ; List of enabled VCS backends, available options are: |
|
605 | 605 | ; `hg` - mercurial |
|
606 | 606 | ; `git` - git |
|
607 | 607 | ; `svn` - subversion |
|
608 | 608 | vcs.backends = hg, git, svn |
|
609 | 609 | |
|
610 | 610 | ; Wait this number of seconds before killing connection to the vcsserver |
|
611 | 611 | vcs.connection_timeout = 3600 |
|
612 | 612 | |
|
613 | 613 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
614 | 614 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
615 | 615 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
616 | 616 | #vcs.svn.compatible_version = 1.8 |
|
617 | 617 | |
|
618 | ; Enable SVN proxy of requests over HTTP | |
|
619 | vcs.svn.proxy.enabled = true | |
|
620 | ||
|
621 | ; host to connect to running SVN subsystem | |
|
622 | vcs.svn.proxy.host = http://svn:8090 | |
|
623 | ||
|
618 | 624 | ; Cache flag to cache vcsserver remote calls locally |
|
619 | 625 | ; It uses cache_region `cache_repo` |
|
620 | 626 | vcs.methods.cache = true |
|
621 | 627 | |
|
622 | 628 | ; #################################################### |
|
623 | 629 | ; Subversion proxy support (mod_dav_svn) |
|
624 | 630 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
625 | 631 | ; #################################################### |
|
626 | 632 | |
|
627 | 633 | ; Enable or disable the config file generation. |
|
628 | 634 | svn.proxy.generate_config = false |
|
629 | 635 | |
|
630 | 636 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
631 | 637 | svn.proxy.list_parent_path = true |
|
632 | 638 | |
|
633 | 639 | ; Set location and file name of generated config file. |
|
634 | 640 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
635 | 641 | |
|
636 | 642 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
637 | 643 | ; Example template can be found in the source code: |
|
638 | 644 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
639 | 645 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
640 | 646 | |
|
641 | 647 | ; Used as a prefix to the `Location` block in the generated config file. |
|
642 | 648 | ; In most cases it should be set to `/`. |
|
643 | 649 | svn.proxy.location_root = / |
|
644 | 650 | |
|
645 | 651 | ; Command to reload the mod dav svn configuration on change. |
|
646 | 652 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
647 | 653 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
648 | 654 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
649 | 655 | |
|
650 | 656 | ; If the timeout expires before the reload command finishes, the command will |
|
651 | 657 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
652 | 658 | #svn.proxy.reload_timeout = 10 |
|
653 | 659 | |
|
654 | 660 | ; #################### |
|
655 | 661 | ; SSH Support Settings |
|
656 | 662 | ; #################### |
|
657 | 663 | |
|
658 | 664 | ; Defines if a custom authorized_keys file should be created and written on |
|
659 | 665 | ; any change user ssh keys. Setting this to false also disables possibility |
|
660 | 666 | ; of adding SSH keys by users from web interface. Super admins can still |
|
661 | 667 | ; manage SSH Keys. |
|
662 | 668 | ssh.generate_authorized_keyfile = false |
|
663 | 669 | |
|
664 | 670 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
665 | 671 | # ssh.authorized_keys_ssh_opts = |
|
666 | 672 | |
|
667 | 673 | ; Path to the authorized_keys file where the generate entries are placed. |
|
668 | 674 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
669 | 675 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
670 | 676 | ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode |
|
671 | 677 | |
|
672 | 678 | ; Command to execute the SSH wrapper. The binary is available in the |
|
673 | 679 | ; RhodeCode installation directory. |
|
674 | 680 | ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper |
|
675 | 681 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper |
|
676 | 682 | |
|
677 | 683 | ; Allow shell when executing the ssh-wrapper command |
|
678 | 684 | ssh.wrapper_cmd_allow_shell = false |
|
679 | 685 | |
|
680 | 686 | ; Enables logging, and detailed output send back to the client during SSH |
|
681 | 687 | ; operations. Useful for debugging, shouldn't be used in production. |
|
682 | 688 | ssh.enable_debug_logging = true |
|
683 | 689 | |
|
684 | 690 | ; Paths to binary executable, by default they are the names, but we can |
|
685 | 691 | ; override them if we want to use a custom one |
|
686 | 692 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg |
|
687 | 693 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git |
|
688 | 694 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve |
|
689 | 695 | |
|
690 | 696 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
691 | 697 | ; to add their own keys. |
|
692 | 698 | ssh.enable_ui_key_generator = true |
|
693 | 699 | |
|
694 | 700 | |
|
695 | 701 | ; ################# |
|
696 | 702 | ; APPENLIGHT CONFIG |
|
697 | 703 | ; ################# |
|
698 | 704 | |
|
699 | 705 | ; Appenlight is tailored to work with RhodeCode, see |
|
700 | 706 | ; http://appenlight.rhodecode.com for details how to obtain an account |
|
701 | 707 | |
|
702 | 708 | ; Appenlight integration enabled |
|
703 | 709 | #appenlight = false |
|
704 | 710 | |
|
705 | 711 | #appenlight.server_url = https://api.appenlight.com |
|
706 | 712 | #appenlight.api_key = YOUR_API_KEY |
|
707 | 713 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
708 | 714 | |
|
709 | 715 | ; used for JS client |
|
710 | 716 | #appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
711 | 717 | |
|
712 | 718 | ; TWEAK AMOUNT OF INFO SENT HERE |
|
713 | 719 | |
|
714 | 720 | ; enables 404 error logging (default False) |
|
715 | 721 | #appenlight.report_404 = false |
|
716 | 722 | |
|
717 | 723 | ; time in seconds after request is considered being slow (default 1) |
|
718 | 724 | #appenlight.slow_request_time = 1 |
|
719 | 725 | |
|
720 | 726 | ; record slow requests in application |
|
721 | 727 | ; (needs to be enabled for slow datastore recording and time tracking) |
|
722 | 728 | #appenlight.slow_requests = true |
|
723 | 729 | |
|
724 | 730 | ; enable hooking to application loggers |
|
725 | 731 | #appenlight.logging = true |
|
726 | 732 | |
|
727 | 733 | ; minimum log level for log capture |
|
728 | 734 | #ppenlight.logging.level = WARNING |
|
729 | 735 | |
|
730 | 736 | ; send logs only from erroneous/slow requests |
|
731 | 737 | ; (saves API quota for intensive logging) |
|
732 | 738 | #appenlight.logging_on_error = false |
|
733 | 739 | |
|
734 | 740 | ; list of additional keywords that should be grabbed from environ object |
|
735 | 741 | ; can be string with comma separated list of words in lowercase |
|
736 | 742 | ; (by default client will always send following info: |
|
737 | 743 | ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
738 | 744 | ; start with HTTP* this list be extended with additional keywords here |
|
739 | 745 | #appenlight.environ_keys_whitelist = |
|
740 | 746 | |
|
741 | 747 | ; list of keywords that should be blanked from request object |
|
742 | 748 | ; can be string with comma separated list of words in lowercase |
|
743 | 749 | ; (by default client will always blank keys that contain following words |
|
744 | 750 | ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
745 | 751 | ; this list be extended with additional keywords set here |
|
746 | 752 | #appenlight.request_keys_blacklist = |
|
747 | 753 | |
|
748 | 754 | ; list of namespaces that should be ignores when gathering log entries |
|
749 | 755 | ; can be string with comma separated list of namespaces |
|
750 | 756 | ; (by default the client ignores own entries: appenlight_client.client) |
|
751 | 757 | #appenlight.log_namespace_blacklist = |
|
752 | 758 | |
|
753 | 759 | ; Statsd client config, this is used to send metrics to statsd |
|
754 | 760 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
755 | 761 | #statsd.enabled = false |
|
756 | 762 | #statsd.statsd_host = 0.0.0.0 |
|
757 | 763 | #statsd.statsd_port = 8125 |
|
758 | 764 | #statsd.statsd_prefix = |
|
759 | 765 | #statsd.statsd_ipv6 = false |
|
760 | 766 | |
|
761 | 767 | ; configure logging automatically at server startup set to false |
|
762 | 768 | ; to use the below custom logging config. |
|
763 | 769 | ; RC_LOGGING_FORMATTER |
|
764 | 770 | ; RC_LOGGING_LEVEL |
|
765 | 771 | ; env variables can control the settings for logging in case of autoconfigure |
|
766 | 772 | |
|
767 | 773 | #logging.autoconfigure = true |
|
768 | 774 | |
|
769 | 775 | ; specify your own custom logging config file to configure logging |
|
770 | 776 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
771 | 777 | |
|
772 | 778 | ; Dummy marker to add new entries after. |
|
773 | 779 | ; Add any custom entries below. Please don't remove this marker. |
|
774 | 780 | custom.conf = 1 |
|
775 | 781 | |
|
776 | 782 | |
|
777 | 783 | ; ##################### |
|
778 | 784 | ; LOGGING CONFIGURATION |
|
779 | 785 | ; ##################### |
|
780 | 786 | |
|
781 | 787 | [loggers] |
|
782 | 788 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
783 | 789 | |
|
784 | 790 | [handlers] |
|
785 | 791 | keys = console, console_sql |
|
786 | 792 | |
|
787 | 793 | [formatters] |
|
788 | 794 | keys = generic, json, color_formatter, color_formatter_sql |
|
789 | 795 | |
|
790 | 796 | ; ####### |
|
791 | 797 | ; LOGGERS |
|
792 | 798 | ; ####### |
|
793 | 799 | [logger_root] |
|
794 | 800 | level = NOTSET |
|
795 | 801 | handlers = console |
|
796 | 802 | |
|
797 | 803 | [logger_sqlalchemy] |
|
798 | 804 | level = INFO |
|
799 | 805 | handlers = console_sql |
|
800 | 806 | qualname = sqlalchemy.engine |
|
801 | 807 | propagate = 0 |
|
802 | 808 | |
|
803 | 809 | [logger_beaker] |
|
804 | 810 | level = DEBUG |
|
805 | 811 | handlers = |
|
806 | 812 | qualname = beaker.container |
|
807 | 813 | propagate = 1 |
|
808 | 814 | |
|
809 | 815 | [logger_rhodecode] |
|
810 | 816 | level = DEBUG |
|
811 | 817 | handlers = |
|
812 | 818 | qualname = rhodecode |
|
813 | 819 | propagate = 1 |
|
814 | 820 | |
|
815 | 821 | [logger_ssh_wrapper] |
|
816 | 822 | level = DEBUG |
|
817 | 823 | handlers = |
|
818 | 824 | qualname = ssh_wrapper |
|
819 | 825 | propagate = 1 |
|
820 | 826 | |
|
821 | 827 | [logger_celery] |
|
822 | 828 | level = DEBUG |
|
823 | 829 | handlers = |
|
824 | 830 | qualname = celery |
|
825 | 831 | |
|
826 | 832 | |
|
827 | 833 | ; ######## |
|
828 | 834 | ; HANDLERS |
|
829 | 835 | ; ######## |
|
830 | 836 | |
|
831 | 837 | [handler_console] |
|
832 | 838 | class = StreamHandler |
|
833 | 839 | args = (sys.stderr, ) |
|
834 | 840 | level = DEBUG |
|
835 | 841 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
836 | 842 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
837 | 843 | formatter = color_formatter |
|
838 | 844 | |
|
839 | 845 | [handler_console_sql] |
|
840 | 846 | ; "level = DEBUG" logs SQL queries and results. |
|
841 | 847 | ; "level = INFO" logs SQL queries. |
|
842 | 848 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
843 | 849 | class = StreamHandler |
|
844 | 850 | args = (sys.stderr, ) |
|
845 | 851 | level = WARN |
|
846 | 852 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
847 | 853 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
848 | 854 | formatter = color_formatter_sql |
|
849 | 855 | |
|
850 | 856 | ; ########## |
|
851 | 857 | ; FORMATTERS |
|
852 | 858 | ; ########## |
|
853 | 859 | |
|
854 | 860 | [formatter_generic] |
|
855 | 861 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
856 | 862 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
857 | 863 | datefmt = %Y-%m-%d %H:%M:%S |
|
858 | 864 | |
|
859 | 865 | [formatter_color_formatter] |
|
860 | 866 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
861 | 867 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
862 | 868 | datefmt = %Y-%m-%d %H:%M:%S |
|
863 | 869 | |
|
864 | 870 | [formatter_color_formatter_sql] |
|
865 | 871 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
866 | 872 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
867 | 873 | datefmt = %Y-%m-%d %H:%M:%S |
|
868 | 874 | |
|
869 | 875 | [formatter_json] |
|
870 | 876 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
871 | 877 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,822 +1,828 b'' | |||
|
1 | 1 | |
|
2 | 2 | ; ######################################### |
|
3 | 3 | ; RHODECODE COMMUNITY EDITION CONFIGURATION |
|
4 | 4 | ; ######################################### |
|
5 | 5 | |
|
6 | 6 | [DEFAULT] |
|
7 | 7 | ; Debug flag sets all loggers to debug, and enables request tracking |
|
8 | 8 | debug = false |
|
9 | 9 | |
|
10 | 10 | ; ######################################################################## |
|
11 | 11 | ; EMAIL CONFIGURATION |
|
12 | 12 | ; These settings will be used by the RhodeCode mailing system |
|
13 | 13 | ; ######################################################################## |
|
14 | 14 | |
|
15 | 15 | ; prefix all emails subjects with given prefix, helps filtering out emails |
|
16 | 16 | #email_prefix = [RhodeCode] |
|
17 | 17 | |
|
18 | 18 | ; email FROM address all mails will be sent |
|
19 | 19 | #app_email_from = rhodecode-noreply@localhost |
|
20 | 20 | |
|
21 | 21 | #smtp_server = mail.server.com |
|
22 | 22 | #smtp_username = |
|
23 | 23 | #smtp_password = |
|
24 | 24 | #smtp_port = |
|
25 | 25 | #smtp_use_tls = false |
|
26 | 26 | #smtp_use_ssl = true |
|
27 | 27 | |
|
28 | 28 | [server:main] |
|
29 | 29 | ; COMMON HOST/IP CONFIG, This applies mostly to develop setup, |
|
30 | 30 | ; Host port for gunicorn are controlled by gunicorn_conf.py |
|
31 | 31 | host = 127.0.0.1 |
|
32 | 32 | port = 10020 |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | ; ########################### |
|
36 | 36 | ; GUNICORN APPLICATION SERVER |
|
37 | 37 | ; ########################### |
|
38 | 38 | |
|
39 | 39 | ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py |
|
40 | 40 | |
|
41 | 41 | ; Module to use, this setting shouldn't be changed |
|
42 | 42 | use = egg:gunicorn#main |
|
43 | 43 | |
|
44 | 44 | ; Prefix middleware for RhodeCode. |
|
45 | 45 | ; recommended when using proxy setup. |
|
46 | 46 | ; allows to set RhodeCode under a prefix in server. |
|
47 | 47 | ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
48 | 48 | ; And set your prefix like: `prefix = /custom_prefix` |
|
49 | 49 | ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
50 | 50 | ; to make your cookies only work on prefix url |
|
51 | 51 | [filter:proxy-prefix] |
|
52 | 52 | use = egg:PasteDeploy#prefix |
|
53 | 53 | prefix = / |
|
54 | 54 | |
|
55 | 55 | [app:main] |
|
56 | 56 | ; The %(here)s variable will be replaced with the absolute path of parent directory |
|
57 | 57 | ; of this file |
|
58 | 58 | ; Each option in the app:main can be override by an environmental variable |
|
59 | 59 | ; |
|
60 | 60 | ;To override an option: |
|
61 | 61 | ; |
|
62 | 62 | ;RC_<KeyName> |
|
63 | 63 | ;Everything should be uppercase, . and - should be replaced by _. |
|
64 | 64 | ;For example, if you have these configuration settings: |
|
65 | 65 | ;rc_cache.repo_object.backend = foo |
|
66 | 66 | ;can be overridden by |
|
67 | 67 | ;export RC_CACHE_REPO_OBJECT_BACKEND=foo |
|
68 | 68 | |
|
69 | 69 | use = egg:rhodecode-enterprise-ce |
|
70 | 70 | |
|
71 | 71 | ; enable proxy prefix middleware, defined above |
|
72 | 72 | #filter-with = proxy-prefix |
|
73 | 73 | |
|
74 | 74 | ; encryption key used to encrypt social plugin tokens, |
|
75 | 75 | ; remote_urls with credentials etc, if not set it defaults to |
|
76 | 76 | ; `beaker.session.secret` |
|
77 | 77 | #rhodecode.encrypted_values.secret = |
|
78 | 78 | |
|
79 | 79 | ; decryption strict mode (enabled by default). It controls if decryption raises |
|
80 | 80 | ; `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
81 | 81 | #rhodecode.encrypted_values.strict = false |
|
82 | 82 | |
|
83 | 83 | ; Pick algorithm for encryption. Either fernet (more secure) or aes (default) |
|
84 | 84 | ; fernet is safer, and we strongly recommend switching to it. |
|
85 | 85 | ; Due to backward compatibility aes is used as default. |
|
86 | 86 | #rhodecode.encrypted_values.algorithm = fernet |
|
87 | 87 | |
|
88 | 88 | ; Return gzipped responses from RhodeCode (static files/application) |
|
89 | 89 | gzip_responses = false |
|
90 | 90 | |
|
91 | 91 | ; Auto-generate javascript routes file on startup |
|
92 | 92 | generate_js_files = false |
|
93 | 93 | |
|
94 | 94 | ; System global default language. |
|
95 | 95 | ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
96 | 96 | lang = en |
|
97 | 97 | |
|
98 | 98 | ; Perform a full repository scan and import on each server start. |
|
99 | 99 | ; Settings this to true could lead to very long startup time. |
|
100 | 100 | startup.import_repos = false |
|
101 | 101 | |
|
102 | 102 | ; URL at which the application is running. This is used for Bootstrapping |
|
103 | 103 | ; requests in context when no web request is available. Used in ishell, or |
|
104 | 104 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
|
105 | 105 | app.base_url = http://rhodecode.local |
|
106 | 106 | |
|
107 | 107 | ; Host at which the Service API is running. |
|
108 | 108 | app.service_api.host= http://rhodecode.local:10020 |
|
109 | 109 | |
|
110 | 110 | ; Secret for Service API authentication. |
|
111 | 111 | app.service_api.token = |
|
112 | 112 | |
|
113 | 113 | ; Unique application ID. Should be a random unique string for security. |
|
114 | 114 | app_instance_uuid = rc-production |
|
115 | 115 | |
|
116 | 116 | ; Cut off limit for large diffs (size in bytes). If overall diff size on |
|
117 | 117 | ; commit, or pull request exceeds this limit this diff will be displayed |
|
118 | 118 | ; partially. E.g 512000 == 512Kb |
|
119 | 119 | cut_off_limit_diff = 512000 |
|
120 | 120 | |
|
121 | 121 | ; Cut off limit for large files inside diffs (size in bytes). Each individual |
|
122 | 122 | ; file inside diff which exceeds this limit will be displayed partially. |
|
123 | 123 | ; E.g 128000 == 128Kb |
|
124 | 124 | cut_off_limit_file = 128000 |
|
125 | 125 | |
|
126 | 126 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
|
127 | 127 | vcs_full_cache = true |
|
128 | 128 | |
|
129 | 129 | ; Force https in RhodeCode, fixes https redirects, assumes it's always https. |
|
130 | 130 | ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache |
|
131 | 131 | force_https = false |
|
132 | 132 | |
|
133 | 133 | ; use Strict-Transport-Security headers |
|
134 | 134 | use_htsts = false |
|
135 | 135 | |
|
136 | 136 | ; Set to true if your repos are exposed using the dumb protocol |
|
137 | 137 | git_update_server_info = false |
|
138 | 138 | |
|
139 | 139 | ; RSS/ATOM feed options |
|
140 | 140 | rss_cut_off_limit = 256000 |
|
141 | 141 | rss_items_per_page = 10 |
|
142 | 142 | rss_include_diff = false |
|
143 | 143 | |
|
144 | 144 | ; gist URL alias, used to create nicer urls for gist. This should be an |
|
145 | 145 | ; url that does rewrites to _admin/gists/{gistid}. |
|
146 | 146 | ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
147 | 147 | ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
148 | 148 | gist_alias_url = |
|
149 | 149 | |
|
150 | 150 | ; List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
151 | 151 | ; used for access. |
|
152 | 152 | ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
153 | 153 | ; came from the the logged in user who own this authentication token. |
|
154 | 154 | ; Additionally @TOKEN syntax can be used to bound the view to specific |
|
155 | 155 | ; authentication token. Such view would be only accessible when used together |
|
156 | 156 | ; with this authentication token |
|
157 | 157 | ; list of all views can be found under `/_admin/permissions/auth_token_access` |
|
158 | 158 | ; The list should be "," separated and on a single line. |
|
159 | 159 | ; Most common views to enable: |
|
160 | 160 | |
|
161 | 161 | # RepoCommitsView:repo_commit_download |
|
162 | 162 | # RepoCommitsView:repo_commit_patch |
|
163 | 163 | # RepoCommitsView:repo_commit_raw |
|
164 | 164 | # RepoCommitsView:repo_commit_raw@TOKEN |
|
165 | 165 | # RepoFilesView:repo_files_diff |
|
166 | 166 | # RepoFilesView:repo_archivefile |
|
167 | 167 | # RepoFilesView:repo_file_raw |
|
168 | 168 | # GistView:* |
|
169 | 169 | api_access_controllers_whitelist = |
|
170 | 170 | |
|
171 | 171 | ; Default encoding used to convert from and to unicode |
|
172 | 172 | ; can be also a comma separated list of encoding in case of mixed encodings |
|
173 | 173 | default_encoding = UTF-8 |
|
174 | 174 | |
|
175 | 175 | ; instance-id prefix |
|
176 | 176 | ; a prefix key for this instance used for cache invalidation when running |
|
177 | 177 | ; multiple instances of RhodeCode, make sure it's globally unique for |
|
178 | 178 | ; all running RhodeCode instances. Leave empty if you don't use it |
|
179 | 179 | instance_id = |
|
180 | 180 | |
|
181 | 181 | ; Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
182 | 182 | ; of an authentication plugin also if it is disabled by it's settings. |
|
183 | 183 | ; This could be useful if you are unable to log in to the system due to broken |
|
184 | 184 | ; authentication settings. Then you can enable e.g. the internal RhodeCode auth |
|
185 | 185 | ; module to log in again and fix the settings. |
|
186 | 186 | ; Available builtin plugin IDs (hash is part of the ID): |
|
187 | 187 | ; egg:rhodecode-enterprise-ce#rhodecode |
|
188 | 188 | ; egg:rhodecode-enterprise-ce#pam |
|
189 | 189 | ; egg:rhodecode-enterprise-ce#ldap |
|
190 | 190 | ; egg:rhodecode-enterprise-ce#jasig_cas |
|
191 | 191 | ; egg:rhodecode-enterprise-ce#headers |
|
192 | 192 | ; egg:rhodecode-enterprise-ce#crowd |
|
193 | 193 | |
|
194 | 194 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
195 | 195 | |
|
196 | 196 | ; Flag to control loading of legacy plugins in py:/path format |
|
197 | 197 | auth_plugin.import_legacy_plugins = true |
|
198 | 198 | |
|
199 | 199 | ; alternative return HTTP header for failed authentication. Default HTTP |
|
200 | 200 | ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
201 | 201 | ; handling that causing a series of failed authentication calls. |
|
202 | 202 | ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
203 | 203 | ; This will be served instead of default 401 on bad authentication |
|
204 | 204 | auth_ret_code = |
|
205 | 205 | |
|
206 | 206 | ; use special detection method when serving auth_ret_code, instead of serving |
|
207 | 207 | ; ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
208 | 208 | ; and then serve auth_ret_code to clients |
|
209 | 209 | auth_ret_code_detection = false |
|
210 | 210 | |
|
211 | 211 | ; locking return code. When repository is locked return this HTTP code. 2XX |
|
212 | 212 | ; codes don't break the transactions while 4XX codes do |
|
213 | 213 | lock_ret_code = 423 |
|
214 | 214 | |
|
215 | 215 | ; allows to change the repository location in settings page |
|
216 | 216 | allow_repo_location_change = true |
|
217 | 217 | |
|
218 | 218 | ; allows to setup custom hooks in settings page |
|
219 | 219 | allow_custom_hooks_settings = true |
|
220 | 220 | |
|
221 | 221 | ; Generated license token required for EE edition license. |
|
222 | 222 | ; New generated token value can be found in Admin > settings > license page. |
|
223 | 223 | license_token = |
|
224 | 224 | |
|
225 | 225 | ; This flag hides sensitive information on the license page such as token, and license data |
|
226 | 226 | license.hide_license_info = false |
|
227 | 227 | |
|
228 | 228 | ; supervisor connection uri, for managing supervisor and logs. |
|
229 | 229 | supervisor.uri = |
|
230 | 230 | |
|
231 | 231 | ; supervisord group name/id we only want this RC instance to handle |
|
232 | 232 | supervisor.group_id = prod |
|
233 | 233 | |
|
234 | 234 | ; Display extended labs settings |
|
235 | 235 | labs_settings_active = true |
|
236 | 236 | |
|
237 | 237 | ; Custom exception store path, defaults to TMPDIR |
|
238 | 238 | ; This is used to store exception from RhodeCode in shared directory |
|
239 | 239 | #exception_tracker.store_path = |
|
240 | 240 | |
|
241 | 241 | ; Send email with exception details when it happens |
|
242 | 242 | #exception_tracker.send_email = false |
|
243 | 243 | |
|
244 | 244 | ; Comma separated list of recipients for exception emails, |
|
245 | 245 | ; e.g admin@rhodecode.com,devops@rhodecode.com |
|
246 | 246 | ; Can be left empty, then emails will be sent to ALL super-admins |
|
247 | 247 | #exception_tracker.send_email_recipients = |
|
248 | 248 | |
|
249 | 249 | ; optional prefix to Add to email Subject |
|
250 | 250 | #exception_tracker.email_prefix = [RHODECODE ERROR] |
|
251 | 251 | |
|
252 | 252 | ; File store configuration. This is used to store and serve uploaded files |
|
253 | 253 | file_store.enabled = true |
|
254 | 254 | |
|
255 | 255 | ; Storage backend, available options are: local |
|
256 | 256 | file_store.backend = local |
|
257 | 257 | |
|
258 | 258 | ; path to store the uploaded binaries |
|
259 | 259 | file_store.storage_path = %(here)s/data/file_store |
|
260 | 260 | |
|
261 | 261 | ; Uncomment and set this path to control settings for archive download cache. |
|
262 | 262 | ; Generated repo archives will be cached at this location |
|
263 | 263 | ; and served from the cache during subsequent requests for the same archive of |
|
264 | 264 | ; the repository. This path is important to be shared across filesystems and with |
|
265 | 265 | ; RhodeCode and vcsserver |
|
266 | 266 | |
|
267 | 267 | ; Default is $cache_dir/archive_cache if not set |
|
268 | 268 | archive_cache.store_dir = %(here)s/data/archive_cache |
|
269 | 269 | |
|
270 | 270 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
271 | 271 | archive_cache.cache_size_gb = 40 |
|
272 | 272 | |
|
273 | 273 | ; By default cache uses sharding technique, this specifies how many shards are there |
|
274 | 274 | archive_cache.cache_shards = 4 |
|
275 | 275 | |
|
276 | 276 | ; ############# |
|
277 | 277 | ; CELERY CONFIG |
|
278 | 278 | ; ############# |
|
279 | 279 | |
|
280 | 280 | ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini |
|
281 | 281 | |
|
282 | 282 | use_celery = false |
|
283 | 283 | |
|
284 | 284 | ; path to store schedule database |
|
285 | 285 | #celerybeat-schedule.path = |
|
286 | 286 | |
|
287 | 287 | ; connection url to the message broker (default redis) |
|
288 | 288 | celery.broker_url = redis://redis:6379/8 |
|
289 | 289 | |
|
290 | 290 | ; results backend to get results for (default redis) |
|
291 | 291 | celery.result_backend = redis://redis:6379/8 |
|
292 | 292 | |
|
293 | 293 | ; rabbitmq example |
|
294 | 294 | #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost |
|
295 | 295 | |
|
296 | 296 | ; maximum tasks to execute before worker restart |
|
297 | 297 | celery.max_tasks_per_child = 20 |
|
298 | 298 | |
|
299 | 299 | ; tasks will never be sent to the queue, but executed locally instead. |
|
300 | 300 | celery.task_always_eager = false |
|
301 | 301 | |
|
302 | 302 | ; ############# |
|
303 | 303 | ; DOGPILE CACHE |
|
304 | 304 | ; ############# |
|
305 | 305 | |
|
306 | 306 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
307 | 307 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
308 | 308 | cache_dir = %(here)s/data |
|
309 | 309 | |
|
310 | 310 | ; ********************************************* |
|
311 | 311 | ; `sql_cache_short` cache for heavy SQL queries |
|
312 | 312 | ; Only supported backend is `memory_lru` |
|
313 | 313 | ; ********************************************* |
|
314 | 314 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
315 | 315 | rc_cache.sql_cache_short.expiration_time = 30 |
|
316 | 316 | |
|
317 | 317 | |
|
318 | 318 | ; ***************************************************** |
|
319 | 319 | ; `cache_repo_longterm` cache for repo object instances |
|
320 | 320 | ; Only supported backend is `memory_lru` |
|
321 | 321 | ; ***************************************************** |
|
322 | 322 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru |
|
323 | 323 | ; by default we use 30 Days, cache is still invalidated on push |
|
324 | 324 | rc_cache.cache_repo_longterm.expiration_time = 2592000 |
|
325 | 325 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches |
|
326 | 326 | rc_cache.cache_repo_longterm.max_size = 10000 |
|
327 | 327 | |
|
328 | 328 | |
|
329 | 329 | ; ********************************************* |
|
330 | 330 | ; `cache_general` cache for general purpose use |
|
331 | 331 | ; for simplicity use rc.file_namespace backend, |
|
332 | 332 | ; for performance and scale use rc.redis |
|
333 | 333 | ; ********************************************* |
|
334 | 334 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
335 | 335 | rc_cache.cache_general.expiration_time = 43200 |
|
336 | 336 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
337 | 337 | #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db |
|
338 | 338 | |
|
339 | 339 | ; alternative `cache_general` redis backend with distributed lock |
|
340 | 340 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
341 | 341 | #rc_cache.cache_general.expiration_time = 300 |
|
342 | 342 | |
|
343 | 343 | ; redis_expiration_time needs to be greater then expiration_time |
|
344 | 344 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 |
|
345 | 345 | |
|
346 | 346 | #rc_cache.cache_general.arguments.host = localhost |
|
347 | 347 | #rc_cache.cache_general.arguments.port = 6379 |
|
348 | 348 | #rc_cache.cache_general.arguments.db = 0 |
|
349 | 349 | #rc_cache.cache_general.arguments.socket_timeout = 30 |
|
350 | 350 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
351 | 351 | #rc_cache.cache_general.arguments.distributed_lock = true |
|
352 | 352 | |
|
353 | 353 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
354 | 354 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
355 | 355 | |
|
356 | 356 | ; ************************************************* |
|
357 | 357 | ; `cache_perms` cache for permission tree, auth TTL |
|
358 | 358 | ; for simplicity use rc.file_namespace backend, |
|
359 | 359 | ; for performance and scale use rc.redis |
|
360 | 360 | ; ************************************************* |
|
361 | 361 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
362 | 362 | rc_cache.cache_perms.expiration_time = 3600 |
|
363 | 363 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
364 | 364 | #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db |
|
365 | 365 | |
|
366 | 366 | ; alternative `cache_perms` redis backend with distributed lock |
|
367 | 367 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
368 | 368 | #rc_cache.cache_perms.expiration_time = 300 |
|
369 | 369 | |
|
370 | 370 | ; redis_expiration_time needs to be greater then expiration_time |
|
371 | 371 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 |
|
372 | 372 | |
|
373 | 373 | #rc_cache.cache_perms.arguments.host = localhost |
|
374 | 374 | #rc_cache.cache_perms.arguments.port = 6379 |
|
375 | 375 | #rc_cache.cache_perms.arguments.db = 0 |
|
376 | 376 | #rc_cache.cache_perms.arguments.socket_timeout = 30 |
|
377 | 377 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
378 | 378 | #rc_cache.cache_perms.arguments.distributed_lock = true |
|
379 | 379 | |
|
380 | 380 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
381 | 381 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
382 | 382 | |
|
383 | 383 | ; *************************************************** |
|
384 | 384 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
385 | 385 | ; for simplicity use rc.file_namespace backend, |
|
386 | 386 | ; for performance and scale use rc.redis |
|
387 | 387 | ; *************************************************** |
|
388 | 388 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
389 | 389 | rc_cache.cache_repo.expiration_time = 2592000 |
|
390 | 390 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
391 | 391 | #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db |
|
392 | 392 | |
|
393 | 393 | ; alternative `cache_repo` redis backend with distributed lock |
|
394 | 394 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
395 | 395 | #rc_cache.cache_repo.expiration_time = 2592000 |
|
396 | 396 | |
|
397 | 397 | ; redis_expiration_time needs to be greater then expiration_time |
|
398 | 398 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 |
|
399 | 399 | |
|
400 | 400 | #rc_cache.cache_repo.arguments.host = localhost |
|
401 | 401 | #rc_cache.cache_repo.arguments.port = 6379 |
|
402 | 402 | #rc_cache.cache_repo.arguments.db = 1 |
|
403 | 403 | #rc_cache.cache_repo.arguments.socket_timeout = 30 |
|
404 | 404 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends |
|
405 | 405 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
406 | 406 | |
|
407 | 407 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
408 | 408 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
409 | 409 | |
|
410 | 410 | ; ############## |
|
411 | 411 | ; BEAKER SESSION |
|
412 | 412 | ; ############## |
|
413 | 413 | |
|
414 | 414 | ; beaker.session.type is type of storage options for the logged users sessions. Current allowed |
|
415 | 415 | ; types are file, ext:redis, ext:database, ext:memcached |
|
416 | 416 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
417 | 417 | beaker.session.type = file |
|
418 | 418 | beaker.session.data_dir = %(here)s/data/sessions |
|
419 | 419 | |
|
420 | 420 | ; Redis based sessions |
|
421 | 421 | #beaker.session.type = ext:redis |
|
422 | 422 | #beaker.session.url = redis://127.0.0.1:6379/2 |
|
423 | 423 | |
|
424 | 424 | ; DB based session, fast, and allows easy management over logged in users |
|
425 | 425 | #beaker.session.type = ext:database |
|
426 | 426 | #beaker.session.table_name = db_session |
|
427 | 427 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
428 | 428 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
429 | 429 | #beaker.session.sa.pool_recycle = 3600 |
|
430 | 430 | #beaker.session.sa.echo = false |
|
431 | 431 | |
|
432 | 432 | beaker.session.key = rhodecode |
|
433 | 433 | beaker.session.secret = production-rc-uytcxaz |
|
434 | 434 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
435 | 435 | |
|
436 | 436 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
437 | 437 | ; you must disable beaker.session.secret to use this |
|
438 | 438 | #beaker.session.encrypt_key = key_for_encryption |
|
439 | 439 | #beaker.session.validate_key = validation_key |
|
440 | 440 | |
|
441 | 441 | ; Sets session as invalid (also logging out user) if it haven not been |
|
442 | 442 | ; accessed for given amount of time in seconds |
|
443 | 443 | beaker.session.timeout = 2592000 |
|
444 | 444 | beaker.session.httponly = true |
|
445 | 445 | |
|
446 | 446 | ; Path to use for the cookie. Set to prefix if you use prefix middleware |
|
447 | 447 | #beaker.session.cookie_path = /custom_prefix |
|
448 | 448 | |
|
449 | 449 | ; Set https secure cookie |
|
450 | 450 | beaker.session.secure = false |
|
451 | 451 | |
|
452 | 452 | ; default cookie expiration time in seconds, set to `true` to set expire |
|
453 | 453 | ; at browser close |
|
454 | 454 | #beaker.session.cookie_expires = 3600 |
|
455 | 455 | |
|
456 | 456 | ; ############################# |
|
457 | 457 | ; SEARCH INDEXING CONFIGURATION |
|
458 | 458 | ; ############################# |
|
459 | 459 | |
|
460 | 460 | ; Full text search indexer is available in rhodecode-tools under |
|
461 | 461 | ; `rhodecode-tools index` command |
|
462 | 462 | |
|
463 | 463 | ; WHOOSH Backend, doesn't require additional services to run |
|
464 | 464 | ; it works good with few dozen repos |
|
465 | 465 | search.module = rhodecode.lib.index.whoosh |
|
466 | 466 | search.location = %(here)s/data/index |
|
467 | 467 | |
|
468 | 468 | ; #################### |
|
469 | 469 | ; CHANNELSTREAM CONFIG |
|
470 | 470 | ; #################### |
|
471 | 471 | |
|
472 | 472 | ; channelstream enables persistent connections and live notification |
|
473 | 473 | ; in the system. It's also used by the chat system |
|
474 | 474 | |
|
475 | 475 | channelstream.enabled = false |
|
476 | 476 | |
|
477 | 477 | ; server address for channelstream server on the backend |
|
478 | 478 | channelstream.server = 127.0.0.1:9800 |
|
479 | 479 | |
|
480 | 480 | ; location of the channelstream server from outside world |
|
481 | 481 | ; use ws:// for http or wss:// for https. This address needs to be handled |
|
482 | 482 | ; by external HTTP server such as Nginx or Apache |
|
483 | 483 | ; see Nginx/Apache configuration examples in our docs |
|
484 | 484 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
485 | 485 | channelstream.secret = secret |
|
486 | 486 | channelstream.history.location = %(here)s/channelstream_history |
|
487 | 487 | |
|
488 | 488 | ; Internal application path that Javascript uses to connect into. |
|
489 | 489 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
490 | 490 | channelstream.proxy_path = /_channelstream |
|
491 | 491 | |
|
492 | 492 | |
|
493 | 493 | ; ############################## |
|
494 | 494 | ; MAIN RHODECODE DATABASE CONFIG |
|
495 | 495 | ; ############################## |
|
496 | 496 | |
|
497 | 497 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
498 | 498 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
499 | 499 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8 |
|
500 | 500 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
501 | 501 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
502 | 502 | |
|
503 | 503 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
504 | 504 | |
|
505 | 505 | ; see sqlalchemy docs for other advanced settings |
|
506 | 506 | ; print the sql statements to output |
|
507 | 507 | sqlalchemy.db1.echo = false |
|
508 | 508 | |
|
509 | 509 | ; recycle the connections after this amount of seconds |
|
510 | 510 | sqlalchemy.db1.pool_recycle = 3600 |
|
511 | 511 | |
|
512 | 512 | ; the number of connections to keep open inside the connection pool. |
|
513 | 513 | ; 0 indicates no limit |
|
514 | 514 | ; the general calculus with gevent is: |
|
515 | 515 | ; if your system allows 500 concurrent greenlets (max_connections) that all do database access, |
|
516 | 516 | ; then increase pool size + max overflow so that they add up to 500. |
|
517 | 517 | #sqlalchemy.db1.pool_size = 5 |
|
518 | 518 | |
|
519 | 519 | ; The number of connections to allow in connection pool "overflow", that is |
|
520 | 520 | ; connections that can be opened above and beyond the pool_size setting, |
|
521 | 521 | ; which defaults to five. |
|
522 | 522 | #sqlalchemy.db1.max_overflow = 10 |
|
523 | 523 | |
|
524 | 524 | ; Connection check ping, used to detect broken database connections |
|
525 | 525 | ; could be enabled to better handle cases if MySQL has gone away errors |
|
526 | 526 | #sqlalchemy.db1.ping_connection = true |
|
527 | 527 | |
|
528 | 528 | ; ########## |
|
529 | 529 | ; VCS CONFIG |
|
530 | 530 | ; ########## |
|
531 | 531 | vcs.server.enable = true |
|
532 | 532 | vcs.server = localhost:9900 |
|
533 | 533 | |
|
534 | 534 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
535 | 535 | ; Available protocols are: |
|
536 | 536 | ; `http` - use http-rpc backend (default) |
|
537 | 537 | vcs.server.protocol = http |
|
538 | 538 | |
|
539 | 539 | ; Push/Pull operations protocol, available options are: |
|
540 | 540 | ; `http` - use http-rpc backend (default) |
|
541 | 541 | vcs.scm_app_implementation = http |
|
542 | 542 | |
|
543 | 543 | ; Push/Pull operations hooks protocol, available options are: |
|
544 | 544 | ; `http` - use http-rpc backend (default) |
|
545 | 545 | vcs.hooks.protocol = http |
|
546 | 546 | |
|
547 | 547 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be |
|
548 | 548 | ; accessible via network. |
|
549 | 549 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) |
|
550 | 550 | vcs.hooks.host = * |
|
551 | 551 | |
|
552 | 552 | ; Start VCSServer with this instance as a subprocess, useful for development |
|
553 | 553 | vcs.start_server = false |
|
554 | 554 | |
|
555 | 555 | ; List of enabled VCS backends, available options are: |
|
556 | 556 | ; `hg` - mercurial |
|
557 | 557 | ; `git` - git |
|
558 | 558 | ; `svn` - subversion |
|
559 | 559 | vcs.backends = hg, git, svn |
|
560 | 560 | |
|
561 | 561 | ; Wait this number of seconds before killing connection to the vcsserver |
|
562 | 562 | vcs.connection_timeout = 3600 |
|
563 | 563 | |
|
564 | 564 | ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
565 | 565 | ; Set a numeric version for your current SVN e.g 1.8, or 1.12 |
|
566 | 566 | ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible |
|
567 | 567 | #vcs.svn.compatible_version = 1.8 |
|
568 | 568 | |
|
569 | ; Enable SVN proxy of requests over HTTP | |
|
570 | vcs.svn.proxy.enabled = true | |
|
571 | ||
|
572 | ; host to connect to running SVN subsystem | |
|
573 | vcs.svn.proxy.host = http://svn:8090 | |
|
574 | ||
|
569 | 575 | ; Cache flag to cache vcsserver remote calls locally |
|
570 | 576 | ; It uses cache_region `cache_repo` |
|
571 | 577 | vcs.methods.cache = true |
|
572 | 578 | |
|
573 | 579 | ; #################################################### |
|
574 | 580 | ; Subversion proxy support (mod_dav_svn) |
|
575 | 581 | ; Maps RhodeCode repo groups into SVN paths for Apache |
|
576 | 582 | ; #################################################### |
|
577 | 583 | |
|
578 | 584 | ; Enable or disable the config file generation. |
|
579 | 585 | svn.proxy.generate_config = false |
|
580 | 586 | |
|
581 | 587 | ; Generate config file with `SVNListParentPath` set to `On`. |
|
582 | 588 | svn.proxy.list_parent_path = true |
|
583 | 589 | |
|
584 | 590 | ; Set location and file name of generated config file. |
|
585 | 591 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
586 | 592 | |
|
587 | 593 | ; alternative mod_dav config template. This needs to be a valid mako template |
|
588 | 594 | ; Example template can be found in the source code: |
|
589 | 595 | ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako |
|
590 | 596 | #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako |
|
591 | 597 | |
|
592 | 598 | ; Used as a prefix to the `Location` block in the generated config file. |
|
593 | 599 | ; In most cases it should be set to `/`. |
|
594 | 600 | svn.proxy.location_root = / |
|
595 | 601 | |
|
596 | 602 | ; Command to reload the mod dav svn configuration on change. |
|
597 | 603 | ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh |
|
598 | 604 | ; Make sure user who runs RhodeCode process is allowed to reload Apache |
|
599 | 605 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
600 | 606 | |
|
601 | 607 | ; If the timeout expires before the reload command finishes, the command will |
|
602 | 608 | ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
603 | 609 | #svn.proxy.reload_timeout = 10 |
|
604 | 610 | |
|
605 | 611 | ; #################### |
|
606 | 612 | ; SSH Support Settings |
|
607 | 613 | ; #################### |
|
608 | 614 | |
|
609 | 615 | ; Defines if a custom authorized_keys file should be created and written on |
|
610 | 616 | ; any change user ssh keys. Setting this to false also disables possibility |
|
611 | 617 | ; of adding SSH keys by users from web interface. Super admins can still |
|
612 | 618 | ; manage SSH Keys. |
|
613 | 619 | ssh.generate_authorized_keyfile = false |
|
614 | 620 | |
|
615 | 621 | ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding` |
|
616 | 622 | # ssh.authorized_keys_ssh_opts = |
|
617 | 623 | |
|
618 | 624 | ; Path to the authorized_keys file where the generate entries are placed. |
|
619 | 625 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
620 | 626 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
621 | 627 | ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode |
|
622 | 628 | |
|
623 | 629 | ; Command to execute the SSH wrapper. The binary is available in the |
|
624 | 630 | ; RhodeCode installation directory. |
|
625 | 631 | ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper |
|
626 | 632 | ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper |
|
627 | 633 | |
|
628 | 634 | ; Allow shell when executing the ssh-wrapper command |
|
629 | 635 | ssh.wrapper_cmd_allow_shell = false |
|
630 | 636 | |
|
631 | 637 | ; Enables logging, and detailed output send back to the client during SSH |
|
632 | 638 | ; operations. Useful for debugging, shouldn't be used in production. |
|
633 | 639 | ssh.enable_debug_logging = false |
|
634 | 640 | |
|
635 | 641 | ; Paths to binary executable, by default they are the names, but we can |
|
636 | 642 | ; override them if we want to use a custom one |
|
637 | 643 | ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg |
|
638 | 644 | ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git |
|
639 | 645 | ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve |
|
640 | 646 | |
|
641 | 647 | ; Enables SSH key generator web interface. Disabling this still allows users |
|
642 | 648 | ; to add their own keys. |
|
643 | 649 | ssh.enable_ui_key_generator = true |
|
644 | 650 | |
|
645 | 651 | |
|
646 | 652 | ; ################# |
|
647 | 653 | ; APPENLIGHT CONFIG |
|
648 | 654 | ; ################# |
|
649 | 655 | |
|
650 | 656 | ; Appenlight is tailored to work with RhodeCode, see |
|
651 | 657 | ; http://appenlight.rhodecode.com for details how to obtain an account |
|
652 | 658 | |
|
653 | 659 | ; Appenlight integration enabled |
|
654 | 660 | #appenlight = false |
|
655 | 661 | |
|
656 | 662 | #appenlight.server_url = https://api.appenlight.com |
|
657 | 663 | #appenlight.api_key = YOUR_API_KEY |
|
658 | 664 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
659 | 665 | |
|
660 | 666 | ; used for JS client |
|
661 | 667 | #appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
662 | 668 | |
|
663 | 669 | ; TWEAK AMOUNT OF INFO SENT HERE |
|
664 | 670 | |
|
665 | 671 | ; enables 404 error logging (default False) |
|
666 | 672 | #appenlight.report_404 = false |
|
667 | 673 | |
|
668 | 674 | ; time in seconds after request is considered being slow (default 1) |
|
669 | 675 | #appenlight.slow_request_time = 1 |
|
670 | 676 | |
|
671 | 677 | ; record slow requests in application |
|
672 | 678 | ; (needs to be enabled for slow datastore recording and time tracking) |
|
673 | 679 | #appenlight.slow_requests = true |
|
674 | 680 | |
|
675 | 681 | ; enable hooking to application loggers |
|
676 | 682 | #appenlight.logging = true |
|
677 | 683 | |
|
678 | 684 | ; minimum log level for log capture |
|
679 | 685 | #ppenlight.logging.level = WARNING |
|
680 | 686 | |
|
681 | 687 | ; send logs only from erroneous/slow requests |
|
682 | 688 | ; (saves API quota for intensive logging) |
|
683 | 689 | #appenlight.logging_on_error = false |
|
684 | 690 | |
|
685 | 691 | ; list of additional keywords that should be grabbed from environ object |
|
686 | 692 | ; can be string with comma separated list of words in lowercase |
|
687 | 693 | ; (by default client will always send following info: |
|
688 | 694 | ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
689 | 695 | ; start with HTTP* this list be extended with additional keywords here |
|
690 | 696 | #appenlight.environ_keys_whitelist = |
|
691 | 697 | |
|
692 | 698 | ; list of keywords that should be blanked from request object |
|
693 | 699 | ; can be string with comma separated list of words in lowercase |
|
694 | 700 | ; (by default client will always blank keys that contain following words |
|
695 | 701 | ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
696 | 702 | ; this list be extended with additional keywords set here |
|
697 | 703 | #appenlight.request_keys_blacklist = |
|
698 | 704 | |
|
699 | 705 | ; list of namespaces that should be ignores when gathering log entries |
|
700 | 706 | ; can be string with comma separated list of namespaces |
|
701 | 707 | ; (by default the client ignores own entries: appenlight_client.client) |
|
702 | 708 | #appenlight.log_namespace_blacklist = |
|
703 | 709 | |
|
704 | 710 | ; Statsd client config, this is used to send metrics to statsd |
|
705 | 711 | ; We recommend setting statsd_exported and scrape them using Prometheus |
|
706 | 712 | #statsd.enabled = false |
|
707 | 713 | #statsd.statsd_host = 0.0.0.0 |
|
708 | 714 | #statsd.statsd_port = 8125 |
|
709 | 715 | #statsd.statsd_prefix = |
|
710 | 716 | #statsd.statsd_ipv6 = false |
|
711 | 717 | |
|
712 | 718 | ; configure logging automatically at server startup set to false |
|
713 | 719 | ; to use the below custom logging config. |
|
714 | 720 | ; RC_LOGGING_FORMATTER |
|
715 | 721 | ; RC_LOGGING_LEVEL |
|
716 | 722 | ; env variables can control the settings for logging in case of autoconfigure |
|
717 | 723 | |
|
718 | 724 | #logging.autoconfigure = true |
|
719 | 725 | |
|
720 | 726 | ; specify your own custom logging config file to configure logging |
|
721 | 727 | #logging.logging_conf_file = /path/to/custom_logging.ini |
|
722 | 728 | |
|
723 | 729 | ; Dummy marker to add new entries after. |
|
724 | 730 | ; Add any custom entries below. Please don't remove this marker. |
|
725 | 731 | custom.conf = 1 |
|
726 | 732 | |
|
727 | 733 | |
|
728 | 734 | ; ##################### |
|
729 | 735 | ; LOGGING CONFIGURATION |
|
730 | 736 | ; ##################### |
|
731 | 737 | |
|
732 | 738 | [loggers] |
|
733 | 739 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper |
|
734 | 740 | |
|
735 | 741 | [handlers] |
|
736 | 742 | keys = console, console_sql |
|
737 | 743 | |
|
738 | 744 | [formatters] |
|
739 | 745 | keys = generic, json, color_formatter, color_formatter_sql |
|
740 | 746 | |
|
741 | 747 | ; ####### |
|
742 | 748 | ; LOGGERS |
|
743 | 749 | ; ####### |
|
744 | 750 | [logger_root] |
|
745 | 751 | level = NOTSET |
|
746 | 752 | handlers = console |
|
747 | 753 | |
|
748 | 754 | [logger_sqlalchemy] |
|
749 | 755 | level = INFO |
|
750 | 756 | handlers = console_sql |
|
751 | 757 | qualname = sqlalchemy.engine |
|
752 | 758 | propagate = 0 |
|
753 | 759 | |
|
754 | 760 | [logger_beaker] |
|
755 | 761 | level = DEBUG |
|
756 | 762 | handlers = |
|
757 | 763 | qualname = beaker.container |
|
758 | 764 | propagate = 1 |
|
759 | 765 | |
|
760 | 766 | [logger_rhodecode] |
|
761 | 767 | level = DEBUG |
|
762 | 768 | handlers = |
|
763 | 769 | qualname = rhodecode |
|
764 | 770 | propagate = 1 |
|
765 | 771 | |
|
766 | 772 | [logger_ssh_wrapper] |
|
767 | 773 | level = DEBUG |
|
768 | 774 | handlers = |
|
769 | 775 | qualname = ssh_wrapper |
|
770 | 776 | propagate = 1 |
|
771 | 777 | |
|
772 | 778 | [logger_celery] |
|
773 | 779 | level = DEBUG |
|
774 | 780 | handlers = |
|
775 | 781 | qualname = celery |
|
776 | 782 | |
|
777 | 783 | |
|
778 | 784 | ; ######## |
|
779 | 785 | ; HANDLERS |
|
780 | 786 | ; ######## |
|
781 | 787 | |
|
782 | 788 | [handler_console] |
|
783 | 789 | class = StreamHandler |
|
784 | 790 | args = (sys.stderr, ) |
|
785 | 791 | level = INFO |
|
786 | 792 | ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json' |
|
787 | 793 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
788 | 794 | formatter = generic |
|
789 | 795 | |
|
790 | 796 | [handler_console_sql] |
|
791 | 797 | ; "level = DEBUG" logs SQL queries and results. |
|
792 | 798 | ; "level = INFO" logs SQL queries. |
|
793 | 799 | ; "level = WARN" logs neither. (Recommended for production systems.) |
|
794 | 800 | class = StreamHandler |
|
795 | 801 | args = (sys.stderr, ) |
|
796 | 802 | level = WARN |
|
797 | 803 | ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json' |
|
798 | 804 | ; This allows sending properly formatted logs to grafana loki or elasticsearch |
|
799 | 805 | formatter = generic |
|
800 | 806 | |
|
801 | 807 | ; ########## |
|
802 | 808 | ; FORMATTERS |
|
803 | 809 | ; ########## |
|
804 | 810 | |
|
805 | 811 | [formatter_generic] |
|
806 | 812 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
807 | 813 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
808 | 814 | datefmt = %Y-%m-%d %H:%M:%S |
|
809 | 815 | |
|
810 | 816 | [formatter_color_formatter] |
|
811 | 817 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
812 | 818 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
813 | 819 | datefmt = %Y-%m-%d %H:%M:%S |
|
814 | 820 | |
|
815 | 821 | [formatter_color_formatter_sql] |
|
816 | 822 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
817 | 823 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
|
818 | 824 | datefmt = %Y-%m-%d %H:%M:%S |
|
819 | 825 | |
|
820 | 826 | [formatter_json] |
|
821 | 827 | format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s |
|
822 | 828 | class = rhodecode.lib._vendor.jsonlogger.JsonFormatter |
@@ -1,508 +1,513 b'' | |||
|
1 | ||
|
2 | 1 |
|
|
3 | 2 | # |
|
4 | 3 | # This program is free software: you can redistribute it and/or modify |
|
5 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 5 | # (only), as published by the Free Software Foundation. |
|
7 | 6 | # |
|
8 | 7 | # This program is distributed in the hope that it will be useful, |
|
9 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 10 | # GNU General Public License for more details. |
|
12 | 11 | # |
|
13 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 14 | # |
|
16 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 18 | |
|
20 | 19 | import re |
|
21 | 20 | |
|
22 | 21 | import mock |
|
23 | 22 | import pytest |
|
24 | 23 | |
|
25 | 24 | from rhodecode.apps.repository.views.repo_summary import RepoSummaryView |
|
26 | 25 | from rhodecode.lib import helpers as h |
|
27 | 26 | from collections import OrderedDict |
|
28 | 27 | from rhodecode.lib.utils2 import AttributeDict, safe_str |
|
29 | 28 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
30 | 29 | from rhodecode.model.db import Repository |
|
31 | 30 | from rhodecode.model.meta import Session |
|
32 | 31 | from rhodecode.model.repo import RepoModel |
|
33 | 32 | from rhodecode.model.scm import ScmModel |
|
34 | 33 | from rhodecode.tests import assert_session_flash |
|
35 | 34 | from rhodecode.tests.fixture import Fixture |
|
36 | 35 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem |
|
37 | 36 | from rhodecode.tests.routes import route_path |
|
38 | 37 | |
|
39 | 38 | |
|
40 | 39 | fixture = Fixture() |
|
41 | 40 | |
|
42 | 41 | |
|
43 | 42 | def assert_clone_url(response, server, repo, disabled=False): |
|
44 | 43 | |
|
45 | 44 | response.mustcontain( |
|
46 | 45 | '<input type="text" class="input-monospace clone_url_input" ' |
|
47 | 46 | '{disabled}readonly="readonly" ' |
|
48 | 47 | 'value="http://test_admin@{server}/{repo}"/>'.format( |
|
49 | 48 | server=server, repo=repo, disabled='disabled ' if disabled else ' ') |
|
50 | 49 | ) |
|
51 | 50 | |
|
52 | 51 | |
|
53 | 52 | @pytest.mark.usefixtures('app') |
|
54 | 53 | class TestSummaryView(object): |
|
54 | ||
|
55 | 55 | def test_index(self, autologin_user, backend, http_host_only_stub): |
|
56 | 56 | repo_id = backend.repo.repo_id |
|
57 | 57 | repo_name = backend.repo_name |
|
58 | with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', | |
|
59 | return_value=False): | |
|
60 | response = self.app.get( | |
|
61 | route_path('repo_summary', repo_name=repo_name)) | |
|
58 | ||
|
59 | response = self.app.get( | |
|
60 | route_path('repo_summary', repo_name=repo_name)) | |
|
62 | 61 | |
|
63 | 62 | # repo type |
|
64 | 63 | response.mustcontain( |
|
65 | 64 | '<i class="icon-%s">' % (backend.alias, ) |
|
66 | 65 | ) |
|
67 | 66 | # public/private |
|
68 | 67 | response.mustcontain( |
|
69 | 68 | """<i class="icon-unlock-alt">""" |
|
70 | 69 | ) |
|
71 | 70 | |
|
72 | 71 | # clone url... |
|
73 | 72 | assert_clone_url(response, http_host_only_stub, repo_name) |
|
74 |
assert_clone_url(response, http_host_only_stub, '_{}' |
|
|
73 | assert_clone_url(response, http_host_only_stub, f'_{repo_id}') | |
|
75 | 74 | |
|
76 | 75 | def test_index_svn_without_proxy( |
|
77 | 76 | self, autologin_user, backend_svn, http_host_only_stub): |
|
77 | ||
|
78 | 78 | repo_id = backend_svn.repo.repo_id |
|
79 | 79 | repo_name = backend_svn.repo_name |
|
80 | response = self.app.get(route_path('repo_summary', repo_name=repo_name)) | |
|
81 | # clone url... | |
|
80 | ||
|
81 | # by default the SVN is enabled now, this is how inputs look when it's disabled | |
|
82 | with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', return_value=True): | |
|
82 | 83 | |
|
84 | response = self.app.get( | |
|
85 | route_path('repo_summary', repo_name=repo_name), | |
|
86 | status=200) | |
|
87 | ||
|
88 | # clone url test... | |
|
83 | 89 | assert_clone_url(response, http_host_only_stub, repo_name, disabled=True) |
|
84 |
assert_clone_url(response, http_host_only_stub, '_{}' |
|
|
90 | assert_clone_url(response, http_host_only_stub, f'_{repo_id}', disabled=True) | |
|
85 | 91 | |
|
86 | 92 | def test_index_with_trailing_slash( |
|
87 | 93 | self, autologin_user, backend, http_host_only_stub): |
|
88 | 94 | |
|
89 | 95 | repo_id = backend.repo.repo_id |
|
90 | 96 | repo_name = backend.repo_name |
|
91 | with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy', | |
|
92 | return_value=False): | |
|
93 | response = self.app.get( | |
|
94 | route_path('repo_summary', repo_name=repo_name) + '/', | |
|
95 | status=200) | |
|
97 | trailing_slash = '/' | |
|
98 | response = self.app.get( | |
|
99 | route_path('repo_summary', repo_name=repo_name) + trailing_slash, | |
|
100 | status=200) | |
|
96 | 101 | |
|
97 | 102 | # clone url... |
|
98 | 103 | assert_clone_url(response, http_host_only_stub, repo_name) |
|
99 |
assert_clone_url(response, http_host_only_stub, '_{}' |
|
|
104 | assert_clone_url(response, http_host_only_stub, f'_{repo_id}') | |
|
100 | 105 | |
|
101 | 106 | def test_index_by_id(self, autologin_user, backend): |
|
102 | 107 | repo_id = backend.repo.repo_id |
|
103 | 108 | response = self.app.get( |
|
104 |
route_path('repo_summary', repo_name='_ |
|
|
109 | route_path('repo_summary', repo_name=f'_{repo_id}')) | |
|
105 | 110 | |
|
106 | 111 | # repo type |
|
107 | 112 | response.mustcontain( |
|
108 | 113 | '<i class="icon-%s">' % (backend.alias, ) |
|
109 | 114 | ) |
|
110 | 115 | # public/private |
|
111 | 116 | response.mustcontain( |
|
112 | 117 | """<i class="icon-unlock-alt">""" |
|
113 | 118 | ) |
|
114 | 119 | |
|
115 | 120 | def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user): |
|
116 | 121 | fixture.create_repo(name='repo_1') |
|
117 | 122 | response = self.app.get(route_path('repo_summary', repo_name='repo_1')) |
|
118 | 123 | |
|
119 | 124 | try: |
|
120 | 125 | response.mustcontain("repo_1") |
|
121 | 126 | finally: |
|
122 | 127 | RepoModel().delete(Repository.get_by_repo_name('repo_1')) |
|
123 | 128 | Session().commit() |
|
124 | 129 | |
|
125 | 130 | def test_index_with_anonymous_access_disabled( |
|
126 | 131 | self, backend, disable_anonymous_user): |
|
127 | 132 | response = self.app.get( |
|
128 | 133 | route_path('repo_summary', repo_name=backend.repo_name), status=302) |
|
129 | 134 | assert 'login' in response.location |
|
130 | 135 | |
|
131 | 136 | def _enable_stats(self, repo): |
|
132 | 137 | r = Repository.get_by_repo_name(repo) |
|
133 | 138 | r.enable_statistics = True |
|
134 | 139 | Session().add(r) |
|
135 | 140 | Session().commit() |
|
136 | 141 | |
|
137 | 142 | expected_trending = { |
|
138 | 143 | 'hg': { |
|
139 | 144 | "py": {"count": 68, "desc": ["Python"]}, |
|
140 | 145 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
141 | 146 | "css": {"count": 2, "desc": ["Css"]}, |
|
142 | 147 | "sh": {"count": 2, "desc": ["Bash"]}, |
|
143 | 148 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
144 | 149 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
145 | 150 | "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]}, |
|
146 | 151 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
147 | 152 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
148 | 153 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]} |
|
149 | 154 | }, |
|
150 | 155 | 'git': { |
|
151 | 156 | "py": {"count": 68, "desc": ["Python"]}, |
|
152 | 157 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
153 | 158 | "css": {"count": 2, "desc": ["Css"]}, |
|
154 | 159 | "sh": {"count": 2, "desc": ["Bash"]}, |
|
155 | 160 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
156 | 161 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
157 | 162 | "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]}, |
|
158 | 163 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
159 | 164 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
160 | 165 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]} |
|
161 | 166 | }, |
|
162 | 167 | 'svn': { |
|
163 | 168 | "py": {"count": 75, "desc": ["Python"]}, |
|
164 | 169 | "rst": {"count": 16, "desc": ["Rst"]}, |
|
165 | 170 | "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]}, |
|
166 | 171 | "css": {"count": 2, "desc": ["Css"]}, |
|
167 | 172 | "bat": {"count": 1, "desc": ["Batch"]}, |
|
168 | 173 | "cfg": {"count": 1, "desc": ["Ini"]}, |
|
169 | 174 | "ini": {"count": 1, "desc": ["Ini"]}, |
|
170 | 175 | "js": {"count": 1, "desc": ["Javascript"]}, |
|
171 | 176 | "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}, |
|
172 | 177 | "sh": {"count": 1, "desc": ["Bash"]} |
|
173 | 178 | }, |
|
174 | 179 | } |
|
175 | 180 | |
|
176 | 181 | def test_repo_stats(self, autologin_user, backend, xhr_header): |
|
177 | 182 | response = self.app.get( |
|
178 | 183 | route_path( |
|
179 | 184 | 'repo_stats', repo_name=backend.repo_name, commit_id='tip'), |
|
180 | 185 | extra_environ=xhr_header, |
|
181 | 186 | status=200) |
|
182 | 187 | assert re.match(r'6[\d\.]+ KiB', response.json['size']) |
|
183 | 188 | |
|
184 | 189 | def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header): |
|
185 | 190 | repo_name = backend.repo_name |
|
186 | 191 | |
|
187 | 192 | # codes stats |
|
188 | 193 | self._enable_stats(repo_name) |
|
189 | 194 | ScmModel().mark_for_invalidation(repo_name) |
|
190 | 195 | |
|
191 | 196 | response = self.app.get( |
|
192 | 197 | route_path( |
|
193 | 198 | 'repo_stats', repo_name=backend.repo_name, commit_id='tip'), |
|
194 | 199 | extra_environ=xhr_header, |
|
195 | 200 | status=200) |
|
196 | 201 | |
|
197 | 202 | expected_data = self.expected_trending[backend.alias] |
|
198 | 203 | returned_stats = response.json['code_stats'] |
|
199 | 204 | for k, v in expected_data.items(): |
|
200 | 205 | assert v == returned_stats[k] |
|
201 | 206 | |
|
202 | 207 | def test_repo_refs_data(self, backend): |
|
203 | 208 | response = self.app.get( |
|
204 | 209 | route_path('repo_refs_data', repo_name=backend.repo_name), |
|
205 | 210 | status=200) |
|
206 | 211 | |
|
207 | 212 | # Ensure that there is the correct amount of items in the result |
|
208 | 213 | repo = backend.repo.scm_instance() |
|
209 | 214 | data = response.json['results'] |
|
210 | 215 | items = sum(len(section['children']) for section in data) |
|
211 | 216 | repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks) |
|
212 | 217 | assert items == repo_refs |
|
213 | 218 | |
|
214 | 219 | def test_index_shows_missing_requirements_message( |
|
215 | 220 | self, backend, autologin_user): |
|
216 | 221 | repo_name = backend.repo_name |
|
217 | 222 | scm_patcher = mock.patch.object( |
|
218 | 223 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
219 | 224 | |
|
220 | 225 | with scm_patcher: |
|
221 | 226 | response = self.app.get( |
|
222 | 227 | route_path('repo_summary', repo_name=repo_name)) |
|
223 | 228 | assert_response = response.assert_response() |
|
224 | 229 | assert_response.element_contains( |
|
225 | 230 | '.main .alert-warning strong', 'Missing requirements') |
|
226 | 231 | assert_response.element_contains( |
|
227 | 232 | '.main .alert-warning', |
|
228 | 233 | 'Commits cannot be displayed, because this repository ' |
|
229 | 234 | 'uses one or more extensions, which was not enabled.') |
|
230 | 235 | |
|
231 | 236 | def test_missing_requirements_page_does_not_contains_switch_to( |
|
232 | 237 | self, autologin_user, backend): |
|
233 | 238 | repo_name = backend.repo_name |
|
234 | 239 | scm_patcher = mock.patch.object( |
|
235 | 240 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
236 | 241 | |
|
237 | 242 | with scm_patcher: |
|
238 | 243 | response = self.app.get(route_path('repo_summary', repo_name=repo_name)) |
|
239 | 244 | response.mustcontain(no='Switch To') |
|
240 | 245 | |
|
241 | 246 | |
|
242 | 247 | @pytest.mark.usefixtures('app') |
|
243 | 248 | class TestRepoLocation(object): |
|
244 | 249 | |
|
245 | 250 | @pytest.mark.parametrize("suffix", [u'', u'Δ ΔΕ'], ids=['', 'non-ascii']) |
|
246 | 251 | def test_missing_filesystem_repo( |
|
247 | 252 | self, autologin_user, backend, suffix, csrf_token): |
|
248 | 253 | repo = backend.create_repo(name_suffix=suffix) |
|
249 | 254 | repo_name = repo.repo_name |
|
250 | 255 | |
|
251 | 256 | # delete from file system |
|
252 | 257 | RepoModel()._delete_filesystem_repo(repo) |
|
253 | 258 | |
|
254 | 259 | # test if the repo is still in the database |
|
255 | 260 | new_repo = RepoModel().get_by_repo_name(repo_name) |
|
256 | 261 | assert new_repo.repo_name == repo_name |
|
257 | 262 | |
|
258 | 263 | # check if repo is not in the filesystem |
|
259 | 264 | assert not repo_on_filesystem(repo_name) |
|
260 | 265 | |
|
261 | 266 | response = self.app.get( |
|
262 | 267 | route_path('repo_summary', repo_name=safe_str(repo_name)), status=302) |
|
263 | 268 | |
|
264 | 269 | msg = f'The repository `{repo_name}` cannot be loaded in filesystem. ' \ |
|
265 | 270 | f'Please check if it exist, or is not damaged.' |
|
266 | 271 | assert_session_flash(response, msg) |
|
267 | 272 | |
|
268 | 273 | @pytest.mark.parametrize("suffix", [u'', u'Δ ΔΕ'], ids=['', 'non-ascii']) |
|
269 | 274 | def test_missing_filesystem_repo_on_repo_check( |
|
270 | 275 | self, autologin_user, backend, suffix, csrf_token): |
|
271 | 276 | repo = backend.create_repo(name_suffix=suffix) |
|
272 | 277 | repo_name = repo.repo_name |
|
273 | 278 | |
|
274 | 279 | # delete from file system |
|
275 | 280 | RepoModel()._delete_filesystem_repo(repo) |
|
276 | 281 | |
|
277 | 282 | # test if the repo is still in the database |
|
278 | 283 | new_repo = RepoModel().get_by_repo_name(repo_name) |
|
279 | 284 | assert new_repo.repo_name == repo_name |
|
280 | 285 | |
|
281 | 286 | # check if repo is not in the filesystem |
|
282 | 287 | assert not repo_on_filesystem(repo_name) |
|
283 | 288 | |
|
284 | 289 | # flush the session |
|
285 | 290 | self.app.get( |
|
286 | 291 | route_path('repo_summary', repo_name=safe_str(repo_name)), |
|
287 | 292 | status=302) |
|
288 | 293 | |
|
289 | 294 | response = self.app.get( |
|
290 | 295 | route_path('repo_creating_check', repo_name=safe_str(repo_name)), |
|
291 | 296 | status=200) |
|
292 | 297 | msg = 'The repository `%s` cannot be loaded in filesystem. ' \ |
|
293 | 298 | 'Please check if it exist, or is not damaged.' % repo_name |
|
294 | 299 | assert_session_flash(response, msg ) |
|
295 | 300 | |
|
296 | 301 | |
|
297 | 302 | @pytest.fixture() |
|
298 | 303 | def summary_view(context_stub, request_stub, user_util): |
|
299 | 304 | """ |
|
300 | 305 | Bootstrap view to test the view functions |
|
301 | 306 | """ |
|
302 | 307 | request_stub.matched_route = AttributeDict(name='test_view') |
|
303 | 308 | |
|
304 | 309 | request_stub.user = user_util.create_user().AuthUser() |
|
305 | 310 | request_stub.db_repo = user_util.create_repo() |
|
306 | 311 | |
|
307 | 312 | view = RepoSummaryView(context=context_stub, request=request_stub) |
|
308 | 313 | return view |
|
309 | 314 | |
|
310 | 315 | |
|
311 | 316 | @pytest.mark.usefixtures('app') |
|
312 | 317 | class TestCreateReferenceData(object): |
|
313 | 318 | |
|
314 | 319 | @pytest.fixture() |
|
315 | 320 | def example_refs(self): |
|
316 | 321 | section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id'))) |
|
317 | 322 | example_refs = [ |
|
318 | 323 | ('section_1', section_1_refs, 't1'), |
|
319 | 324 | ('section_2', {'c': 'c_id'}, 't2'), |
|
320 | 325 | ] |
|
321 | 326 | return example_refs |
|
322 | 327 | |
|
323 | 328 | def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view): |
|
324 | 329 | repo = mock.Mock() |
|
325 | 330 | repo.name = 'test-repo' |
|
326 | 331 | repo.alias = 'git' |
|
327 | 332 | full_repo_name = 'pytest-repo-group/' + repo.name |
|
328 | 333 | |
|
329 | 334 | result = summary_view._create_reference_data( |
|
330 | 335 | repo, full_repo_name, example_refs) |
|
331 | 336 | |
|
332 | 337 | expected_files_url = '/{}/files/'.format(full_repo_name) |
|
333 | 338 | expected_result = [ |
|
334 | 339 | { |
|
335 | 340 | 'children': [ |
|
336 | 341 | { |
|
337 | 342 | 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1', |
|
338 | 343 | 'files_url': expected_files_url + 'a/?at=a', |
|
339 | 344 | }, |
|
340 | 345 | { |
|
341 | 346 | 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1', |
|
342 | 347 | 'files_url': expected_files_url + 'b/?at=b', |
|
343 | 348 | } |
|
344 | 349 | ], |
|
345 | 350 | 'text': 'section_1' |
|
346 | 351 | }, |
|
347 | 352 | { |
|
348 | 353 | 'children': [ |
|
349 | 354 | { |
|
350 | 355 | 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2', |
|
351 | 356 | 'files_url': expected_files_url + 'c/?at=c', |
|
352 | 357 | } |
|
353 | 358 | ], |
|
354 | 359 | 'text': 'section_2' |
|
355 | 360 | }] |
|
356 | 361 | assert result == expected_result |
|
357 | 362 | |
|
358 | 363 | def test_generates_refs_with_path_for_svn(self, example_refs, summary_view): |
|
359 | 364 | repo = mock.Mock() |
|
360 | 365 | repo.name = 'test-repo' |
|
361 | 366 | repo.alias = 'svn' |
|
362 | 367 | full_repo_name = 'pytest-repo-group/' + repo.name |
|
363 | 368 | |
|
364 | 369 | result = summary_view._create_reference_data( |
|
365 | 370 | repo, full_repo_name, example_refs) |
|
366 | 371 | |
|
367 | 372 | expected_files_url = '/{}/files/'.format(full_repo_name) |
|
368 | 373 | expected_result = [ |
|
369 | 374 | { |
|
370 | 375 | 'children': [ |
|
371 | 376 | { |
|
372 | 377 | 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id', |
|
373 | 378 | 'text': 'a', 'type': 't1', |
|
374 | 379 | 'files_url': expected_files_url + 'a_id/a?at=a', |
|
375 | 380 | }, |
|
376 | 381 | { |
|
377 | 382 | 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id', |
|
378 | 383 | 'text': 'b', 'type': 't1', |
|
379 | 384 | 'files_url': expected_files_url + 'b_id/b?at=b', |
|
380 | 385 | } |
|
381 | 386 | ], |
|
382 | 387 | 'text': 'section_1' |
|
383 | 388 | }, |
|
384 | 389 | { |
|
385 | 390 | 'children': [ |
|
386 | 391 | { |
|
387 | 392 | 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id', |
|
388 | 393 | 'text': 'c', 'type': 't2', |
|
389 | 394 | 'files_url': expected_files_url + 'c_id/c?at=c', |
|
390 | 395 | } |
|
391 | 396 | ], |
|
392 | 397 | 'text': 'section_2' |
|
393 | 398 | } |
|
394 | 399 | ] |
|
395 | 400 | assert result == expected_result |
|
396 | 401 | |
|
397 | 402 | |
|
398 | 403 | class TestCreateFilesUrl(object): |
|
399 | 404 | |
|
400 | 405 | def test_creates_non_svn_url(self, app, summary_view): |
|
401 | 406 | repo = mock.Mock() |
|
402 | 407 | repo.name = 'abcde' |
|
403 | 408 | full_repo_name = 'test-repo-group/' + repo.name |
|
404 | 409 | ref_name = 'branch1' |
|
405 | 410 | raw_id = 'deadbeef0123456789' |
|
406 | 411 | is_svn = False |
|
407 | 412 | |
|
408 | 413 | with mock.patch('rhodecode.lib.helpers.route_path') as url_mock: |
|
409 | 414 | result = summary_view._create_files_url( |
|
410 | 415 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
411 | 416 | url_mock.assert_called_once_with( |
|
412 | 417 | 'repo_files', repo_name=full_repo_name, commit_id=ref_name, |
|
413 | 418 | f_path='', _query=dict(at=ref_name)) |
|
414 | 419 | assert result == url_mock.return_value |
|
415 | 420 | |
|
416 | 421 | def test_creates_svn_url(self, app, summary_view): |
|
417 | 422 | repo = mock.Mock() |
|
418 | 423 | repo.name = 'abcde' |
|
419 | 424 | full_repo_name = 'test-repo-group/' + repo.name |
|
420 | 425 | ref_name = 'branch1' |
|
421 | 426 | raw_id = 'deadbeef0123456789' |
|
422 | 427 | is_svn = True |
|
423 | 428 | |
|
424 | 429 | with mock.patch('rhodecode.lib.helpers.route_path') as url_mock: |
|
425 | 430 | result = summary_view._create_files_url( |
|
426 | 431 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
427 | 432 | url_mock.assert_called_once_with( |
|
428 | 433 | 'repo_files', repo_name=full_repo_name, f_path=ref_name, |
|
429 | 434 | commit_id=raw_id, _query=dict(at=ref_name)) |
|
430 | 435 | assert result == url_mock.return_value |
|
431 | 436 | |
|
432 | 437 | def test_name_has_slashes(self, app, summary_view): |
|
433 | 438 | repo = mock.Mock() |
|
434 | 439 | repo.name = 'abcde' |
|
435 | 440 | full_repo_name = 'test-repo-group/' + repo.name |
|
436 | 441 | ref_name = 'branch1/branch2' |
|
437 | 442 | raw_id = 'deadbeef0123456789' |
|
438 | 443 | is_svn = False |
|
439 | 444 | |
|
440 | 445 | with mock.patch('rhodecode.lib.helpers.route_path') as url_mock: |
|
441 | 446 | result = summary_view._create_files_url( |
|
442 | 447 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
443 | 448 | url_mock.assert_called_once_with( |
|
444 | 449 | 'repo_files', repo_name=full_repo_name, commit_id=raw_id, |
|
445 | 450 | f_path='', _query=dict(at=ref_name)) |
|
446 | 451 | assert result == url_mock.return_value |
|
447 | 452 | |
|
448 | 453 | |
|
449 | 454 | class TestReferenceItems(object): |
|
450 | 455 | repo = mock.Mock() |
|
451 | 456 | repo.name = 'pytest-repo' |
|
452 | 457 | repo_full_name = 'pytest-repo-group/' + repo.name |
|
453 | 458 | ref_type = 'branch' |
|
454 | 459 | fake_url = '/abcde/' |
|
455 | 460 | |
|
456 | 461 | @staticmethod |
|
457 | 462 | def _format_function(name, id_): |
|
458 | 463 | return 'format_function_{}_{}'.format(name, id_) |
|
459 | 464 | |
|
460 | 465 | def test_creates_required_amount_of_items(self, summary_view): |
|
461 | 466 | amount = 100 |
|
462 | 467 | refs = { |
|
463 | 468 | 'ref{}'.format(i): '{0:040d}'.format(i) |
|
464 | 469 | for i in range(amount) |
|
465 | 470 | } |
|
466 | 471 | |
|
467 | 472 | url_patcher = mock.patch.object(summary_view, '_create_files_url') |
|
468 | 473 | svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn', |
|
469 | 474 | return_value=False) |
|
470 | 475 | |
|
471 | 476 | with url_patcher as url_mock, svn_patcher: |
|
472 | 477 | result = summary_view._create_reference_items( |
|
473 | 478 | self.repo, self.repo_full_name, refs, self.ref_type, |
|
474 | 479 | self._format_function) |
|
475 | 480 | assert len(result) == amount |
|
476 | 481 | assert url_mock.call_count == amount |
|
477 | 482 | |
|
478 | 483 | def test_single_item_details(self, summary_view): |
|
479 | 484 | ref_name = 'ref1' |
|
480 | 485 | ref_id = 'deadbeef' |
|
481 | 486 | refs = { |
|
482 | 487 | ref_name: ref_id |
|
483 | 488 | } |
|
484 | 489 | |
|
485 | 490 | svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn', |
|
486 | 491 | return_value=False) |
|
487 | 492 | |
|
488 | 493 | url_patcher = mock.patch.object( |
|
489 | 494 | summary_view, '_create_files_url', return_value=self.fake_url) |
|
490 | 495 | |
|
491 | 496 | with url_patcher as url_mock, svn_patcher: |
|
492 | 497 | result = summary_view._create_reference_items( |
|
493 | 498 | self.repo, self.repo_full_name, refs, self.ref_type, |
|
494 | 499 | self._format_function) |
|
495 | 500 | |
|
496 | 501 | url_mock.assert_called_once_with( |
|
497 | 502 | self.repo, self.repo_full_name, ref_name, ref_id, False) |
|
498 | 503 | expected_result = [ |
|
499 | 504 | { |
|
500 | 505 | 'text': ref_name, |
|
501 | 506 | 'id': self._format_function(ref_name, ref_id), |
|
502 | 507 | 'raw_id': ref_id, |
|
503 | 508 | 'idx': 0, |
|
504 | 509 | 'type': self.ref_type, |
|
505 | 510 | 'files_url': self.fake_url |
|
506 | 511 | } |
|
507 | 512 | ] |
|
508 | 513 | assert result == expected_result |
@@ -1,256 +1,257 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import os |
|
20 | 20 | import re |
|
21 | 21 | import sys |
|
22 | 22 | import logging |
|
23 | 23 | import signal |
|
24 | 24 | import tempfile |
|
25 | 25 | from subprocess import Popen, PIPE |
|
26 | 26 | import urllib.parse |
|
27 | 27 | |
|
28 | 28 | from .base import VcsServer |
|
29 | 29 | |
|
30 | 30 | log = logging.getLogger(__name__) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | class SubversionTunnelWrapper(object): |
|
34 | 34 | process = None |
|
35 | 35 | |
|
36 | 36 | def __init__(self, server): |
|
37 | 37 | self.server = server |
|
38 | 38 | self.timeout = 30 |
|
39 | 39 | self.stdin = sys.stdin |
|
40 | 40 | self.stdout = sys.stdout |
|
41 | 41 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() |
|
42 | 42 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() |
|
43 | 43 | |
|
44 | 44 | self.read_only = True # flag that we set to make the hooks readonly |
|
45 | 45 | |
|
46 | 46 | def create_svn_config(self): |
|
47 | 47 | content = ( |
|
48 | 48 | '[general]\n' |
|
49 | 49 | 'hooks-env = {}\n').format(self.hooks_env_path) |
|
50 | 50 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: |
|
51 | 51 | config_file.write(content) |
|
52 | 52 | |
|
53 | 53 | def create_hooks_env(self): |
|
54 | 54 | content = ( |
|
55 | 55 | '[default]\n' |
|
56 | 56 | 'LANG = en_US.UTF-8\n') |
|
57 | 57 | if self.read_only: |
|
58 | 58 | content += 'SSH_READ_ONLY = 1\n' |
|
59 | 59 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
60 | 60 | hooks_env_file.write(content) |
|
61 | 61 | |
|
62 | 62 | def remove_configs(self): |
|
63 | 63 | os.remove(self.svn_conf_path) |
|
64 | 64 | os.remove(self.hooks_env_path) |
|
65 | 65 | |
|
66 | 66 | def command(self): |
|
67 | 67 | root = self.server.get_root_store() |
|
68 | 68 | username = self.server.user.username |
|
69 | 69 | |
|
70 | 70 | command = [ |
|
71 | 71 | self.server.svn_path, '-t', |
|
72 | 72 | '--config-file', self.svn_conf_path, |
|
73 | 73 | '--tunnel-user', username, |
|
74 | 74 | '-r', root] |
|
75 | 75 | log.debug("Final CMD: %s", ' '.join(command)) |
|
76 | 76 | return command |
|
77 | 77 | |
|
78 | 78 | def start(self): |
|
79 | 79 | command = self.command() |
|
80 | 80 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) |
|
81 | 81 | |
|
82 | 82 | def sync(self): |
|
83 | 83 | while self.process.poll() is None: |
|
84 | 84 | next_byte = self.stdin.read(1) |
|
85 | 85 | if not next_byte: |
|
86 | 86 | break |
|
87 | 87 | self.process.stdin.write(next_byte) |
|
88 | 88 | self.remove_configs() |
|
89 | 89 | |
|
90 | 90 | @property |
|
91 | 91 | def return_code(self): |
|
92 | 92 | return self.process.returncode |
|
93 | 93 | |
|
94 | 94 | def get_first_client_response(self): |
|
95 | 95 | signal.signal(signal.SIGALRM, self.interrupt) |
|
96 | 96 | signal.alarm(self.timeout) |
|
97 | 97 | first_response = self._read_first_client_response() |
|
98 | 98 | signal.alarm(0) |
|
99 | 99 | return (self._parse_first_client_response(first_response) |
|
100 | 100 | if first_response else None) |
|
101 | 101 | |
|
102 | 102 | def patch_first_client_response(self, response, **kwargs): |
|
103 | 103 | self.create_hooks_env() |
|
104 | 104 | data = response.copy() |
|
105 | 105 | data.update(kwargs) |
|
106 | 106 | data['url'] = self._svn_string(data['url']) |
|
107 | 107 | data['ra_client'] = self._svn_string(data['ra_client']) |
|
108 | 108 | data['client'] = data['client'] or '' |
|
109 | 109 | buffer_ = ( |
|
110 | 110 | "( {version} ( {capabilities} ) {url}{ra_client}" |
|
111 | 111 | "( {client}) ) ".format(**data)) |
|
112 | 112 | self.process.stdin.write(buffer_) |
|
113 | 113 | |
|
114 | 114 | def fail(self, message): |
|
115 | 115 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( |
|
116 | 116 | message=self._svn_string(message))) |
|
117 | 117 | self.remove_configs() |
|
118 | 118 | self.process.kill() |
|
119 | 119 | return 1 |
|
120 | 120 | |
|
121 | 121 | def interrupt(self, signum, frame): |
|
122 | 122 | self.fail("Exited by timeout") |
|
123 | 123 | |
|
124 | 124 | def _svn_string(self, str_): |
|
125 | 125 | if not str_: |
|
126 | 126 | return '' |
|
127 | 127 | return f'{len(str_)}:{str_} ' |
|
128 | 128 | |
|
129 | 129 | def _read_first_client_response(self): |
|
130 | 130 | buffer_ = "" |
|
131 | 131 | brackets_stack = [] |
|
132 | 132 | while True: |
|
133 | 133 | next_byte = self.stdin.read(1) |
|
134 | 134 | buffer_ += next_byte |
|
135 | 135 | if next_byte == "(": |
|
136 | 136 | brackets_stack.append(next_byte) |
|
137 | 137 | elif next_byte == ")": |
|
138 | 138 | brackets_stack.pop() |
|
139 | 139 | elif next_byte == " " and not brackets_stack: |
|
140 | 140 | break |
|
141 | 141 | |
|
142 | 142 | return buffer_ |
|
143 | 143 | |
|
144 | 144 | def _parse_first_client_response(self, buffer_): |
|
145 | 145 | """ |
|
146 | 146 | According to the Subversion RA protocol, the first request |
|
147 | 147 | should look like: |
|
148 | 148 | |
|
149 | 149 | ( version:number ( cap:word ... ) url:string ? ra-client:string |
|
150 | 150 | ( ? client:string ) ) |
|
151 | 151 | |
|
152 | 152 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol |
|
153 | 153 | """ |
|
154 | 154 | version_re = r'(?P<version>\d+)' |
|
155 | 155 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' |
|
156 | 156 | url_re = r'\d+\:(?P<url>[\W\w]+)' |
|
157 | 157 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' |
|
158 | 158 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' |
|
159 | 159 | regex = re.compile( |
|
160 | 160 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' |
|
161 | 161 | r'\(\s{client}\)\s\)\s*$'.format( |
|
162 | 162 | version=version_re, capabilities=capabilities_re, |
|
163 | 163 | url=url_re, ra_client=ra_client_re, client=client_re)) |
|
164 | 164 | matcher = regex.match(buffer_) |
|
165 | 165 | |
|
166 | 166 | return matcher.groupdict() if matcher else None |
|
167 | 167 | |
|
168 | 168 | def _match_repo_name(self, url): |
|
169 | 169 | """ |
|
170 | 170 | Given an server url, try to match it against ALL known repository names. |
|
171 | 171 | This handles a tricky SVN case for SSH and subdir commits. |
|
172 | 172 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would |
|
173 | 173 | result in the url with this subdir added. |
|
174 | 174 | """ |
|
175 | 175 | # case 1 direct match, we don't do any "heavy" lookups |
|
176 | 176 | if url in self.server.user_permissions: |
|
177 | 177 | return url |
|
178 | 178 | |
|
179 | 179 | log.debug('Extracting repository name from subdir path %s', url) |
|
180 | 180 | # case 2 we check all permissions, and match closes possible case... |
|
181 | 181 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe |
|
182 | 182 | # to assume that it will have the repo name as prefix, we ensure the prefix |
|
183 | 183 | # for similar repositories isn't matched by adding a / |
|
184 | 184 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. |
|
185 | 185 | for repo_name in self.server.user_permissions: |
|
186 | 186 | repo_name_prefix = repo_name + '/' |
|
187 | 187 | if url.startswith(repo_name_prefix): |
|
188 | 188 | log.debug('Found prefix %s match, returning proper repository name', |
|
189 | 189 | repo_name_prefix) |
|
190 | 190 | return repo_name |
|
191 | 191 | |
|
192 | 192 | return |
|
193 | 193 | |
|
194 | 194 | def run(self, extras): |
|
195 | 195 | action = 'pull' |
|
196 | 196 | self.create_svn_config() |
|
197 | 197 | self.start() |
|
198 | 198 | |
|
199 | 199 | first_response = self.get_first_client_response() |
|
200 | 200 | if not first_response: |
|
201 | 201 | return self.fail("Repository name cannot be extracted") |
|
202 | 202 | |
|
203 | 203 | url_parts = urllib.parse.urlparse(first_response['url']) |
|
204 | 204 | |
|
205 | 205 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) |
|
206 | 206 | |
|
207 | 207 | exit_code = self.server._check_permissions(action) |
|
208 | 208 | if exit_code: |
|
209 | 209 | return exit_code |
|
210 | 210 | |
|
211 | 211 | # set the readonly flag to False if we have proper permissions |
|
212 | 212 | if self.server.has_write_perm(): |
|
213 | 213 | self.read_only = False |
|
214 | 214 | self.server.update_environment(action=action, extras=extras) |
|
215 | 215 | |
|
216 | 216 | self.patch_first_client_response(first_response) |
|
217 | 217 | self.sync() |
|
218 | 218 | return self.return_code |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | class SubversionServer(VcsServer): |
|
222 | 222 | backend = 'svn' |
|
223 | 223 | repo_user_agent = 'svn' |
|
224 | 224 | |
|
225 | 225 | def __init__(self, store, ini_path, repo_name, |
|
226 | 226 | user, user_permissions, config, env): |
|
227 | 227 | super()\ |
|
228 | 228 | .__init__(user, user_permissions, config, env) |
|
229 | 229 | self.store = store |
|
230 | 230 | self.ini_path = ini_path |
|
231 | 231 | # NOTE(dan): repo_name at this point is empty, |
|
232 | 232 | # this is set later in .run() based from parsed input stream |
|
233 | 233 | self.repo_name = repo_name |
|
234 | 234 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') |
|
235 | 235 | |
|
236 | 236 | self.tunnel = SubversionTunnelWrapper(server=self) |
|
237 | 237 | |
|
238 | 238 | def _handle_tunnel(self, extras): |
|
239 | 239 | |
|
240 | 240 | # pre-auth |
|
241 | 241 | action = 'pull' |
|
242 | 242 | # Special case for SVN, we extract repo name at later stage |
|
243 | 243 | # exit_code = self._check_permissions(action) |
|
244 | 244 | # if exit_code: |
|
245 | 245 | # return exit_code, False |
|
246 | 246 | |
|
247 |
req = self.env |
|
|
248 | server_url = req.host_url + req.script_name | |
|
249 | extras['server_url'] = server_url | |
|
247 | req = self.env.get('request') | |
|
248 | if req: | |
|
249 | server_url = req.host_url + req.script_name | |
|
250 | extras['server_url'] = server_url | |
|
250 | 251 | |
|
251 | 252 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
252 | 253 | exit_code = self.tunnel.run(extras) |
|
253 | 254 | |
|
254 | 255 | return exit_code, action == "push" |
|
255 | 256 | |
|
256 | 257 |
@@ -1,637 +1,639 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import os |
|
20 | 20 | import sys |
|
21 | 21 | import collections |
|
22 | 22 | import tempfile |
|
23 | 23 | import time |
|
24 | 24 | import logging.config |
|
25 | 25 | |
|
26 | 26 | from paste.gzipper import make_gzip_middleware |
|
27 | 27 | import pyramid.events |
|
28 | 28 | from pyramid.wsgi import wsgiapp |
|
29 | 29 | from pyramid.authorization import ACLAuthorizationPolicy |
|
30 | 30 | from pyramid.config import Configurator |
|
31 | 31 | from pyramid.settings import asbool, aslist |
|
32 | 32 | from pyramid.httpexceptions import ( |
|
33 | 33 | HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound) |
|
34 | 34 | from pyramid.renderers import render_to_response |
|
35 | 35 | |
|
36 | 36 | from rhodecode.model import meta |
|
37 | 37 | from rhodecode.config import patches |
|
38 | 38 | from rhodecode.config import utils as config_utils |
|
39 | 39 | from rhodecode.config.settings_maker import SettingsMaker |
|
40 | 40 | from rhodecode.config.environment import load_pyramid_environment |
|
41 | 41 | |
|
42 | 42 | import rhodecode.events |
|
43 | 43 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
44 | 44 | from rhodecode.lib.request import Request |
|
45 | 45 | from rhodecode.lib.vcs import VCSCommunicationError |
|
46 | 46 | from rhodecode.lib.exceptions import VCSServerUnavailable |
|
47 | 47 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
48 | 48 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
49 | 49 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
50 | 50 | from rhodecode.lib.utils2 import AttributeDict |
|
51 | 51 | from rhodecode.lib.exc_tracking import store_exception, format_exc |
|
52 | 52 | from rhodecode.subscribers import ( |
|
53 | 53 | scan_repositories_if_enabled, write_js_routes_if_enabled, |
|
54 | 54 | write_metadata_if_needed, write_usage_data) |
|
55 | 55 | from rhodecode.lib.statsd_client import StatsdClient |
|
56 | 56 | |
|
57 | 57 | log = logging.getLogger(__name__) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def is_http_error(response): |
|
61 | 61 | # error which should have traceback |
|
62 | 62 | return response.status_code > 499 |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def should_load_all(): |
|
66 | 66 | """ |
|
67 | 67 | Returns if all application components should be loaded. In some cases it's |
|
68 | 68 | desired to skip apps loading for faster shell script execution |
|
69 | 69 | """ |
|
70 | 70 | ssh_cmd = os.environ.get('RC_CMD_SSH_WRAPPER') |
|
71 | 71 | if ssh_cmd: |
|
72 | 72 | return False |
|
73 | 73 | |
|
74 | 74 | return True |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def make_pyramid_app(global_config, **settings): |
|
78 | 78 | """ |
|
79 | 79 | Constructs the WSGI application based on Pyramid. |
|
80 | 80 | |
|
81 | 81 | Specials: |
|
82 | 82 | |
|
83 | 83 | * The application can also be integrated like a plugin via the call to |
|
84 | 84 | `includeme`. This is accompanied with the other utility functions which |
|
85 | 85 | are called. Changing this should be done with great care to not break |
|
86 | 86 | cases when these fragments are assembled from another place. |
|
87 | 87 | |
|
88 | 88 | """ |
|
89 | 89 | start_time = time.time() |
|
90 | 90 | log.info('Pyramid app config starting') |
|
91 | 91 | |
|
92 | 92 | sanitize_settings_and_apply_defaults(global_config, settings) |
|
93 | 93 | |
|
94 | 94 | # init and bootstrap StatsdClient |
|
95 | 95 | StatsdClient.setup(settings) |
|
96 | 96 | |
|
97 | 97 | config = Configurator(settings=settings) |
|
98 | 98 | # Init our statsd at very start |
|
99 | 99 | config.registry.statsd = StatsdClient.statsd |
|
100 | 100 | |
|
101 | 101 | # Apply compatibility patches |
|
102 | 102 | patches.inspect_getargspec() |
|
103 | 103 | |
|
104 | 104 | load_pyramid_environment(global_config, settings) |
|
105 | 105 | |
|
106 | 106 | # Static file view comes first |
|
107 | 107 | includeme_first(config) |
|
108 | 108 | |
|
109 | 109 | includeme(config) |
|
110 | 110 | |
|
111 | 111 | pyramid_app = config.make_wsgi_app() |
|
112 | 112 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
113 | 113 | pyramid_app.config = config |
|
114 | 114 | |
|
115 | 115 | celery_settings = get_celery_config(settings) |
|
116 | 116 | config.configure_celery(celery_settings) |
|
117 | 117 | |
|
118 | 118 | # creating the app uses a connection - return it after we are done |
|
119 | 119 | meta.Session.remove() |
|
120 | 120 | |
|
121 | 121 | total_time = time.time() - start_time |
|
122 | 122 | log.info('Pyramid app created and configured in %.2fs', total_time) |
|
123 | 123 | return pyramid_app |
|
124 | 124 | |
|
125 | 125 | |
|
126 | 126 | def get_celery_config(settings): |
|
127 | 127 | """ |
|
128 | 128 | Converts basic ini configuration into celery 4.X options |
|
129 | 129 | """ |
|
130 | 130 | |
|
131 | 131 | def key_converter(key_name): |
|
132 | 132 | pref = 'celery.' |
|
133 | 133 | if key_name.startswith(pref): |
|
134 | 134 | return key_name[len(pref):].replace('.', '_').lower() |
|
135 | 135 | |
|
136 | 136 | def type_converter(parsed_key, value): |
|
137 | 137 | # cast to int |
|
138 | 138 | if value.isdigit(): |
|
139 | 139 | return int(value) |
|
140 | 140 | |
|
141 | 141 | # cast to bool |
|
142 | 142 | if value.lower() in ['true', 'false', 'True', 'False']: |
|
143 | 143 | return value.lower() == 'true' |
|
144 | 144 | return value |
|
145 | 145 | |
|
146 | 146 | celery_config = {} |
|
147 | 147 | for k, v in settings.items(): |
|
148 | 148 | pref = 'celery.' |
|
149 | 149 | if k.startswith(pref): |
|
150 | 150 | celery_config[key_converter(k)] = type_converter(key_converter(k), v) |
|
151 | 151 | |
|
152 | 152 | # TODO:rethink if we want to support celerybeat based file config, probably NOT |
|
153 | 153 | # beat_config = {} |
|
154 | 154 | # for section in parser.sections(): |
|
155 | 155 | # if section.startswith('celerybeat:'): |
|
156 | 156 | # name = section.split(':', 1)[1] |
|
157 | 157 | # beat_config[name] = get_beat_config(parser, section) |
|
158 | 158 | |
|
159 | 159 | # final compose of settings |
|
160 | 160 | celery_settings = {} |
|
161 | 161 | |
|
162 | 162 | if celery_config: |
|
163 | 163 | celery_settings.update(celery_config) |
|
164 | 164 | # if beat_config: |
|
165 | 165 | # celery_settings.update({'beat_schedule': beat_config}) |
|
166 | 166 | |
|
167 | 167 | return celery_settings |
|
168 | 168 | |
|
169 | 169 | |
|
170 | 170 | def not_found_view(request): |
|
171 | 171 | """ |
|
172 | 172 | This creates the view which should be registered as not-found-view to |
|
173 | 173 | pyramid. |
|
174 | 174 | """ |
|
175 | 175 | |
|
176 | 176 | if not getattr(request, 'vcs_call', None): |
|
177 | 177 | # handle like regular case with our error_handler |
|
178 | 178 | return error_handler(HTTPNotFound(), request) |
|
179 | 179 | |
|
180 | 180 | # handle not found view as a vcs call |
|
181 | 181 | settings = request.registry.settings |
|
182 | 182 | ae_client = getattr(request, 'ae_client', None) |
|
183 | 183 | vcs_app = VCSMiddleware( |
|
184 | 184 | HTTPNotFound(), request.registry, settings, |
|
185 | 185 | appenlight_client=ae_client) |
|
186 | 186 | |
|
187 | 187 | return wsgiapp(vcs_app)(None, request) |
|
188 | 188 | |
|
189 | 189 | |
|
190 | 190 | def error_handler(exception, request): |
|
191 | 191 | import rhodecode |
|
192 | 192 | from rhodecode.lib import helpers |
|
193 | 193 | |
|
194 | 194 | rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode' |
|
195 | 195 | |
|
196 | 196 | base_response = HTTPInternalServerError() |
|
197 | 197 | # prefer original exception for the response since it may have headers set |
|
198 | 198 | if isinstance(exception, HTTPException): |
|
199 | 199 | base_response = exception |
|
200 | 200 | elif isinstance(exception, VCSCommunicationError): |
|
201 | 201 | base_response = VCSServerUnavailable() |
|
202 | 202 | |
|
203 | 203 | if is_http_error(base_response): |
|
204 | 204 | traceback_info = format_exc(request.exc_info) |
|
205 | 205 | log.error( |
|
206 | 206 | 'error occurred handling this request for path: %s, \n%s', |
|
207 | 207 | request.path, traceback_info) |
|
208 | 208 | |
|
209 | 209 | error_explanation = base_response.explanation or str(base_response) |
|
210 | 210 | if base_response.status_code == 404: |
|
211 | 211 | error_explanation += " Optionally you don't have permission to access this page." |
|
212 | 212 | c = AttributeDict() |
|
213 | 213 | c.error_message = base_response.status |
|
214 | 214 | c.error_explanation = error_explanation |
|
215 | 215 | c.visual = AttributeDict() |
|
216 | 216 | |
|
217 | 217 | c.visual.rhodecode_support_url = ( |
|
218 | 218 | request.registry.settings.get('rhodecode_support_url') or |
|
219 | 219 | request.route_url('rhodecode_support') |
|
220 | 220 | ) |
|
221 | 221 | c.redirect_time = 0 |
|
222 | 222 | c.rhodecode_name = rhodecode_title |
|
223 | 223 | if not c.rhodecode_name: |
|
224 | 224 | c.rhodecode_name = 'Rhodecode' |
|
225 | 225 | |
|
226 | 226 | c.causes = [] |
|
227 | 227 | if is_http_error(base_response): |
|
228 | 228 | c.causes.append('Server is overloaded.') |
|
229 | 229 | c.causes.append('Server database connection is lost.') |
|
230 | 230 | c.causes.append('Server expected unhandled error.') |
|
231 | 231 | |
|
232 | 232 | if hasattr(base_response, 'causes'): |
|
233 | 233 | c.causes = base_response.causes |
|
234 | 234 | |
|
235 | 235 | c.messages = helpers.flash.pop_messages(request=request) |
|
236 | 236 | exc_info = sys.exc_info() |
|
237 | 237 | c.exception_id = id(exc_info) |
|
238 | 238 | c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \ |
|
239 | 239 | or base_response.status_code > 499 |
|
240 | 240 | c.exception_id_url = request.route_url( |
|
241 | 241 | 'admin_settings_exception_tracker_show', exception_id=c.exception_id) |
|
242 | 242 | |
|
243 | 243 | debug_mode = rhodecode.ConfigGet().get_bool('debug') |
|
244 | 244 | if c.show_exception_id: |
|
245 | 245 | store_exception(c.exception_id, exc_info) |
|
246 | 246 | c.exception_debug = debug_mode |
|
247 | 247 | c.exception_config_ini = rhodecode.CONFIG.get('__file__') |
|
248 | 248 | |
|
249 | 249 | if debug_mode: |
|
250 | 250 | try: |
|
251 | 251 | from rich.traceback import install |
|
252 | 252 | install(show_locals=True) |
|
253 | 253 | log.debug('Installing rich tracebacks...') |
|
254 | 254 | except ImportError: |
|
255 | 255 | pass |
|
256 | 256 | |
|
257 | 257 | response = render_to_response( |
|
258 | 258 | '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request, |
|
259 | 259 | response=base_response) |
|
260 | 260 | |
|
261 | 261 | response.headers["X-RC-Exception-Id"] = str(c.exception_id) |
|
262 | 262 | |
|
263 | 263 | statsd = request.registry.statsd |
|
264 | 264 | if statsd and base_response.status_code > 499: |
|
265 | 265 | exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}" |
|
266 | 266 | statsd.incr('rhodecode_exception_total', |
|
267 | 267 | tags=["exc_source:web", |
|
268 | 268 | f"http_code:{base_response.status_code}", |
|
269 | 269 | f"type:{exc_type}"]) |
|
270 | 270 | |
|
271 | 271 | return response |
|
272 | 272 | |
|
273 | 273 | |
|
274 | 274 | def includeme_first(config): |
|
275 | 275 | # redirect automatic browser favicon.ico requests to correct place |
|
276 | 276 | def favicon_redirect(context, request): |
|
277 | 277 | return HTTPFound( |
|
278 | 278 | request.static_path('rhodecode:public/images/favicon.ico')) |
|
279 | 279 | |
|
280 | 280 | config.add_view(favicon_redirect, route_name='favicon') |
|
281 | 281 | config.add_route('favicon', '/favicon.ico') |
|
282 | 282 | |
|
283 | 283 | def robots_redirect(context, request): |
|
284 | 284 | return HTTPFound( |
|
285 | 285 | request.static_path('rhodecode:public/robots.txt')) |
|
286 | 286 | |
|
287 | 287 | config.add_view(robots_redirect, route_name='robots') |
|
288 | 288 | config.add_route('robots', '/robots.txt') |
|
289 | 289 | |
|
290 | 290 | config.add_static_view( |
|
291 | 291 | '_static/deform', 'deform:static') |
|
292 | 292 | config.add_static_view( |
|
293 | 293 | '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24) |
|
294 | 294 | |
|
295 | 295 | |
|
296 | 296 | ce_auth_resources = [ |
|
297 | 297 | 'rhodecode.authentication.plugins.auth_crowd', |
|
298 | 298 | 'rhodecode.authentication.plugins.auth_headers', |
|
299 | 299 | 'rhodecode.authentication.plugins.auth_jasig_cas', |
|
300 | 300 | 'rhodecode.authentication.plugins.auth_ldap', |
|
301 | 301 | 'rhodecode.authentication.plugins.auth_pam', |
|
302 | 302 | 'rhodecode.authentication.plugins.auth_rhodecode', |
|
303 | 303 | 'rhodecode.authentication.plugins.auth_token', |
|
304 | 304 | ] |
|
305 | 305 | |
|
306 | 306 | |
|
307 | 307 | def includeme(config, auth_resources=None): |
|
308 | 308 | from rhodecode.lib.celerylib.loader import configure_celery |
|
309 | 309 | log.debug('Initializing main includeme from %s', os.path.basename(__file__)) |
|
310 | 310 | settings = config.registry.settings |
|
311 | 311 | config.set_request_factory(Request) |
|
312 | 312 | |
|
313 | 313 | # plugin information |
|
314 | 314 | config.registry.rhodecode_plugins = collections.OrderedDict() |
|
315 | 315 | |
|
316 | 316 | config.add_directive( |
|
317 | 317 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
318 | 318 | |
|
319 | 319 | config.add_directive('configure_celery', configure_celery) |
|
320 | 320 | |
|
321 | 321 | if settings.get('appenlight', False): |
|
322 | 322 | config.include('appenlight_client.ext.pyramid_tween') |
|
323 | 323 | |
|
324 | 324 | load_all = should_load_all() |
|
325 | 325 | |
|
326 | 326 | # Includes which are required. The application would fail without them. |
|
327 | 327 | config.include('pyramid_mako') |
|
328 | 328 | config.include('rhodecode.lib.rc_beaker') |
|
329 | 329 | config.include('rhodecode.lib.rc_cache') |
|
330 | 330 | config.include('rhodecode.lib.rc_cache.archive_cache') |
|
331 | 331 | |
|
332 | 332 | config.include('rhodecode.apps._base.navigation') |
|
333 | 333 | config.include('rhodecode.apps._base.subscribers') |
|
334 | 334 | config.include('rhodecode.tweens') |
|
335 | 335 | config.include('rhodecode.authentication') |
|
336 | 336 | |
|
337 | 337 | if load_all: |
|
338 | 338 | |
|
339 | 339 | # load CE authentication plugins |
|
340 | 340 | |
|
341 | 341 | if auth_resources: |
|
342 | 342 | ce_auth_resources.extend(auth_resources) |
|
343 | 343 | |
|
344 | 344 | for resource in ce_auth_resources: |
|
345 | 345 | config.include(resource) |
|
346 | 346 | |
|
347 | 347 | # Auto discover authentication plugins and include their configuration. |
|
348 | 348 | if asbool(settings.get('auth_plugin.import_legacy_plugins', 'true')): |
|
349 | 349 | from rhodecode.authentication import discover_legacy_plugins |
|
350 | 350 | discover_legacy_plugins(config) |
|
351 | 351 | |
|
352 | 352 | # apps |
|
353 | 353 | if load_all: |
|
354 | 354 | log.debug('Starting config.include() calls') |
|
355 | 355 | config.include('rhodecode.api.includeme') |
|
356 | 356 | config.include('rhodecode.apps._base.includeme') |
|
357 | 357 | config.include('rhodecode.apps._base.navigation.includeme') |
|
358 | 358 | config.include('rhodecode.apps._base.subscribers.includeme') |
|
359 | 359 | config.include('rhodecode.apps.hovercards.includeme') |
|
360 | 360 | config.include('rhodecode.apps.ops.includeme') |
|
361 | 361 | config.include('rhodecode.apps.channelstream.includeme') |
|
362 | 362 | config.include('rhodecode.apps.file_store.includeme') |
|
363 | 363 | config.include('rhodecode.apps.admin.includeme') |
|
364 | 364 | config.include('rhodecode.apps.login.includeme') |
|
365 | 365 | config.include('rhodecode.apps.home.includeme') |
|
366 | 366 | config.include('rhodecode.apps.journal.includeme') |
|
367 | 367 | |
|
368 | 368 | config.include('rhodecode.apps.repository.includeme') |
|
369 | 369 | config.include('rhodecode.apps.repo_group.includeme') |
|
370 | 370 | config.include('rhodecode.apps.user_group.includeme') |
|
371 | 371 | config.include('rhodecode.apps.search.includeme') |
|
372 | 372 | config.include('rhodecode.apps.user_profile.includeme') |
|
373 | 373 | config.include('rhodecode.apps.user_group_profile.includeme') |
|
374 | 374 | config.include('rhodecode.apps.my_account.includeme') |
|
375 | 375 | config.include('rhodecode.apps.gist.includeme') |
|
376 | 376 | |
|
377 | 377 | config.include('rhodecode.apps.svn_support.includeme') |
|
378 | 378 | config.include('rhodecode.apps.ssh_support.includeme') |
|
379 | 379 | config.include('rhodecode.apps.debug_style') |
|
380 | 380 | |
|
381 | 381 | if load_all: |
|
382 | 382 | config.include('rhodecode.integrations.includeme') |
|
383 | 383 | config.include('rhodecode.integrations.routes.includeme') |
|
384 | 384 | |
|
385 | 385 | config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True) |
|
386 | 386 | settings['default_locale_name'] = settings.get('lang', 'en') |
|
387 | 387 | config.add_translation_dirs('rhodecode:i18n/') |
|
388 | 388 | |
|
389 | 389 | # Add subscribers. |
|
390 | 390 | if load_all: |
|
391 | 391 | log.debug('Adding subscribers...') |
|
392 | 392 | config.add_subscriber(scan_repositories_if_enabled, |
|
393 | 393 | pyramid.events.ApplicationCreated) |
|
394 | 394 | config.add_subscriber(write_metadata_if_needed, |
|
395 | 395 | pyramid.events.ApplicationCreated) |
|
396 | 396 | config.add_subscriber(write_usage_data, |
|
397 | 397 | pyramid.events.ApplicationCreated) |
|
398 | 398 | config.add_subscriber(write_js_routes_if_enabled, |
|
399 | 399 | pyramid.events.ApplicationCreated) |
|
400 | 400 | |
|
401 | 401 | |
|
402 | 402 | # Set the default renderer for HTML templates to mako. |
|
403 | 403 | config.add_mako_renderer('.html') |
|
404 | 404 | |
|
405 | 405 | config.add_renderer( |
|
406 | 406 | name='json_ext', |
|
407 | 407 | factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json') |
|
408 | 408 | |
|
409 | 409 | config.add_renderer( |
|
410 | 410 | name='string_html', |
|
411 | 411 | factory='rhodecode.lib.string_renderer.html') |
|
412 | 412 | |
|
413 | 413 | # include RhodeCode plugins |
|
414 | 414 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
415 | 415 | log.debug('processing rhodecode.includes data...') |
|
416 | 416 | for inc in includes: |
|
417 | 417 | config.include(inc) |
|
418 | 418 | |
|
419 | 419 | # custom not found view, if our pyramid app doesn't know how to handle |
|
420 | 420 | # the request pass it to potential VCS handling ap |
|
421 | 421 | config.add_notfound_view(not_found_view) |
|
422 | 422 | if not settings.get('debugtoolbar.enabled', False): |
|
423 | 423 | # disabled debugtoolbar handle all exceptions via the error_handlers |
|
424 | 424 | config.add_view(error_handler, context=Exception) |
|
425 | 425 | |
|
426 | 426 | # all errors including 403/404/50X |
|
427 | 427 | config.add_view(error_handler, context=HTTPError) |
|
428 | 428 | |
|
429 | 429 | |
|
430 | 430 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
431 | 431 | """ |
|
432 | 432 | Apply outer WSGI middlewares around the application. |
|
433 | 433 | """ |
|
434 | 434 | registry = config.registry |
|
435 | 435 | settings = registry.settings |
|
436 | 436 | |
|
437 | 437 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
438 | 438 | pyramid_app = HttpsFixup(pyramid_app, settings) |
|
439 | 439 | |
|
440 | 440 | pyramid_app, _ae_client = wrap_in_appenlight_if_enabled( |
|
441 | 441 | pyramid_app, settings) |
|
442 | 442 | registry.ae_client = _ae_client |
|
443 | 443 | |
|
444 | 444 | if settings['gzip_responses']: |
|
445 | 445 | pyramid_app = make_gzip_middleware( |
|
446 | 446 | pyramid_app, settings, compress_level=1) |
|
447 | 447 | |
|
448 | 448 | # this should be the outer most middleware in the wsgi stack since |
|
449 | 449 | # middleware like Routes make database calls |
|
450 | 450 | def pyramid_app_with_cleanup(environ, start_response): |
|
451 | 451 | start = time.time() |
|
452 | 452 | try: |
|
453 | 453 | return pyramid_app(environ, start_response) |
|
454 | 454 | finally: |
|
455 | 455 | # Dispose current database session and rollback uncommitted |
|
456 | 456 | # transactions. |
|
457 | 457 | meta.Session.remove() |
|
458 | 458 | |
|
459 | 459 | # In a single threaded mode server, on non sqlite db we should have |
|
460 | 460 | # '0 Current Checked out connections' at the end of a request, |
|
461 | 461 | # if not, then something, somewhere is leaving a connection open |
|
462 | 462 | pool = meta.get_engine().pool |
|
463 | 463 | log.debug('sa pool status: %s', pool.status()) |
|
464 | 464 | total = time.time() - start |
|
465 | 465 | log.debug('Request processing finalized: %.4fs', total) |
|
466 | 466 | |
|
467 | 467 | return pyramid_app_with_cleanup |
|
468 | 468 | |
|
469 | 469 | |
|
470 | 470 | def sanitize_settings_and_apply_defaults(global_config, settings): |
|
471 | 471 | """ |
|
472 | 472 | Applies settings defaults and does all type conversion. |
|
473 | 473 | |
|
474 | 474 | We would move all settings parsing and preparation into this place, so that |
|
475 | 475 | we have only one place left which deals with this part. The remaining parts |
|
476 | 476 | of the application would start to rely fully on well prepared settings. |
|
477 | 477 | |
|
478 | 478 | This piece would later be split up per topic to avoid a big fat monster |
|
479 | 479 | function. |
|
480 | 480 | """ |
|
481 | 481 | jn = os.path.join |
|
482 | 482 | |
|
483 | 483 | global_settings_maker = SettingsMaker(global_config) |
|
484 | 484 | global_settings_maker.make_setting('debug', default=False, parser='bool') |
|
485 | 485 | debug_enabled = asbool(global_config.get('debug')) |
|
486 | 486 | |
|
487 | 487 | settings_maker = SettingsMaker(settings) |
|
488 | 488 | |
|
489 | 489 | settings_maker.make_setting( |
|
490 | 490 | 'logging.autoconfigure', |
|
491 | 491 | default=False, |
|
492 | 492 | parser='bool') |
|
493 | 493 | |
|
494 | 494 | logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini') |
|
495 | 495 | settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG') |
|
496 | 496 | |
|
497 | 497 | # Default includes, possible to change as a user |
|
498 | 498 | pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline') |
|
499 | 499 | log.debug( |
|
500 | 500 | "Using the following pyramid.includes: %s", |
|
501 | 501 | pyramid_includes) |
|
502 | 502 | |
|
503 | 503 | settings_maker.make_setting('rhodecode.edition', 'Community Edition') |
|
504 | 504 | settings_maker.make_setting('rhodecode.edition_id', 'CE') |
|
505 | 505 | |
|
506 | 506 | if 'mako.default_filters' not in settings: |
|
507 | 507 | # set custom default filters if we don't have it defined |
|
508 | 508 | settings['mako.imports'] = 'from rhodecode.lib.base import h_filter' |
|
509 | 509 | settings['mako.default_filters'] = 'h_filter' |
|
510 | 510 | |
|
511 | 511 | if 'mako.directories' not in settings: |
|
512 | 512 | mako_directories = settings.setdefault('mako.directories', [ |
|
513 | 513 | # Base templates of the original application |
|
514 | 514 | 'rhodecode:templates', |
|
515 | 515 | ]) |
|
516 | 516 | log.debug( |
|
517 | 517 | "Using the following Mako template directories: %s", |
|
518 | 518 | mako_directories) |
|
519 | 519 | |
|
520 | 520 | # NOTE(marcink): fix redis requirement for schema of connection since 3.X |
|
521 | 521 | if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis': |
|
522 | 522 | raw_url = settings['beaker.session.url'] |
|
523 | 523 | if not raw_url.startswith(('redis://', 'rediss://', 'unix://')): |
|
524 | 524 | settings['beaker.session.url'] = 'redis://' + raw_url |
|
525 | 525 | |
|
526 | 526 | settings_maker.make_setting('__file__', global_config.get('__file__')) |
|
527 | 527 | |
|
528 | 528 | # TODO: johbo: Re-think this, usually the call to config.include |
|
529 | 529 | # should allow to pass in a prefix. |
|
530 | 530 | settings_maker.make_setting('rhodecode.api.url', '/_admin/api') |
|
531 | 531 | |
|
532 | 532 | # Sanitize generic settings. |
|
533 | 533 | settings_maker.make_setting('default_encoding', 'UTF-8', parser='list') |
|
534 | 534 | settings_maker.make_setting('is_test', False, parser='bool') |
|
535 | 535 | settings_maker.make_setting('gzip_responses', False, parser='bool') |
|
536 | 536 | |
|
537 | 537 | # statsd |
|
538 | 538 | settings_maker.make_setting('statsd.enabled', False, parser='bool') |
|
539 | 539 | settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string') |
|
540 | 540 | settings_maker.make_setting('statsd.statsd_port', 9125, parser='int') |
|
541 | 541 | settings_maker.make_setting('statsd.statsd_prefix', '') |
|
542 | 542 | settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool') |
|
543 | 543 | |
|
544 | 544 | settings_maker.make_setting('vcs.svn.compatible_version', '') |
|
545 | settings_maker.make_setting('vcs.svn.proxy.enabled', 'true', parser='bool') | |
|
546 | settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090') | |
|
545 | 547 | settings_maker.make_setting('vcs.hooks.protocol', 'http') |
|
546 | 548 | settings_maker.make_setting('vcs.hooks.host', '*') |
|
547 | 549 | settings_maker.make_setting('vcs.scm_app_implementation', 'http') |
|
548 | 550 | settings_maker.make_setting('vcs.server', '') |
|
549 | 551 | settings_maker.make_setting('vcs.server.protocol', 'http') |
|
550 | 552 | settings_maker.make_setting('vcs.server.enable', 'true', parser='bool') |
|
551 | 553 | settings_maker.make_setting('startup.import_repos', 'false', parser='bool') |
|
552 | 554 | settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool') |
|
553 | 555 | settings_maker.make_setting('vcs.start_server', 'false', parser='bool') |
|
554 | 556 | settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list') |
|
555 | 557 | settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int') |
|
556 | 558 | |
|
557 | 559 | settings_maker.make_setting('vcs.methods.cache', True, parser='bool') |
|
558 | 560 | |
|
559 | 561 | # Support legacy values of vcs.scm_app_implementation. Legacy |
|
560 | 562 | # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or |
|
561 | 563 | # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'. |
|
562 | 564 | scm_app_impl = settings['vcs.scm_app_implementation'] |
|
563 | 565 | if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']: |
|
564 | 566 | settings['vcs.scm_app_implementation'] = 'http' |
|
565 | 567 | |
|
566 | 568 | settings_maker.make_setting('appenlight', False, parser='bool') |
|
567 | 569 | |
|
568 | 570 | temp_store = tempfile.gettempdir() |
|
569 | 571 | tmp_cache_dir = jn(temp_store, 'rc_cache') |
|
570 | 572 | |
|
571 | 573 | # save default, cache dir, and use it for all backends later. |
|
572 | 574 | default_cache_dir = settings_maker.make_setting( |
|
573 | 575 | 'cache_dir', |
|
574 | 576 | default=tmp_cache_dir, default_when_empty=True, |
|
575 | 577 | parser='dir:ensured') |
|
576 | 578 | |
|
577 | 579 | # exception store cache |
|
578 | 580 | settings_maker.make_setting( |
|
579 | 581 | 'exception_tracker.store_path', |
|
580 | 582 | default=jn(default_cache_dir, 'exc_store'), default_when_empty=True, |
|
581 | 583 | parser='dir:ensured' |
|
582 | 584 | ) |
|
583 | 585 | |
|
584 | 586 | settings_maker.make_setting( |
|
585 | 587 | 'celerybeat-schedule.path', |
|
586 | 588 | default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True, |
|
587 | 589 | parser='file:ensured' |
|
588 | 590 | ) |
|
589 | 591 | |
|
590 | 592 | settings_maker.make_setting('exception_tracker.send_email', False, parser='bool') |
|
591 | 593 | settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True) |
|
592 | 594 | |
|
593 | 595 | # sessions, ensure file since no-value is memory |
|
594 | 596 | settings_maker.make_setting('beaker.session.type', 'file') |
|
595 | 597 | settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data')) |
|
596 | 598 | |
|
597 | 599 | # cache_general |
|
598 | 600 | settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace') |
|
599 | 601 | settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int') |
|
600 | 602 | settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db')) |
|
601 | 603 | |
|
602 | 604 | # cache_perms |
|
603 | 605 | settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace') |
|
604 | 606 | settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int') |
|
605 | 607 | settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db')) |
|
606 | 608 | |
|
607 | 609 | # cache_repo |
|
608 | 610 | settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace') |
|
609 | 611 | settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int') |
|
610 | 612 | settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db')) |
|
611 | 613 | |
|
612 | 614 | # cache_license |
|
613 | 615 | settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace') |
|
614 | 616 | settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int') |
|
615 | 617 | settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db')) |
|
616 | 618 | |
|
617 | 619 | # cache_repo_longterm memory, 96H |
|
618 | 620 | settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru') |
|
619 | 621 | settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int') |
|
620 | 622 | settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int') |
|
621 | 623 | |
|
622 | 624 | # sql_cache_short |
|
623 | 625 | settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru') |
|
624 | 626 | settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int') |
|
625 | 627 | settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int') |
|
626 | 628 | |
|
627 | 629 | # archive_cache |
|
628 | 630 | settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,) |
|
629 | 631 | settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float') |
|
630 | 632 | settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int') |
|
631 | 633 | |
|
632 | 634 | settings_maker.env_expand() |
|
633 | 635 | |
|
634 | 636 | # configure instance id |
|
635 | 637 | config_utils.set_instance_id(settings) |
|
636 | 638 | |
|
637 | 639 | return settings |
@@ -1,2199 +1,2198 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | Helper functions |
|
21 | 21 | |
|
22 | 22 | Consists of functions to typically be used within templates, but also |
|
23 | 23 | available to Controllers. This module is available to both as 'h'. |
|
24 | 24 | """ |
|
25 | 25 | import base64 |
|
26 | 26 | import collections |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import random |
|
30 | 30 | import hashlib |
|
31 | 31 | import io |
|
32 | 32 | import textwrap |
|
33 | 33 | import urllib.request |
|
34 | 34 | import urllib.parse |
|
35 | 35 | import urllib.error |
|
36 | 36 | import math |
|
37 | 37 | import logging |
|
38 | 38 | import re |
|
39 | 39 | import time |
|
40 | 40 | import string |
|
41 | 41 | import regex |
|
42 | 42 | from collections import OrderedDict |
|
43 | 43 | |
|
44 | 44 | import pygments |
|
45 | 45 | import itertools |
|
46 | 46 | import fnmatch |
|
47 | 47 | |
|
48 | 48 | from datetime import datetime |
|
49 | 49 | from functools import partial |
|
50 | 50 | from pygments.formatters.html import HtmlFormatter |
|
51 | 51 | from pygments.lexers import ( |
|
52 | 52 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
53 | 53 | |
|
54 | 54 | from pyramid.threadlocal import get_current_request |
|
55 | 55 | from tempita import looper |
|
56 | 56 | from webhelpers2.html import literal, HTML, escape |
|
57 | 57 | from webhelpers2.html._autolink import _auto_link_urls |
|
58 | 58 | from webhelpers2.html.tools import ( |
|
59 | 59 | button_to, highlight, js_obfuscate, strip_links, strip_tags) |
|
60 | 60 | |
|
61 | 61 | from webhelpers2.text import ( |
|
62 | 62 | chop_at, collapse, convert_accented_entities, |
|
63 | 63 | convert_misc_entities, lchop, plural, rchop, remove_formatting, |
|
64 | 64 | replace_whitespace, urlify, truncate, wrap_paragraphs) |
|
65 | 65 | from webhelpers2.date import time_ago_in_words |
|
66 | 66 | |
|
67 | 67 | from webhelpers2.html.tags import ( |
|
68 | 68 | _input, NotGiven, _make_safe_id_component as safeid, |
|
69 | 69 | form as insecure_form, |
|
70 | 70 | auto_discovery_link, checkbox, end_form, file, |
|
71 | 71 | hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol, |
|
72 | 72 | stylesheet_link, submit, text, password, textarea, |
|
73 | 73 | ul, radio, Options) |
|
74 | 74 | |
|
75 | 75 | from webhelpers2.number import format_byte_size |
|
76 | 76 | # python3.11 backport fixes for webhelpers2 |
|
77 | from rhodecode import ConfigGet | |
|
77 | 78 | from rhodecode.lib._vendor.webhelpers_backports import raw_select |
|
78 | 79 | |
|
79 | 80 | from rhodecode.lib.action_parser import action_parser |
|
80 | 81 | from rhodecode.lib.html_filters import sanitize_html |
|
81 | 82 | from rhodecode.lib.pagination import Page, RepoPage, SqlPage |
|
82 | 83 | from rhodecode.lib import ext_json |
|
83 | 84 | from rhodecode.lib.ext_json import json |
|
84 | 85 | from rhodecode.lib.str_utils import safe_bytes, convert_special_chars, base64_to_str |
|
85 | 86 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
86 | 87 | from rhodecode.lib.str_utils import safe_str |
|
87 | 88 | from rhodecode.lib.utils2 import ( |
|
88 | 89 | str2bool, |
|
89 | 90 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, |
|
90 | 91 | AttributeDict, safe_int, md5, md5_safe, get_host_info) |
|
91 | 92 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
92 | 93 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
93 | 94 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
94 | 95 | from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS |
|
95 | 96 | from rhodecode.lib.index.search_utils import get_matching_line_offsets |
|
96 | 97 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
97 | 98 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
98 | 99 | from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore |
|
99 | 100 | from rhodecode.model.repo_group import RepoGroupModel |
|
100 | 101 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
101 | 102 | |
|
102 | 103 | |
|
103 | 104 | log = logging.getLogger(__name__) |
|
104 | 105 | |
|
105 | 106 | |
|
106 | 107 | DEFAULT_USER = User.DEFAULT_USER |
|
107 | 108 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
108 | 109 | |
|
109 | 110 | |
|
110 | 111 | def asset(path, ver=None, **kwargs): |
|
111 | 112 | """ |
|
112 | 113 | Helper to generate a static asset file path for rhodecode assets |
|
113 | 114 | |
|
114 | 115 | eg. h.asset('images/image.png', ver='3923') |
|
115 | 116 | |
|
116 | 117 | :param path: path of asset |
|
117 | 118 | :param ver: optional version query param to append as ?ver= |
|
118 | 119 | """ |
|
119 | 120 | request = get_current_request() |
|
120 | 121 | query = {} |
|
121 | 122 | query.update(kwargs) |
|
122 | 123 | if ver: |
|
123 | 124 | query = {'ver': ver} |
|
124 | 125 | return request.static_path( |
|
125 | 126 | f'rhodecode:public/{path}', _query=query) |
|
126 | 127 | |
|
127 | 128 | |
|
128 | 129 | default_html_escape_table = { |
|
129 | 130 | ord('&'): '&', |
|
130 | 131 | ord('<'): '<', |
|
131 | 132 | ord('>'): '>', |
|
132 | 133 | ord('"'): '"', |
|
133 | 134 | ord("'"): ''', |
|
134 | 135 | } |
|
135 | 136 | |
|
136 | 137 | |
|
137 | 138 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
138 | 139 | """Produce entities within text.""" |
|
139 | 140 | return text.translate(html_escape_table) |
|
140 | 141 | |
|
141 | 142 | |
|
142 | 143 | def str_json(*args, **kwargs): |
|
143 | 144 | return ext_json.str_json(*args, **kwargs) |
|
144 | 145 | |
|
145 | 146 | |
|
146 | 147 | def formatted_str_json(*args, **kwargs): |
|
147 | 148 | return ext_json.formatted_str_json(*args, **kwargs) |
|
148 | 149 | |
|
149 | 150 | |
|
150 | 151 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
151 | 152 | """ |
|
152 | 153 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
153 | 154 | |
|
154 | 155 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
155 | 156 | """ |
|
156 | 157 | suffix_if_chopped = suffix_if_chopped or '' |
|
157 | 158 | pos = s.find(sub) |
|
158 | 159 | if pos == -1: |
|
159 | 160 | return s |
|
160 | 161 | |
|
161 | 162 | if inclusive: |
|
162 | 163 | pos += len(sub) |
|
163 | 164 | |
|
164 | 165 | chopped = s[:pos] |
|
165 | 166 | left = s[pos:].strip() |
|
166 | 167 | |
|
167 | 168 | if left and suffix_if_chopped: |
|
168 | 169 | chopped += suffix_if_chopped |
|
169 | 170 | |
|
170 | 171 | return chopped |
|
171 | 172 | |
|
172 | 173 | |
|
173 | 174 | def shorter(text, size=20, prefix=False): |
|
174 | 175 | postfix = '...' |
|
175 | 176 | if len(text) > size: |
|
176 | 177 | if prefix: |
|
177 | 178 | # shorten in front |
|
178 | 179 | return postfix + text[-(size - len(postfix)):] |
|
179 | 180 | else: |
|
180 | 181 | return text[:size - len(postfix)] + postfix |
|
181 | 182 | return text |
|
182 | 183 | |
|
183 | 184 | |
|
184 | 185 | def reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
185 | 186 | """ |
|
186 | 187 | Reset button |
|
187 | 188 | """ |
|
188 | 189 | return _input(type, name, value, id, attrs) |
|
189 | 190 | |
|
190 | 191 | |
|
191 | 192 | def select(name, selected_values, options, id=NotGiven, **attrs): |
|
192 | 193 | |
|
193 | 194 | if isinstance(options, (list, tuple)): |
|
194 | 195 | options_iter = options |
|
195 | 196 | # Handle old value,label lists ... where value also can be value,label lists |
|
196 | 197 | options = Options() |
|
197 | 198 | for opt in options_iter: |
|
198 | 199 | if isinstance(opt, tuple) and len(opt) == 2: |
|
199 | 200 | value, label = opt |
|
200 | 201 | elif isinstance(opt, str): |
|
201 | 202 | value = label = opt |
|
202 | 203 | else: |
|
203 | 204 | raise ValueError('invalid select option type %r' % type(opt)) |
|
204 | 205 | |
|
205 | 206 | if isinstance(value, (list, tuple)): |
|
206 | 207 | option_group = options.add_optgroup(label) |
|
207 | 208 | for opt2 in value: |
|
208 | 209 | if isinstance(opt2, tuple) and len(opt2) == 2: |
|
209 | 210 | group_value, group_label = opt2 |
|
210 | 211 | elif isinstance(opt2, str): |
|
211 | 212 | group_value = group_label = opt2 |
|
212 | 213 | else: |
|
213 | 214 | raise ValueError('invalid select option type %r' % type(opt2)) |
|
214 | 215 | |
|
215 | 216 | option_group.add_option(group_label, group_value) |
|
216 | 217 | else: |
|
217 | 218 | options.add_option(label, value) |
|
218 | 219 | |
|
219 | 220 | return raw_select(name, selected_values, options, id=id, **attrs) |
|
220 | 221 | |
|
221 | 222 | |
|
222 | 223 | def branding(name, length=40): |
|
223 | 224 | return truncate(name, length, indicator="") |
|
224 | 225 | |
|
225 | 226 | |
|
226 | 227 | def FID(raw_id, path): |
|
227 | 228 | """ |
|
228 | 229 | Creates a unique ID for filenode based on it's hash of path and commit |
|
229 | 230 | it's safe to use in urls |
|
230 | 231 | |
|
231 | 232 | :param raw_id: |
|
232 | 233 | :param path: |
|
233 | 234 | """ |
|
234 | 235 | |
|
235 | 236 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
236 | 237 | |
|
237 | 238 | |
|
238 | 239 | class _GetError(object): |
|
239 | 240 | """Get error from form_errors, and represent it as span wrapped error |
|
240 | 241 | message |
|
241 | 242 | |
|
242 | 243 | :param field_name: field to fetch errors for |
|
243 | 244 | :param form_errors: form errors dict |
|
244 | 245 | """ |
|
245 | 246 | |
|
246 | 247 | def __call__(self, field_name, form_errors): |
|
247 | 248 | tmpl = """<span class="error_msg">%s</span>""" |
|
248 | 249 | if form_errors and field_name in form_errors: |
|
249 | 250 | return literal(tmpl % form_errors.get(field_name)) |
|
250 | 251 | |
|
251 | 252 | |
|
252 | 253 | get_error = _GetError() |
|
253 | 254 | |
|
254 | 255 | |
|
255 | 256 | class _ToolTip(object): |
|
256 | 257 | |
|
257 | 258 | def __call__(self, tooltip_title, trim_at=50): |
|
258 | 259 | """ |
|
259 | 260 | Special function just to wrap our text into nice formatted |
|
260 | 261 | autowrapped text |
|
261 | 262 | |
|
262 | 263 | :param tooltip_title: |
|
263 | 264 | """ |
|
264 | 265 | tooltip_title = escape(tooltip_title) |
|
265 | 266 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
266 | 267 | return tooltip_title |
|
267 | 268 | |
|
268 | 269 | |
|
269 | 270 | tooltip = _ToolTip() |
|
270 | 271 | |
|
271 | 272 | files_icon = '<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>' |
|
272 | 273 | |
|
273 | 274 | |
|
274 | 275 | def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None, |
|
275 | 276 | limit_items=False, linkify_last_item=False, hide_last_item=False, |
|
276 | 277 | copy_path_icon=True): |
|
277 | 278 | |
|
278 | 279 | if at_ref: |
|
279 | 280 | route_qry = {'at': at_ref} |
|
280 | 281 | default_landing_ref = at_ref or landing_ref_name or commit_id |
|
281 | 282 | else: |
|
282 | 283 | route_qry = None |
|
283 | 284 | default_landing_ref = commit_id |
|
284 | 285 | |
|
285 | 286 | # first segment is a `HOME` link to repo files root location |
|
286 | 287 | root_name = literal('<i class="icon-home"></i>') |
|
287 | 288 | |
|
288 | 289 | url_segments = [ |
|
289 | 290 | link_to( |
|
290 | 291 | root_name, |
|
291 | 292 | repo_files_by_ref_url( |
|
292 | 293 | repo_name, |
|
293 | 294 | repo_type, |
|
294 | 295 | f_path=None, # None here is a special case for SVN repos, |
|
295 | 296 | # that won't prefix with a ref |
|
296 | 297 | ref_name=default_landing_ref, |
|
297 | 298 | commit_id=commit_id, |
|
298 | 299 | query=route_qry |
|
299 | 300 | ) |
|
300 | 301 | )] |
|
301 | 302 | |
|
302 | 303 | path_segments = file_path.split('/') |
|
303 | 304 | last_cnt = len(path_segments) - 1 |
|
304 | 305 | for cnt, segment in enumerate(path_segments): |
|
305 | 306 | if not segment: |
|
306 | 307 | continue |
|
307 | 308 | segment_html = escape(segment) |
|
308 | 309 | |
|
309 | 310 | last_item = cnt == last_cnt |
|
310 | 311 | |
|
311 | 312 | if last_item and hide_last_item: |
|
312 | 313 | # iterate over and hide last element |
|
313 | 314 | continue |
|
314 | 315 | |
|
315 | 316 | if last_item and linkify_last_item is False: |
|
316 | 317 | # plain version |
|
317 | 318 | url_segments.append(segment_html) |
|
318 | 319 | else: |
|
319 | 320 | url_segments.append( |
|
320 | 321 | link_to( |
|
321 | 322 | segment_html, |
|
322 | 323 | repo_files_by_ref_url( |
|
323 | 324 | repo_name, |
|
324 | 325 | repo_type, |
|
325 | 326 | f_path='/'.join(path_segments[:cnt + 1]), |
|
326 | 327 | ref_name=default_landing_ref, |
|
327 | 328 | commit_id=commit_id, |
|
328 | 329 | query=route_qry |
|
329 | 330 | ), |
|
330 | 331 | )) |
|
331 | 332 | |
|
332 | 333 | limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:] |
|
333 | 334 | if limit_items and len(limited_url_segments) < len(url_segments): |
|
334 | 335 | url_segments = limited_url_segments |
|
335 | 336 | |
|
336 | 337 | full_path = file_path |
|
337 | 338 | if copy_path_icon: |
|
338 | 339 | icon = files_icon.format(escape(full_path)) |
|
339 | 340 | else: |
|
340 | 341 | icon = '' |
|
341 | 342 | |
|
342 | 343 | if file_path == '': |
|
343 | 344 | return root_name |
|
344 | 345 | else: |
|
345 | 346 | return literal(' / '.join(url_segments) + icon) |
|
346 | 347 | |
|
347 | 348 | |
|
348 | 349 | def files_url_data(request): |
|
349 | 350 | matchdict = request.matchdict |
|
350 | 351 | |
|
351 | 352 | if 'f_path' not in matchdict: |
|
352 | 353 | matchdict['f_path'] = '' |
|
353 | 354 | else: |
|
354 | 355 | matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path'])) |
|
355 | 356 | if 'commit_id' not in matchdict: |
|
356 | 357 | matchdict['commit_id'] = 'tip' |
|
357 | 358 | |
|
358 | 359 | return ext_json.str_json(matchdict) |
|
359 | 360 | |
|
360 | 361 | |
|
361 | 362 | def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ): |
|
362 | 363 | _is_svn = is_svn(db_repo_type) |
|
363 | 364 | final_f_path = f_path |
|
364 | 365 | |
|
365 | 366 | if _is_svn: |
|
366 | 367 | """ |
|
367 | 368 | For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with |
|
368 | 369 | actually commit_id followed by the ref_name. This should be done only in case |
|
369 | 370 | This is a initial landing url, without additional paths. |
|
370 | 371 | |
|
371 | 372 | like: /1000/tags/1.0.0/?at=tags/1.0.0 |
|
372 | 373 | """ |
|
373 | 374 | |
|
374 | 375 | if ref_name and ref_name != 'tip': |
|
375 | 376 | # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it |
|
376 | 377 | # for SVN we only do this magic prefix if it's root, .eg landing revision |
|
377 | 378 | # of files link. If we are in the tree we don't need this since we traverse the url |
|
378 | 379 | # that has everything stored |
|
379 | 380 | if f_path in ['', '/']: |
|
380 | 381 | final_f_path = '/'.join([ref_name, f_path]) |
|
381 | 382 | |
|
382 | 383 | # SVN always needs a commit_id explicitly, without a named REF |
|
383 | 384 | default_commit_id = commit_id |
|
384 | 385 | else: |
|
385 | 386 | """ |
|
386 | 387 | For git and mercurial we construct a new URL using the names instead of commit_id |
|
387 | 388 | like: /master/some_path?at=master |
|
388 | 389 | """ |
|
389 | 390 | # We currently do not support branches with slashes |
|
390 | 391 | if '/' in ref_name: |
|
391 | 392 | default_commit_id = commit_id |
|
392 | 393 | else: |
|
393 | 394 | default_commit_id = ref_name |
|
394 | 395 | |
|
395 | 396 | # sometimes we pass f_path as None, to indicate explicit no prefix, |
|
396 | 397 | # we translate it to string to not have None |
|
397 | 398 | final_f_path = final_f_path or '' |
|
398 | 399 | |
|
399 | 400 | files_url = route_path( |
|
400 | 401 | 'repo_files', |
|
401 | 402 | repo_name=db_repo_name, |
|
402 | 403 | commit_id=default_commit_id, |
|
403 | 404 | f_path=final_f_path, |
|
404 | 405 | _query=query |
|
405 | 406 | ) |
|
406 | 407 | return files_url |
|
407 | 408 | |
|
408 | 409 | |
|
409 | 410 | def code_highlight(code, lexer, formatter, use_hl_filter=False): |
|
410 | 411 | """ |
|
411 | 412 | Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. |
|
412 | 413 | |
|
413 | 414 | If ``outfile`` is given and a valid file object (an object |
|
414 | 415 | with a ``write`` method), the result will be written to it, otherwise |
|
415 | 416 | it is returned as a string. |
|
416 | 417 | """ |
|
417 | 418 | if use_hl_filter: |
|
418 | 419 | # add HL filter |
|
419 | 420 | from rhodecode.lib.index import search_utils |
|
420 | 421 | lexer.add_filter(search_utils.ElasticSearchHLFilter()) |
|
421 | 422 | return pygments.format(pygments.lex(code, lexer), formatter) |
|
422 | 423 | |
|
423 | 424 | |
|
424 | 425 | class CodeHtmlFormatter(HtmlFormatter): |
|
425 | 426 | """ |
|
426 | 427 | My code Html Formatter for source codes |
|
427 | 428 | """ |
|
428 | 429 | |
|
429 | 430 | def wrap(self, source): |
|
430 | 431 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
431 | 432 | |
|
432 | 433 | def _wrap_code(self, source): |
|
433 | 434 | for cnt, it in enumerate(source): |
|
434 | 435 | i, t = it |
|
435 | 436 | t = f'<div id="L{cnt+1}">{t}</div>' |
|
436 | 437 | yield i, t |
|
437 | 438 | |
|
438 | 439 | def _wrap_tablelinenos(self, inner): |
|
439 | 440 | dummyoutfile = io.StringIO() |
|
440 | 441 | lncount = 0 |
|
441 | 442 | for t, line in inner: |
|
442 | 443 | if t: |
|
443 | 444 | lncount += 1 |
|
444 | 445 | dummyoutfile.write(line) |
|
445 | 446 | |
|
446 | 447 | fl = self.linenostart |
|
447 | 448 | mw = len(str(lncount + fl - 1)) |
|
448 | 449 | sp = self.linenospecial |
|
449 | 450 | st = self.linenostep |
|
450 | 451 | la = self.lineanchors |
|
451 | 452 | aln = self.anchorlinenos |
|
452 | 453 | nocls = self.noclasses |
|
453 | 454 | if sp: |
|
454 | 455 | lines = [] |
|
455 | 456 | |
|
456 | 457 | for i in range(fl, fl + lncount): |
|
457 | 458 | if i % st == 0: |
|
458 | 459 | if i % sp == 0: |
|
459 | 460 | if aln: |
|
460 | 461 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
461 | 462 | (la, i, mw, i)) |
|
462 | 463 | else: |
|
463 | 464 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
464 | 465 | else: |
|
465 | 466 | if aln: |
|
466 | 467 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
467 | 468 | else: |
|
468 | 469 | lines.append('%*d' % (mw, i)) |
|
469 | 470 | else: |
|
470 | 471 | lines.append('') |
|
471 | 472 | ls = '\n'.join(lines) |
|
472 | 473 | else: |
|
473 | 474 | lines = [] |
|
474 | 475 | for i in range(fl, fl + lncount): |
|
475 | 476 | if i % st == 0: |
|
476 | 477 | if aln: |
|
477 | 478 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
478 | 479 | else: |
|
479 | 480 | lines.append('%*d' % (mw, i)) |
|
480 | 481 | else: |
|
481 | 482 | lines.append('') |
|
482 | 483 | ls = '\n'.join(lines) |
|
483 | 484 | |
|
484 | 485 | # in case you wonder about the seemingly redundant <div> here: since the |
|
485 | 486 | # content in the other cell also is wrapped in a div, some browsers in |
|
486 | 487 | # some configurations seem to mess up the formatting... |
|
487 | 488 | if nocls: |
|
488 | 489 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
489 | 490 | '<tr><td><div class="linenodiv" ' |
|
490 | 491 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
491 | 492 | '<pre style="line-height: 125%">' + |
|
492 | 493 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
493 | 494 | else: |
|
494 | 495 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
495 | 496 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
496 | 497 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
497 | 498 | yield 0, dummyoutfile.getvalue() |
|
498 | 499 | yield 0, '</td></tr></table>' |
|
499 | 500 | |
|
500 | 501 | |
|
501 | 502 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
502 | 503 | def __init__(self, **kw): |
|
503 | 504 | # only show these line numbers if set |
|
504 | 505 | self.only_lines = kw.pop('only_line_numbers', []) |
|
505 | 506 | self.query_terms = kw.pop('query_terms', []) |
|
506 | 507 | self.max_lines = kw.pop('max_lines', 5) |
|
507 | 508 | self.line_context = kw.pop('line_context', 3) |
|
508 | 509 | self.url = kw.pop('url', None) |
|
509 | 510 | |
|
510 | 511 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
511 | 512 | |
|
512 | 513 | def _wrap_code(self, source): |
|
513 | 514 | for cnt, it in enumerate(source): |
|
514 | 515 | i, t = it |
|
515 | 516 | t = '<pre>%s</pre>' % t |
|
516 | 517 | yield i, t |
|
517 | 518 | |
|
518 | 519 | def _wrap_tablelinenos(self, inner): |
|
519 | 520 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
520 | 521 | |
|
521 | 522 | last_shown_line_number = 0 |
|
522 | 523 | current_line_number = 1 |
|
523 | 524 | |
|
524 | 525 | for t, line in inner: |
|
525 | 526 | if not t: |
|
526 | 527 | yield t, line |
|
527 | 528 | continue |
|
528 | 529 | |
|
529 | 530 | if current_line_number in self.only_lines: |
|
530 | 531 | if last_shown_line_number + 1 != current_line_number: |
|
531 | 532 | yield 0, '<tr>' |
|
532 | 533 | yield 0, '<td class="line">...</td>' |
|
533 | 534 | yield 0, '<td id="hlcode" class="code"></td>' |
|
534 | 535 | yield 0, '</tr>' |
|
535 | 536 | |
|
536 | 537 | yield 0, '<tr>' |
|
537 | 538 | if self.url: |
|
538 | 539 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
539 | 540 | self.url, current_line_number, current_line_number) |
|
540 | 541 | else: |
|
541 | 542 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
542 | 543 | current_line_number) |
|
543 | 544 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
544 | 545 | yield 0, '</tr>' |
|
545 | 546 | |
|
546 | 547 | last_shown_line_number = current_line_number |
|
547 | 548 | |
|
548 | 549 | current_line_number += 1 |
|
549 | 550 | |
|
550 | 551 | yield 0, '</table>' |
|
551 | 552 | |
|
552 | 553 | |
|
553 | 554 | def hsv_to_rgb(h, s, v): |
|
554 | 555 | """ Convert hsv color values to rgb """ |
|
555 | 556 | |
|
556 | 557 | if s == 0.0: |
|
557 | 558 | return v, v, v |
|
558 | 559 | i = int(h * 6.0) # XXX assume int() truncates! |
|
559 | 560 | f = (h * 6.0) - i |
|
560 | 561 | p = v * (1.0 - s) |
|
561 | 562 | q = v * (1.0 - s * f) |
|
562 | 563 | t = v * (1.0 - s * (1.0 - f)) |
|
563 | 564 | i = i % 6 |
|
564 | 565 | if i == 0: |
|
565 | 566 | return v, t, p |
|
566 | 567 | if i == 1: |
|
567 | 568 | return q, v, p |
|
568 | 569 | if i == 2: |
|
569 | 570 | return p, v, t |
|
570 | 571 | if i == 3: |
|
571 | 572 | return p, q, v |
|
572 | 573 | if i == 4: |
|
573 | 574 | return t, p, v |
|
574 | 575 | if i == 5: |
|
575 | 576 | return v, p, q |
|
576 | 577 | |
|
577 | 578 | |
|
578 | 579 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
579 | 580 | """ |
|
580 | 581 | Generator for getting n of evenly distributed colors using |
|
581 | 582 | hsv color and golden ratio. It always return same order of colors |
|
582 | 583 | |
|
583 | 584 | :param n: number of colors to generate |
|
584 | 585 | :param saturation: saturation of returned colors |
|
585 | 586 | :param lightness: lightness of returned colors |
|
586 | 587 | :returns: RGB tuple |
|
587 | 588 | """ |
|
588 | 589 | |
|
589 | 590 | golden_ratio = 0.618033988749895 |
|
590 | 591 | h = 0.22717784590367374 |
|
591 | 592 | |
|
592 | 593 | for _ in range(n): |
|
593 | 594 | h += golden_ratio |
|
594 | 595 | h %= 1 |
|
595 | 596 | HSV_tuple = [h, saturation, lightness] |
|
596 | 597 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
597 | 598 | yield [str(int(x * 256)) for x in RGB_tuple] |
|
598 | 599 | |
|
599 | 600 | |
|
600 | 601 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
601 | 602 | """ |
|
602 | 603 | Returns a function which when called with an argument returns a unique |
|
603 | 604 | color for that argument, eg. |
|
604 | 605 | |
|
605 | 606 | :param n: number of colors to generate |
|
606 | 607 | :param saturation: saturation of returned colors |
|
607 | 608 | :param lightness: lightness of returned colors |
|
608 | 609 | :returns: css RGB string |
|
609 | 610 | |
|
610 | 611 | >>> color_hash = color_hasher() |
|
611 | 612 | >>> color_hash('hello') |
|
612 | 613 | 'rgb(34, 12, 59)' |
|
613 | 614 | >>> color_hash('hello') |
|
614 | 615 | 'rgb(34, 12, 59)' |
|
615 | 616 | >>> color_hash('other') |
|
616 | 617 | 'rgb(90, 224, 159)' |
|
617 | 618 | """ |
|
618 | 619 | |
|
619 | 620 | color_dict = {} |
|
620 | 621 | cgenerator = unique_color_generator( |
|
621 | 622 | saturation=saturation, lightness=lightness) |
|
622 | 623 | |
|
623 | 624 | def get_color_string(thing): |
|
624 | 625 | if thing in color_dict: |
|
625 | 626 | col = color_dict[thing] |
|
626 | 627 | else: |
|
627 | 628 | col = color_dict[thing] = next(cgenerator) |
|
628 | 629 | return "rgb(%s)" % (', '.join(col)) |
|
629 | 630 | |
|
630 | 631 | return get_color_string |
|
631 | 632 | |
|
632 | 633 | |
|
633 | 634 | def get_lexer_safe(mimetype=None, filepath=None): |
|
634 | 635 | """ |
|
635 | 636 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
636 | 637 | defaulting to plain text if none could be found |
|
637 | 638 | """ |
|
638 | 639 | lexer = None |
|
639 | 640 | try: |
|
640 | 641 | if mimetype: |
|
641 | 642 | lexer = get_lexer_for_mimetype(mimetype) |
|
642 | 643 | if not lexer: |
|
643 | 644 | lexer = get_lexer_for_filename(filepath) |
|
644 | 645 | except pygments.util.ClassNotFound: |
|
645 | 646 | pass |
|
646 | 647 | |
|
647 | 648 | if not lexer: |
|
648 | 649 | lexer = get_lexer_by_name('text') |
|
649 | 650 | |
|
650 | 651 | return lexer |
|
651 | 652 | |
|
652 | 653 | |
|
653 | 654 | def get_lexer_for_filenode(filenode): |
|
654 | 655 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
655 | 656 | return lexer |
|
656 | 657 | |
|
657 | 658 | |
|
658 | 659 | def pygmentize(filenode, **kwargs): |
|
659 | 660 | """ |
|
660 | 661 | pygmentize function using pygments |
|
661 | 662 | |
|
662 | 663 | :param filenode: |
|
663 | 664 | """ |
|
664 | 665 | lexer = get_lexer_for_filenode(filenode) |
|
665 | 666 | return literal(code_highlight(filenode.content, lexer, |
|
666 | 667 | CodeHtmlFormatter(**kwargs))) |
|
667 | 668 | |
|
668 | 669 | |
|
669 | 670 | def is_following_repo(repo_name, user_id): |
|
670 | 671 | from rhodecode.model.scm import ScmModel |
|
671 | 672 | return ScmModel().is_following_repo(repo_name, user_id) |
|
672 | 673 | |
|
673 | 674 | |
|
674 | 675 | class _Message(object): |
|
675 | 676 | """A message returned by ``Flash.pop_messages()``. |
|
676 | 677 | |
|
677 | 678 | Converting the message to a string returns the message text. Instances |
|
678 | 679 | also have the following attributes: |
|
679 | 680 | |
|
680 | 681 | * ``message``: the message text. |
|
681 | 682 | * ``category``: the category specified when the message was created. |
|
682 | 683 | """ |
|
683 | 684 | |
|
684 | 685 | def __init__(self, category, message, sub_data=None): |
|
685 | 686 | self.category = category |
|
686 | 687 | self.message = message |
|
687 | 688 | self.sub_data = sub_data or {} |
|
688 | 689 | |
|
689 | 690 | def __str__(self): |
|
690 | 691 | return self.message |
|
691 | 692 | |
|
692 | 693 | __unicode__ = __str__ |
|
693 | 694 | |
|
694 | 695 | def __html__(self): |
|
695 | 696 | return escape(safe_str(self.message)) |
|
696 | 697 | |
|
697 | 698 | |
|
698 | 699 | class Flash(object): |
|
699 | 700 | # List of allowed categories. If None, allow any category. |
|
700 | 701 | categories = ["warning", "notice", "error", "success"] |
|
701 | 702 | |
|
702 | 703 | # Default category if none is specified. |
|
703 | 704 | default_category = "notice" |
|
704 | 705 | |
|
705 | 706 | def __init__(self, session_key="flash", categories=None, |
|
706 | 707 | default_category=None): |
|
707 | 708 | """ |
|
708 | 709 | Instantiate a ``Flash`` object. |
|
709 | 710 | |
|
710 | 711 | ``session_key`` is the key to save the messages under in the user's |
|
711 | 712 | session. |
|
712 | 713 | |
|
713 | 714 | ``categories`` is an optional list which overrides the default list |
|
714 | 715 | of categories. |
|
715 | 716 | |
|
716 | 717 | ``default_category`` overrides the default category used for messages |
|
717 | 718 | when none is specified. |
|
718 | 719 | """ |
|
719 | 720 | self.session_key = session_key |
|
720 | 721 | if categories is not None: |
|
721 | 722 | self.categories = categories |
|
722 | 723 | if default_category is not None: |
|
723 | 724 | self.default_category = default_category |
|
724 | 725 | if self.categories and self.default_category not in self.categories: |
|
725 | 726 | raise ValueError( |
|
726 | 727 | "unrecognized default category %r" % (self.default_category,)) |
|
727 | 728 | |
|
728 | 729 | def pop_messages(self, session=None, request=None): |
|
729 | 730 | """ |
|
730 | 731 | Return all accumulated messages and delete them from the session. |
|
731 | 732 | |
|
732 | 733 | The return value is a list of ``Message`` objects. |
|
733 | 734 | """ |
|
734 | 735 | messages = [] |
|
735 | 736 | |
|
736 | 737 | if not session: |
|
737 | 738 | if not request: |
|
738 | 739 | request = get_current_request() |
|
739 | 740 | session = request.session |
|
740 | 741 | |
|
741 | 742 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
742 | 743 | # (category, message) |
|
743 | 744 | for cat, msg in session.pop(self.session_key, []): |
|
744 | 745 | messages.append(_Message(cat, msg)) |
|
745 | 746 | |
|
746 | 747 | # Pop the 'new' pyramid flash messages for each category as list |
|
747 | 748 | # of strings. |
|
748 | 749 | for cat in self.categories: |
|
749 | 750 | for msg in session.pop_flash(queue=cat): |
|
750 | 751 | sub_data = {} |
|
751 | 752 | if hasattr(msg, 'rsplit'): |
|
752 | 753 | flash_data = msg.rsplit('|DELIM|', 1) |
|
753 | 754 | org_message = flash_data[0] |
|
754 | 755 | if len(flash_data) > 1: |
|
755 | 756 | sub_data = json.loads(flash_data[1]) |
|
756 | 757 | else: |
|
757 | 758 | org_message = msg |
|
758 | 759 | |
|
759 | 760 | messages.append(_Message(cat, org_message, sub_data=sub_data)) |
|
760 | 761 | |
|
761 | 762 | # Map messages from the default queue to the 'notice' category. |
|
762 | 763 | for msg in session.pop_flash(): |
|
763 | 764 | messages.append(_Message('notice', msg)) |
|
764 | 765 | |
|
765 | 766 | session.save() |
|
766 | 767 | return messages |
|
767 | 768 | |
|
768 | 769 | def json_alerts(self, session=None, request=None): |
|
769 | 770 | payloads = [] |
|
770 | 771 | messages = flash.pop_messages(session=session, request=request) or [] |
|
771 | 772 | for message in messages: |
|
772 | 773 | payloads.append({ |
|
773 | 774 | 'message': { |
|
774 | 775 | 'message': '{}'.format(message.message), |
|
775 | 776 | 'level': message.category, |
|
776 | 777 | 'force': True, |
|
777 | 778 | 'subdata': message.sub_data |
|
778 | 779 | } |
|
779 | 780 | }) |
|
780 | 781 | return safe_str(json.dumps(payloads)) |
|
781 | 782 | |
|
782 | 783 | def __call__(self, message, category=None, ignore_duplicate=True, |
|
783 | 784 | session=None, request=None): |
|
784 | 785 | |
|
785 | 786 | if not session: |
|
786 | 787 | if not request: |
|
787 | 788 | request = get_current_request() |
|
788 | 789 | session = request.session |
|
789 | 790 | |
|
790 | 791 | session.flash( |
|
791 | 792 | message, queue=category, allow_duplicate=not ignore_duplicate) |
|
792 | 793 | |
|
793 | 794 | |
|
794 | 795 | flash = Flash() |
|
795 | 796 | |
|
796 | 797 | #============================================================================== |
|
797 | 798 | # SCM FILTERS available via h. |
|
798 | 799 | #============================================================================== |
|
799 | 800 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
800 | 801 | from rhodecode.lib.utils2 import age, age_from_seconds |
|
801 | 802 | from rhodecode.model.db import User, ChangesetStatus |
|
802 | 803 | |
|
803 | 804 | |
|
804 | 805 | email = author_email |
|
805 | 806 | |
|
806 | 807 | |
|
807 | 808 | def capitalize(raw_text): |
|
808 | 809 | return raw_text.capitalize() |
|
809 | 810 | |
|
810 | 811 | |
|
811 | 812 | def short_id(long_id): |
|
812 | 813 | return long_id[:12] |
|
813 | 814 | |
|
814 | 815 | |
|
815 | 816 | def hide_credentials(url): |
|
816 | 817 | from rhodecode.lib.utils2 import credentials_filter |
|
817 | 818 | return credentials_filter(url) |
|
818 | 819 | |
|
819 | 820 | import zoneinfo |
|
820 | 821 | import tzlocal |
|
821 | 822 | local_timezone = tzlocal.get_localzone() |
|
822 | 823 | |
|
823 | 824 | |
|
824 | 825 | def get_timezone(datetime_iso, time_is_local=False): |
|
825 | 826 | tzinfo = '+00:00' |
|
826 | 827 | |
|
827 | 828 | # detect if we have a timezone info, otherwise, add it |
|
828 | 829 | if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
829 | 830 | force_timezone = os.environ.get('RC_TIMEZONE', '') |
|
830 | 831 | if force_timezone: |
|
831 | 832 | force_timezone = zoneinfo.ZoneInfo(force_timezone) |
|
832 | 833 | timezone = force_timezone or local_timezone |
|
833 | 834 | |
|
834 | 835 | offset = datetime_iso.replace(tzinfo=timezone).strftime('%z') |
|
835 | 836 | tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) |
|
836 | 837 | return tzinfo |
|
837 | 838 | |
|
838 | 839 | |
|
839 | 840 | def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True): |
|
840 | 841 | title = value or format_date(datetime_iso) |
|
841 | 842 | tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local) |
|
842 | 843 | |
|
843 | 844 | return literal( |
|
844 | 845 | '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format( |
|
845 | 846 | cls='tooltip' if tooltip else '', |
|
846 | 847 | tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '', |
|
847 | 848 | title=title, dt=datetime_iso, tzinfo=tzinfo |
|
848 | 849 | )) |
|
849 | 850 | |
|
850 | 851 | |
|
851 | 852 | def _shorten_commit_id(commit_id, commit_len=None): |
|
852 | 853 | if commit_len is None: |
|
853 | 854 | request = get_current_request() |
|
854 | 855 | commit_len = request.call_context.visual.show_sha_length |
|
855 | 856 | return commit_id[:commit_len] |
|
856 | 857 | |
|
857 | 858 | |
|
858 | 859 | def show_id(commit, show_idx=None, commit_len=None): |
|
859 | 860 | """ |
|
860 | 861 | Configurable function that shows ID |
|
861 | 862 | by default it's r123:fffeeefffeee |
|
862 | 863 | |
|
863 | 864 | :param commit: commit instance |
|
864 | 865 | """ |
|
865 | 866 | if show_idx is None: |
|
866 | 867 | request = get_current_request() |
|
867 | 868 | show_idx = request.call_context.visual.show_revision_number |
|
868 | 869 | |
|
869 | 870 | raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len) |
|
870 | 871 | if show_idx: |
|
871 | 872 | return 'r%s:%s' % (commit.idx, raw_id) |
|
872 | 873 | else: |
|
873 | 874 | return '%s' % (raw_id, ) |
|
874 | 875 | |
|
875 | 876 | |
|
876 | 877 | def format_date(date): |
|
877 | 878 | """ |
|
878 | 879 | use a standardized formatting for dates used in RhodeCode |
|
879 | 880 | |
|
880 | 881 | :param date: date/datetime object |
|
881 | 882 | :return: formatted date |
|
882 | 883 | """ |
|
883 | 884 | |
|
884 | 885 | if date: |
|
885 | 886 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
886 | 887 | return safe_str(date.strftime(_fmt)) |
|
887 | 888 | |
|
888 | 889 | return "" |
|
889 | 890 | |
|
890 | 891 | |
|
891 | 892 | class _RepoChecker(object): |
|
892 | 893 | |
|
893 | 894 | def __init__(self, backend_alias): |
|
894 | 895 | self._backend_alias = backend_alias |
|
895 | 896 | |
|
896 | 897 | def __call__(self, repository): |
|
897 | 898 | if hasattr(repository, 'alias'): |
|
898 | 899 | _type = repository.alias |
|
899 | 900 | elif hasattr(repository, 'repo_type'): |
|
900 | 901 | _type = repository.repo_type |
|
901 | 902 | else: |
|
902 | 903 | _type = repository |
|
903 | 904 | return _type == self._backend_alias |
|
904 | 905 | |
|
905 | 906 | |
|
906 | 907 | is_git = _RepoChecker('git') |
|
907 | 908 | is_hg = _RepoChecker('hg') |
|
908 | 909 | is_svn = _RepoChecker('svn') |
|
909 | 910 | |
|
910 | 911 | |
|
911 | 912 | def get_repo_type_by_name(repo_name): |
|
912 | 913 | repo = Repository.get_by_repo_name(repo_name) |
|
913 | 914 | if repo: |
|
914 | 915 | return repo.repo_type |
|
915 | 916 | |
|
916 | 917 | |
|
917 | 918 | def is_svn_without_proxy(repository): |
|
918 | 919 | if is_svn(repository): |
|
919 | from rhodecode.model.settings import VcsSettingsModel | |
|
920 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() | |
|
921 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) | |
|
920 | return not ConfigGet().get_bool('vcs.svn.proxy.enabled') | |
|
922 | 921 | return False |
|
923 | 922 | |
|
924 | 923 | |
|
925 | 924 | def discover_user(author): |
|
926 | 925 | """ |
|
927 | 926 | Tries to discover RhodeCode User based on the author string. Author string |
|
928 | 927 | is typically `FirstName LastName <email@address.com>` |
|
929 | 928 | """ |
|
930 | 929 | |
|
931 | 930 | # if author is already an instance use it for extraction |
|
932 | 931 | if isinstance(author, User): |
|
933 | 932 | return author |
|
934 | 933 | |
|
935 | 934 | # Valid email in the attribute passed, see if they're in the system |
|
936 | 935 | _email = author_email(author) |
|
937 | 936 | if _email != '': |
|
938 | 937 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
939 | 938 | if user is not None: |
|
940 | 939 | return user |
|
941 | 940 | |
|
942 | 941 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
943 | 942 | _author = author_name(author) |
|
944 | 943 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
945 | 944 | if user is not None: |
|
946 | 945 | return user |
|
947 | 946 | |
|
948 | 947 | return None |
|
949 | 948 | |
|
950 | 949 | |
|
951 | 950 | def email_or_none(author): |
|
952 | 951 | # extract email from the commit string |
|
953 | 952 | _email = author_email(author) |
|
954 | 953 | |
|
955 | 954 | # If we have an email, use it, otherwise |
|
956 | 955 | # see if it contains a username we can get an email from |
|
957 | 956 | if _email != '': |
|
958 | 957 | return _email |
|
959 | 958 | else: |
|
960 | 959 | user = User.get_by_username( |
|
961 | 960 | author_name(author), case_insensitive=True, cache=True) |
|
962 | 961 | |
|
963 | 962 | if user is not None: |
|
964 | 963 | return user.email |
|
965 | 964 | |
|
966 | 965 | # No valid email, not a valid user in the system, none! |
|
967 | 966 | return None |
|
968 | 967 | |
|
969 | 968 | |
|
970 | 969 | def link_to_user(author, length=0, **kwargs): |
|
971 | 970 | user = discover_user(author) |
|
972 | 971 | # user can be None, but if we have it already it means we can re-use it |
|
973 | 972 | # in the person() function, so we save 1 intensive-query |
|
974 | 973 | if user: |
|
975 | 974 | author = user |
|
976 | 975 | |
|
977 | 976 | display_person = person(author, 'username_or_name_or_email') |
|
978 | 977 | if length: |
|
979 | 978 | display_person = shorter(display_person, length) |
|
980 | 979 | |
|
981 | 980 | if user and user.username != user.DEFAULT_USER: |
|
982 | 981 | return link_to( |
|
983 | 982 | escape(display_person), |
|
984 | 983 | route_path('user_profile', username=user.username), |
|
985 | 984 | **kwargs) |
|
986 | 985 | else: |
|
987 | 986 | return escape(display_person) |
|
988 | 987 | |
|
989 | 988 | |
|
990 | 989 | def link_to_group(users_group_name, **kwargs): |
|
991 | 990 | return link_to( |
|
992 | 991 | escape(users_group_name), |
|
993 | 992 | route_path('user_group_profile', user_group_name=users_group_name), |
|
994 | 993 | **kwargs) |
|
995 | 994 | |
|
996 | 995 | |
|
997 | 996 | def person(author, show_attr="username_and_name"): |
|
998 | 997 | user = discover_user(author) |
|
999 | 998 | if user: |
|
1000 | 999 | return getattr(user, show_attr) |
|
1001 | 1000 | else: |
|
1002 | 1001 | _author = author_name(author) |
|
1003 | 1002 | _email = email(author) |
|
1004 | 1003 | return _author or _email |
|
1005 | 1004 | |
|
1006 | 1005 | |
|
1007 | 1006 | def author_string(email): |
|
1008 | 1007 | if email: |
|
1009 | 1008 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
1010 | 1009 | if user: |
|
1011 | 1010 | if user.first_name or user.last_name: |
|
1012 | 1011 | return '%s %s <%s>' % ( |
|
1013 | 1012 | user.first_name, user.last_name, email) |
|
1014 | 1013 | else: |
|
1015 | 1014 | return email |
|
1016 | 1015 | else: |
|
1017 | 1016 | return email |
|
1018 | 1017 | else: |
|
1019 | 1018 | return None |
|
1020 | 1019 | |
|
1021 | 1020 | |
|
1022 | 1021 | def person_by_id(id_, show_attr="username_and_name"): |
|
1023 | 1022 | # attr to return from fetched user |
|
1024 | 1023 | def person_getter(usr): |
|
1025 | 1024 | return getattr(usr, show_attr) |
|
1026 | 1025 | |
|
1027 | 1026 | #maybe it's an ID ? |
|
1028 | 1027 | if str(id_).isdigit() or isinstance(id_, int): |
|
1029 | 1028 | id_ = int(id_) |
|
1030 | 1029 | user = User.get(id_) |
|
1031 | 1030 | if user is not None: |
|
1032 | 1031 | return person_getter(user) |
|
1033 | 1032 | return id_ |
|
1034 | 1033 | |
|
1035 | 1034 | |
|
1036 | 1035 | def gravatar_with_user(request, author, show_disabled=False, tooltip=False): |
|
1037 | 1036 | _render = request.get_partial_renderer('rhodecode:templates/base/base.mako') |
|
1038 | 1037 | return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip) |
|
1039 | 1038 | |
|
1040 | 1039 | |
|
1041 | 1040 | tags_patterns = OrderedDict( |
|
1042 | 1041 | ( |
|
1043 | 1042 | ( |
|
1044 | 1043 | "lang", |
|
1045 | 1044 | ( |
|
1046 | 1045 | re.compile(r"\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]"), |
|
1047 | 1046 | '<div class="metatag" tag="lang">\\2</div>', |
|
1048 | 1047 | ), |
|
1049 | 1048 | ), |
|
1050 | 1049 | ( |
|
1051 | 1050 | "see", |
|
1052 | 1051 | ( |
|
1053 | 1052 | re.compile(r"\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]"), |
|
1054 | 1053 | '<div class="metatag" tag="see">see: \\1 </div>', |
|
1055 | 1054 | ), |
|
1056 | 1055 | ), |
|
1057 | 1056 | ( |
|
1058 | 1057 | "url", |
|
1059 | 1058 | ( |
|
1060 | 1059 | re.compile( |
|
1061 | 1060 | r"\[url\ \=\>\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]" |
|
1062 | 1061 | ), |
|
1063 | 1062 | '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>', |
|
1064 | 1063 | ), |
|
1065 | 1064 | ), |
|
1066 | 1065 | ( |
|
1067 | 1066 | "license", |
|
1068 | 1067 | ( |
|
1069 | 1068 | re.compile( |
|
1070 | 1069 | r"\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]" |
|
1071 | 1070 | ), |
|
1072 | 1071 | # don't make it a raw string here... |
|
1073 | 1072 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', |
|
1074 | 1073 | ), |
|
1075 | 1074 | ), |
|
1076 | 1075 | ( |
|
1077 | 1076 | "ref", |
|
1078 | 1077 | ( |
|
1079 | 1078 | re.compile( |
|
1080 | 1079 | r"\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]" |
|
1081 | 1080 | ), |
|
1082 | 1081 | '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>', |
|
1083 | 1082 | ), |
|
1084 | 1083 | ), |
|
1085 | 1084 | ( |
|
1086 | 1085 | "state", |
|
1087 | 1086 | ( |
|
1088 | 1087 | re.compile(r"\[(stable|featured|stale|dead|dev|deprecated)\]"), |
|
1089 | 1088 | '<div class="metatag" tag="state \\1">\\1</div>', |
|
1090 | 1089 | ), |
|
1091 | 1090 | ), |
|
1092 | 1091 | # label in grey |
|
1093 | 1092 | ( |
|
1094 | 1093 | "label", |
|
1095 | 1094 | (re.compile(r"\[([a-z]+)\]"), '<div class="metatag" tag="label">\\1</div>'), |
|
1096 | 1095 | ), |
|
1097 | 1096 | # generic catch all in grey |
|
1098 | 1097 | ( |
|
1099 | 1098 | "generic", |
|
1100 | 1099 | ( |
|
1101 | 1100 | re.compile(r"\[([a-zA-Z0-9\.\-\_]+)\]"), |
|
1102 | 1101 | '<div class="metatag" tag="generic">\\1</div>', |
|
1103 | 1102 | ), |
|
1104 | 1103 | ), |
|
1105 | 1104 | ) |
|
1106 | 1105 | ) |
|
1107 | 1106 | |
|
1108 | 1107 | |
|
1109 | 1108 | def extract_metatags(value): |
|
1110 | 1109 | """ |
|
1111 | 1110 | Extract supported meta-tags from given text value |
|
1112 | 1111 | """ |
|
1113 | 1112 | tags = [] |
|
1114 | 1113 | if not value: |
|
1115 | 1114 | return tags, '' |
|
1116 | 1115 | |
|
1117 | 1116 | for key, val in list(tags_patterns.items()): |
|
1118 | 1117 | pat, replace_html = val |
|
1119 | 1118 | tags.extend([(key, x.group()) for x in pat.finditer(value)]) |
|
1120 | 1119 | value = pat.sub('', value) |
|
1121 | 1120 | |
|
1122 | 1121 | return tags, value |
|
1123 | 1122 | |
|
1124 | 1123 | |
|
1125 | 1124 | def style_metatag(tag_type, value): |
|
1126 | 1125 | """ |
|
1127 | 1126 | converts tags from value into html equivalent |
|
1128 | 1127 | """ |
|
1129 | 1128 | if not value: |
|
1130 | 1129 | return '' |
|
1131 | 1130 | |
|
1132 | 1131 | html_value = value |
|
1133 | 1132 | tag_data = tags_patterns.get(tag_type) |
|
1134 | 1133 | if tag_data: |
|
1135 | 1134 | pat, replace_html = tag_data |
|
1136 | 1135 | # convert to plain `str` instead of a markup tag to be used in |
|
1137 | 1136 | # regex expressions. safe_str doesn't work here |
|
1138 | 1137 | html_value = pat.sub(replace_html, value) |
|
1139 | 1138 | |
|
1140 | 1139 | return html_value |
|
1141 | 1140 | |
|
1142 | 1141 | |
|
1143 | 1142 | def bool2icon(value, show_at_false=True): |
|
1144 | 1143 | """ |
|
1145 | 1144 | Returns boolean value of a given value, represented as html element with |
|
1146 | 1145 | classes that will represent icons |
|
1147 | 1146 | |
|
1148 | 1147 | :param value: given value to convert to html node |
|
1149 | 1148 | """ |
|
1150 | 1149 | |
|
1151 | 1150 | if value: # does bool conversion |
|
1152 | 1151 | return HTML.tag('i', class_="icon-true", title='True') |
|
1153 | 1152 | else: # not true as bool |
|
1154 | 1153 | if show_at_false: |
|
1155 | 1154 | return HTML.tag('i', class_="icon-false", title='False') |
|
1156 | 1155 | return HTML.tag('i') |
|
1157 | 1156 | |
|
1158 | 1157 | |
|
1159 | 1158 | def b64(inp): |
|
1160 | 1159 | return base64.b64encode(safe_bytes(inp)) |
|
1161 | 1160 | |
|
1162 | 1161 | #============================================================================== |
|
1163 | 1162 | # PERMS |
|
1164 | 1163 | #============================================================================== |
|
1165 | 1164 | from rhodecode.lib.auth import ( |
|
1166 | 1165 | HasPermissionAny, HasPermissionAll, |
|
1167 | 1166 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, |
|
1168 | 1167 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, |
|
1169 | 1168 | csrf_token_key, AuthUser) |
|
1170 | 1169 | |
|
1171 | 1170 | |
|
1172 | 1171 | #============================================================================== |
|
1173 | 1172 | # GRAVATAR URL |
|
1174 | 1173 | #============================================================================== |
|
1175 | 1174 | class InitialsGravatar(object): |
|
1176 | 1175 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1177 | 1176 | background=None, text_color='#fff'): |
|
1178 | 1177 | self.size = size |
|
1179 | 1178 | self.first_name = first_name |
|
1180 | 1179 | self.last_name = last_name |
|
1181 | 1180 | self.email_address = email_address |
|
1182 | 1181 | self.background = background or self.str2color(email_address) |
|
1183 | 1182 | self.text_color = text_color |
|
1184 | 1183 | |
|
1185 | 1184 | def get_color_bank(self): |
|
1186 | 1185 | """ |
|
1187 | 1186 | returns a predefined list of colors that gravatars can use. |
|
1188 | 1187 | Those are randomized distinct colors that guarantee readability and |
|
1189 | 1188 | uniqueness. |
|
1190 | 1189 | |
|
1191 | 1190 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1192 | 1191 | """ |
|
1193 | 1192 | return [ |
|
1194 | 1193 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1195 | 1194 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1196 | 1195 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1197 | 1196 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1198 | 1197 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1199 | 1198 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1200 | 1199 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1201 | 1200 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1202 | 1201 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1203 | 1202 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1204 | 1203 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1205 | 1204 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1206 | 1205 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1207 | 1206 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1208 | 1207 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1209 | 1208 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1210 | 1209 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1211 | 1210 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1212 | 1211 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1213 | 1212 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1214 | 1213 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1215 | 1214 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1216 | 1215 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1217 | 1216 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1218 | 1217 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1219 | 1218 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1220 | 1219 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1221 | 1220 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1222 | 1221 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1223 | 1222 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1224 | 1223 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1225 | 1224 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1226 | 1225 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1227 | 1226 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1228 | 1227 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1229 | 1228 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1230 | 1229 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1231 | 1230 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1232 | 1231 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1233 | 1232 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1234 | 1233 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1235 | 1234 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1236 | 1235 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1237 | 1236 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1238 | 1237 | '#4f8c46', '#368dd9', '#5c0073' |
|
1239 | 1238 | ] |
|
1240 | 1239 | |
|
1241 | 1240 | def rgb_to_hex_color(self, rgb_tuple): |
|
1242 | 1241 | """ |
|
1243 | 1242 | Converts an rgb_tuple passed to an hex color. |
|
1244 | 1243 | |
|
1245 | 1244 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1246 | 1245 | """ |
|
1247 | 1246 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1248 | 1247 | |
|
1249 | 1248 | def email_to_int_list(self, email_str): |
|
1250 | 1249 | """ |
|
1251 | 1250 | Get every byte of the hex digest value of email and turn it to integer. |
|
1252 | 1251 | It's going to be always between 0-255 |
|
1253 | 1252 | """ |
|
1254 | 1253 | digest = md5_safe(email_str.lower()) |
|
1255 | 1254 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1256 | 1255 | |
|
1257 | 1256 | def pick_color_bank_index(self, email_str, color_bank): |
|
1258 | 1257 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1259 | 1258 | |
|
1260 | 1259 | def str2color(self, email_str): |
|
1261 | 1260 | """ |
|
1262 | 1261 | Tries to map in a stable algorithm an email to color |
|
1263 | 1262 | |
|
1264 | 1263 | :param email_str: |
|
1265 | 1264 | """ |
|
1266 | 1265 | color_bank = self.get_color_bank() |
|
1267 | 1266 | # pick position (module it's length so we always find it in the |
|
1268 | 1267 | # bank even if it's smaller than 256 values |
|
1269 | 1268 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1270 | 1269 | return color_bank[pos] |
|
1271 | 1270 | |
|
1272 | 1271 | def normalize_email(self, email_address): |
|
1273 | 1272 | # default host used to fill in the fake/missing email |
|
1274 | 1273 | default_host = 'localhost' |
|
1275 | 1274 | |
|
1276 | 1275 | if not email_address: |
|
1277 | 1276 | email_address = f'{User.DEFAULT_USER}@{default_host}' |
|
1278 | 1277 | |
|
1279 | 1278 | email_address = safe_str(email_address) |
|
1280 | 1279 | |
|
1281 | 1280 | if '@' not in email_address: |
|
1282 | 1281 | email_address = f'{email_address}@{default_host}' |
|
1283 | 1282 | |
|
1284 | 1283 | if email_address.endswith('@'): |
|
1285 | 1284 | email_address = f'{email_address}{default_host}' |
|
1286 | 1285 | |
|
1287 | 1286 | email_address = convert_special_chars(email_address) |
|
1288 | 1287 | |
|
1289 | 1288 | return email_address |
|
1290 | 1289 | |
|
1291 | 1290 | def get_initials(self): |
|
1292 | 1291 | """ |
|
1293 | 1292 | Returns 2 letter initials calculated based on the input. |
|
1294 | 1293 | The algorithm picks first given email address, and takes first letter |
|
1295 | 1294 | of part before @, and then the first letter of server name. In case |
|
1296 | 1295 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1297 | 1296 | the server letter with first letter of somestring2 |
|
1298 | 1297 | |
|
1299 | 1298 | In case function was initialized with both first and lastname, this |
|
1300 | 1299 | overrides the extraction from email by first letter of the first and |
|
1301 | 1300 | last name. We add special logic to that functionality, In case Full name |
|
1302 | 1301 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1303 | 1302 | (Von Rossum) picking `R`. |
|
1304 | 1303 | |
|
1305 | 1304 | Function also normalizes the non-ascii characters to they ascii |
|
1306 | 1305 | representation, eg Δ => A |
|
1307 | 1306 | """ |
|
1308 | 1307 | # replace non-ascii to ascii |
|
1309 | 1308 | first_name = convert_special_chars(self.first_name) |
|
1310 | 1309 | last_name = convert_special_chars(self.last_name) |
|
1311 | 1310 | # multi word last names, Guido Von Rossum, we take the last part only |
|
1312 | 1311 | last_name = last_name.split(' ', 1)[-1] |
|
1313 | 1312 | |
|
1314 | 1313 | # do NFKD encoding, and also make sure email has proper format |
|
1315 | 1314 | email_address = self.normalize_email(self.email_address) |
|
1316 | 1315 | |
|
1317 | 1316 | # first push the email initials |
|
1318 | 1317 | prefix, server = email_address.split('@', 1) |
|
1319 | 1318 | |
|
1320 | 1319 | # check if prefix is maybe a 'first_name.last_name' syntax |
|
1321 | 1320 | _dot_split = prefix.rsplit('.', 1) |
|
1322 | 1321 | if len(_dot_split) == 2 and _dot_split[1]: |
|
1323 | 1322 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1324 | 1323 | else: |
|
1325 | 1324 | initials = [prefix[0], server[0]] |
|
1326 | 1325 | |
|
1327 | 1326 | # get first letter of first and last names to create initials |
|
1328 | 1327 | fn_letter = (first_name or " ")[0].strip() |
|
1329 | 1328 | ln_letter = (last_name or " ")[0].strip() |
|
1330 | 1329 | |
|
1331 | 1330 | if fn_letter: |
|
1332 | 1331 | initials[0] = fn_letter |
|
1333 | 1332 | |
|
1334 | 1333 | if ln_letter: |
|
1335 | 1334 | initials[1] = ln_letter |
|
1336 | 1335 | |
|
1337 | 1336 | return ''.join(initials).upper() |
|
1338 | 1337 | |
|
1339 | 1338 | def get_img_data_by_type(self, font_family, img_type): |
|
1340 | 1339 | default_user = """ |
|
1341 | 1340 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1342 | 1341 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1343 | 1342 | viewBox="-15 -10 439.165 429.164" |
|
1344 | 1343 | |
|
1345 | 1344 | xml:space="preserve" |
|
1346 | 1345 | font-family="{font_family}" |
|
1347 | 1346 | style="background:{background};" > |
|
1348 | 1347 | |
|
1349 | 1348 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1350 | 1349 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1351 | 1350 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1352 | 1351 | 168.596,153.916,216.671, |
|
1353 | 1352 | 204.583,216.671z" fill="{text_color}"/> |
|
1354 | 1353 | <path d="M407.164,374.717L360.88, |
|
1355 | 1354 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1356 | 1355 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1357 | 1356 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1358 | 1357 | 0-48.762-8.122-69.078-23.488 |
|
1359 | 1358 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1360 | 1359 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1361 | 1360 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1362 | 1361 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1363 | 1362 | 19.402-10.527 C409.699,390.129, |
|
1364 | 1363 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1365 | 1364 | </svg>""".format( |
|
1366 | 1365 | size=self.size, |
|
1367 | 1366 | background='#979797', # @grey4 |
|
1368 | 1367 | text_color=self.text_color, |
|
1369 | 1368 | font_family=font_family) |
|
1370 | 1369 | |
|
1371 | 1370 | return { |
|
1372 | 1371 | "default_user": default_user |
|
1373 | 1372 | }[img_type] |
|
1374 | 1373 | |
|
1375 | 1374 | def get_img_data(self, svg_type=None): |
|
1376 | 1375 | """ |
|
1377 | 1376 | generates the svg metadata for image |
|
1378 | 1377 | """ |
|
1379 | 1378 | fonts = [ |
|
1380 | 1379 | '-apple-system', |
|
1381 | 1380 | 'BlinkMacSystemFont', |
|
1382 | 1381 | 'Segoe UI', |
|
1383 | 1382 | 'Roboto', |
|
1384 | 1383 | 'Oxygen-Sans', |
|
1385 | 1384 | 'Ubuntu', |
|
1386 | 1385 | 'Cantarell', |
|
1387 | 1386 | 'Helvetica Neue', |
|
1388 | 1387 | 'sans-serif' |
|
1389 | 1388 | ] |
|
1390 | 1389 | font_family = ','.join(fonts) |
|
1391 | 1390 | if svg_type: |
|
1392 | 1391 | return self.get_img_data_by_type(font_family, svg_type) |
|
1393 | 1392 | |
|
1394 | 1393 | initials = self.get_initials() |
|
1395 | 1394 | img_data = """ |
|
1396 | 1395 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1397 | 1396 | width="{size}" height="{size}" |
|
1398 | 1397 | style="width: 100%; height: 100%; background-color: {background}" |
|
1399 | 1398 | viewBox="0 0 {size} {size}"> |
|
1400 | 1399 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1401 | 1400 | pointer-events="auto" fill="{text_color}" |
|
1402 | 1401 | font-family="{font_family}" |
|
1403 | 1402 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1404 | 1403 | </text> |
|
1405 | 1404 | </svg>""".format( |
|
1406 | 1405 | size=self.size, |
|
1407 | 1406 | f_size=self.size/2.05, # scale the text inside the box nicely |
|
1408 | 1407 | background=self.background, |
|
1409 | 1408 | text_color=self.text_color, |
|
1410 | 1409 | text=initials.upper(), |
|
1411 | 1410 | font_family=font_family) |
|
1412 | 1411 | |
|
1413 | 1412 | return img_data |
|
1414 | 1413 | |
|
1415 | 1414 | def generate_svg(self, svg_type=None): |
|
1416 | 1415 | img_data = base64_to_str(self.get_img_data(svg_type)) |
|
1417 | 1416 | return "data:image/svg+xml;base64,{}".format(img_data) |
|
1418 | 1417 | |
|
1419 | 1418 | |
|
1420 | 1419 | def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False): |
|
1421 | 1420 | |
|
1422 | 1421 | svg_type = None |
|
1423 | 1422 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1424 | 1423 | svg_type = 'default_user' |
|
1425 | 1424 | |
|
1426 | 1425 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1427 | 1426 | |
|
1428 | 1427 | if store_on_disk: |
|
1429 | 1428 | from rhodecode.apps.file_store import utils as store_utils |
|
1430 | 1429 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \ |
|
1431 | 1430 | FileOverSizeException |
|
1432 | 1431 | from rhodecode.model.db import Session |
|
1433 | 1432 | |
|
1434 | 1433 | image_key = md5_safe(email_address.lower() |
|
1435 | 1434 | + first_name.lower() + last_name.lower()) |
|
1436 | 1435 | |
|
1437 | 1436 | storage = store_utils.get_file_storage(request.registry.settings) |
|
1438 | 1437 | filename = '{}.svg'.format(image_key) |
|
1439 | 1438 | subdir = 'gravatars' |
|
1440 | 1439 | # since final name has a counter, we apply the 0 |
|
1441 | 1440 | uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False)) |
|
1442 | 1441 | store_uid = os.path.join(subdir, uid) |
|
1443 | 1442 | |
|
1444 | 1443 | db_entry = FileStore.get_by_store_uid(store_uid) |
|
1445 | 1444 | if db_entry: |
|
1446 | 1445 | return request.route_path('download_file', fid=store_uid) |
|
1447 | 1446 | |
|
1448 | 1447 | img_data = klass.get_img_data(svg_type=svg_type) |
|
1449 | 1448 | img_file = store_utils.bytes_to_file_obj(img_data) |
|
1450 | 1449 | |
|
1451 | 1450 | try: |
|
1452 | 1451 | store_uid, metadata = storage.save_file( |
|
1453 | 1452 | img_file, filename, directory=subdir, |
|
1454 | 1453 | extensions=['.svg'], randomized_name=False) |
|
1455 | 1454 | except (FileNotAllowedException, FileOverSizeException): |
|
1456 | 1455 | raise |
|
1457 | 1456 | |
|
1458 | 1457 | try: |
|
1459 | 1458 | entry = FileStore.create( |
|
1460 | 1459 | file_uid=store_uid, filename=metadata["filename"], |
|
1461 | 1460 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
1462 | 1461 | file_display_name=filename, |
|
1463 | 1462 | file_description=f'user gravatar `{safe_str(filename)}`', |
|
1464 | 1463 | hidden=True, check_acl=False, user_id=1 |
|
1465 | 1464 | ) |
|
1466 | 1465 | Session().add(entry) |
|
1467 | 1466 | Session().commit() |
|
1468 | 1467 | log.debug('Stored upload in DB as %s', entry) |
|
1469 | 1468 | except Exception: |
|
1470 | 1469 | raise |
|
1471 | 1470 | |
|
1472 | 1471 | return request.route_path('download_file', fid=store_uid) |
|
1473 | 1472 | |
|
1474 | 1473 | else: |
|
1475 | 1474 | return klass.generate_svg(svg_type=svg_type) |
|
1476 | 1475 | |
|
1477 | 1476 | |
|
1478 | 1477 | def gravatar_external(request, gravatar_url_tmpl, email_address, size=30): |
|
1479 | 1478 | return safe_str(gravatar_url_tmpl)\ |
|
1480 | 1479 | .replace('{email}', email_address) \ |
|
1481 | 1480 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1482 | 1481 | .replace('{netloc}', request.host) \ |
|
1483 | 1482 | .replace('{scheme}', request.scheme) \ |
|
1484 | 1483 | .replace('{size}', safe_str(size)) |
|
1485 | 1484 | |
|
1486 | 1485 | |
|
1487 | 1486 | def gravatar_url(email_address, size=30, request=None): |
|
1488 | 1487 | request = request or get_current_request() |
|
1489 | 1488 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1490 | 1489 | |
|
1491 | 1490 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1492 | 1491 | if isinstance(email_address, str): |
|
1493 | 1492 | # hashlib crashes on unicode items |
|
1494 | 1493 | email_address = safe_str(email_address) |
|
1495 | 1494 | |
|
1496 | 1495 | # empty email or default user |
|
1497 | 1496 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1498 | 1497 | return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1499 | 1498 | |
|
1500 | 1499 | if _use_gravatar: |
|
1501 | 1500 | gravatar_url_tmpl = request.call_context.visual.gravatar_url \ |
|
1502 | 1501 | or User.DEFAULT_GRAVATAR_URL |
|
1503 | 1502 | return gravatar_external(request, gravatar_url_tmpl, email_address, size=size) |
|
1504 | 1503 | |
|
1505 | 1504 | else: |
|
1506 | 1505 | return initials_gravatar(request, email_address, '', '', size=size) |
|
1507 | 1506 | |
|
1508 | 1507 | |
|
1509 | 1508 | def breadcrumb_repo_link(repo): |
|
1510 | 1509 | """ |
|
1511 | 1510 | Makes a breadcrumbs path link to repo |
|
1512 | 1511 | |
|
1513 | 1512 | ex:: |
|
1514 | 1513 | group >> subgroup >> repo |
|
1515 | 1514 | |
|
1516 | 1515 | :param repo: a Repository instance |
|
1517 | 1516 | """ |
|
1518 | 1517 | |
|
1519 | 1518 | path = [ |
|
1520 | 1519 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), |
|
1521 | 1520 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1522 | 1521 | for group in repo.groups_with_parents |
|
1523 | 1522 | ] + [ |
|
1524 | 1523 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), |
|
1525 | 1524 | title='last change:{}'.format(format_date(repo.last_commit_change))) |
|
1526 | 1525 | ] |
|
1527 | 1526 | |
|
1528 | 1527 | return literal(' » '.join(path)) |
|
1529 | 1528 | |
|
1530 | 1529 | |
|
1531 | 1530 | def breadcrumb_repo_group_link(repo_group): |
|
1532 | 1531 | """ |
|
1533 | 1532 | Makes a breadcrumbs path link to repo |
|
1534 | 1533 | |
|
1535 | 1534 | ex:: |
|
1536 | 1535 | group >> subgroup |
|
1537 | 1536 | |
|
1538 | 1537 | :param repo_group: a Repository Group instance |
|
1539 | 1538 | """ |
|
1540 | 1539 | |
|
1541 | 1540 | path = [ |
|
1542 | 1541 | link_to(group.name, |
|
1543 | 1542 | route_path('repo_group_home', repo_group_name=group.group_name), |
|
1544 | 1543 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1545 | 1544 | for group in repo_group.parents |
|
1546 | 1545 | ] + [ |
|
1547 | 1546 | link_to(repo_group.name, |
|
1548 | 1547 | route_path('repo_group_home', repo_group_name=repo_group.group_name), |
|
1549 | 1548 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) |
|
1550 | 1549 | ] |
|
1551 | 1550 | |
|
1552 | 1551 | return literal(' » '.join(path)) |
|
1553 | 1552 | |
|
1554 | 1553 | |
|
1555 | 1554 | def format_byte_size_binary(file_size): |
|
1556 | 1555 | """ |
|
1557 | 1556 | Formats file/folder sizes to standard. |
|
1558 | 1557 | """ |
|
1559 | 1558 | if file_size is None: |
|
1560 | 1559 | file_size = 0 |
|
1561 | 1560 | |
|
1562 | 1561 | formatted_size = format_byte_size(file_size, binary=True) |
|
1563 | 1562 | return formatted_size |
|
1564 | 1563 | |
|
1565 | 1564 | |
|
1566 | 1565 | def urlify_text(text_, safe=True, **href_attrs): |
|
1567 | 1566 | """ |
|
1568 | 1567 | Extract urls from text and make html links out of them |
|
1569 | 1568 | """ |
|
1570 | 1569 | |
|
1571 | 1570 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1572 | 1571 | r'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1573 | 1572 | |
|
1574 | 1573 | def url_func(match_obj): |
|
1575 | 1574 | url_full = match_obj.groups()[0] |
|
1576 | 1575 | a_options = dict(href_attrs) |
|
1577 | 1576 | a_options['href'] = url_full |
|
1578 | 1577 | a_text = url_full |
|
1579 | 1578 | return HTML.tag("a", a_text, **a_options) |
|
1580 | 1579 | |
|
1581 | 1580 | _new_text = url_pat.sub(url_func, text_) |
|
1582 | 1581 | |
|
1583 | 1582 | if safe: |
|
1584 | 1583 | return literal(_new_text) |
|
1585 | 1584 | return _new_text |
|
1586 | 1585 | |
|
1587 | 1586 | |
|
1588 | 1587 | def urlify_commits(text_, repo_name): |
|
1589 | 1588 | """ |
|
1590 | 1589 | Extract commit ids from text and make link from them |
|
1591 | 1590 | |
|
1592 | 1591 | :param text_: |
|
1593 | 1592 | :param repo_name: repo name to build the URL with |
|
1594 | 1593 | """ |
|
1595 | 1594 | |
|
1596 | 1595 | url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1597 | 1596 | |
|
1598 | 1597 | def url_func(match_obj): |
|
1599 | 1598 | commit_id = match_obj.groups()[1] |
|
1600 | 1599 | pref = match_obj.groups()[0] |
|
1601 | 1600 | suf = match_obj.groups()[2] |
|
1602 | 1601 | |
|
1603 | 1602 | tmpl = ( |
|
1604 | 1603 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">' |
|
1605 | 1604 | '%(commit_id)s</a>%(suf)s' |
|
1606 | 1605 | ) |
|
1607 | 1606 | return tmpl % { |
|
1608 | 1607 | 'pref': pref, |
|
1609 | 1608 | 'cls': 'revision-link', |
|
1610 | 1609 | 'url': route_url( |
|
1611 | 1610 | 'repo_commit', repo_name=repo_name, commit_id=commit_id), |
|
1612 | 1611 | 'commit_id': commit_id, |
|
1613 | 1612 | 'suf': suf, |
|
1614 | 1613 | 'hovercard_alt': 'Commit: {}'.format(commit_id), |
|
1615 | 1614 | 'hovercard_url': route_url( |
|
1616 | 1615 | 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id) |
|
1617 | 1616 | } |
|
1618 | 1617 | |
|
1619 | 1618 | new_text = url_pat.sub(url_func, text_) |
|
1620 | 1619 | |
|
1621 | 1620 | return new_text |
|
1622 | 1621 | |
|
1623 | 1622 | |
|
1624 | 1623 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1625 | 1624 | return_raw_data=False, link_format='html'): |
|
1626 | 1625 | pref = '' |
|
1627 | 1626 | if match_obj.group().startswith(' '): |
|
1628 | 1627 | pref = ' ' |
|
1629 | 1628 | |
|
1630 | 1629 | issue_id = ''.join(match_obj.groups()) |
|
1631 | 1630 | |
|
1632 | 1631 | if link_format == 'html': |
|
1633 | 1632 | tmpl = ( |
|
1634 | 1633 | '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">' |
|
1635 | 1634 | '%(issue-prefix)s%(id-repr)s' |
|
1636 | 1635 | '</a>') |
|
1637 | 1636 | elif link_format == 'html+hovercard': |
|
1638 | 1637 | tmpl = ( |
|
1639 | 1638 | '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">' |
|
1640 | 1639 | '%(issue-prefix)s%(id-repr)s' |
|
1641 | 1640 | '</a>') |
|
1642 | 1641 | elif link_format in ['rst', 'rst+hovercard']: |
|
1643 | 1642 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1644 | 1643 | elif link_format in ['markdown', 'markdown+hovercard']: |
|
1645 | 1644 | tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1646 | 1645 | else: |
|
1647 | 1646 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1648 | 1647 | |
|
1649 | 1648 | (repo_name_cleaned, |
|
1650 | 1649 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name) |
|
1651 | 1650 | |
|
1652 | 1651 | # variables replacement |
|
1653 | 1652 | named_vars = { |
|
1654 | 1653 | 'id': issue_id, |
|
1655 | 1654 | 'repo': repo_name, |
|
1656 | 1655 | 'repo_name': repo_name_cleaned, |
|
1657 | 1656 | 'group_name': parent_group_name, |
|
1658 | 1657 | # set dummy keys so we always have them |
|
1659 | 1658 | 'hostname': '', |
|
1660 | 1659 | 'netloc': '', |
|
1661 | 1660 | 'scheme': '' |
|
1662 | 1661 | } |
|
1663 | 1662 | |
|
1664 | 1663 | request = get_current_request() |
|
1665 | 1664 | if request: |
|
1666 | 1665 | # exposes, hostname, netloc, scheme |
|
1667 | 1666 | host_data = get_host_info(request) |
|
1668 | 1667 | named_vars.update(host_data) |
|
1669 | 1668 | |
|
1670 | 1669 | # named regex variables |
|
1671 | 1670 | named_vars.update(match_obj.groupdict()) |
|
1672 | 1671 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1673 | 1672 | desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars) |
|
1674 | 1673 | hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars) |
|
1675 | 1674 | |
|
1676 | 1675 | def quote_cleaner(input_str): |
|
1677 | 1676 | """Remove quotes as it's HTML""" |
|
1678 | 1677 | return input_str.replace('"', '') |
|
1679 | 1678 | |
|
1680 | 1679 | data = { |
|
1681 | 1680 | 'pref': pref, |
|
1682 | 1681 | 'cls': quote_cleaner('issue-tracker-link'), |
|
1683 | 1682 | 'url': quote_cleaner(_url), |
|
1684 | 1683 | 'id-repr': issue_id, |
|
1685 | 1684 | 'issue-prefix': entry['pref'], |
|
1686 | 1685 | 'serv': entry['url'], |
|
1687 | 1686 | 'title': sanitize_html(desc, strip=True), |
|
1688 | 1687 | 'hovercard_url': hovercard_url |
|
1689 | 1688 | } |
|
1690 | 1689 | |
|
1691 | 1690 | if return_raw_data: |
|
1692 | 1691 | return { |
|
1693 | 1692 | 'id': issue_id, |
|
1694 | 1693 | 'url': _url |
|
1695 | 1694 | } |
|
1696 | 1695 | return tmpl % data |
|
1697 | 1696 | |
|
1698 | 1697 | |
|
1699 | 1698 | def get_active_pattern_entries(repo_name): |
|
1700 | 1699 | repo = None |
|
1701 | 1700 | if repo_name: |
|
1702 | 1701 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1703 | 1702 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1704 | 1703 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1705 | 1704 | |
|
1706 | 1705 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1707 | 1706 | active_entries = settings_model.get_settings(cache=True) |
|
1708 | 1707 | return active_entries |
|
1709 | 1708 | |
|
1710 | 1709 | |
|
1711 | 1710 | pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)') |
|
1712 | 1711 | |
|
1713 | 1712 | allowed_link_formats = [ |
|
1714 | 1713 | 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard'] |
|
1715 | 1714 | |
|
1716 | 1715 | compile_cache = { |
|
1717 | 1716 | |
|
1718 | 1717 | } |
|
1719 | 1718 | |
|
1720 | 1719 | |
|
1721 | 1720 | def process_patterns(text_string, repo_name, link_format='html', active_entries=None): |
|
1722 | 1721 | |
|
1723 | 1722 | if link_format not in allowed_link_formats: |
|
1724 | 1723 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1725 | 1724 | allowed_link_formats, link_format)) |
|
1726 | 1725 | issues_data = [] |
|
1727 | 1726 | errors = [] |
|
1728 | 1727 | new_text = text_string |
|
1729 | 1728 | |
|
1730 | 1729 | if active_entries is None: |
|
1731 | 1730 | log.debug('Fetch active issue tracker patterns for repo: %s', repo_name) |
|
1732 | 1731 | active_entries = get_active_pattern_entries(repo_name) |
|
1733 | 1732 | |
|
1734 | 1733 | log.debug('Got %s pattern entries to process', len(active_entries)) |
|
1735 | 1734 | |
|
1736 | 1735 | for uid, entry in list(active_entries.items()): |
|
1737 | 1736 | |
|
1738 | 1737 | if not (entry['pat'] and entry['url']): |
|
1739 | 1738 | log.debug('skipping due to missing data') |
|
1740 | 1739 | continue |
|
1741 | 1740 | |
|
1742 | 1741 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s', |
|
1743 | 1742 | uid, entry['pat'], entry['url'], entry['pref']) |
|
1744 | 1743 | |
|
1745 | 1744 | if entry.get('pat_compiled'): |
|
1746 | 1745 | pattern = entry['pat_compiled'] |
|
1747 | 1746 | elif entry['pat'] in compile_cache: |
|
1748 | 1747 | pattern = compile_cache[entry['pat']] |
|
1749 | 1748 | else: |
|
1750 | 1749 | try: |
|
1751 | 1750 | pattern = regex.compile(r'%s' % entry['pat']) |
|
1752 | 1751 | except regex.error as e: |
|
1753 | 1752 | regex_err = ValueError('{}:{}'.format(entry['pat'], e)) |
|
1754 | 1753 | log.exception('issue tracker pattern: `%s` failed to compile', regex_err) |
|
1755 | 1754 | errors.append(regex_err) |
|
1756 | 1755 | continue |
|
1757 | 1756 | compile_cache[entry['pat']] = pattern |
|
1758 | 1757 | |
|
1759 | 1758 | data_func = partial( |
|
1760 | 1759 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1761 | 1760 | return_raw_data=True) |
|
1762 | 1761 | |
|
1763 | 1762 | for match_obj in pattern.finditer(text_string): |
|
1764 | 1763 | issues_data.append(data_func(match_obj)) |
|
1765 | 1764 | |
|
1766 | 1765 | url_func = partial( |
|
1767 | 1766 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1768 | 1767 | link_format=link_format) |
|
1769 | 1768 | |
|
1770 | 1769 | new_text = pattern.sub(url_func, new_text) |
|
1771 | 1770 | log.debug('processed prefix:uid `%s`', uid) |
|
1772 | 1771 | |
|
1773 | 1772 | # finally use global replace, eg !123 -> pr-link, those will not catch |
|
1774 | 1773 | # if already similar pattern exists |
|
1775 | 1774 | server_url = '${scheme}://${netloc}' |
|
1776 | 1775 | pr_entry = { |
|
1777 | 1776 | 'pref': '!', |
|
1778 | 1777 | 'url': server_url + '/_admin/pull-requests/${id}', |
|
1779 | 1778 | 'desc': 'Pull Request !${id}', |
|
1780 | 1779 | 'hovercard_url': server_url + '/_hovercard/pull_request/${id}' |
|
1781 | 1780 | } |
|
1782 | 1781 | pr_url_func = partial( |
|
1783 | 1782 | _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None, |
|
1784 | 1783 | link_format=link_format+'+hovercard') |
|
1785 | 1784 | new_text = pr_pattern_re.sub(pr_url_func, new_text) |
|
1786 | 1785 | log.debug('processed !pr pattern') |
|
1787 | 1786 | |
|
1788 | 1787 | return new_text, issues_data, errors |
|
1789 | 1788 | |
|
1790 | 1789 | |
|
1791 | 1790 | def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None, |
|
1792 | 1791 | issues_container_callback=None, error_container=None): |
|
1793 | 1792 | """ |
|
1794 | 1793 | Parses given text message and makes proper links. |
|
1795 | 1794 | issues are linked to given issue-server, and rest is a commit link |
|
1796 | 1795 | """ |
|
1797 | 1796 | |
|
1798 | 1797 | def escaper(_text): |
|
1799 | 1798 | return _text.replace('<', '<').replace('>', '>') |
|
1800 | 1799 | |
|
1801 | 1800 | new_text = escaper(commit_text) |
|
1802 | 1801 | |
|
1803 | 1802 | # extract http/https links and make them real urls |
|
1804 | 1803 | new_text = urlify_text(new_text, safe=False) |
|
1805 | 1804 | |
|
1806 | 1805 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1807 | 1806 | # the scope of repository present. |
|
1808 | 1807 | if repository: |
|
1809 | 1808 | new_text = urlify_commits(new_text, repository) |
|
1810 | 1809 | |
|
1811 | 1810 | # process issue tracker patterns |
|
1812 | 1811 | new_text, issues, errors = process_patterns( |
|
1813 | 1812 | new_text, repository or '', active_entries=active_pattern_entries) |
|
1814 | 1813 | |
|
1815 | 1814 | if issues_container_callback is not None: |
|
1816 | 1815 | for issue in issues: |
|
1817 | 1816 | issues_container_callback(issue) |
|
1818 | 1817 | |
|
1819 | 1818 | if error_container is not None: |
|
1820 | 1819 | error_container.extend(errors) |
|
1821 | 1820 | |
|
1822 | 1821 | return literal(new_text) |
|
1823 | 1822 | |
|
1824 | 1823 | |
|
1825 | 1824 | def render_binary(repo_name, file_obj): |
|
1826 | 1825 | """ |
|
1827 | 1826 | Choose how to render a binary file |
|
1828 | 1827 | """ |
|
1829 | 1828 | |
|
1830 | 1829 | # unicode |
|
1831 | 1830 | filename = file_obj.name |
|
1832 | 1831 | |
|
1833 | 1832 | # images |
|
1834 | 1833 | for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']: |
|
1835 | 1834 | if fnmatch.fnmatch(filename, pat=ext): |
|
1836 | 1835 | src = route_path( |
|
1837 | 1836 | 'repo_file_raw', repo_name=repo_name, |
|
1838 | 1837 | commit_id=file_obj.commit.raw_id, |
|
1839 | 1838 | f_path=file_obj.path) |
|
1840 | 1839 | |
|
1841 | 1840 | return literal( |
|
1842 | 1841 | '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src)) |
|
1843 | 1842 | |
|
1844 | 1843 | |
|
1845 | 1844 | def renderer_from_filename(filename, exclude=None): |
|
1846 | 1845 | """ |
|
1847 | 1846 | choose a renderer based on filename, this works only for text based files |
|
1848 | 1847 | """ |
|
1849 | 1848 | |
|
1850 | 1849 | # ipython |
|
1851 | 1850 | for ext in ['*.ipynb']: |
|
1852 | 1851 | if fnmatch.fnmatch(filename, pat=ext): |
|
1853 | 1852 | return 'jupyter' |
|
1854 | 1853 | |
|
1855 | 1854 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1856 | 1855 | if is_markup: |
|
1857 | 1856 | return is_markup |
|
1858 | 1857 | return None |
|
1859 | 1858 | |
|
1860 | 1859 | |
|
1861 | 1860 | def render(source, renderer='rst', mentions=False, relative_urls=None, |
|
1862 | 1861 | repo_name=None, active_pattern_entries=None, issues_container_callback=None): |
|
1863 | 1862 | |
|
1864 | 1863 | def maybe_convert_relative_links(html_source): |
|
1865 | 1864 | if relative_urls: |
|
1866 | 1865 | return relative_links(html_source, relative_urls) |
|
1867 | 1866 | return html_source |
|
1868 | 1867 | |
|
1869 | 1868 | if renderer == 'plain': |
|
1870 | 1869 | return literal( |
|
1871 | 1870 | MarkupRenderer.plain(source, leading_newline=False)) |
|
1872 | 1871 | |
|
1873 | 1872 | elif renderer == 'rst': |
|
1874 | 1873 | if repo_name: |
|
1875 | 1874 | # process patterns on comments if we pass in repo name |
|
1876 | 1875 | source, issues, errors = process_patterns( |
|
1877 | 1876 | source, repo_name, link_format='rst', |
|
1878 | 1877 | active_entries=active_pattern_entries) |
|
1879 | 1878 | if issues_container_callback is not None: |
|
1880 | 1879 | for issue in issues: |
|
1881 | 1880 | issues_container_callback(issue) |
|
1882 | 1881 | |
|
1883 | 1882 | rendered_block = maybe_convert_relative_links( |
|
1884 | 1883 | MarkupRenderer.rst(source, mentions=mentions)) |
|
1885 | 1884 | |
|
1886 | 1885 | return literal(f'<div class="rst-block">{rendered_block}</div>') |
|
1887 | 1886 | |
|
1888 | 1887 | elif renderer == 'markdown': |
|
1889 | 1888 | if repo_name: |
|
1890 | 1889 | # process patterns on comments if we pass in repo name |
|
1891 | 1890 | source, issues, errors = process_patterns( |
|
1892 | 1891 | source, repo_name, link_format='markdown', |
|
1893 | 1892 | active_entries=active_pattern_entries) |
|
1894 | 1893 | if issues_container_callback is not None: |
|
1895 | 1894 | for issue in issues: |
|
1896 | 1895 | issues_container_callback(issue) |
|
1897 | 1896 | |
|
1898 | 1897 | rendered_block = maybe_convert_relative_links( |
|
1899 | 1898 | MarkupRenderer.markdown(source, flavored=True, mentions=mentions)) |
|
1900 | 1899 | return literal(f'<div class="markdown-block">{rendered_block}</div>') |
|
1901 | 1900 | |
|
1902 | 1901 | elif renderer == 'jupyter': |
|
1903 | 1902 | rendered_block = maybe_convert_relative_links( |
|
1904 | 1903 | MarkupRenderer.jupyter(source)) |
|
1905 | 1904 | return literal(f'<div class="ipynb">{rendered_block}</div>') |
|
1906 | 1905 | |
|
1907 | 1906 | # None means just show the file-source |
|
1908 | 1907 | return None |
|
1909 | 1908 | |
|
1910 | 1909 | |
|
1911 | 1910 | def commit_status(repo, commit_id): |
|
1912 | 1911 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1913 | 1912 | |
|
1914 | 1913 | |
|
1915 | 1914 | def commit_status_lbl(commit_status): |
|
1916 | 1915 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1917 | 1916 | |
|
1918 | 1917 | |
|
1919 | 1918 | def commit_time(repo_name, commit_id): |
|
1920 | 1919 | repo = Repository.get_by_repo_name(repo_name) |
|
1921 | 1920 | commit = repo.get_commit(commit_id=commit_id) |
|
1922 | 1921 | return commit.date |
|
1923 | 1922 | |
|
1924 | 1923 | |
|
1925 | 1924 | def get_permission_name(key): |
|
1926 | 1925 | return dict(Permission.PERMS).get(key) |
|
1927 | 1926 | |
|
1928 | 1927 | |
|
1929 | 1928 | def journal_filter_help(request): |
|
1930 | 1929 | _ = request.translate |
|
1931 | 1930 | from rhodecode.lib.audit_logger import ACTIONS |
|
1932 | 1931 | actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80)) |
|
1933 | 1932 | |
|
1934 | 1933 | return _( |
|
1935 | 1934 | 'Example filter terms:\n' + |
|
1936 | 1935 | ' repository:vcs\n' + |
|
1937 | 1936 | ' username:marcin\n' + |
|
1938 | 1937 | ' username:(NOT marcin)\n' + |
|
1939 | 1938 | ' action:*push*\n' + |
|
1940 | 1939 | ' ip:127.0.0.1\n' + |
|
1941 | 1940 | ' date:20120101\n' + |
|
1942 | 1941 | ' date:[20120101100000 TO 20120102]\n' + |
|
1943 | 1942 | '\n' + |
|
1944 | 1943 | 'Actions: {actions}\n' + |
|
1945 | 1944 | '\n' + |
|
1946 | 1945 | 'Generate wildcards using \'*\' character:\n' + |
|
1947 | 1946 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1948 | 1947 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1949 | 1948 | '\n' + |
|
1950 | 1949 | 'Optional AND / OR operators in queries\n' + |
|
1951 | 1950 | ' "repository:vcs OR repository:test"\n' + |
|
1952 | 1951 | ' "username:test AND repository:test*"\n' |
|
1953 | 1952 | ).format(actions=actions) |
|
1954 | 1953 | |
|
1955 | 1954 | |
|
1956 | 1955 | def not_mapped_error(repo_name): |
|
1957 | 1956 | from rhodecode.translation import _ |
|
1958 | 1957 | flash(_('%s repository is not mapped to db perhaps' |
|
1959 | 1958 | ' it was created or renamed from the filesystem' |
|
1960 | 1959 | ' please run the application again' |
|
1961 | 1960 | ' in order to rescan repositories') % repo_name, category='error') |
|
1962 | 1961 | |
|
1963 | 1962 | |
|
1964 | 1963 | def ip_range(ip_addr): |
|
1965 | 1964 | from rhodecode.model.db import UserIpMap |
|
1966 | 1965 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1967 | 1966 | return '%s - %s' % (s, e) |
|
1968 | 1967 | |
|
1969 | 1968 | |
|
1970 | 1969 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1971 | 1970 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1972 | 1971 | if method.lower() != 'get' and needs_csrf_token: |
|
1973 | 1972 | raise Exception( |
|
1974 | 1973 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1975 | 1974 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1976 | 1975 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1977 | 1976 | |
|
1978 | 1977 | return insecure_form(url, method=method, **attrs) |
|
1979 | 1978 | |
|
1980 | 1979 | |
|
1981 | 1980 | def secure_form(form_url, method="POST", multipart=False, **attrs): |
|
1982 | 1981 | """Start a form tag that points the action to an url. This |
|
1983 | 1982 | form tag will also include the hidden field containing |
|
1984 | 1983 | the auth token. |
|
1985 | 1984 | |
|
1986 | 1985 | The url options should be given either as a string, or as a |
|
1987 | 1986 | ``url()`` function. The method for the form defaults to POST. |
|
1988 | 1987 | |
|
1989 | 1988 | Options: |
|
1990 | 1989 | |
|
1991 | 1990 | ``multipart`` |
|
1992 | 1991 | If set to True, the enctype is set to "multipart/form-data". |
|
1993 | 1992 | ``method`` |
|
1994 | 1993 | The method to use when submitting the form, usually either |
|
1995 | 1994 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1996 | 1995 | hidden input with name _method is added to simulate the verb |
|
1997 | 1996 | over POST. |
|
1998 | 1997 | |
|
1999 | 1998 | """ |
|
2000 | 1999 | |
|
2001 | 2000 | if 'request' in attrs: |
|
2002 | 2001 | session = attrs['request'].session |
|
2003 | 2002 | del attrs['request'] |
|
2004 | 2003 | else: |
|
2005 | 2004 | raise ValueError( |
|
2006 | 2005 | 'Calling this form requires request= to be passed as argument') |
|
2007 | 2006 | |
|
2008 | 2007 | _form = insecure_form(form_url, method, multipart, **attrs) |
|
2009 | 2008 | token = literal( |
|
2010 | 2009 | '<input type="hidden" name="{}" value="{}">'.format( |
|
2011 | 2010 | csrf_token_key, get_csrf_token(session))) |
|
2012 | 2011 | |
|
2013 | 2012 | return literal("%s\n%s" % (_form, token)) |
|
2014 | 2013 | |
|
2015 | 2014 | |
|
2016 | 2015 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
2017 | 2016 | select_html = select(name, selected, options, **attrs) |
|
2018 | 2017 | |
|
2019 | 2018 | select2 = """ |
|
2020 | 2019 | <script> |
|
2021 | 2020 | $(document).ready(function() { |
|
2022 | 2021 | $('#%s').select2({ |
|
2023 | 2022 | containerCssClass: 'drop-menu %s', |
|
2024 | 2023 | dropdownCssClass: 'drop-menu-dropdown', |
|
2025 | 2024 | dropdownAutoWidth: true%s |
|
2026 | 2025 | }); |
|
2027 | 2026 | }); |
|
2028 | 2027 | </script> |
|
2029 | 2028 | """ |
|
2030 | 2029 | |
|
2031 | 2030 | filter_option = """, |
|
2032 | 2031 | minimumResultsForSearch: -1 |
|
2033 | 2032 | """ |
|
2034 | 2033 | input_id = attrs.get('id') or name |
|
2035 | 2034 | extra_classes = ' '.join(attrs.pop('extra_classes', [])) |
|
2036 | 2035 | filter_enabled = "" if enable_filter else filter_option |
|
2037 | 2036 | select_script = literal(select2 % (input_id, extra_classes, filter_enabled)) |
|
2038 | 2037 | |
|
2039 | 2038 | return literal(select_html+select_script) |
|
2040 | 2039 | |
|
2041 | 2040 | |
|
2042 | 2041 | def get_visual_attr(tmpl_context_var, attr_name): |
|
2043 | 2042 | """ |
|
2044 | 2043 | A safe way to get a variable from visual variable of template context |
|
2045 | 2044 | |
|
2046 | 2045 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
2047 | 2046 | :param attr_name: name of the attribute we fetch from the c.visual |
|
2048 | 2047 | """ |
|
2049 | 2048 | visual = getattr(tmpl_context_var, 'visual', None) |
|
2050 | 2049 | if not visual: |
|
2051 | 2050 | return |
|
2052 | 2051 | else: |
|
2053 | 2052 | return getattr(visual, attr_name, None) |
|
2054 | 2053 | |
|
2055 | 2054 | |
|
2056 | 2055 | def get_last_path_part(file_node): |
|
2057 | 2056 | if not file_node.path: |
|
2058 | 2057 | return '/' |
|
2059 | 2058 | |
|
2060 | 2059 | path = safe_str(file_node.path.split('/')[-1]) |
|
2061 | 2060 | return '../' + path |
|
2062 | 2061 | |
|
2063 | 2062 | |
|
2064 | 2063 | def route_url(*args, **kwargs): |
|
2065 | 2064 | """ |
|
2066 | 2065 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
2067 | 2066 | """ |
|
2068 | 2067 | req = get_current_request() |
|
2069 | 2068 | return req.route_url(*args, **kwargs) |
|
2070 | 2069 | |
|
2071 | 2070 | |
|
2072 | 2071 | def route_path(*args, **kwargs): |
|
2073 | 2072 | """ |
|
2074 | 2073 | Wrapper around pyramids `route_path` function. |
|
2075 | 2074 | """ |
|
2076 | 2075 | req = get_current_request() |
|
2077 | 2076 | return req.route_path(*args, **kwargs) |
|
2078 | 2077 | |
|
2079 | 2078 | |
|
2080 | 2079 | def route_path_or_none(*args, **kwargs): |
|
2081 | 2080 | try: |
|
2082 | 2081 | return route_path(*args, **kwargs) |
|
2083 | 2082 | except KeyError: |
|
2084 | 2083 | return None |
|
2085 | 2084 | |
|
2086 | 2085 | |
|
2087 | 2086 | def current_route_path(request, **kw): |
|
2088 | 2087 | new_args = request.GET.mixed() |
|
2089 | 2088 | new_args.update(kw) |
|
2090 | 2089 | return request.current_route_path(_query=new_args) |
|
2091 | 2090 | |
|
2092 | 2091 | |
|
2093 | 2092 | def curl_api_example(method, args): |
|
2094 | 2093 | args_json = json.dumps(OrderedDict([ |
|
2095 | 2094 | ('id', 1), |
|
2096 | 2095 | ('auth_token', 'SECRET'), |
|
2097 | 2096 | ('method', method), |
|
2098 | 2097 | ('args', args) |
|
2099 | 2098 | ])) |
|
2100 | 2099 | |
|
2101 | 2100 | return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format( |
|
2102 | 2101 | api_url=route_url('apiv2'), |
|
2103 | 2102 | args_json=args_json |
|
2104 | 2103 | ) |
|
2105 | 2104 | |
|
2106 | 2105 | |
|
2107 | 2106 | def api_call_example(method, args): |
|
2108 | 2107 | """ |
|
2109 | 2108 | Generates an API call example via CURL |
|
2110 | 2109 | """ |
|
2111 | 2110 | curl_call = curl_api_example(method, args) |
|
2112 | 2111 | |
|
2113 | 2112 | return literal( |
|
2114 | 2113 | curl_call + |
|
2115 | 2114 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2116 | 2115 | "and needs to be of `api calls` role." |
|
2117 | 2116 | .format(token_url=route_url('my_account_auth_tokens'))) |
|
2118 | 2117 | |
|
2119 | 2118 | |
|
2120 | 2119 | def notification_description(notification, request): |
|
2121 | 2120 | """ |
|
2122 | 2121 | Generate notification human readable description based on notification type |
|
2123 | 2122 | """ |
|
2124 | 2123 | from rhodecode.model.notification import NotificationModel |
|
2125 | 2124 | return NotificationModel().make_description( |
|
2126 | 2125 | notification, translate=request.translate) |
|
2127 | 2126 | |
|
2128 | 2127 | |
|
2129 | 2128 | def go_import_header(request, db_repo=None): |
|
2130 | 2129 | """ |
|
2131 | 2130 | Creates a header for go-import functionality in Go Lang |
|
2132 | 2131 | """ |
|
2133 | 2132 | |
|
2134 | 2133 | if not db_repo: |
|
2135 | 2134 | return |
|
2136 | 2135 | if 'go-get' not in request.GET: |
|
2137 | 2136 | return |
|
2138 | 2137 | |
|
2139 | 2138 | clone_url = db_repo.clone_url() |
|
2140 | 2139 | prefix = re.split(r'^https?:\/\/', clone_url)[-1] |
|
2141 | 2140 | # we have a repo and go-get flag, |
|
2142 | 2141 | return literal('<meta name="go-import" content="{} {} {}">'.format( |
|
2143 | 2142 | prefix, db_repo.repo_type, clone_url)) |
|
2144 | 2143 | |
|
2145 | 2144 | |
|
2146 | 2145 | def reviewer_as_json(*args, **kwargs): |
|
2147 | 2146 | from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json |
|
2148 | 2147 | return _reviewer_as_json(*args, **kwargs) |
|
2149 | 2148 | |
|
2150 | 2149 | |
|
2151 | 2150 | def get_repo_view_type(request): |
|
2152 | 2151 | route_name = request.matched_route.name |
|
2153 | 2152 | route_to_view_type = { |
|
2154 | 2153 | 'repo_changelog': 'commits', |
|
2155 | 2154 | 'repo_commits': 'commits', |
|
2156 | 2155 | 'repo_files': 'files', |
|
2157 | 2156 | 'repo_summary': 'summary', |
|
2158 | 2157 | 'repo_commit': 'commit' |
|
2159 | 2158 | } |
|
2160 | 2159 | |
|
2161 | 2160 | return route_to_view_type.get(route_name) |
|
2162 | 2161 | |
|
2163 | 2162 | |
|
2164 | 2163 | def is_active(menu_entry, selected): |
|
2165 | 2164 | """ |
|
2166 | 2165 | Returns active class for selecting menus in templates |
|
2167 | 2166 | <li class=${h.is_active('settings', current_active)}></li> |
|
2168 | 2167 | """ |
|
2169 | 2168 | if not isinstance(menu_entry, list): |
|
2170 | 2169 | menu_entry = [menu_entry] |
|
2171 | 2170 | |
|
2172 | 2171 | if selected in menu_entry: |
|
2173 | 2172 | return "active" |
|
2174 | 2173 | |
|
2175 | 2174 | |
|
2176 | 2175 | class IssuesRegistry(object): |
|
2177 | 2176 | """ |
|
2178 | 2177 | issue_registry = IssuesRegistry() |
|
2179 | 2178 | some_func(issues_callback=issues_registry(...)) |
|
2180 | 2179 | """ |
|
2181 | 2180 | |
|
2182 | 2181 | def __init__(self): |
|
2183 | 2182 | self.issues = [] |
|
2184 | 2183 | self.unique_issues = collections.defaultdict(lambda: []) |
|
2185 | 2184 | |
|
2186 | 2185 | def __call__(self, commit_dict=None): |
|
2187 | 2186 | def callback(issue): |
|
2188 | 2187 | if commit_dict and issue: |
|
2189 | 2188 | issue['commit'] = commit_dict |
|
2190 | 2189 | self.issues.append(issue) |
|
2191 | 2190 | self.unique_issues[issue['id']].append(issue) |
|
2192 | 2191 | return callback |
|
2193 | 2192 | |
|
2194 | 2193 | def get_issues(self): |
|
2195 | 2194 | return self.issues |
|
2196 | 2195 | |
|
2197 | 2196 | @property |
|
2198 | 2197 | def issues_unique_count(self): |
|
2199 | 2198 | return len(set(i['id'] for i in self.issues)) |
@@ -1,244 +1,243 b'' | |||
|
1 | 1 | |
|
2 | 2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | import base64 |
|
21 | 21 | import logging |
|
22 | 22 | import urllib.request |
|
23 | 23 | import urllib.parse |
|
24 | 24 | import urllib.error |
|
25 | 25 | import urllib.parse |
|
26 | 26 | |
|
27 | 27 | import requests |
|
28 | 28 | from pyramid.httpexceptions import HTTPNotAcceptable |
|
29 | 29 | |
|
30 | from rhodecode import ConfigGet | |
|
30 | 31 | from rhodecode.lib import rc_cache |
|
31 | 32 | from rhodecode.lib.middleware import simplevcs |
|
32 | 33 | from rhodecode.lib.middleware.utils import get_path_info |
|
33 | 34 | from rhodecode.lib.utils import is_valid_repo |
|
34 | 35 | from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes |
|
35 | 36 | from rhodecode.lib.type_utils import str2bool |
|
36 | 37 | from rhodecode.lib.ext_json import json |
|
37 | 38 | from rhodecode.lib.hooks_daemon import store_txn_id_data |
|
38 | 39 | |
|
39 | 40 | |
|
40 | 41 | log = logging.getLogger(__name__) |
|
41 | 42 | |
|
42 | 43 | |
|
43 | 44 | class SimpleSvnApp(object): |
|
44 | 45 | IGNORED_HEADERS = [ |
|
45 | 46 | 'connection', 'keep-alive', 'content-encoding', |
|
46 | 47 | 'transfer-encoding', 'content-length'] |
|
47 | 48 | rc_extras = {} |
|
48 | 49 | |
|
49 | 50 | def __init__(self, config): |
|
50 | 51 | self.config = config |
|
51 | 52 | self.session = requests.Session() |
|
52 | 53 | |
|
53 | 54 | def __call__(self, environ, start_response): |
|
54 | 55 | request_headers = self._get_request_headers(environ) |
|
55 | 56 | data_io = environ['wsgi.input'] |
|
56 | 57 | req_method: str = environ['REQUEST_METHOD'] |
|
57 | 58 | has_content_length = 'CONTENT_LENGTH' in environ |
|
58 | 59 | |
|
59 | 60 | path_info = self._get_url( |
|
60 | 61 | self.config.get('subversion_http_server_url', ''), get_path_info(environ)) |
|
61 | 62 | transfer_encoding = environ.get('HTTP_TRANSFER_ENCODING', '') |
|
62 | 63 | log.debug('Handling: %s method via `%s`', req_method, path_info) |
|
63 | 64 | |
|
64 | 65 | # stream control flag, based on request and content type... |
|
65 | 66 | stream = False |
|
66 | 67 | |
|
67 | 68 | if req_method in ['MKCOL'] or has_content_length: |
|
68 | 69 | data_processed = False |
|
69 | 70 | # read chunk to check if we have txn-with-props |
|
70 | 71 | initial_data: bytes = data_io.read(1024) |
|
71 | 72 | if initial_data.startswith(b'(create-txn-with-props'): |
|
72 | 73 | data_io = initial_data + data_io.read() |
|
73 | 74 | # store on-the-fly our rc_extra using svn revision properties |
|
74 | 75 | # those can be read later on in hooks executed so we have a way |
|
75 | 76 | # to pass in the data into svn hooks |
|
76 | 77 | rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras)) |
|
77 | 78 | rc_data_len = str(len(rc_data)) |
|
78 | 79 | # header defines data length, and serialized data |
|
79 | 80 | skel = b' rc-scm-extras %b %b' % (safe_bytes(rc_data_len), safe_bytes(rc_data)) |
|
80 | 81 | data_io = data_io[:-2] + skel + b'))' |
|
81 | 82 | data_processed = True |
|
82 | 83 | |
|
83 | 84 | if not data_processed: |
|
84 | 85 | # NOTE(johbo): Avoid that we end up with sending the request in chunked |
|
85 | 86 | # transfer encoding (mainly on Gunicorn). If we know the content |
|
86 | 87 | # length, then we should transfer the payload in one request. |
|
87 | 88 | data_io = initial_data + data_io.read() |
|
88 | 89 | |
|
89 | 90 | if req_method in ['GET', 'PUT'] or transfer_encoding == 'chunked': |
|
90 | 91 | # NOTE(marcink): when getting/uploading files, we want to STREAM content |
|
91 | 92 | # back to the client/proxy instead of buffering it here... |
|
92 | 93 | stream = True |
|
93 | 94 | |
|
94 | 95 | stream = stream |
|
95 | 96 | log.debug('Calling SVN PROXY at `%s`, using method:%s. Stream: %s', |
|
96 | 97 | path_info, req_method, stream) |
|
97 | 98 | |
|
98 | 99 | call_kwargs = dict( |
|
99 | 100 | data=data_io, |
|
100 | 101 | headers=request_headers, |
|
101 | 102 | stream=stream |
|
102 | 103 | ) |
|
103 | 104 | if req_method in ['HEAD', 'DELETE']: |
|
104 | 105 | del call_kwargs['data'] |
|
105 | 106 | |
|
106 | 107 | try: |
|
107 | 108 | response = self.session.request( |
|
108 | 109 | req_method, path_info, **call_kwargs) |
|
109 | 110 | except requests.ConnectionError: |
|
110 | 111 | log.exception('ConnectionError occurred for endpoint %s', path_info) |
|
111 | 112 | raise |
|
112 | 113 | |
|
113 | 114 | if response.status_code not in [200, 401]: |
|
114 | 115 | text = '\n{}'.format(safe_str(response.text)) if response.text else '' |
|
115 | 116 | if response.status_code >= 500: |
|
116 | 117 | log.error('Got SVN response:%s with text:`%s`', response, text) |
|
117 | 118 | else: |
|
118 | 119 | log.debug('Got SVN response:%s with text:`%s`', response, text) |
|
119 | 120 | else: |
|
120 | 121 | log.debug('got response code: %s', response.status_code) |
|
121 | 122 | |
|
122 | 123 | response_headers = self._get_response_headers(response.headers) |
|
123 | 124 | |
|
124 | 125 | if response.headers.get('SVN-Txn-name'): |
|
125 | 126 | svn_tx_id = response.headers.get('SVN-Txn-name') |
|
126 | 127 | txn_id = rc_cache.utils.compute_key_from_params( |
|
127 | 128 | self.config['repository'], svn_tx_id) |
|
128 | 129 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) |
|
129 | 130 | store_txn_id_data(txn_id, {'port': port}) |
|
130 | 131 | |
|
131 | 132 | start_response(f'{response.status_code} {response.reason}', response_headers) |
|
132 | 133 | return response.iter_content(chunk_size=1024) |
|
133 | 134 | |
|
134 | 135 | def _get_url(self, svn_http_server, path): |
|
135 | 136 | svn_http_server_url = (svn_http_server or '').rstrip('/') |
|
136 | 137 | url_path = urllib.parse.urljoin(svn_http_server_url + '/', (path or '').lstrip('/')) |
|
137 | 138 | url_path = urllib.parse.quote(url_path, safe="/:=~+!$,;'") |
|
138 | 139 | return url_path |
|
139 | 140 | |
|
140 | 141 | def _get_request_headers(self, environ): |
|
141 | 142 | headers = {} |
|
142 | 143 | whitelist = { |
|
143 | 144 | 'Authorization': {} |
|
144 | 145 | } |
|
145 | 146 | for key in environ: |
|
146 | 147 | if key in whitelist: |
|
147 | 148 | headers[key] = environ[key] |
|
148 | 149 | elif not key.startswith('HTTP_'): |
|
149 | 150 | continue |
|
150 | 151 | else: |
|
151 | 152 | new_key = key.split('_') |
|
152 | 153 | new_key = [k.capitalize() for k in new_key[1:]] |
|
153 | 154 | new_key = '-'.join(new_key) |
|
154 | 155 | headers[new_key] = environ[key] |
|
155 | 156 | |
|
156 | 157 | if 'CONTENT_TYPE' in environ: |
|
157 | 158 | headers['Content-Type'] = environ['CONTENT_TYPE'] |
|
158 | 159 | |
|
159 | 160 | if 'CONTENT_LENGTH' in environ: |
|
160 | 161 | headers['Content-Length'] = environ['CONTENT_LENGTH'] |
|
161 | 162 | |
|
162 | 163 | return headers |
|
163 | 164 | |
|
164 | 165 | def _get_response_headers(self, headers): |
|
165 | 166 | headers = [ |
|
166 | 167 | (h, headers[h]) |
|
167 | 168 | for h in headers |
|
168 | 169 | if h.lower() not in self.IGNORED_HEADERS |
|
169 | 170 | ] |
|
170 | 171 | |
|
171 | 172 | return headers |
|
172 | 173 | |
|
173 | 174 | |
|
174 | 175 | class DisabledSimpleSvnApp(object): |
|
175 | 176 | def __init__(self, config): |
|
176 | 177 | self.config = config |
|
177 | 178 | |
|
178 | 179 | def __call__(self, environ, start_response): |
|
179 | 180 | reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled' |
|
180 | 181 | log.warning(reason) |
|
181 | 182 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
182 | 183 | |
|
183 | 184 | |
|
184 | 185 | class SimpleSvn(simplevcs.SimpleVCS): |
|
185 | 186 | |
|
186 | 187 | SCM = 'svn' |
|
187 | 188 | READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT') |
|
188 | 189 | DEFAULT_HTTP_SERVER = 'http://localhost:8090' |
|
189 | 190 | |
|
190 | 191 | def _get_repository_name(self, environ): |
|
191 | 192 | """ |
|
192 | 193 | Gets repository name out of PATH_INFO header |
|
193 | 194 | |
|
194 | 195 | :param environ: environ where PATH_INFO is stored |
|
195 | 196 | """ |
|
196 | 197 | path = get_path_info(environ).split('!') |
|
197 | 198 | repo_name = path[0].strip('/') |
|
198 | 199 | |
|
199 | 200 | # SVN includes the whole path in it's requests, including |
|
200 | 201 | # subdirectories inside the repo. Therefore we have to search for |
|
201 | 202 | # the repo root directory. |
|
202 | 203 | if not is_valid_repo( |
|
203 | 204 | repo_name, self.base_path, explicit_scm=self.SCM): |
|
204 | 205 | current_path = '' |
|
205 | 206 | for component in repo_name.split('/'): |
|
206 | 207 | current_path += component |
|
207 | 208 | if is_valid_repo( |
|
208 | 209 | current_path, self.base_path, explicit_scm=self.SCM): |
|
209 | 210 | return current_path |
|
210 | 211 | current_path += '/' |
|
211 | 212 | |
|
212 | 213 | return repo_name |
|
213 | 214 | |
|
214 | 215 | def _get_action(self, environ): |
|
215 | 216 | return ( |
|
216 | 217 | 'pull' |
|
217 | 218 | if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS |
|
218 | 219 | else 'push') |
|
219 | 220 | |
|
220 | 221 | def _should_use_callback_daemon(self, extras, environ, action): |
|
221 | 222 | # only MERGE command triggers hooks, so we don't want to start |
|
222 | 223 | # hooks server too many times. POST however starts the svn transaction |
|
223 | 224 | # so we also need to run the init of callback daemon of POST |
|
224 | 225 | if environ['REQUEST_METHOD'] in ['MERGE', 'POST']: |
|
225 | 226 | return True |
|
226 | 227 | return False |
|
227 | 228 | |
|
228 | 229 | def _create_wsgi_app(self, repo_path, repo_name, config): |
|
229 | 230 | if self._is_svn_enabled(): |
|
230 | 231 | return SimpleSvnApp(config) |
|
231 | 232 | # we don't have http proxy enabled return dummy request handler |
|
232 | 233 | return DisabledSimpleSvnApp(config) |
|
233 | 234 | |
|
234 | 235 | def _is_svn_enabled(self): |
|
235 | conf = self.repo_vcs_config | |
|
236 | return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) | |
|
236 | return ConfigGet().get_bool('vcs.svn.proxy.enabled') | |
|
237 | 237 | |
|
238 | 238 | def _create_config(self, extras, repo_name, scheme='http'): |
|
239 | conf = self.repo_vcs_config | |
|
240 | server_url = conf.get('vcs_svn_proxy', 'http_server_url') | |
|
239 | server_url = ConfigGet().get_str('vcs.svn.proxy.host') | |
|
241 | 240 | server_url = server_url or self.DEFAULT_HTTP_SERVER |
|
242 | 241 | |
|
243 | 242 | extras['subversion_http_server_url'] = server_url |
|
244 | 243 | return extras |
@@ -1,638 +1,634 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | this is forms validation classes |
|
21 | 21 | http://formencode.org/module-formencode.validators.html |
|
22 | 22 | for list off all availible validators |
|
23 | 23 | |
|
24 | 24 | we can create our own validators |
|
25 | 25 | |
|
26 | 26 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
27 | 27 | pre_validators [] These validators will be applied before the schema |
|
28 | 28 | chained_validators [] These validators will be applied after the schema |
|
29 | 29 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
30 | 30 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
31 | 31 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
32 | 32 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | <name> = formencode.validators.<name of validator> |
|
36 | 36 | <name> must equal form name |
|
37 | 37 | list=[1,2,3,4,5] |
|
38 | 38 | for SELECT use formencode.All(OneOf(list), Int()) |
|
39 | 39 | |
|
40 | 40 | """ |
|
41 | 41 | |
|
42 | 42 | import deform |
|
43 | 43 | import logging |
|
44 | 44 | import formencode |
|
45 | 45 | |
|
46 | 46 | from pkg_resources import resource_filename |
|
47 | 47 | from formencode import All, Pipe |
|
48 | 48 | |
|
49 | 49 | from pyramid.threadlocal import get_current_request |
|
50 | 50 | |
|
51 | 51 | from rhodecode import BACKENDS |
|
52 | 52 | from rhodecode.lib import helpers |
|
53 | 53 | from rhodecode.model import validators as v |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | deform_templates = resource_filename('deform', 'templates') |
|
59 | 59 | rhodecode_templates = resource_filename('rhodecode', 'templates/forms') |
|
60 | 60 | search_path = (rhodecode_templates, deform_templates) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory): |
|
64 | 64 | """ Subclass of ZPTRendererFactory to add rhodecode context variables """ |
|
65 | 65 | def __call__(self, template_name, **kw): |
|
66 | 66 | kw['h'] = helpers |
|
67 | 67 | kw['request'] = get_current_request() |
|
68 | 68 | return self.load(template_name)(**kw) |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | form_renderer = RhodecodeFormZPTRendererFactory(search_path) |
|
72 | 72 | deform.Form.set_default_renderer(form_renderer) |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def LoginForm(localizer): |
|
76 | 76 | _ = localizer |
|
77 | 77 | |
|
78 | 78 | class _LoginForm(formencode.Schema): |
|
79 | 79 | allow_extra_fields = True |
|
80 | 80 | filter_extra_fields = True |
|
81 | 81 | username = v.UnicodeString( |
|
82 | 82 | strip=True, |
|
83 | 83 | min=1, |
|
84 | 84 | not_empty=True, |
|
85 | 85 | messages={ |
|
86 | 86 | 'empty': _('Please enter a login'), |
|
87 | 87 | 'tooShort': _('Enter a value %(min)i characters long or more') |
|
88 | 88 | } |
|
89 | 89 | ) |
|
90 | 90 | |
|
91 | 91 | password = v.UnicodeString( |
|
92 | 92 | strip=False, |
|
93 | 93 | min=3, |
|
94 | 94 | max=72, |
|
95 | 95 | not_empty=True, |
|
96 | 96 | messages={ |
|
97 | 97 | 'empty': _('Please enter a password'), |
|
98 | 98 | 'tooShort': _('Enter %(min)i characters or more')} |
|
99 | 99 | ) |
|
100 | 100 | |
|
101 | 101 | remember = v.StringBoolean(if_missing=False) |
|
102 | 102 | |
|
103 | 103 | chained_validators = [v.ValidAuth(localizer)] |
|
104 | 104 | return _LoginForm |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | def UserForm(localizer, edit=False, available_languages=None, old_data=None): |
|
108 | 108 | old_data = old_data or {} |
|
109 | 109 | available_languages = available_languages or [] |
|
110 | 110 | _ = localizer |
|
111 | 111 | |
|
112 | 112 | class _UserForm(formencode.Schema): |
|
113 | 113 | allow_extra_fields = True |
|
114 | 114 | filter_extra_fields = True |
|
115 | 115 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
116 | 116 | v.ValidUsername(localizer, edit, old_data)) |
|
117 | 117 | if edit: |
|
118 | 118 | new_password = All( |
|
119 | 119 | v.ValidPassword(localizer), |
|
120 | 120 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
121 | 121 | ) |
|
122 | 122 | password_confirmation = All( |
|
123 | 123 | v.ValidPassword(localizer), |
|
124 | 124 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False), |
|
125 | 125 | ) |
|
126 | 126 | admin = v.StringBoolean(if_missing=False) |
|
127 | 127 | else: |
|
128 | 128 | password = All( |
|
129 | 129 | v.ValidPassword(localizer), |
|
130 | 130 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
131 | 131 | ) |
|
132 | 132 | password_confirmation = All( |
|
133 | 133 | v.ValidPassword(localizer), |
|
134 | 134 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
135 | 135 | ) |
|
136 | 136 | |
|
137 | 137 | password_change = v.StringBoolean(if_missing=False) |
|
138 | 138 | create_repo_group = v.StringBoolean(if_missing=False) |
|
139 | 139 | |
|
140 | 140 | active = v.StringBoolean(if_missing=False) |
|
141 | 141 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
142 | 142 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
143 | 143 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
144 | 144 | description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False, |
|
145 | 145 | if_missing='') |
|
146 | 146 | extern_name = v.UnicodeString(strip=True) |
|
147 | 147 | extern_type = v.UnicodeString(strip=True) |
|
148 | 148 | language = v.OneOf(available_languages, hideList=False, |
|
149 | 149 | testValueList=True, if_missing=None) |
|
150 | 150 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
151 | 151 | return _UserForm |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False): |
|
155 | 155 | old_data = old_data or {} |
|
156 | 156 | _ = localizer |
|
157 | 157 | |
|
158 | 158 | class _UserGroupForm(formencode.Schema): |
|
159 | 159 | allow_extra_fields = True |
|
160 | 160 | filter_extra_fields = True |
|
161 | 161 | |
|
162 | 162 | users_group_name = All( |
|
163 | 163 | v.UnicodeString(strip=True, min=1, not_empty=True), |
|
164 | 164 | v.ValidUserGroup(localizer, edit, old_data) |
|
165 | 165 | ) |
|
166 | 166 | user_group_description = v.UnicodeString(strip=True, min=1, |
|
167 | 167 | not_empty=False) |
|
168 | 168 | |
|
169 | 169 | users_group_active = v.StringBoolean(if_missing=False) |
|
170 | 170 | |
|
171 | 171 | if edit: |
|
172 | 172 | # this is user group owner |
|
173 | 173 | user = All( |
|
174 | 174 | v.UnicodeString(not_empty=True), |
|
175 | 175 | v.ValidRepoUser(localizer, allow_disabled)) |
|
176 | 176 | return _UserGroupForm |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None, |
|
180 | 180 | can_create_in_root=False, allow_disabled=False): |
|
181 | 181 | _ = localizer |
|
182 | 182 | old_data = old_data or {} |
|
183 | 183 | available_groups = available_groups or [] |
|
184 | 184 | |
|
185 | 185 | class _RepoGroupForm(formencode.Schema): |
|
186 | 186 | allow_extra_fields = True |
|
187 | 187 | filter_extra_fields = False |
|
188 | 188 | |
|
189 | 189 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
190 | 190 | v.SlugifyName(localizer),) |
|
191 | 191 | group_description = v.UnicodeString(strip=True, min=1, |
|
192 | 192 | not_empty=False) |
|
193 | 193 | group_copy_permissions = v.StringBoolean(if_missing=False) |
|
194 | 194 | |
|
195 | 195 | group_parent_id = v.OneOf(available_groups, hideList=False, |
|
196 | 196 | testValueList=True, not_empty=True) |
|
197 | 197 | enable_locking = v.StringBoolean(if_missing=False) |
|
198 | 198 | chained_validators = [ |
|
199 | 199 | v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)] |
|
200 | 200 | |
|
201 | 201 | if edit: |
|
202 | 202 | # this is repo group owner |
|
203 | 203 | user = All( |
|
204 | 204 | v.UnicodeString(not_empty=True), |
|
205 | 205 | v.ValidRepoUser(localizer, allow_disabled)) |
|
206 | 206 | return _RepoGroupForm |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | def RegisterForm(localizer, edit=False, old_data=None): |
|
210 | 210 | _ = localizer |
|
211 | 211 | old_data = old_data or {} |
|
212 | 212 | |
|
213 | 213 | class _RegisterForm(formencode.Schema): |
|
214 | 214 | allow_extra_fields = True |
|
215 | 215 | filter_extra_fields = True |
|
216 | 216 | username = All( |
|
217 | 217 | v.ValidUsername(localizer, edit, old_data), |
|
218 | 218 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
219 | 219 | ) |
|
220 | 220 | password = All( |
|
221 | 221 | v.ValidPassword(localizer), |
|
222 | 222 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
223 | 223 | ) |
|
224 | 224 | password_confirmation = All( |
|
225 | 225 | v.ValidPassword(localizer), |
|
226 | 226 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
227 | 227 | ) |
|
228 | 228 | active = v.StringBoolean(if_missing=False) |
|
229 | 229 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
230 | 230 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
231 | 231 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
232 | 232 | |
|
233 | 233 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
234 | 234 | return _RegisterForm |
|
235 | 235 | |
|
236 | 236 | |
|
237 | 237 | def PasswordResetForm(localizer): |
|
238 | 238 | _ = localizer |
|
239 | 239 | |
|
240 | 240 | class _PasswordResetForm(formencode.Schema): |
|
241 | 241 | allow_extra_fields = True |
|
242 | 242 | filter_extra_fields = True |
|
243 | 243 | email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True)) |
|
244 | 244 | return _PasswordResetForm |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False): |
|
248 | 248 | _ = localizer |
|
249 | 249 | old_data = old_data or {} |
|
250 | 250 | repo_groups = repo_groups or [] |
|
251 | 251 | supported_backends = BACKENDS.keys() |
|
252 | 252 | |
|
253 | 253 | class _RepoForm(formencode.Schema): |
|
254 | 254 | allow_extra_fields = True |
|
255 | 255 | filter_extra_fields = False |
|
256 | 256 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
257 | 257 | v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer)) |
|
258 | 258 | repo_group = All(v.CanWriteGroup(localizer, old_data), |
|
259 | 259 | v.OneOf(repo_groups, hideList=True)) |
|
260 | 260 | repo_type = v.OneOf(supported_backends, required=False, |
|
261 | 261 | if_missing=old_data.get('repo_type')) |
|
262 | 262 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
263 | 263 | repo_private = v.StringBoolean(if_missing=False) |
|
264 | 264 | repo_copy_permissions = v.StringBoolean(if_missing=False) |
|
265 | 265 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) |
|
266 | 266 | |
|
267 | 267 | repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
268 | 268 | repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
269 | 269 | repo_enable_locking = v.StringBoolean(if_missing=False) |
|
270 | 270 | |
|
271 | 271 | if edit: |
|
272 | 272 | # this is repo owner |
|
273 | 273 | user = All( |
|
274 | 274 | v.UnicodeString(not_empty=True), |
|
275 | 275 | v.ValidRepoUser(localizer, allow_disabled)) |
|
276 | 276 | clone_uri_change = v.UnicodeString( |
|
277 | 277 | not_empty=False, if_missing=v.Missing) |
|
278 | 278 | |
|
279 | 279 | chained_validators = [v.ValidCloneUri(localizer), |
|
280 | 280 | v.ValidRepoName(localizer, edit, old_data)] |
|
281 | 281 | return _RepoForm |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | def RepoPermsForm(localizer): |
|
285 | 285 | _ = localizer |
|
286 | 286 | |
|
287 | 287 | class _RepoPermsForm(formencode.Schema): |
|
288 | 288 | allow_extra_fields = True |
|
289 | 289 | filter_extra_fields = False |
|
290 | 290 | chained_validators = [v.ValidPerms(localizer, type_='repo')] |
|
291 | 291 | return _RepoPermsForm |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | def RepoGroupPermsForm(localizer, valid_recursive_choices): |
|
295 | 295 | _ = localizer |
|
296 | 296 | |
|
297 | 297 | class _RepoGroupPermsForm(formencode.Schema): |
|
298 | 298 | allow_extra_fields = True |
|
299 | 299 | filter_extra_fields = False |
|
300 | 300 | recursive = v.OneOf(valid_recursive_choices) |
|
301 | 301 | chained_validators = [v.ValidPerms(localizer, type_='repo_group')] |
|
302 | 302 | return _RepoGroupPermsForm |
|
303 | 303 | |
|
304 | 304 | |
|
305 | 305 | def UserGroupPermsForm(localizer): |
|
306 | 306 | _ = localizer |
|
307 | 307 | |
|
308 | 308 | class _UserPermsForm(formencode.Schema): |
|
309 | 309 | allow_extra_fields = True |
|
310 | 310 | filter_extra_fields = False |
|
311 | 311 | chained_validators = [v.ValidPerms(localizer, type_='user_group')] |
|
312 | 312 | return _UserPermsForm |
|
313 | 313 | |
|
314 | 314 | |
|
315 | 315 | def RepoFieldForm(localizer): |
|
316 | 316 | _ = localizer |
|
317 | 317 | |
|
318 | 318 | class _RepoFieldForm(formencode.Schema): |
|
319 | 319 | filter_extra_fields = True |
|
320 | 320 | allow_extra_fields = True |
|
321 | 321 | |
|
322 | 322 | new_field_key = All(v.FieldKey(localizer), |
|
323 | 323 | v.UnicodeString(strip=True, min=3, not_empty=True)) |
|
324 | 324 | new_field_value = v.UnicodeString(not_empty=False, if_missing='') |
|
325 | 325 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], |
|
326 | 326 | if_missing='str') |
|
327 | 327 | new_field_label = v.UnicodeString(not_empty=False) |
|
328 | 328 | new_field_desc = v.UnicodeString(not_empty=False) |
|
329 | 329 | return _RepoFieldForm |
|
330 | 330 | |
|
331 | 331 | |
|
332 | 332 | def RepoForkForm(localizer, edit=False, old_data=None, |
|
333 | 333 | supported_backends=BACKENDS.keys(), repo_groups=None): |
|
334 | 334 | _ = localizer |
|
335 | 335 | old_data = old_data or {} |
|
336 | 336 | repo_groups = repo_groups or [] |
|
337 | 337 | |
|
338 | 338 | class _RepoForkForm(formencode.Schema): |
|
339 | 339 | allow_extra_fields = True |
|
340 | 340 | filter_extra_fields = False |
|
341 | 341 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
342 | 342 | v.SlugifyName(localizer)) |
|
343 | 343 | repo_group = All(v.CanWriteGroup(localizer, ), |
|
344 | 344 | v.OneOf(repo_groups, hideList=True)) |
|
345 | 345 | repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends)) |
|
346 | 346 | description = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
347 | 347 | private = v.StringBoolean(if_missing=False) |
|
348 | 348 | copy_permissions = v.StringBoolean(if_missing=False) |
|
349 | 349 | fork_parent_id = v.UnicodeString() |
|
350 | 350 | chained_validators = [v.ValidForkName(localizer, edit, old_data)] |
|
351 | 351 | return _RepoForkForm |
|
352 | 352 | |
|
353 | 353 | |
|
354 | 354 | def ApplicationSettingsForm(localizer): |
|
355 | 355 | _ = localizer |
|
356 | 356 | |
|
357 | 357 | class _ApplicationSettingsForm(formencode.Schema): |
|
358 | 358 | allow_extra_fields = True |
|
359 | 359 | filter_extra_fields = False |
|
360 | 360 | rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False) |
|
361 | 361 | rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
362 | 362 | rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
363 | 363 | rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
364 | 364 | rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
365 | 365 | rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
366 | 366 | rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False) |
|
367 | 367 | rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
368 | 368 | return _ApplicationSettingsForm |
|
369 | 369 | |
|
370 | 370 | |
|
371 | 371 | def ApplicationVisualisationForm(localizer): |
|
372 | 372 | from rhodecode.model.db import Repository |
|
373 | 373 | _ = localizer |
|
374 | 374 | |
|
375 | 375 | class _ApplicationVisualisationForm(formencode.Schema): |
|
376 | 376 | allow_extra_fields = True |
|
377 | 377 | filter_extra_fields = False |
|
378 | 378 | rhodecode_show_public_icon = v.StringBoolean(if_missing=False) |
|
379 | 379 | rhodecode_show_private_icon = v.StringBoolean(if_missing=False) |
|
380 | 380 | rhodecode_stylify_metatags = v.StringBoolean(if_missing=False) |
|
381 | 381 | |
|
382 | 382 | rhodecode_repository_fields = v.StringBoolean(if_missing=False) |
|
383 | 383 | rhodecode_lightweight_journal = v.StringBoolean(if_missing=False) |
|
384 | 384 | rhodecode_dashboard_items = v.Int(min=5, not_empty=True) |
|
385 | 385 | rhodecode_admin_grid_items = v.Int(min=5, not_empty=True) |
|
386 | 386 | rhodecode_show_version = v.StringBoolean(if_missing=False) |
|
387 | 387 | rhodecode_use_gravatar = v.StringBoolean(if_missing=False) |
|
388 | 388 | rhodecode_markup_renderer = v.OneOf(['markdown', 'rst']) |
|
389 | 389 | rhodecode_gravatar_url = v.UnicodeString(min=3) |
|
390 | 390 | rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI) |
|
391 | 391 | rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID) |
|
392 | 392 | rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH) |
|
393 | 393 | rhodecode_support_url = v.UnicodeString() |
|
394 | 394 | rhodecode_show_revision_number = v.StringBoolean(if_missing=False) |
|
395 | 395 | rhodecode_show_sha_length = v.Int(min=4, not_empty=True) |
|
396 | 396 | return _ApplicationVisualisationForm |
|
397 | 397 | |
|
398 | 398 | |
|
399 | 399 | class _BaseVcsSettingsForm(formencode.Schema): |
|
400 | 400 | |
|
401 | 401 | allow_extra_fields = True |
|
402 | 402 | filter_extra_fields = False |
|
403 | 403 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) |
|
404 | 404 | hooks_changegroup_push_logger = v.StringBoolean(if_missing=False) |
|
405 | 405 | hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False) |
|
406 | 406 | |
|
407 | 407 | # PR/Code-review |
|
408 | 408 | rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False) |
|
409 | 409 | rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False) |
|
410 | 410 | |
|
411 | 411 | # hg |
|
412 | 412 | extensions_largefiles = v.StringBoolean(if_missing=False) |
|
413 | 413 | extensions_evolve = v.StringBoolean(if_missing=False) |
|
414 | 414 | phases_publish = v.StringBoolean(if_missing=False) |
|
415 | 415 | |
|
416 | 416 | rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
417 | 417 | rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
418 | 418 | |
|
419 | 419 | # git |
|
420 | 420 | vcs_git_lfs_enabled = v.StringBoolean(if_missing=False) |
|
421 | 421 | rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
422 | 422 | rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
423 | 423 | |
|
424 | # svn | |
|
425 | vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False) | |
|
426 | vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None) | |
|
427 | ||
|
428 | 424 | # cache |
|
429 | 425 | rhodecode_diff_cache = v.StringBoolean(if_missing=False) |
|
430 | 426 | |
|
431 | 427 | |
|
432 | 428 | def ApplicationUiSettingsForm(localizer): |
|
433 | 429 | _ = localizer |
|
434 | 430 | |
|
435 | 431 | class _ApplicationUiSettingsForm(_BaseVcsSettingsForm): |
|
436 | 432 | web_push_ssl = v.StringBoolean(if_missing=False) |
|
437 | 433 | paths_root_path = All( |
|
438 | 434 | v.ValidPath(localizer), |
|
439 | 435 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
440 | 436 | ) |
|
441 | 437 | largefiles_usercache = All( |
|
442 | 438 | v.ValidPath(localizer), |
|
443 | 439 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
444 | 440 | vcs_git_lfs_store_location = All( |
|
445 | 441 | v.ValidPath(localizer), |
|
446 | 442 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
447 | 443 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
448 | 444 | new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch') |
|
449 | 445 | new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag') |
|
450 | 446 | return _ApplicationUiSettingsForm |
|
451 | 447 | |
|
452 | 448 | |
|
453 | 449 | def RepoVcsSettingsForm(localizer, repo_name): |
|
454 | 450 | _ = localizer |
|
455 | 451 | |
|
456 | 452 | class _RepoVcsSettingsForm(_BaseVcsSettingsForm): |
|
457 | 453 | inherit_global_settings = v.StringBoolean(if_missing=False) |
|
458 | 454 | new_svn_branch = v.ValidSvnPattern(localizer, |
|
459 | 455 | section='vcs_svn_branch', repo_name=repo_name) |
|
460 | 456 | new_svn_tag = v.ValidSvnPattern(localizer, |
|
461 | 457 | section='vcs_svn_tag', repo_name=repo_name) |
|
462 | 458 | return _RepoVcsSettingsForm |
|
463 | 459 | |
|
464 | 460 | |
|
465 | 461 | def LabsSettingsForm(localizer): |
|
466 | 462 | _ = localizer |
|
467 | 463 | |
|
468 | 464 | class _LabSettingsForm(formencode.Schema): |
|
469 | 465 | allow_extra_fields = True |
|
470 | 466 | filter_extra_fields = False |
|
471 | 467 | return _LabSettingsForm |
|
472 | 468 | |
|
473 | 469 | |
|
474 | 470 | def ApplicationPermissionsForm( |
|
475 | 471 | localizer, register_choices, password_reset_choices, |
|
476 | 472 | extern_activate_choices): |
|
477 | 473 | _ = localizer |
|
478 | 474 | |
|
479 | 475 | class _DefaultPermissionsForm(formencode.Schema): |
|
480 | 476 | allow_extra_fields = True |
|
481 | 477 | filter_extra_fields = True |
|
482 | 478 | |
|
483 | 479 | anonymous = v.StringBoolean(if_missing=False) |
|
484 | 480 | default_register = v.OneOf(register_choices) |
|
485 | 481 | default_register_message = v.UnicodeString() |
|
486 | 482 | default_password_reset = v.OneOf(password_reset_choices) |
|
487 | 483 | default_extern_activate = v.OneOf(extern_activate_choices) |
|
488 | 484 | return _DefaultPermissionsForm |
|
489 | 485 | |
|
490 | 486 | |
|
491 | 487 | def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices, |
|
492 | 488 | user_group_perms_choices): |
|
493 | 489 | _ = localizer |
|
494 | 490 | |
|
495 | 491 | class _ObjectPermissionsForm(formencode.Schema): |
|
496 | 492 | allow_extra_fields = True |
|
497 | 493 | filter_extra_fields = True |
|
498 | 494 | overwrite_default_repo = v.StringBoolean(if_missing=False) |
|
499 | 495 | overwrite_default_group = v.StringBoolean(if_missing=False) |
|
500 | 496 | overwrite_default_user_group = v.StringBoolean(if_missing=False) |
|
501 | 497 | |
|
502 | 498 | default_repo_perm = v.OneOf(repo_perms_choices) |
|
503 | 499 | default_group_perm = v.OneOf(group_perms_choices) |
|
504 | 500 | default_user_group_perm = v.OneOf(user_group_perms_choices) |
|
505 | 501 | |
|
506 | 502 | return _ObjectPermissionsForm |
|
507 | 503 | |
|
508 | 504 | |
|
509 | 505 | def BranchPermissionsForm(localizer, branch_perms_choices): |
|
510 | 506 | _ = localizer |
|
511 | 507 | |
|
512 | 508 | class _BranchPermissionsForm(formencode.Schema): |
|
513 | 509 | allow_extra_fields = True |
|
514 | 510 | filter_extra_fields = True |
|
515 | 511 | overwrite_default_branch = v.StringBoolean(if_missing=False) |
|
516 | 512 | default_branch_perm = v.OneOf(branch_perms_choices) |
|
517 | 513 | |
|
518 | 514 | return _BranchPermissionsForm |
|
519 | 515 | |
|
520 | 516 | |
|
521 | 517 | def UserPermissionsForm(localizer, create_choices, create_on_write_choices, |
|
522 | 518 | repo_group_create_choices, user_group_create_choices, |
|
523 | 519 | fork_choices, inherit_default_permissions_choices): |
|
524 | 520 | _ = localizer |
|
525 | 521 | |
|
526 | 522 | class _DefaultPermissionsForm(formencode.Schema): |
|
527 | 523 | allow_extra_fields = True |
|
528 | 524 | filter_extra_fields = True |
|
529 | 525 | |
|
530 | 526 | anonymous = v.StringBoolean(if_missing=False) |
|
531 | 527 | |
|
532 | 528 | default_repo_create = v.OneOf(create_choices) |
|
533 | 529 | default_repo_create_on_write = v.OneOf(create_on_write_choices) |
|
534 | 530 | default_user_group_create = v.OneOf(user_group_create_choices) |
|
535 | 531 | default_repo_group_create = v.OneOf(repo_group_create_choices) |
|
536 | 532 | default_fork_create = v.OneOf(fork_choices) |
|
537 | 533 | default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices) |
|
538 | 534 | return _DefaultPermissionsForm |
|
539 | 535 | |
|
540 | 536 | |
|
541 | 537 | def UserIndividualPermissionsForm(localizer): |
|
542 | 538 | _ = localizer |
|
543 | 539 | |
|
544 | 540 | class _DefaultPermissionsForm(formencode.Schema): |
|
545 | 541 | allow_extra_fields = True |
|
546 | 542 | filter_extra_fields = True |
|
547 | 543 | |
|
548 | 544 | inherit_default_permissions = v.StringBoolean(if_missing=False) |
|
549 | 545 | return _DefaultPermissionsForm |
|
550 | 546 | |
|
551 | 547 | |
|
552 | 548 | def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()): |
|
553 | 549 | _ = localizer |
|
554 | 550 | old_data = old_data or {} |
|
555 | 551 | |
|
556 | 552 | class _DefaultsForm(formencode.Schema): |
|
557 | 553 | allow_extra_fields = True |
|
558 | 554 | filter_extra_fields = True |
|
559 | 555 | default_repo_type = v.OneOf(supported_backends) |
|
560 | 556 | default_repo_private = v.StringBoolean(if_missing=False) |
|
561 | 557 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
562 | 558 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
563 | 559 | default_repo_enable_locking = v.StringBoolean(if_missing=False) |
|
564 | 560 | return _DefaultsForm |
|
565 | 561 | |
|
566 | 562 | |
|
567 | 563 | def AuthSettingsForm(localizer): |
|
568 | 564 | _ = localizer |
|
569 | 565 | |
|
570 | 566 | class _AuthSettingsForm(formencode.Schema): |
|
571 | 567 | allow_extra_fields = True |
|
572 | 568 | filter_extra_fields = True |
|
573 | 569 | auth_plugins = All(v.ValidAuthPlugins(localizer), |
|
574 | 570 | v.UniqueListFromString(localizer)(not_empty=True)) |
|
575 | 571 | return _AuthSettingsForm |
|
576 | 572 | |
|
577 | 573 | |
|
578 | 574 | def UserExtraEmailForm(localizer): |
|
579 | 575 | _ = localizer |
|
580 | 576 | |
|
581 | 577 | class _UserExtraEmailForm(formencode.Schema): |
|
582 | 578 | email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True)) |
|
583 | 579 | return _UserExtraEmailForm |
|
584 | 580 | |
|
585 | 581 | |
|
586 | 582 | def UserExtraIpForm(localizer): |
|
587 | 583 | _ = localizer |
|
588 | 584 | |
|
589 | 585 | class _UserExtraIpForm(formencode.Schema): |
|
590 | 586 | ip = v.ValidIp(localizer)(not_empty=True) |
|
591 | 587 | return _UserExtraIpForm |
|
592 | 588 | |
|
593 | 589 | |
|
594 | 590 | def PullRequestForm(localizer, repo_id): |
|
595 | 591 | _ = localizer |
|
596 | 592 | |
|
597 | 593 | class ReviewerForm(formencode.Schema): |
|
598 | 594 | user_id = v.Int(not_empty=True) |
|
599 | 595 | reasons = All() |
|
600 | 596 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
601 | 597 | mandatory = v.StringBoolean() |
|
602 | 598 | role = v.String(if_missing='reviewer') |
|
603 | 599 | |
|
604 | 600 | class ObserverForm(formencode.Schema): |
|
605 | 601 | user_id = v.Int(not_empty=True) |
|
606 | 602 | reasons = All() |
|
607 | 603 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
608 | 604 | mandatory = v.StringBoolean() |
|
609 | 605 | role = v.String(if_missing='observer') |
|
610 | 606 | |
|
611 | 607 | class _PullRequestForm(formencode.Schema): |
|
612 | 608 | allow_extra_fields = True |
|
613 | 609 | filter_extra_fields = True |
|
614 | 610 | |
|
615 | 611 | common_ancestor = v.UnicodeString(strip=True, required=True) |
|
616 | 612 | source_repo = v.UnicodeString(strip=True, required=True) |
|
617 | 613 | source_ref = v.UnicodeString(strip=True, required=True) |
|
618 | 614 | target_repo = v.UnicodeString(strip=True, required=True) |
|
619 | 615 | target_ref = v.UnicodeString(strip=True, required=True) |
|
620 | 616 | revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(), |
|
621 | 617 | v.UniqueList(localizer)(not_empty=True)) |
|
622 | 618 | review_members = formencode.ForEach(ReviewerForm()) |
|
623 | 619 | observer_members = formencode.ForEach(ObserverForm()) |
|
624 | 620 | pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255) |
|
625 | 621 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
626 | 622 | description_renderer = v.UnicodeString(strip=True, required=False) |
|
627 | 623 | |
|
628 | 624 | return _PullRequestForm |
|
629 | 625 | |
|
630 | 626 | |
|
631 | 627 | def IssueTrackerPatternsForm(localizer): |
|
632 | 628 | _ = localizer |
|
633 | 629 | |
|
634 | 630 | class _IssueTrackerPatternsForm(formencode.Schema): |
|
635 | 631 | allow_extra_fields = True |
|
636 | 632 | filter_extra_fields = False |
|
637 | 633 | chained_validators = [v.ValidPattern(localizer)] |
|
638 | 634 | return _IssueTrackerPatternsForm |
@@ -1,925 +1,909 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import os |
|
20 | 20 | import re |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | import functools |
|
24 | 24 | from collections import namedtuple |
|
25 | 25 | |
|
26 | 26 | from pyramid.threadlocal import get_current_request |
|
27 | 27 | |
|
28 | 28 | from rhodecode.lib import rc_cache |
|
29 | 29 | from rhodecode.lib.hash_utils import sha1_safe |
|
30 | 30 | from rhodecode.lib.html_filters import sanitize_html |
|
31 | 31 | from rhodecode.lib.utils2 import ( |
|
32 | 32 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
33 | 33 | from rhodecode.lib.vcs.backends import base |
|
34 | 34 | from rhodecode.lib.statsd_client import StatsdClient |
|
35 | 35 | from rhodecode.model import BaseModel |
|
36 | 36 | from rhodecode.model.db import ( |
|
37 | 37 | RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting) |
|
38 | 38 | from rhodecode.model.meta import Session |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | log = logging.getLogger(__name__) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | UiSetting = namedtuple( |
|
45 | 45 | 'UiSetting', ['section', 'key', 'value', 'active']) |
|
46 | 46 | |
|
47 | 47 | SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google'] |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class SettingNotFound(Exception): |
|
51 | 51 | def __init__(self, setting_id): |
|
52 | 52 | msg = f'Setting `{setting_id}` is not found' |
|
53 | 53 | super().__init__(msg) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class SettingsModel(BaseModel): |
|
57 | 57 | BUILTIN_HOOKS = ( |
|
58 | 58 | RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH, |
|
59 | 59 | RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH, |
|
60 | 60 | RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL, |
|
61 | 61 | RhodeCodeUi.HOOK_PUSH_KEY,) |
|
62 | 62 | HOOKS_SECTION = 'hooks' |
|
63 | 63 | |
|
64 | 64 | def __init__(self, sa=None, repo=None): |
|
65 | 65 | self.repo = repo |
|
66 | 66 | self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi |
|
67 | 67 | self.SettingsDbModel = ( |
|
68 | 68 | RepoRhodeCodeSetting if repo else RhodeCodeSetting) |
|
69 | 69 | super().__init__(sa) |
|
70 | 70 | |
|
71 | 71 | def get_keyname(self, key_name, prefix='rhodecode_'): |
|
72 | 72 | return f'{prefix}{key_name}' |
|
73 | 73 | |
|
74 | 74 | def get_ui_by_key(self, key): |
|
75 | 75 | q = self.UiDbModel.query() |
|
76 | 76 | q = q.filter(self.UiDbModel.ui_key == key) |
|
77 | 77 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
78 | 78 | return q.scalar() |
|
79 | 79 | |
|
80 | 80 | def get_ui_by_section(self, section): |
|
81 | 81 | q = self.UiDbModel.query() |
|
82 | 82 | q = q.filter(self.UiDbModel.ui_section == section) |
|
83 | 83 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
84 | 84 | return q.all() |
|
85 | 85 | |
|
86 | 86 | def get_ui_by_section_and_key(self, section, key): |
|
87 | 87 | q = self.UiDbModel.query() |
|
88 | 88 | q = q.filter(self.UiDbModel.ui_section == section) |
|
89 | 89 | q = q.filter(self.UiDbModel.ui_key == key) |
|
90 | 90 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
91 | 91 | return q.scalar() |
|
92 | 92 | |
|
93 | 93 | def get_ui(self, section=None, key=None): |
|
94 | 94 | q = self.UiDbModel.query() |
|
95 | 95 | q = self._filter_by_repo(RepoRhodeCodeUi, q) |
|
96 | 96 | |
|
97 | 97 | if section: |
|
98 | 98 | q = q.filter(self.UiDbModel.ui_section == section) |
|
99 | 99 | if key: |
|
100 | 100 | q = q.filter(self.UiDbModel.ui_key == key) |
|
101 | 101 | |
|
102 | 102 | # TODO: mikhail: add caching |
|
103 | 103 | result = [ |
|
104 | 104 | UiSetting( |
|
105 | 105 | section=safe_str(r.ui_section), key=safe_str(r.ui_key), |
|
106 | 106 | value=safe_str(r.ui_value), active=r.ui_active |
|
107 | 107 | ) |
|
108 | 108 | for r in q.all() |
|
109 | 109 | ] |
|
110 | 110 | return result |
|
111 | 111 | |
|
112 | 112 | def get_builtin_hooks(self): |
|
113 | 113 | q = self.UiDbModel.query() |
|
114 | 114 | q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
115 | 115 | return self._get_hooks(q) |
|
116 | 116 | |
|
117 | 117 | def get_custom_hooks(self): |
|
118 | 118 | q = self.UiDbModel.query() |
|
119 | 119 | q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS)) |
|
120 | 120 | return self._get_hooks(q) |
|
121 | 121 | |
|
122 | 122 | def create_ui_section_value(self, section, val, key=None, active=True): |
|
123 | 123 | new_ui = self.UiDbModel() |
|
124 | 124 | new_ui.ui_section = section |
|
125 | 125 | new_ui.ui_value = val |
|
126 | 126 | new_ui.ui_active = active |
|
127 | 127 | |
|
128 | 128 | repository_id = '' |
|
129 | 129 | if self.repo: |
|
130 | 130 | repo = self._get_repo(self.repo) |
|
131 | 131 | repository_id = repo.repo_id |
|
132 | 132 | new_ui.repository_id = repository_id |
|
133 | 133 | |
|
134 | 134 | if not key: |
|
135 | 135 | # keys are unique so they need appended info |
|
136 | 136 | if self.repo: |
|
137 | 137 | key = sha1_safe(f'{section}{val}{repository_id}') |
|
138 | 138 | else: |
|
139 | 139 | key = sha1_safe(f'{section}{val}') |
|
140 | 140 | |
|
141 | 141 | new_ui.ui_key = key |
|
142 | 142 | |
|
143 | 143 | Session().add(new_ui) |
|
144 | 144 | return new_ui |
|
145 | 145 | |
|
146 | 146 | def create_or_update_hook(self, key, value): |
|
147 | 147 | ui = ( |
|
148 | 148 | self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or |
|
149 | 149 | self.UiDbModel()) |
|
150 | 150 | ui.ui_section = self.HOOKS_SECTION |
|
151 | 151 | ui.ui_active = True |
|
152 | 152 | ui.ui_key = key |
|
153 | 153 | ui.ui_value = value |
|
154 | 154 | |
|
155 | 155 | if self.repo: |
|
156 | 156 | repo = self._get_repo(self.repo) |
|
157 | 157 | repository_id = repo.repo_id |
|
158 | 158 | ui.repository_id = repository_id |
|
159 | 159 | |
|
160 | 160 | Session().add(ui) |
|
161 | 161 | return ui |
|
162 | 162 | |
|
163 | 163 | def delete_ui(self, id_): |
|
164 | 164 | ui = self.UiDbModel.get(id_) |
|
165 | 165 | if not ui: |
|
166 | 166 | raise SettingNotFound(id_) |
|
167 | 167 | Session().delete(ui) |
|
168 | 168 | |
|
169 | 169 | def get_setting_by_name(self, name): |
|
170 | 170 | q = self._get_settings_query() |
|
171 | 171 | q = q.filter(self.SettingsDbModel.app_settings_name == name) |
|
172 | 172 | return q.scalar() |
|
173 | 173 | |
|
174 | 174 | def create_or_update_setting( |
|
175 | 175 | self, name, val: Optional | str = Optional(''), type_: Optional | str = Optional('unicode')): |
|
176 | 176 | """ |
|
177 | 177 | Creates or updates RhodeCode setting. If updates are triggered, it will |
|
178 | 178 | only update parameters that are explicitly set Optional instance will |
|
179 | 179 | be skipped |
|
180 | 180 | |
|
181 | 181 | :param name: |
|
182 | 182 | :param val: |
|
183 | 183 | :param type_: |
|
184 | 184 | :return: |
|
185 | 185 | """ |
|
186 | 186 | |
|
187 | 187 | res = self.get_setting_by_name(name) |
|
188 | 188 | repo = self._get_repo(self.repo) if self.repo else None |
|
189 | 189 | |
|
190 | 190 | if not res: |
|
191 | 191 | val = Optional.extract(val) |
|
192 | 192 | type_ = Optional.extract(type_) |
|
193 | 193 | |
|
194 | 194 | args = ( |
|
195 | 195 | (repo.repo_id, name, val, type_) |
|
196 | 196 | if repo else (name, val, type_)) |
|
197 | 197 | res = self.SettingsDbModel(*args) |
|
198 | 198 | |
|
199 | 199 | else: |
|
200 | 200 | if self.repo: |
|
201 | 201 | res.repository_id = repo.repo_id |
|
202 | 202 | |
|
203 | 203 | res.app_settings_name = name |
|
204 | 204 | if not isinstance(type_, Optional): |
|
205 | 205 | # update if set |
|
206 | 206 | res.app_settings_type = type_ |
|
207 | 207 | if not isinstance(val, Optional): |
|
208 | 208 | # update if set |
|
209 | 209 | res.app_settings_value = val |
|
210 | 210 | |
|
211 | 211 | Session().add(res) |
|
212 | 212 | return res |
|
213 | 213 | |
|
214 | 214 | def get_cache_region(self): |
|
215 | 215 | repo = self._get_repo(self.repo) if self.repo else None |
|
216 | 216 | cache_key = f"repo.v1.{repo.repo_id}" if repo else "repo.v1.ALL" |
|
217 | 217 | cache_namespace_uid = f'cache_settings.{cache_key}' |
|
218 | 218 | region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid) |
|
219 | 219 | return region, cache_namespace_uid |
|
220 | 220 | |
|
221 | 221 | def invalidate_settings_cache(self, hard=False): |
|
222 | 222 | region, namespace_key = self.get_cache_region() |
|
223 | 223 | log.debug('Invalidation cache [%s] region %s for cache_key: %s', |
|
224 | 224 | 'invalidate_settings_cache', region, namespace_key) |
|
225 | 225 | |
|
226 | 226 | # we use hard cleanup if invalidation is sent |
|
227 | 227 | rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE) |
|
228 | 228 | |
|
229 | 229 | def get_cache_call_method(self, cache=True): |
|
230 | 230 | region, cache_key = self.get_cache_region() |
|
231 | 231 | |
|
232 | 232 | @region.conditional_cache_on_arguments(condition=cache) |
|
233 | 233 | def _get_all_settings(name, key): |
|
234 | 234 | q = self._get_settings_query() |
|
235 | 235 | if not q: |
|
236 | 236 | raise Exception('Could not get application settings !') |
|
237 | 237 | |
|
238 | 238 | settings = { |
|
239 | 239 | self.get_keyname(res.app_settings_name): res.app_settings_value |
|
240 | 240 | for res in q |
|
241 | 241 | } |
|
242 | 242 | return settings |
|
243 | 243 | return _get_all_settings |
|
244 | 244 | |
|
245 | 245 | def get_all_settings(self, cache=False, from_request=True): |
|
246 | 246 | # defines if we use GLOBAL, or PER_REPO |
|
247 | 247 | repo = self._get_repo(self.repo) if self.repo else None |
|
248 | 248 | |
|
249 | 249 | # initially try the request context; this is the fastest |
|
250 | 250 | # we only fetch global config, NOT for repo-specific |
|
251 | 251 | if from_request and not repo: |
|
252 | 252 | request = get_current_request() |
|
253 | 253 | |
|
254 | 254 | if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): |
|
255 | 255 | rc_config = request.call_context.rc_config |
|
256 | 256 | if rc_config: |
|
257 | 257 | return rc_config |
|
258 | 258 | |
|
259 | 259 | _region, cache_key = self.get_cache_region() |
|
260 | 260 | _get_all_settings = self.get_cache_call_method(cache=cache) |
|
261 | 261 | |
|
262 | 262 | start = time.time() |
|
263 | 263 | result = _get_all_settings('rhodecode_settings', cache_key) |
|
264 | 264 | compute_time = time.time() - start |
|
265 | 265 | log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time) |
|
266 | 266 | |
|
267 | 267 | statsd = StatsdClient.statsd |
|
268 | 268 | if statsd: |
|
269 | 269 | elapsed_time_ms = round(1000.0 * compute_time) # use ms only |
|
270 | 270 | statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms, |
|
271 | 271 | use_decimals=False) |
|
272 | 272 | |
|
273 | 273 | log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache) |
|
274 | 274 | |
|
275 | 275 | return result |
|
276 | 276 | |
|
277 | 277 | def get_auth_settings(self): |
|
278 | 278 | q = self._get_settings_query() |
|
279 | 279 | q = q.filter( |
|
280 | 280 | self.SettingsDbModel.app_settings_name.startswith('auth_')) |
|
281 | 281 | rows = q.all() |
|
282 | 282 | auth_settings = { |
|
283 | 283 | row.app_settings_name: row.app_settings_value for row in rows} |
|
284 | 284 | return auth_settings |
|
285 | 285 | |
|
286 | 286 | def get_auth_plugins(self): |
|
287 | 287 | auth_plugins = self.get_setting_by_name("auth_plugins") |
|
288 | 288 | return auth_plugins.app_settings_value |
|
289 | 289 | |
|
290 | 290 | def get_default_repo_settings(self, strip_prefix=False): |
|
291 | 291 | q = self._get_settings_query() |
|
292 | 292 | q = q.filter( |
|
293 | 293 | self.SettingsDbModel.app_settings_name.startswith('default_')) |
|
294 | 294 | rows = q.all() |
|
295 | 295 | |
|
296 | 296 | result = {} |
|
297 | 297 | for row in rows: |
|
298 | 298 | key = row.app_settings_name |
|
299 | 299 | if strip_prefix: |
|
300 | 300 | key = remove_prefix(key, prefix='default_') |
|
301 | 301 | result.update({key: row.app_settings_value}) |
|
302 | 302 | return result |
|
303 | 303 | |
|
304 | 304 | def get_repo(self): |
|
305 | 305 | repo = self._get_repo(self.repo) |
|
306 | 306 | if not repo: |
|
307 | 307 | raise Exception( |
|
308 | 308 | f'Repository `{self.repo}` cannot be found inside the database') |
|
309 | 309 | return repo |
|
310 | 310 | |
|
311 | 311 | def _filter_by_repo(self, model, query): |
|
312 | 312 | if self.repo: |
|
313 | 313 | repo = self.get_repo() |
|
314 | 314 | query = query.filter(model.repository_id == repo.repo_id) |
|
315 | 315 | return query |
|
316 | 316 | |
|
317 | 317 | def _get_hooks(self, query): |
|
318 | 318 | query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION) |
|
319 | 319 | query = self._filter_by_repo(RepoRhodeCodeUi, query) |
|
320 | 320 | return query.all() |
|
321 | 321 | |
|
322 | 322 | def _get_settings_query(self): |
|
323 | 323 | q = self.SettingsDbModel.query() |
|
324 | 324 | return self._filter_by_repo(RepoRhodeCodeSetting, q) |
|
325 | 325 | |
|
326 | 326 | def list_enabled_social_plugins(self, settings): |
|
327 | 327 | enabled = [] |
|
328 | 328 | for plug in SOCIAL_PLUGINS_LIST: |
|
329 | 329 | if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')): |
|
330 | 330 | enabled.append(plug) |
|
331 | 331 | return enabled |
|
332 | 332 | |
|
333 | 333 | |
|
334 | 334 | def assert_repo_settings(func): |
|
335 | 335 | @functools.wraps(func) |
|
336 | 336 | def _wrapper(self, *args, **kwargs): |
|
337 | 337 | if not self.repo_settings: |
|
338 | 338 | raise Exception('Repository is not specified') |
|
339 | 339 | return func(self, *args, **kwargs) |
|
340 | 340 | return _wrapper |
|
341 | 341 | |
|
342 | 342 | |
|
343 | 343 | class IssueTrackerSettingsModel(object): |
|
344 | 344 | INHERIT_SETTINGS = 'inherit_issue_tracker_settings' |
|
345 | 345 | SETTINGS_PREFIX = 'issuetracker_' |
|
346 | 346 | |
|
347 | 347 | def __init__(self, sa=None, repo=None): |
|
348 | 348 | self.global_settings = SettingsModel(sa=sa) |
|
349 | 349 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
350 | 350 | |
|
351 | 351 | @property |
|
352 | 352 | def inherit_global_settings(self): |
|
353 | 353 | if not self.repo_settings: |
|
354 | 354 | return True |
|
355 | 355 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
356 | 356 | return setting.app_settings_value if setting else True |
|
357 | 357 | |
|
358 | 358 | @inherit_global_settings.setter |
|
359 | 359 | def inherit_global_settings(self, value): |
|
360 | 360 | if self.repo_settings: |
|
361 | 361 | settings = self.repo_settings.create_or_update_setting( |
|
362 | 362 | self.INHERIT_SETTINGS, value, type_='bool') |
|
363 | 363 | Session().add(settings) |
|
364 | 364 | |
|
365 | 365 | def _get_keyname(self, key, uid, prefix='rhodecode_'): |
|
366 | 366 | return f'{prefix}{self.SETTINGS_PREFIX}{key}_{uid}' |
|
367 | 367 | |
|
368 | 368 | def _make_dict_for_settings(self, qs): |
|
369 | 369 | prefix_match = self._get_keyname('pat', '',) |
|
370 | 370 | |
|
371 | 371 | issuetracker_entries = {} |
|
372 | 372 | # create keys |
|
373 | 373 | for k, v in qs.items(): |
|
374 | 374 | if k.startswith(prefix_match): |
|
375 | 375 | uid = k[len(prefix_match):] |
|
376 | 376 | issuetracker_entries[uid] = None |
|
377 | 377 | |
|
378 | 378 | def url_cleaner(input_str): |
|
379 | 379 | input_str = input_str.replace('"', '').replace("'", '') |
|
380 | 380 | input_str = sanitize_html(input_str, strip=True) |
|
381 | 381 | return input_str |
|
382 | 382 | |
|
383 | 383 | # populate |
|
384 | 384 | for uid in issuetracker_entries: |
|
385 | 385 | url_data = qs.get(self._get_keyname('url', uid)) |
|
386 | 386 | |
|
387 | 387 | pat = qs.get(self._get_keyname('pat', uid)) |
|
388 | 388 | try: |
|
389 | 389 | pat_compiled = re.compile(r'%s' % pat) |
|
390 | 390 | except re.error: |
|
391 | 391 | pat_compiled = None |
|
392 | 392 | |
|
393 | 393 | issuetracker_entries[uid] = AttributeDict({ |
|
394 | 394 | 'pat': pat, |
|
395 | 395 | 'pat_compiled': pat_compiled, |
|
396 | 396 | 'url': url_cleaner( |
|
397 | 397 | qs.get(self._get_keyname('url', uid)) or ''), |
|
398 | 398 | 'pref': sanitize_html( |
|
399 | 399 | qs.get(self._get_keyname('pref', uid)) or ''), |
|
400 | 400 | 'desc': qs.get( |
|
401 | 401 | self._get_keyname('desc', uid)), |
|
402 | 402 | }) |
|
403 | 403 | |
|
404 | 404 | return issuetracker_entries |
|
405 | 405 | |
|
406 | 406 | def get_global_settings(self, cache=False): |
|
407 | 407 | """ |
|
408 | 408 | Returns list of global issue tracker settings |
|
409 | 409 | """ |
|
410 | 410 | defaults = self.global_settings.get_all_settings(cache=cache) |
|
411 | 411 | settings = self._make_dict_for_settings(defaults) |
|
412 | 412 | return settings |
|
413 | 413 | |
|
414 | 414 | def get_repo_settings(self, cache=False): |
|
415 | 415 | """ |
|
416 | 416 | Returns list of issue tracker settings per repository |
|
417 | 417 | """ |
|
418 | 418 | if not self.repo_settings: |
|
419 | 419 | raise Exception('Repository is not specified') |
|
420 | 420 | all_settings = self.repo_settings.get_all_settings(cache=cache) |
|
421 | 421 | settings = self._make_dict_for_settings(all_settings) |
|
422 | 422 | return settings |
|
423 | 423 | |
|
424 | 424 | def get_settings(self, cache=False): |
|
425 | 425 | if self.inherit_global_settings: |
|
426 | 426 | return self.get_global_settings(cache=cache) |
|
427 | 427 | else: |
|
428 | 428 | return self.get_repo_settings(cache=cache) |
|
429 | 429 | |
|
430 | 430 | def delete_entries(self, uid): |
|
431 | 431 | if self.repo_settings: |
|
432 | 432 | all_patterns = self.get_repo_settings() |
|
433 | 433 | settings_model = self.repo_settings |
|
434 | 434 | else: |
|
435 | 435 | all_patterns = self.get_global_settings() |
|
436 | 436 | settings_model = self.global_settings |
|
437 | 437 | entries = all_patterns.get(uid, []) |
|
438 | 438 | |
|
439 | 439 | for del_key in entries: |
|
440 | 440 | setting_name = self._get_keyname(del_key, uid, prefix='') |
|
441 | 441 | entry = settings_model.get_setting_by_name(setting_name) |
|
442 | 442 | if entry: |
|
443 | 443 | Session().delete(entry) |
|
444 | 444 | |
|
445 | 445 | Session().commit() |
|
446 | 446 | |
|
447 | 447 | def create_or_update_setting( |
|
448 | 448 | self, name, val=Optional(''), type_=Optional('unicode')): |
|
449 | 449 | if self.repo_settings: |
|
450 | 450 | setting = self.repo_settings.create_or_update_setting( |
|
451 | 451 | name, val, type_) |
|
452 | 452 | else: |
|
453 | 453 | setting = self.global_settings.create_or_update_setting( |
|
454 | 454 | name, val, type_) |
|
455 | 455 | return setting |
|
456 | 456 | |
|
457 | 457 | |
|
458 | 458 | class VcsSettingsModel(object): |
|
459 | 459 | |
|
460 | 460 | INHERIT_SETTINGS = 'inherit_vcs_settings' |
|
461 | 461 | GENERAL_SETTINGS = ( |
|
462 | 462 | 'use_outdated_comments', |
|
463 | 463 | 'pr_merge_enabled', |
|
464 | 464 | 'hg_use_rebase_for_merging', |
|
465 | 465 | 'hg_close_branch_before_merging', |
|
466 | 466 | 'git_use_rebase_for_merging', |
|
467 | 467 | 'git_close_branch_before_merging', |
|
468 | 468 | 'diff_cache', |
|
469 | 469 | ) |
|
470 | 470 | |
|
471 | 471 | HOOKS_SETTINGS = ( |
|
472 | 472 | ('hooks', 'changegroup.repo_size'), |
|
473 | 473 | ('hooks', 'changegroup.push_logger'), |
|
474 | 474 | ('hooks', 'outgoing.pull_logger'), |
|
475 | 475 | ) |
|
476 | 476 | HG_SETTINGS = ( |
|
477 | 477 | ('extensions', 'largefiles'), |
|
478 | 478 | ('phases', 'publish'), |
|
479 | 479 | ('extensions', 'evolve'), |
|
480 | 480 | ('extensions', 'topic'), |
|
481 | 481 | ('experimental', 'evolution'), |
|
482 | 482 | ('experimental', 'evolution.exchange'), |
|
483 | 483 | ) |
|
484 | 484 | GIT_SETTINGS = ( |
|
485 | 485 | ('vcs_git_lfs', 'enabled'), |
|
486 | 486 | ) |
|
487 | 487 | GLOBAL_HG_SETTINGS = ( |
|
488 | 488 | ('extensions', 'largefiles'), |
|
489 | 489 | ('largefiles', 'usercache'), |
|
490 | 490 | ('phases', 'publish'), |
|
491 | 491 | ('extensions', 'evolve'), |
|
492 | 492 | ('extensions', 'topic'), |
|
493 | 493 | ('experimental', 'evolution'), |
|
494 | 494 | ('experimental', 'evolution.exchange'), |
|
495 | 495 | ) |
|
496 | 496 | |
|
497 | 497 | GLOBAL_GIT_SETTINGS = ( |
|
498 | 498 | ('vcs_git_lfs', 'enabled'), |
|
499 | 499 | ('vcs_git_lfs', 'store_location') |
|
500 | 500 | ) |
|
501 | 501 | |
|
502 | GLOBAL_SVN_SETTINGS = ( | |
|
503 | ('vcs_svn_proxy', 'http_requests_enabled'), | |
|
504 | ('vcs_svn_proxy', 'http_server_url') | |
|
505 | ) | |
|
506 | ||
|
507 | 502 | SVN_BRANCH_SECTION = 'vcs_svn_branch' |
|
508 | 503 | SVN_TAG_SECTION = 'vcs_svn_tag' |
|
509 | 504 | SSL_SETTING = ('web', 'push_ssl') |
|
510 | 505 | PATH_SETTING = ('paths', '/') |
|
511 | 506 | |
|
512 | 507 | def __init__(self, sa=None, repo=None): |
|
513 | 508 | self.global_settings = SettingsModel(sa=sa) |
|
514 | 509 | self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None |
|
515 | 510 | self._ui_settings = ( |
|
516 | 511 | self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS) |
|
517 | 512 | self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION) |
|
518 | 513 | |
|
519 | 514 | @property |
|
520 | 515 | @assert_repo_settings |
|
521 | 516 | def inherit_global_settings(self): |
|
522 | 517 | setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS) |
|
523 | 518 | return setting.app_settings_value if setting else True |
|
524 | 519 | |
|
525 | 520 | @inherit_global_settings.setter |
|
526 | 521 | @assert_repo_settings |
|
527 | 522 | def inherit_global_settings(self, value): |
|
528 | 523 | self.repo_settings.create_or_update_setting( |
|
529 | 524 | self.INHERIT_SETTINGS, value, type_='bool') |
|
530 | 525 | |
|
531 | 526 | def get_keyname(self, key_name, prefix='rhodecode_'): |
|
532 | 527 | return f'{prefix}{key_name}' |
|
533 | 528 | |
|
534 | 529 | def get_global_svn_branch_patterns(self): |
|
535 | 530 | return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
536 | 531 | |
|
537 | 532 | @assert_repo_settings |
|
538 | 533 | def get_repo_svn_branch_patterns(self): |
|
539 | 534 | return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION) |
|
540 | 535 | |
|
541 | 536 | def get_global_svn_tag_patterns(self): |
|
542 | 537 | return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
543 | 538 | |
|
544 | 539 | @assert_repo_settings |
|
545 | 540 | def get_repo_svn_tag_patterns(self): |
|
546 | 541 | return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION) |
|
547 | 542 | |
|
548 | 543 | def get_global_settings(self): |
|
549 | 544 | return self._collect_all_settings(global_=True) |
|
550 | 545 | |
|
551 | 546 | @assert_repo_settings |
|
552 | 547 | def get_repo_settings(self): |
|
553 | 548 | return self._collect_all_settings(global_=False) |
|
554 | 549 | |
|
555 | 550 | @assert_repo_settings |
|
556 | 551 | def get_repo_settings_inherited(self): |
|
557 | 552 | global_settings = self.get_global_settings() |
|
558 | 553 | global_settings.update(self.get_repo_settings()) |
|
559 | 554 | return global_settings |
|
560 | 555 | |
|
561 | 556 | @assert_repo_settings |
|
562 | 557 | def create_or_update_repo_settings( |
|
563 | 558 | self, data, inherit_global_settings=False): |
|
564 | 559 | from rhodecode.model.scm import ScmModel |
|
565 | 560 | |
|
566 | 561 | self.inherit_global_settings = inherit_global_settings |
|
567 | 562 | |
|
568 | 563 | repo = self.repo_settings.get_repo() |
|
569 | 564 | if not inherit_global_settings: |
|
570 | 565 | if repo.repo_type == 'svn': |
|
571 | 566 | self.create_repo_svn_settings(data) |
|
572 | 567 | else: |
|
573 | 568 | self.create_or_update_repo_hook_settings(data) |
|
574 | 569 | self.create_or_update_repo_pr_settings(data) |
|
575 | 570 | |
|
576 | 571 | if repo.repo_type == 'hg': |
|
577 | 572 | self.create_or_update_repo_hg_settings(data) |
|
578 | 573 | |
|
579 | 574 | if repo.repo_type == 'git': |
|
580 | 575 | self.create_or_update_repo_git_settings(data) |
|
581 | 576 | |
|
582 | 577 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
583 | 578 | |
|
584 | 579 | @assert_repo_settings |
|
585 | 580 | def create_or_update_repo_hook_settings(self, data): |
|
586 | 581 | for section, key in self.HOOKS_SETTINGS: |
|
587 | 582 | data_key = self._get_form_ui_key(section, key) |
|
588 | 583 | if data_key not in data: |
|
589 | 584 | raise ValueError( |
|
590 | 585 | f'The given data does not contain {data_key} key') |
|
591 | 586 | |
|
592 | 587 | active = data.get(data_key) |
|
593 | 588 | repo_setting = self.repo_settings.get_ui_by_section_and_key( |
|
594 | 589 | section, key) |
|
595 | 590 | if not repo_setting: |
|
596 | 591 | global_setting = self.global_settings.\ |
|
597 | 592 | get_ui_by_section_and_key(section, key) |
|
598 | 593 | self.repo_settings.create_ui_section_value( |
|
599 | 594 | section, global_setting.ui_value, key=key, active=active) |
|
600 | 595 | else: |
|
601 | 596 | repo_setting.ui_active = active |
|
602 | 597 | Session().add(repo_setting) |
|
603 | 598 | |
|
604 | 599 | def update_global_hook_settings(self, data): |
|
605 | 600 | for section, key in self.HOOKS_SETTINGS: |
|
606 | 601 | data_key = self._get_form_ui_key(section, key) |
|
607 | 602 | if data_key not in data: |
|
608 | 603 | raise ValueError( |
|
609 | 604 | f'The given data does not contain {data_key} key') |
|
610 | 605 | active = data.get(data_key) |
|
611 | 606 | repo_setting = self.global_settings.get_ui_by_section_and_key( |
|
612 | 607 | section, key) |
|
613 | 608 | repo_setting.ui_active = active |
|
614 | 609 | Session().add(repo_setting) |
|
615 | 610 | |
|
616 | 611 | @assert_repo_settings |
|
617 | 612 | def create_or_update_repo_pr_settings(self, data): |
|
618 | 613 | return self._create_or_update_general_settings( |
|
619 | 614 | self.repo_settings, data) |
|
620 | 615 | |
|
621 | 616 | def create_or_update_global_pr_settings(self, data): |
|
622 | 617 | return self._create_or_update_general_settings( |
|
623 | 618 | self.global_settings, data) |
|
624 | 619 | |
|
625 | 620 | @assert_repo_settings |
|
626 | 621 | def create_repo_svn_settings(self, data): |
|
627 | 622 | return self._create_svn_settings(self.repo_settings, data) |
|
628 | 623 | |
|
629 | 624 | def _set_evolution(self, settings, is_enabled): |
|
630 | 625 | if is_enabled: |
|
631 | 626 | # if evolve is active set evolution=all |
|
632 | 627 | |
|
633 | 628 | self._create_or_update_ui( |
|
634 | 629 | settings, *('experimental', 'evolution'), value='all', |
|
635 | 630 | active=True) |
|
636 | 631 | self._create_or_update_ui( |
|
637 | 632 | settings, *('experimental', 'evolution.exchange'), value='yes', |
|
638 | 633 | active=True) |
|
639 | 634 | # if evolve is active set topics server support |
|
640 | 635 | self._create_or_update_ui( |
|
641 | 636 | settings, *('extensions', 'topic'), value='', |
|
642 | 637 | active=True) |
|
643 | 638 | |
|
644 | 639 | else: |
|
645 | 640 | self._create_or_update_ui( |
|
646 | 641 | settings, *('experimental', 'evolution'), value='', |
|
647 | 642 | active=False) |
|
648 | 643 | self._create_or_update_ui( |
|
649 | 644 | settings, *('experimental', 'evolution.exchange'), value='no', |
|
650 | 645 | active=False) |
|
651 | 646 | self._create_or_update_ui( |
|
652 | 647 | settings, *('extensions', 'topic'), value='', |
|
653 | 648 | active=False) |
|
654 | 649 | |
|
655 | 650 | @assert_repo_settings |
|
656 | 651 | def create_or_update_repo_hg_settings(self, data): |
|
657 | 652 | largefiles, phases, evolve = \ |
|
658 | 653 | self.HG_SETTINGS[:3] |
|
659 | 654 | largefiles_key, phases_key, evolve_key = \ |
|
660 | 655 | self._get_settings_keys(self.HG_SETTINGS[:3], data) |
|
661 | 656 | |
|
662 | 657 | self._create_or_update_ui( |
|
663 | 658 | self.repo_settings, *largefiles, value='', |
|
664 | 659 | active=data[largefiles_key]) |
|
665 | 660 | self._create_or_update_ui( |
|
666 | 661 | self.repo_settings, *evolve, value='', |
|
667 | 662 | active=data[evolve_key]) |
|
668 | 663 | self._set_evolution(self.repo_settings, is_enabled=data[evolve_key]) |
|
669 | 664 | |
|
670 | 665 | self._create_or_update_ui( |
|
671 | 666 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
672 | 667 | |
|
673 | 668 | def create_or_update_global_hg_settings(self, data): |
|
674 | 669 | opts_len = 4 |
|
675 | 670 | largefiles, largefiles_store, phases, evolve \ |
|
676 | 671 | = self.GLOBAL_HG_SETTINGS[:opts_len] |
|
677 | 672 | largefiles_key, largefiles_store_key, phases_key, evolve_key \ |
|
678 | 673 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:opts_len], data) |
|
679 | 674 | |
|
680 | 675 | self._create_or_update_ui( |
|
681 | 676 | self.global_settings, *largefiles, value='', |
|
682 | 677 | active=data[largefiles_key]) |
|
683 | 678 | self._create_or_update_ui( |
|
684 | 679 | self.global_settings, *largefiles_store, value=data[largefiles_store_key]) |
|
685 | 680 | self._create_or_update_ui( |
|
686 | 681 | self.global_settings, *phases, value=safe_str(data[phases_key])) |
|
687 | 682 | self._create_or_update_ui( |
|
688 | 683 | self.global_settings, *evolve, value='', |
|
689 | 684 | active=data[evolve_key]) |
|
690 | 685 | self._set_evolution(self.global_settings, is_enabled=data[evolve_key]) |
|
691 | 686 | |
|
692 | 687 | def create_or_update_repo_git_settings(self, data): |
|
693 | 688 | # NOTE(marcink): # comma makes unpack work properly |
|
694 | 689 | lfs_enabled, \ |
|
695 | 690 | = self.GIT_SETTINGS |
|
696 | 691 | |
|
697 | 692 | lfs_enabled_key, \ |
|
698 | 693 | = self._get_settings_keys(self.GIT_SETTINGS, data) |
|
699 | 694 | |
|
700 | 695 | self._create_or_update_ui( |
|
701 | 696 | self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
702 | 697 | active=data[lfs_enabled_key]) |
|
703 | 698 | |
|
704 | 699 | def create_or_update_global_git_settings(self, data): |
|
705 | 700 | lfs_enabled, lfs_store_location \ |
|
706 | 701 | = self.GLOBAL_GIT_SETTINGS |
|
707 | 702 | lfs_enabled_key, lfs_store_location_key \ |
|
708 | 703 | = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data) |
|
709 | 704 | |
|
710 | 705 | self._create_or_update_ui( |
|
711 | 706 | self.global_settings, *lfs_enabled, value=data[lfs_enabled_key], |
|
712 | 707 | active=data[lfs_enabled_key]) |
|
713 | 708 | self._create_or_update_ui( |
|
714 | 709 | self.global_settings, *lfs_store_location, |
|
715 | 710 | value=data[lfs_store_location_key]) |
|
716 | 711 | |
|
717 | 712 | def create_or_update_global_svn_settings(self, data): |
|
718 | 713 | # branch/tags patterns |
|
719 | 714 | self._create_svn_settings(self.global_settings, data) |
|
720 | 715 | |
|
721 | http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS | |
|
722 | http_requests_enabled_key, http_server_url_key = self._get_settings_keys( | |
|
723 | self.GLOBAL_SVN_SETTINGS, data) | |
|
724 | ||
|
725 | self._create_or_update_ui( | |
|
726 | self.global_settings, *http_requests_enabled, | |
|
727 | value=safe_str(data[http_requests_enabled_key])) | |
|
728 | self._create_or_update_ui( | |
|
729 | self.global_settings, *http_server_url, | |
|
730 | value=data[http_server_url_key]) | |
|
731 | ||
|
732 | 716 | def update_global_ssl_setting(self, value): |
|
733 | 717 | self._create_or_update_ui( |
|
734 | 718 | self.global_settings, *self.SSL_SETTING, value=value) |
|
735 | 719 | |
|
736 | 720 | def update_global_path_setting(self, value): |
|
737 | 721 | self._create_or_update_ui( |
|
738 | 722 | self.global_settings, *self.PATH_SETTING, value=value) |
|
739 | 723 | |
|
740 | 724 | @assert_repo_settings |
|
741 | 725 | def delete_repo_svn_pattern(self, id_): |
|
742 | 726 | ui = self.repo_settings.UiDbModel.get(id_) |
|
743 | 727 | if ui and ui.repository.repo_name == self.repo_settings.repo: |
|
744 | 728 | # only delete if it's the same repo as initialized settings |
|
745 | 729 | self.repo_settings.delete_ui(id_) |
|
746 | 730 | else: |
|
747 | 731 | # raise error as if we wouldn't find this option |
|
748 | 732 | self.repo_settings.delete_ui(-1) |
|
749 | 733 | |
|
750 | 734 | def delete_global_svn_pattern(self, id_): |
|
751 | 735 | self.global_settings.delete_ui(id_) |
|
752 | 736 | |
|
753 | 737 | @assert_repo_settings |
|
754 | 738 | def get_repo_ui_settings(self, section=None, key=None): |
|
755 | 739 | global_uis = self.global_settings.get_ui(section, key) |
|
756 | 740 | repo_uis = self.repo_settings.get_ui(section, key) |
|
757 | 741 | |
|
758 | 742 | filtered_repo_uis = self._filter_ui_settings(repo_uis) |
|
759 | 743 | filtered_repo_uis_keys = [ |
|
760 | 744 | (s.section, s.key) for s in filtered_repo_uis] |
|
761 | 745 | |
|
762 | 746 | def _is_global_ui_filtered(ui): |
|
763 | 747 | return ( |
|
764 | 748 | (ui.section, ui.key) in filtered_repo_uis_keys |
|
765 | 749 | or ui.section in self._svn_sections) |
|
766 | 750 | |
|
767 | 751 | filtered_global_uis = [ |
|
768 | 752 | ui for ui in global_uis if not _is_global_ui_filtered(ui)] |
|
769 | 753 | |
|
770 | 754 | return filtered_global_uis + filtered_repo_uis |
|
771 | 755 | |
|
772 | 756 | def get_global_ui_settings(self, section=None, key=None): |
|
773 | 757 | return self.global_settings.get_ui(section, key) |
|
774 | 758 | |
|
775 | 759 | def get_ui_settings_as_config_obj(self, section=None, key=None): |
|
776 | 760 | config = base.Config() |
|
777 | 761 | |
|
778 | 762 | ui_settings = self.get_ui_settings(section=section, key=key) |
|
779 | 763 | |
|
780 | 764 | for entry in ui_settings: |
|
781 | 765 | config.set(entry.section, entry.key, entry.value) |
|
782 | 766 | |
|
783 | 767 | return config |
|
784 | 768 | |
|
785 | 769 | def get_ui_settings(self, section=None, key=None): |
|
786 | 770 | if not self.repo_settings or self.inherit_global_settings: |
|
787 | 771 | return self.get_global_ui_settings(section, key) |
|
788 | 772 | else: |
|
789 | 773 | return self.get_repo_ui_settings(section, key) |
|
790 | 774 | |
|
791 | 775 | def get_svn_patterns(self, section=None): |
|
792 | 776 | if not self.repo_settings: |
|
793 | 777 | return self.get_global_ui_settings(section) |
|
794 | 778 | else: |
|
795 | 779 | return self.get_repo_ui_settings(section) |
|
796 | 780 | |
|
797 | 781 | @assert_repo_settings |
|
798 | 782 | def get_repo_general_settings(self): |
|
799 | 783 | global_settings = self.global_settings.get_all_settings() |
|
800 | 784 | repo_settings = self.repo_settings.get_all_settings() |
|
801 | 785 | filtered_repo_settings = self._filter_general_settings(repo_settings) |
|
802 | 786 | global_settings.update(filtered_repo_settings) |
|
803 | 787 | return global_settings |
|
804 | 788 | |
|
805 | 789 | def get_global_general_settings(self): |
|
806 | 790 | return self.global_settings.get_all_settings() |
|
807 | 791 | |
|
808 | 792 | def get_general_settings(self): |
|
809 | 793 | if not self.repo_settings or self.inherit_global_settings: |
|
810 | 794 | return self.get_global_general_settings() |
|
811 | 795 | else: |
|
812 | 796 | return self.get_repo_general_settings() |
|
813 | 797 | |
|
814 | 798 | def get_repos_location(self): |
|
815 | 799 | return self.global_settings.get_ui_by_key('/').ui_value |
|
816 | 800 | |
|
817 | 801 | def _filter_ui_settings(self, settings): |
|
818 | 802 | filtered_settings = [ |
|
819 | 803 | s for s in settings if self._should_keep_setting(s)] |
|
820 | 804 | return filtered_settings |
|
821 | 805 | |
|
822 | 806 | def _should_keep_setting(self, setting): |
|
823 | 807 | keep = ( |
|
824 | 808 | (setting.section, setting.key) in self._ui_settings or |
|
825 | 809 | setting.section in self._svn_sections) |
|
826 | 810 | return keep |
|
827 | 811 | |
|
828 | 812 | def _filter_general_settings(self, settings): |
|
829 | 813 | keys = [self.get_keyname(key) for key in self.GENERAL_SETTINGS] |
|
830 | 814 | return { |
|
831 | 815 | k: settings[k] |
|
832 | 816 | for k in settings if k in keys} |
|
833 | 817 | |
|
834 | 818 | def _collect_all_settings(self, global_=False): |
|
835 | 819 | settings = self.global_settings if global_ else self.repo_settings |
|
836 | 820 | result = {} |
|
837 | 821 | |
|
838 | 822 | for section, key in self._ui_settings: |
|
839 | 823 | ui = settings.get_ui_by_section_and_key(section, key) |
|
840 | 824 | result_key = self._get_form_ui_key(section, key) |
|
841 | 825 | |
|
842 | 826 | if ui: |
|
843 | 827 | if section in ('hooks', 'extensions'): |
|
844 | 828 | result[result_key] = ui.ui_active |
|
845 | 829 | elif result_key in ['vcs_git_lfs_enabled']: |
|
846 | 830 | result[result_key] = ui.ui_active |
|
847 | 831 | else: |
|
848 | 832 | result[result_key] = ui.ui_value |
|
849 | 833 | |
|
850 | 834 | for name in self.GENERAL_SETTINGS: |
|
851 | 835 | setting = settings.get_setting_by_name(name) |
|
852 | 836 | if setting: |
|
853 | 837 | result_key = self.get_keyname(name) |
|
854 | 838 | result[result_key] = setting.app_settings_value |
|
855 | 839 | |
|
856 | 840 | return result |
|
857 | 841 | |
|
858 | 842 | def _get_form_ui_key(self, section, key): |
|
859 | 843 | return '{section}_{key}'.format( |
|
860 | 844 | section=section, key=key.replace('.', '_')) |
|
861 | 845 | |
|
862 | 846 | def _create_or_update_ui( |
|
863 | 847 | self, settings, section, key, value=None, active=None): |
|
864 | 848 | ui = settings.get_ui_by_section_and_key(section, key) |
|
865 | 849 | if not ui: |
|
866 | 850 | active = True if active is None else active |
|
867 | 851 | settings.create_ui_section_value( |
|
868 | 852 | section, value, key=key, active=active) |
|
869 | 853 | else: |
|
870 | 854 | if active is not None: |
|
871 | 855 | ui.ui_active = active |
|
872 | 856 | if value is not None: |
|
873 | 857 | ui.ui_value = value |
|
874 | 858 | Session().add(ui) |
|
875 | 859 | |
|
876 | 860 | def _create_svn_settings(self, settings, data): |
|
877 | 861 | svn_settings = { |
|
878 | 862 | 'new_svn_branch': self.SVN_BRANCH_SECTION, |
|
879 | 863 | 'new_svn_tag': self.SVN_TAG_SECTION |
|
880 | 864 | } |
|
881 | 865 | for key in svn_settings: |
|
882 | 866 | if data.get(key): |
|
883 | 867 | settings.create_ui_section_value(svn_settings[key], data[key]) |
|
884 | 868 | |
|
885 | 869 | def _create_or_update_general_settings(self, settings, data): |
|
886 | 870 | for name in self.GENERAL_SETTINGS: |
|
887 | 871 | data_key = self.get_keyname(name) |
|
888 | 872 | if data_key not in data: |
|
889 | 873 | raise ValueError( |
|
890 | 874 | f'The given data does not contain {data_key} key') |
|
891 | 875 | setting = settings.create_or_update_setting( |
|
892 | 876 | name, data[data_key], 'bool') |
|
893 | 877 | Session().add(setting) |
|
894 | 878 | |
|
895 | 879 | def _get_settings_keys(self, settings, data): |
|
896 | 880 | data_keys = [self._get_form_ui_key(*s) for s in settings] |
|
897 | 881 | for data_key in data_keys: |
|
898 | 882 | if data_key not in data: |
|
899 | 883 | raise ValueError( |
|
900 | 884 | f'The given data does not contain {data_key} key') |
|
901 | 885 | return data_keys |
|
902 | 886 | |
|
903 | 887 | def create_largeobjects_dirs_if_needed(self, repo_store_path): |
|
904 | 888 | """ |
|
905 | 889 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
906 | 890 | does a repository scan if enabled in the settings. |
|
907 | 891 | """ |
|
908 | 892 | |
|
909 | 893 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
910 | 894 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
911 | 895 | |
|
912 | 896 | paths = [ |
|
913 | 897 | largefiles_store(repo_store_path), |
|
914 | 898 | lfs_store(repo_store_path)] |
|
915 | 899 | |
|
916 | 900 | for path in paths: |
|
917 | 901 | if os.path.isdir(path): |
|
918 | 902 | continue |
|
919 | 903 | if os.path.isfile(path): |
|
920 | 904 | continue |
|
921 | 905 | # not a file nor dir, we try to create it |
|
922 | 906 | try: |
|
923 | 907 | os.makedirs(path) |
|
924 | 908 | except Exception: |
|
925 | 909 | log.warning('Failed to create largefiles dir:%s', path) |
@@ -1,375 +1,339 b'' | |||
|
1 | 1 | ## snippet for displaying vcs settings |
|
2 | 2 | ## usage: |
|
3 | 3 | ## <%namespace name="vcss" file="/base/vcssettings.mako"/> |
|
4 | 4 | ## ${vcss.vcs_settings_fields()} |
|
5 | 5 | |
|
6 | 6 | <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)"> |
|
7 | 7 | % if display_globals: |
|
8 | 8 | <div class="panel panel-default"> |
|
9 | 9 | <div class="panel-heading" id="general"> |
|
10 | 10 | <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3> |
|
11 | 11 | </div> |
|
12 | 12 | <div class="panel-body"> |
|
13 | 13 | <div class="field"> |
|
14 | 14 | <div class="checkbox"> |
|
15 | 15 | ${h.checkbox('web_push_ssl' + suffix, 'True')} |
|
16 | 16 | <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label> |
|
17 | 17 | </div> |
|
18 | 18 | <div class="label"> |
|
19 | 19 | <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span> |
|
20 | 20 | </div> |
|
21 | 21 | </div> |
|
22 | 22 | </div> |
|
23 | 23 | </div> |
|
24 | 24 | % endif |
|
25 | 25 | |
|
26 | 26 | % if display_globals: |
|
27 | 27 | <div class="panel panel-default"> |
|
28 | 28 | <div class="panel-heading" id="vcs-storage-options"> |
|
29 | 29 | <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"> ΒΆ</a></h3> |
|
30 | 30 | </div> |
|
31 | 31 | <div class="panel-body"> |
|
32 | 32 | <div class="field"> |
|
33 | 33 | <div class="inputx locked_input"> |
|
34 | 34 | %if allow_repo_location_change: |
|
35 | 35 | ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")} |
|
36 | 36 | <span id="path_unlock" class="tooltip" |
|
37 | 37 | title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}"> |
|
38 | 38 | <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div> |
|
39 | 39 | </span> |
|
40 | 40 | %else: |
|
41 | 41 | ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')} |
|
42 | 42 | ## form still requires this but we cannot internally change it anyway |
|
43 | 43 | ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")} |
|
44 | 44 | %endif |
|
45 | 45 | </div> |
|
46 | 46 | </div> |
|
47 | 47 | <div class="label"> |
|
48 | 48 | <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span> |
|
49 | 49 | </div> |
|
50 | 50 | </div> |
|
51 | 51 | </div> |
|
52 | 52 | % endif |
|
53 | 53 | |
|
54 | 54 | % if display_globals or repo_type in ['git', 'hg']: |
|
55 | 55 | <div class="panel panel-default"> |
|
56 | 56 | <div class="panel-heading" id="vcs-hooks-options"> |
|
57 | 57 | <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3> |
|
58 | 58 | </div> |
|
59 | 59 | <div class="panel-body"> |
|
60 | 60 | <div class="field"> |
|
61 | 61 | <div class="checkbox"> |
|
62 | 62 | ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)} |
|
63 | 63 | <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label> |
|
64 | 64 | </div> |
|
65 | 65 | |
|
66 | 66 | <div class="label"> |
|
67 | 67 | <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span> |
|
68 | 68 | </div> |
|
69 | 69 | <div class="checkbox"> |
|
70 | 70 | ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)} |
|
71 | 71 | <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label> |
|
72 | 72 | </div> |
|
73 | 73 | <div class="label"> |
|
74 | 74 | <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span> |
|
75 | 75 | </div> |
|
76 | 76 | <div class="checkbox"> |
|
77 | 77 | ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)} |
|
78 | 78 | <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label> |
|
79 | 79 | </div> |
|
80 | 80 | <div class="label"> |
|
81 | 81 | <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span> |
|
82 | 82 | </div> |
|
83 | 83 | </div> |
|
84 | 84 | </div> |
|
85 | 85 | </div> |
|
86 | 86 | % endif |
|
87 | 87 | |
|
88 | 88 | % if display_globals or repo_type in ['hg']: |
|
89 | 89 | <div class="panel panel-default"> |
|
90 | 90 | <div class="panel-heading" id="vcs-hg-options"> |
|
91 | 91 | <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3> |
|
92 | 92 | </div> |
|
93 | 93 | <div class="panel-body"> |
|
94 | 94 | <div class="checkbox"> |
|
95 | 95 | ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)} |
|
96 | 96 | <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label> |
|
97 | 97 | </div> |
|
98 | 98 | <div class="label"> |
|
99 | 99 | % if display_globals: |
|
100 | 100 | <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span> |
|
101 | 101 | % else: |
|
102 | 102 | <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span> |
|
103 | 103 | % endif |
|
104 | 104 | </div> |
|
105 | 105 | |
|
106 | 106 | % if display_globals: |
|
107 | 107 | <div class="field"> |
|
108 | 108 | <div class="input"> |
|
109 | 109 | ${h.text('largefiles_usercache' + suffix, size=59)} |
|
110 | 110 | </div> |
|
111 | 111 | </div> |
|
112 | 112 | <div class="label"> |
|
113 | 113 | <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span> |
|
114 | 114 | </div> |
|
115 | 115 | % endif |
|
116 | 116 | |
|
117 | 117 | <div class="checkbox"> |
|
118 | 118 | ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)} |
|
119 | 119 | <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label> |
|
120 | 120 | </div> |
|
121 | 121 | <div class="label"> |
|
122 | 122 | <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span> |
|
123 | 123 | </div> |
|
124 | 124 | |
|
125 | 125 | <div class="checkbox"> |
|
126 | 126 | ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)} |
|
127 | 127 | <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label> |
|
128 | 128 | </div> |
|
129 | 129 | <div class="label"> |
|
130 | 130 | % if display_globals: |
|
131 | 131 | <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span> |
|
132 | 132 | % else: |
|
133 | 133 | <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span> |
|
134 | 134 | % endif |
|
135 | 135 | </div> |
|
136 | 136 | |
|
137 | 137 | </div> |
|
138 | 138 | </div> |
|
139 | 139 | % endif |
|
140 | 140 | |
|
141 | 141 | % if display_globals or repo_type in ['git']: |
|
142 | 142 | <div class="panel panel-default"> |
|
143 | 143 | <div class="panel-heading" id="vcs-git-options"> |
|
144 | 144 | <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3> |
|
145 | 145 | </div> |
|
146 | 146 | <div class="panel-body"> |
|
147 | 147 | <div class="checkbox"> |
|
148 | 148 | ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)} |
|
149 | 149 | <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label> |
|
150 | 150 | </div> |
|
151 | 151 | <div class="label"> |
|
152 | 152 | % if display_globals: |
|
153 | 153 | <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span> |
|
154 | 154 | % else: |
|
155 | 155 | <span class="help-block">${_('Enable lfs extensions for this repository.')}</span> |
|
156 | 156 | % endif |
|
157 | 157 | </div> |
|
158 | 158 | |
|
159 | 159 | % if display_globals: |
|
160 | 160 | <div class="field"> |
|
161 | 161 | <div class="input"> |
|
162 | 162 | ${h.text('vcs_git_lfs_store_location' + suffix, size=59)} |
|
163 | 163 | </div> |
|
164 | 164 | </div> |
|
165 | 165 | <div class="label"> |
|
166 | 166 | <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span> |
|
167 | 167 | </div> |
|
168 | 168 | % endif |
|
169 | 169 | </div> |
|
170 | 170 | </div> |
|
171 | 171 | % endif |
|
172 | 172 | |
|
173 | ||
|
174 | % if display_globals: | |
|
175 | <div class="panel panel-default"> | |
|
176 | <div class="panel-heading" id="vcs-global-svn-options"> | |
|
177 | <h3 class="panel-title">${_('Global Subversion Settings')}<a class="permalink" href="#vcs-global-svn-options"> ΒΆ</a></h3> | |
|
178 | </div> | |
|
179 | <div class="panel-body"> | |
|
180 | <div class="field"> | |
|
181 | <div class="checkbox"> | |
|
182 | ${h.checkbox('vcs_svn_proxy_http_requests_enabled' + suffix, 'True', **kwargs)} | |
|
183 | <label for="vcs_svn_proxy_http_requests_enabled${suffix}">${_('Proxy subversion HTTP requests')}</label> | |
|
184 | </div> | |
|
185 | <div class="label"> | |
|
186 | <span class="help-block"> | |
|
187 | ${_('Subversion HTTP Support. Enables communication with SVN over HTTP protocol.')} | |
|
188 | <a href="${h.route_url('enterprise_svn_setup')}" target="_blank">${_('SVN Protocol setup Documentation')}</a>. | |
|
189 | </span> | |
|
190 | </div> | |
|
191 | </div> | |
|
192 | <div class="field"> | |
|
193 | <div class="label"> | |
|
194 | <label for="vcs_svn_proxy_http_server_url">${_('Subversion HTTP Server URL')}</label><br/> | |
|
195 | </div> | |
|
196 | <div class="input"> | |
|
197 | ${h.text('vcs_svn_proxy_http_server_url',size=59)} | |
|
198 | % if c.svn_proxy_generate_config: | |
|
199 | <span class="buttons"> | |
|
200 | <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Generate Apache Config')}</button> | |
|
201 | </span> | |
|
202 | % endif | |
|
203 | </div> | |
|
204 | </div> | |
|
205 | </div> | |
|
206 | </div> | |
|
207 | % endif | |
|
208 | ||
|
209 | 173 | % if display_globals or repo_type in ['svn']: |
|
210 | 174 | <div class="panel panel-default"> |
|
211 | 175 | <div class="panel-heading" id="vcs-svn-options"> |
|
212 | 176 | <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3> |
|
213 | 177 | </div> |
|
214 | 178 | <div class="panel-body"> |
|
215 | 179 | <div class="field"> |
|
216 | 180 | <div class="content" > |
|
217 | 181 | <label>${_('Repository patterns')}</label><br/> |
|
218 | 182 | </div> |
|
219 | 183 | </div> |
|
220 | 184 | <div class="label"> |
|
221 | 185 | <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span> |
|
222 | 186 | </div> |
|
223 | 187 | |
|
224 | 188 | <div class="field branch_patterns"> |
|
225 | 189 | <div class="input" > |
|
226 | 190 | <label>${_('Branches')}:</label><br/> |
|
227 | 191 | </div> |
|
228 | 192 | % if svn_branch_patterns: |
|
229 | 193 | % for branch in svn_branch_patterns: |
|
230 | 194 | <div class="input adjacent" id="${'id%s' % branch.ui_id}"> |
|
231 | 195 | ${h.hidden('branch_ui_key' + suffix, branch.ui_key)} |
|
232 | 196 | ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')} |
|
233 | 197 | % if kwargs.get('disabled') != 'disabled': |
|
234 | 198 | <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')"> |
|
235 | 199 | ${_('Delete')} |
|
236 | 200 | </span> |
|
237 | 201 | % endif |
|
238 | 202 | </div> |
|
239 | 203 | % endfor |
|
240 | 204 | %endif |
|
241 | 205 | </div> |
|
242 | 206 | % if kwargs.get('disabled') != 'disabled': |
|
243 | 207 | <div class="field branch_patterns"> |
|
244 | 208 | <div class="input" > |
|
245 | 209 | ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')} |
|
246 | 210 | </div> |
|
247 | 211 | </div> |
|
248 | 212 | % endif |
|
249 | 213 | <div class="field tag_patterns"> |
|
250 | 214 | <div class="input" > |
|
251 | 215 | <label>${_('Tags')}:</label><br/> |
|
252 | 216 | </div> |
|
253 | 217 | % if svn_tag_patterns: |
|
254 | 218 | % for tag in svn_tag_patterns: |
|
255 | 219 | <div class="input" id="${'id%s' % tag.ui_id + suffix}"> |
|
256 | 220 | ${h.hidden('tag_ui_key' + suffix, tag.ui_key)} |
|
257 | 221 | ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')} |
|
258 | 222 | % if kwargs.get('disabled') != 'disabled': |
|
259 | 223 | <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')"> |
|
260 | 224 | ${_('Delete')} |
|
261 | 225 | </span> |
|
262 | 226 | %endif |
|
263 | 227 | </div> |
|
264 | 228 | % endfor |
|
265 | 229 | % endif |
|
266 | 230 | </div> |
|
267 | 231 | % if kwargs.get('disabled') != 'disabled': |
|
268 | 232 | <div class="field tag_patterns"> |
|
269 | 233 | <div class="input" > |
|
270 | 234 | ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')} |
|
271 | 235 | </div> |
|
272 | 236 | </div> |
|
273 | 237 | %endif |
|
274 | 238 | </div> |
|
275 | 239 | </div> |
|
276 | 240 | % else: |
|
277 | 241 | ${h.hidden('new_svn_branch' + suffix, '')} |
|
278 | 242 | ${h.hidden('new_svn_tag' + suffix, '')} |
|
279 | 243 | % endif |
|
280 | 244 | |
|
281 | 245 | |
|
282 | 246 | % if display_globals or repo_type in ['hg', 'git']: |
|
283 | 247 | <div class="panel panel-default"> |
|
284 | 248 | <div class="panel-heading" id="vcs-pull-requests-options"> |
|
285 | 249 | <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3> |
|
286 | 250 | </div> |
|
287 | 251 | <div class="panel-body"> |
|
288 | 252 | <div class="checkbox"> |
|
289 | 253 | ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)} |
|
290 | 254 | <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label> |
|
291 | 255 | </div> |
|
292 | 256 | <div class="label"> |
|
293 | 257 | <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span> |
|
294 | 258 | </div> |
|
295 | 259 | <div class="checkbox"> |
|
296 | 260 | ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)} |
|
297 | 261 | <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label> |
|
298 | 262 | </div> |
|
299 | 263 | <div class="label"> |
|
300 | 264 | <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span> |
|
301 | 265 | </div> |
|
302 | 266 | </div> |
|
303 | 267 | </div> |
|
304 | 268 | % endif |
|
305 | 269 | |
|
306 | 270 | % if display_globals or repo_type in ['hg', 'git', 'svn']: |
|
307 | 271 | <div class="panel panel-default"> |
|
308 | 272 | <div class="panel-heading" id="vcs-pull-requests-options"> |
|
309 | 273 | <h3 class="panel-title">${_('Diff cache')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3> |
|
310 | 274 | </div> |
|
311 | 275 | <div class="panel-body"> |
|
312 | 276 | <div class="checkbox"> |
|
313 | 277 | ${h.checkbox('rhodecode_diff_cache' + suffix, 'True', **kwargs)} |
|
314 | 278 | <label for="rhodecode_diff_cache${suffix}">${_('Enable caching diffs for pull requests cache and commits')}</label> |
|
315 | 279 | </div> |
|
316 | 280 | </div> |
|
317 | 281 | </div> |
|
318 | 282 | % endif |
|
319 | 283 | |
|
320 | 284 | % if display_globals or repo_type in ['hg',]: |
|
321 | 285 | <div class="panel panel-default"> |
|
322 | 286 | <div class="panel-heading" id="vcs-pull-requests-options"> |
|
323 | 287 | <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3> |
|
324 | 288 | </div> |
|
325 | 289 | <div class="panel-body"> |
|
326 | 290 | ## Specific HG settings |
|
327 | 291 | <div class="checkbox"> |
|
328 | 292 | ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)} |
|
329 | 293 | <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label> |
|
330 | 294 | </div> |
|
331 | 295 | <div class="label"> |
|
332 | 296 | <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span> |
|
333 | 297 | </div> |
|
334 | 298 | |
|
335 | 299 | <div class="checkbox"> |
|
336 | 300 | ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)} |
|
337 | 301 | <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label> |
|
338 | 302 | </div> |
|
339 | 303 | <div class="label"> |
|
340 | 304 | <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span> |
|
341 | 305 | </div> |
|
342 | 306 | |
|
343 | 307 | |
|
344 | 308 | </div> |
|
345 | 309 | </div> |
|
346 | 310 | % endif |
|
347 | 311 | |
|
348 | 312 | ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet |
|
349 | 313 | ## % if display_globals or repo_type in ['git']: |
|
350 | 314 | ## <div class="panel panel-default"> |
|
351 | 315 | ## <div class="panel-heading" id="vcs-pull-requests-options"> |
|
352 | 316 | ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3> |
|
353 | 317 | ## </div> |
|
354 | 318 | ## <div class="panel-body"> |
|
355 | 319 | ## <div class="checkbox"> |
|
356 | 320 | ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)} |
|
357 | 321 | ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label> |
|
358 | 322 | ## </div> |
|
359 | 323 | ## <div class="label"> |
|
360 | 324 | ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span> |
|
361 | 325 | ## </div> |
|
362 | 326 | ## |
|
363 | 327 | ## <div class="checkbox"> |
|
364 | 328 | ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)} |
|
365 | 329 | ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label> |
|
366 | 330 | ## </div> |
|
367 | 331 | ## <div class="label"> |
|
368 | 332 | ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span> |
|
369 | 333 | ## </div> |
|
370 | 334 | ## </div> |
|
371 | 335 | ## </div> |
|
372 | 336 | ## % endif |
|
373 | 337 | |
|
374 | 338 | |
|
375 | 339 | </%def> |
@@ -1,223 +1,224 b'' | |||
|
1 | 1 | |
|
2 | 2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | import pytest |
|
21 | 21 | |
|
22 | 22 | from rhodecode.lib.pyramid_utils import get_app_config |
|
23 | 23 | from rhodecode.tests.fixture import TestINI |
|
24 | 24 | from rhodecode.tests.server_utils import RcVCSServer |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | @pytest.fixture(scope='session') |
|
28 | 28 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
29 | 29 | """ |
|
30 | 30 | Session scope VCSServer. |
|
31 | 31 | |
|
32 | 32 | Tests which need the VCSServer have to rely on this fixture in order |
|
33 | 33 | to ensure it will be running. |
|
34 | 34 | |
|
35 | 35 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
36 | 36 | adjust the configuration file for the test run. |
|
37 | 37 | |
|
38 | 38 | Command line args: |
|
39 | 39 | |
|
40 | 40 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
41 | 41 | manually start the server. |
|
42 | 42 | |
|
43 | 43 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
44 | 44 | """ |
|
45 | 45 | |
|
46 | 46 | if not request.config.getoption('with_vcsserver'): |
|
47 | 47 | return None |
|
48 | 48 | |
|
49 | 49 | return vcsserver_factory( |
|
50 | 50 | request, vcsserver_port=vcsserver_port) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | @pytest.fixture(scope='session') |
|
54 | 54 | def vcsserver_factory(tmpdir_factory): |
|
55 | 55 | """ |
|
56 | 56 | Use this if you need a running vcsserver with a special configuration. |
|
57 | 57 | """ |
|
58 | 58 | |
|
59 | 59 | def factory(request, overrides=(), vcsserver_port=None, |
|
60 | 60 | log_file=None, workers='2'): |
|
61 | 61 | |
|
62 | 62 | if vcsserver_port is None: |
|
63 | 63 | vcsserver_port = get_available_port() |
|
64 | 64 | |
|
65 | 65 | overrides = list(overrides) |
|
66 | 66 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
67 | 67 | |
|
68 | 68 | option_name = 'vcsserver_config_http' |
|
69 | 69 | override_option_name = 'vcsserver_config_override' |
|
70 | 70 | config_file = get_config( |
|
71 | 71 | request.config, option_name=option_name, |
|
72 | 72 | override_option_name=override_option_name, overrides=overrides, |
|
73 | 73 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
74 | 74 | prefix='test_vcs_') |
|
75 | 75 | |
|
76 | 76 | server = RcVCSServer(config_file, log_file, workers) |
|
77 | 77 | server.start() |
|
78 | 78 | |
|
79 | 79 | @request.addfinalizer |
|
80 | 80 | def cleanup(): |
|
81 | 81 | server.shutdown() |
|
82 | 82 | |
|
83 | 83 | server.wait_until_ready() |
|
84 | 84 | return server |
|
85 | 85 | |
|
86 | 86 | return factory |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def _use_log_level(config): |
|
90 | 90 | level = config.getoption('test_loglevel') or 'critical' |
|
91 | 91 | return level.upper() |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | @pytest.fixture(scope='session') |
|
95 | 95 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
96 | 96 | option_name = 'pyramid_config' |
|
97 | 97 | log_level = _use_log_level(request.config) |
|
98 | 98 | |
|
99 | 99 | overrides = [ |
|
100 | 100 | {'server:main': {'port': rcserver_port}}, |
|
101 | 101 | {'app:main': { |
|
102 | 102 | 'cache_dir': '%(here)s/rc_data', |
|
103 | 103 | 'vcs.server': f'localhost:{vcsserver_port}', |
|
104 | 104 | # johbo: We will always start the VCSServer on our own based on the |
|
105 | 105 | # fixtures of the test cases. For the test run it must always be |
|
106 | 106 | # off in the INI file. |
|
107 | 107 | 'vcs.start_server': 'false', |
|
108 | 108 | |
|
109 | 109 | 'vcs.server.protocol': 'http', |
|
110 | 110 | 'vcs.scm_app_implementation': 'http', |
|
111 | 'vcs.svn.proxy.enabled': 'true', | |
|
111 | 112 | 'vcs.hooks.protocol': 'http', |
|
112 | 113 | 'vcs.hooks.host': '*', |
|
113 | 114 | 'app.service_api.token': 'service_secret_token', |
|
114 | 115 | }}, |
|
115 | 116 | |
|
116 | 117 | {'handler_console': { |
|
117 | 118 | 'class': 'StreamHandler', |
|
118 | 119 | 'args': '(sys.stderr,)', |
|
119 | 120 | 'level': log_level, |
|
120 | 121 | }}, |
|
121 | 122 | |
|
122 | 123 | ] |
|
123 | 124 | |
|
124 | 125 | filename = get_config( |
|
125 | 126 | request.config, option_name=option_name, |
|
126 | 127 | override_option_name='{}_override'.format(option_name), |
|
127 | 128 | overrides=overrides, |
|
128 | 129 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
129 | 130 | prefix='test_rce_') |
|
130 | 131 | return filename |
|
131 | 132 | |
|
132 | 133 | |
|
133 | 134 | @pytest.fixture(scope='session') |
|
134 | 135 | def ini_settings(ini_config): |
|
135 | 136 | ini_path = ini_config |
|
136 | 137 | return get_app_config(ini_path) |
|
137 | 138 | |
|
138 | 139 | |
|
139 | 140 | def get_available_port(min_port=40000, max_port=55555): |
|
140 | 141 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
141 | 142 | return _get_port(min_port, max_port) |
|
142 | 143 | |
|
143 | 144 | |
|
144 | 145 | @pytest.fixture(scope='session') |
|
145 | 146 | def rcserver_port(request): |
|
146 | 147 | port = get_available_port() |
|
147 | 148 | print(f'Using rhodecode port {port}') |
|
148 | 149 | return port |
|
149 | 150 | |
|
150 | 151 | |
|
151 | 152 | @pytest.fixture(scope='session') |
|
152 | 153 | def vcsserver_port(request): |
|
153 | 154 | port = request.config.getoption('--vcsserver-port') |
|
154 | 155 | if port is None: |
|
155 | 156 | port = get_available_port() |
|
156 | 157 | print(f'Using vcsserver port {port}') |
|
157 | 158 | return port |
|
158 | 159 | |
|
159 | 160 | |
|
160 | 161 | @pytest.fixture(scope='session') |
|
161 | 162 | def available_port_factory(): |
|
162 | 163 | """ |
|
163 | 164 | Returns a callable which returns free port numbers. |
|
164 | 165 | """ |
|
165 | 166 | return get_available_port |
|
166 | 167 | |
|
167 | 168 | |
|
168 | 169 | @pytest.fixture() |
|
169 | 170 | def available_port(available_port_factory): |
|
170 | 171 | """ |
|
171 | 172 | Gives you one free port for the current test. |
|
172 | 173 | |
|
173 | 174 | Uses "available_port_factory" to retrieve the port. |
|
174 | 175 | """ |
|
175 | 176 | return available_port_factory() |
|
176 | 177 | |
|
177 | 178 | |
|
178 | 179 | @pytest.fixture(scope='session') |
|
179 | 180 | def testini_factory(tmpdir_factory, ini_config): |
|
180 | 181 | """ |
|
181 | 182 | Factory to create an INI file based on TestINI. |
|
182 | 183 | |
|
183 | 184 | It will make sure to place the INI file in the correct directory. |
|
184 | 185 | """ |
|
185 | 186 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
186 | 187 | return TestIniFactory(basetemp, ini_config) |
|
187 | 188 | |
|
188 | 189 | |
|
189 | 190 | class TestIniFactory(object): |
|
190 | 191 | |
|
191 | 192 | def __init__(self, basetemp, template_ini): |
|
192 | 193 | self._basetemp = basetemp |
|
193 | 194 | self._template_ini = template_ini |
|
194 | 195 | |
|
195 | 196 | def __call__(self, ini_params, new_file_prefix='test'): |
|
196 | 197 | ini_file = TestINI( |
|
197 | 198 | self._template_ini, ini_params=ini_params, |
|
198 | 199 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
199 | 200 | result = ini_file.create() |
|
200 | 201 | return result |
|
201 | 202 | |
|
202 | 203 | |
|
203 | 204 | def get_config( |
|
204 | 205 | config, option_name, override_option_name, overrides=None, |
|
205 | 206 | basetemp=None, prefix='test'): |
|
206 | 207 | """ |
|
207 | 208 | Find a configuration file and apply overrides for the given `prefix`. |
|
208 | 209 | """ |
|
209 | 210 | config_file = ( |
|
210 | 211 | config.getoption(option_name) or config.getini(option_name)) |
|
211 | 212 | if not config_file: |
|
212 | 213 | pytest.exit( |
|
213 | 214 | "Configuration error, could not extract {}.".format(option_name)) |
|
214 | 215 | |
|
215 | 216 | overrides = overrides or [] |
|
216 | 217 | config_override = config.getoption(override_option_name) |
|
217 | 218 | if config_override: |
|
218 | 219 | overrides.append(config_override) |
|
219 | 220 | temp_ini_file = TestINI( |
|
220 | 221 | config_file, ini_params=overrides, new_file_prefix=prefix, |
|
221 | 222 | dir=basetemp) |
|
222 | 223 | |
|
223 | 224 | return temp_ini_file.create() |
General Comments 0
You need to be logged in to leave comments.
Login now