##// END OF EJS Templates
caches: use dogpile for sql_cache_short region.
marcink -
r2883:f2837b35 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,738 +1,738 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 use = egg:waitress#main
55 55 ## number of worker threads
56 56 threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 #use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 #workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 #proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 #worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 #max_requests = 1000
88 88 #max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 #timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 # During development the we want to have the debug toolbar enabled
112 112 pyramid.includes =
113 113 pyramid_debugtoolbar
114 114 rhodecode.lib.middleware.request_wrapper
115 115
116 116 pyramid.reload_templates = true
117 117
118 118 debugtoolbar.hosts = 0.0.0.0/0
119 119 debugtoolbar.exclude_prefixes =
120 120 /css
121 121 /fonts
122 122 /images
123 123 /js
124 124
125 125 ## RHODECODE PLUGINS ##
126 126 rhodecode.includes =
127 127 rhodecode.api
128 128
129 129
130 130 # api prefix url
131 131 rhodecode.api.url = /_admin/api
132 132
133 133
134 134 ## END RHODECODE PLUGINS ##
135 135
136 136 ## encryption key used to encrypt social plugin tokens,
137 137 ## remote_urls with credentials etc, if not set it defaults to
138 138 ## `beaker.session.secret`
139 139 #rhodecode.encrypted_values.secret =
140 140
141 141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 143 #rhodecode.encrypted_values.strict = false
144 144
145 145 ## return gzipped responses from Rhodecode (static files/application)
146 146 gzip_responses = false
147 147
148 148 ## autogenerate javascript routes file on startup
149 149 generate_js_files = false
150 150
151 151 ## Optional Languages
152 152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 153 lang = en
154 154
155 155 ## perform a full repository scan on each server start, this should be
156 156 ## set to false after first startup, to allow faster server restarts.
157 157 startup.import_repos = false
158 158
159 159 ## Uncomment and set this path to use archive download cache.
160 160 ## Once enabled, generated archives will be cached at this location
161 161 ## and served from the cache during subsequent requests for the same archive of
162 162 ## the repository.
163 163 #archive_cache_dir = /tmp/tarballcache
164 164
165 165 ## URL at which the application is running. This is used for bootstraping
166 166 ## requests in context when no web request is available. Used in ishell, or
167 167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 168 app.base_url = http://rhodecode.local
169 169
170 170 ## change this to unique ID for security
171 171 app_instance_uuid = rc-production
172 172
173 173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 174 ## commit, or pull request exceeds this limit this diff will be displayed
175 175 ## partially. E.g 512000 == 512Kb
176 176 cut_off_limit_diff = 512000
177 177
178 178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 179 ## file inside diff which exceeds this limit will be displayed partially.
180 180 ## E.g 128000 == 128Kb
181 181 cut_off_limit_file = 128000
182 182
183 183 ## use cache version of scm repo everywhere
184 184 vcs_full_cache = true
185 185
186 186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 187 ## Normally this is controlled by proper http flags sent from http server
188 188 force_https = false
189 189
190 190 ## use Strict-Transport-Security headers
191 191 use_htsts = false
192 192
193 193 ## git rev filter option, --all is the default filter, if you need to
194 194 ## hide all refs in changelog switch this to --branches --tags
195 195 git_rev_filter = --branches --tags
196 196
197 197 # Set to true if your repos are exposed using the dumb protocol
198 198 git_update_server_info = false
199 199
200 200 ## RSS/ATOM feed options
201 201 rss_cut_off_limit = 256000
202 202 rss_items_per_page = 10
203 203 rss_include_diff = false
204 204
205 205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 206 ## url that does rewrites to _admin/gists/{gistid}.
207 207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 209 gist_alias_url =
210 210
211 211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 212 ## used for access.
213 213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 214 ## came from the the logged in user who own this authentication token.
215 215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 216 ## authentication token. Such view would be only accessible when used together
217 217 ## with this authentication token
218 218 ##
219 219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 220 ## The list should be "," separated and on a single line.
221 221 ##
222 222 ## Most common views to enable:
223 223 # RepoCommitsView:repo_commit_download
224 224 # RepoCommitsView:repo_commit_patch
225 225 # RepoCommitsView:repo_commit_raw
226 226 # RepoCommitsView:repo_commit_raw@TOKEN
227 227 # RepoFilesView:repo_files_diff
228 228 # RepoFilesView:repo_archivefile
229 229 # RepoFilesView:repo_file_raw
230 230 # GistView:*
231 231 api_access_controllers_whitelist =
232 232
233 233 ## default encoding used to convert from and to unicode
234 234 ## can be also a comma separated list of encoding in case of mixed encodings
235 235 default_encoding = UTF-8
236 236
237 237 ## instance-id prefix
238 238 ## a prefix key for this instance used for cache invalidation when running
239 239 ## multiple instances of rhodecode, make sure it's globally unique for
240 240 ## all running rhodecode instances. Leave empty if you don't use it
241 241 instance_id =
242 242
243 243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 244 ## of an authentication plugin also if it is disabled by it's settings.
245 245 ## This could be useful if you are unable to log in to the system due to broken
246 246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 247 ## module to log in again and fix the settings.
248 248 ##
249 249 ## Available builtin plugin IDs (hash is part of the ID):
250 250 ## egg:rhodecode-enterprise-ce#rhodecode
251 251 ## egg:rhodecode-enterprise-ce#pam
252 252 ## egg:rhodecode-enterprise-ce#ldap
253 253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 254 ## egg:rhodecode-enterprise-ce#headers
255 255 ## egg:rhodecode-enterprise-ce#crowd
256 256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257 257
258 258 ## alternative return HTTP header for failed authentication. Default HTTP
259 259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 260 ## handling that causing a series of failed authentication calls.
261 261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 262 ## This will be served instead of default 401 on bad authnetication
263 263 auth_ret_code =
264 264
265 265 ## use special detection method when serving auth_ret_code, instead of serving
266 266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 267 ## and then serve auth_ret_code to clients
268 268 auth_ret_code_detection = false
269 269
270 270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 271 ## codes don't break the transactions while 4XX codes do
272 272 lock_ret_code = 423
273 273
274 274 ## allows to change the repository location in settings page
275 275 allow_repo_location_change = true
276 276
277 277 ## allows to setup custom hooks in settings page
278 278 allow_custom_hooks_settings = true
279 279
280 280 ## generated license token, goto license page in RhodeCode settings to obtain
281 281 ## new token
282 282 license_token =
283 283
284 284 ## supervisor connection uri, for managing supervisor and logs.
285 285 supervisor.uri =
286 286 ## supervisord group name/id we only want this RC instance to handle
287 287 supervisor.group_id = dev
288 288
289 289 ## Display extended labs settings
290 290 labs_settings_active = true
291 291
292 292 ####################################
293 293 ### CELERY CONFIG ####
294 294 ####################################
295 295 ## run: /path/to/celery worker \
296 296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299 299
300 300 use_celery = false
301 301
302 302 ## connection url to the message broker (default rabbitmq)
303 303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304 304
305 305 ## maximum tasks to execute before worker restart
306 306 celery.max_tasks_per_child = 100
307 307
308 308 ## tasks will never be sent to the queue, but executed locally instead.
309 309 celery.task_always_eager = false
310 310
311 311 #####################################
312 312 ### DOGPILE CACHE ####
313 313 #####################################
314 314 ## Default cache dir for caches. Putting this into a ramdisk
315 315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 316 ## of space
317 317 cache_dir = /tmp/rcdev/data
318 318
319 319 ## cache settings for permission tree, auth TTL.
320 320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 321 rc_cache.cache_perms.expiration_time = 300
322 322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323 323
324 324 ## redis backend with distributed locks
325 325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 326 #rc_cache.cache_perms.expiration_time = 300
327 327 #rc_cache.cache_perms.arguments.host = localhost
328 328 #rc_cache.cache_perms.arguments.port = 6379
329 329 #rc_cache.cache_perms.arguments.db = 0
330 330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 331 #rc_cache.cache_perms.arguments.distributed_lock = true
332 332
333 333
334 334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 335 rc_cache.cache_repo.expiration_time = 2592000
336 336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337 337
338 338 ## redis backend with distributed locks
339 339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 340 #rc_cache.cache_repo.expiration_time = 2592000
341 341 ## this needs to be greater then expiration_time
342 342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 343 #rc_cache.cache_repo.arguments.host = localhost
344 344 #rc_cache.cache_repo.arguments.port = 6379
345 345 #rc_cache.cache_repo.arguments.db = 1
346 346 #rc_cache.cache_repo.arguments.distributed_lock = true
347 347
348 ## cache settings for SQL queries
349 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
350 rc_cache.sql_cache_short.expiration_time = 30
351
348 352
349 353 ####################################
350 354 ### BEAKER CACHE ####
351 355 ####################################
352 356
353 357 ## locking and default file storage for Beaker. Putting this into a ramdisk
354 358 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
355 359 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
356 360 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
357 361
358 beaker.cache.regions = long_term, sql_cache_short
362 beaker.cache.regions = long_term
359 363
360 364 beaker.cache.long_term.type = memorylru_base
361 365 beaker.cache.long_term.expire = 172800
362 366 beaker.cache.long_term.key_length = 256
363 367
364 beaker.cache.sql_cache_short.type = memorylru_base
365 beaker.cache.sql_cache_short.expire = 10
366 beaker.cache.sql_cache_short.key_length = 256
367
368 368
369 369 ####################################
370 370 ### BEAKER SESSION ####
371 371 ####################################
372 372
373 373 ## .session.type is type of storage options for the session, current allowed
374 374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
375 375 beaker.session.type = file
376 376 beaker.session.data_dir = %(here)s/data/sessions
377 377
378 378 ## db based session, fast, and allows easy management over logged in users
379 379 #beaker.session.type = ext:database
380 380 #beaker.session.table_name = db_session
381 381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
382 382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
383 383 #beaker.session.sa.pool_recycle = 3600
384 384 #beaker.session.sa.echo = false
385 385
386 386 beaker.session.key = rhodecode
387 387 beaker.session.secret = develop-rc-uytcxaz
388 388 beaker.session.lock_dir = %(here)s/data/sessions/lock
389 389
390 390 ## Secure encrypted cookie. Requires AES and AES python libraries
391 391 ## you must disable beaker.session.secret to use this
392 392 #beaker.session.encrypt_key = key_for_encryption
393 393 #beaker.session.validate_key = validation_key
394 394
395 395 ## sets session as invalid(also logging out user) if it haven not been
396 396 ## accessed for given amount of time in seconds
397 397 beaker.session.timeout = 2592000
398 398 beaker.session.httponly = true
399 399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
400 400 #beaker.session.cookie_path = /custom_prefix
401 401
402 402 ## uncomment for https secure cookie
403 403 beaker.session.secure = false
404 404
405 405 ## auto save the session to not to use .save()
406 406 beaker.session.auto = false
407 407
408 408 ## default cookie expiration time in seconds, set to `true` to set expire
409 409 ## at browser close
410 410 #beaker.session.cookie_expires = 3600
411 411
412 412 ###################################
413 413 ## SEARCH INDEXING CONFIGURATION ##
414 414 ###################################
415 415 ## Full text search indexer is available in rhodecode-tools under
416 416 ## `rhodecode-tools index` command
417 417
418 418 ## WHOOSH Backend, doesn't require additional services to run
419 419 ## it works good with few dozen repos
420 420 search.module = rhodecode.lib.index.whoosh
421 421 search.location = %(here)s/data/index
422 422
423 423 ########################################
424 424 ### CHANNELSTREAM CONFIG ####
425 425 ########################################
426 426 ## channelstream enables persistent connections and live notification
427 427 ## in the system. It's also used by the chat system
428 428 channelstream.enabled = false
429 429
430 430 ## server address for channelstream server on the backend
431 431 channelstream.server = 127.0.0.1:9800
432 432
433 433 ## location of the channelstream server from outside world
434 434 ## use ws:// for http or wss:// for https. This address needs to be handled
435 435 ## by external HTTP server such as Nginx or Apache
436 436 ## see nginx/apache configuration examples in our docs
437 437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
438 438 channelstream.secret = secret
439 439 channelstream.history.location = %(here)s/channelstream_history
440 440
441 441 ## Internal application path that Javascript uses to connect into.
442 442 ## If you use proxy-prefix the prefix should be added before /_channelstream
443 443 channelstream.proxy_path = /_channelstream
444 444
445 445
446 446 ###################################
447 447 ## APPENLIGHT CONFIG ##
448 448 ###################################
449 449
450 450 ## Appenlight is tailored to work with RhodeCode, see
451 451 ## http://appenlight.com for details how to obtain an account
452 452
453 453 ## appenlight integration enabled
454 454 appenlight = false
455 455
456 456 appenlight.server_url = https://api.appenlight.com
457 457 appenlight.api_key = YOUR_API_KEY
458 458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
459 459
460 460 # used for JS client
461 461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
462 462
463 463 ## TWEAK AMOUNT OF INFO SENT HERE
464 464
465 465 ## enables 404 error logging (default False)
466 466 appenlight.report_404 = false
467 467
468 468 ## time in seconds after request is considered being slow (default 1)
469 469 appenlight.slow_request_time = 1
470 470
471 471 ## record slow requests in application
472 472 ## (needs to be enabled for slow datastore recording and time tracking)
473 473 appenlight.slow_requests = true
474 474
475 475 ## enable hooking to application loggers
476 476 appenlight.logging = true
477 477
478 478 ## minimum log level for log capture
479 479 appenlight.logging.level = WARNING
480 480
481 481 ## send logs only from erroneous/slow requests
482 482 ## (saves API quota for intensive logging)
483 483 appenlight.logging_on_error = false
484 484
485 485 ## list of additonal keywords that should be grabbed from environ object
486 486 ## can be string with comma separated list of words in lowercase
487 487 ## (by default client will always send following info:
488 488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
489 489 ## start with HTTP* this list be extended with additional keywords here
490 490 appenlight.environ_keys_whitelist =
491 491
492 492 ## list of keywords that should be blanked from request object
493 493 ## can be string with comma separated list of words in lowercase
494 494 ## (by default client will always blank keys that contain following words
495 495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
496 496 ## this list be extended with additional keywords set here
497 497 appenlight.request_keys_blacklist =
498 498
499 499 ## list of namespaces that should be ignores when gathering log entries
500 500 ## can be string with comma separated list of namespaces
501 501 ## (by default the client ignores own entries: appenlight_client.client)
502 502 appenlight.log_namespace_blacklist =
503 503
504 504
505 505 ################################################################################
506 506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
507 507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
508 508 ## execute malicious code after an exception is raised. ##
509 509 ################################################################################
510 510 #set debug = false
511 511
512 512
513 513 ##############
514 514 ## STYLING ##
515 515 ##############
516 516 debug_style = true
517 517
518 518 ###########################################
519 519 ### MAIN RHODECODE DATABASE CONFIG ###
520 520 ###########################################
521 521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
522 522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
523 523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525 525
526 526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
527 527
528 528 # see sqlalchemy docs for other advanced settings
529 529
530 530 ## print the sql statements to output
531 531 sqlalchemy.db1.echo = false
532 532 ## recycle the connections after this amount of seconds
533 533 sqlalchemy.db1.pool_recycle = 3600
534 534 sqlalchemy.db1.convert_unicode = true
535 535
536 536 ## the number of connections to keep open inside the connection pool.
537 537 ## 0 indicates no limit
538 538 #sqlalchemy.db1.pool_size = 5
539 539
540 540 ## the number of connections to allow in connection pool "overflow", that is
541 541 ## connections that can be opened above and beyond the pool_size setting,
542 542 ## which defaults to five.
543 543 #sqlalchemy.db1.max_overflow = 10
544 544
545 545 ## Connection check ping, used to detect broken database connections
546 546 ## could be enabled to better handle cases if MySQL has gone away errors
547 547 #sqlalchemy.db1.ping_connection = true
548 548
549 549 ##################
550 550 ### VCS CONFIG ###
551 551 ##################
552 552 vcs.server.enable = true
553 553 vcs.server = localhost:9900
554 554
555 555 ## Web server connectivity protocol, responsible for web based VCS operatations
556 556 ## Available protocols are:
557 557 ## `http` - use http-rpc backend (default)
558 558 vcs.server.protocol = http
559 559
560 560 ## Push/Pull operations protocol, available options are:
561 561 ## `http` - use http-rpc backend (default)
562 562 ##
563 563 vcs.scm_app_implementation = http
564 564
565 565 ## Push/Pull operations hooks protocol, available options are:
566 566 ## `http` - use http-rpc backend (default)
567 567 vcs.hooks.protocol = http
568 568
569 569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
570 570 ## this should be adjusted.
571 571 vcs.hooks.host = 127.0.0.1
572 572
573 573 vcs.server.log_level = debug
574 574 ## Start VCSServer with this instance as a subprocess, usefull for development
575 575 vcs.start_server = false
576 576
577 577 ## List of enabled VCS backends, available options are:
578 578 ## `hg` - mercurial
579 579 ## `git` - git
580 580 ## `svn` - subversion
581 581 vcs.backends = hg, git, svn
582 582
583 583 vcs.connection_timeout = 3600
584 584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
585 585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
586 586 #vcs.svn.compatible_version = pre-1.8-compatible
587 587
588 588
589 589 ############################################################
590 590 ### Subversion proxy support (mod_dav_svn) ###
591 591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
592 592 ############################################################
593 593 ## Enable or disable the config file generation.
594 594 svn.proxy.generate_config = false
595 595 ## Generate config file with `SVNListParentPath` set to `On`.
596 596 svn.proxy.list_parent_path = true
597 597 ## Set location and file name of generated config file.
598 598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
599 599 ## alternative mod_dav config template. This needs to be a mako template
600 600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
601 601 ## Used as a prefix to the `Location` block in the generated config file.
602 602 ## In most cases it should be set to `/`.
603 603 svn.proxy.location_root = /
604 604 ## Command to reload the mod dav svn configuration on change.
605 605 ## Example: `/etc/init.d/apache2 reload`
606 606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
607 607 ## If the timeout expires before the reload command finishes, the command will
608 608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
609 609 #svn.proxy.reload_timeout = 10
610 610
611 611 ############################################################
612 612 ### SSH Support Settings ###
613 613 ############################################################
614 614
615 615 ## Defines if a custom authorized_keys file should be created and written on
616 616 ## any change user ssh keys. Setting this to false also disables posibility
617 617 ## of adding SSH keys by users from web interface. Super admins can still
618 618 ## manage SSH Keys.
619 619 ssh.generate_authorized_keyfile = false
620 620
621 621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
622 622 # ssh.authorized_keys_ssh_opts =
623 623
624 624 ## Path to the authrozied_keys file where the generate entries are placed.
625 625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
626 626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
627 627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
628 628
629 629 ## Command to execute the SSH wrapper. The binary is available in the
630 630 ## rhodecode installation directory.
631 631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
632 632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
633 633
634 634 ## Allow shell when executing the ssh-wrapper command
635 635 ssh.wrapper_cmd_allow_shell = false
636 636
637 637 ## Enables logging, and detailed output send back to the client during SSH
638 638 ## operations. Usefull for debugging, shouldn't be used in production.
639 639 ssh.enable_debug_logging = true
640 640
641 641 ## Paths to binary executable, by default they are the names, but we can
642 642 ## override them if we want to use a custom one
643 643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
644 644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
645 645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
646 646
647 647
648 648 ## Dummy marker to add new entries after.
649 649 ## Add any custom entries below. Please don't remove.
650 650 custom.conf = 1
651 651
652 652
653 653 ################################
654 654 ### LOGGING CONFIGURATION ####
655 655 ################################
656 656 [loggers]
657 657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
658 658
659 659 [handlers]
660 660 keys = console, console_sql
661 661
662 662 [formatters]
663 663 keys = generic, color_formatter, color_formatter_sql
664 664
665 665 #############
666 666 ## LOGGERS ##
667 667 #############
668 668 [logger_root]
669 669 level = NOTSET
670 670 handlers = console
671 671
672 672 [logger_sqlalchemy]
673 673 level = INFO
674 674 handlers = console_sql
675 675 qualname = sqlalchemy.engine
676 676 propagate = 0
677 677
678 678 [logger_beaker]
679 679 level = DEBUG
680 680 handlers =
681 681 qualname = beaker.container
682 682 propagate = 1
683 683
684 684 [logger_rhodecode]
685 685 level = DEBUG
686 686 handlers =
687 687 qualname = rhodecode
688 688 propagate = 1
689 689
690 690 [logger_ssh_wrapper]
691 691 level = DEBUG
692 692 handlers =
693 693 qualname = ssh_wrapper
694 694 propagate = 1
695 695
696 696 [logger_celery]
697 697 level = DEBUG
698 698 handlers =
699 699 qualname = celery
700 700
701 701
702 702 ##############
703 703 ## HANDLERS ##
704 704 ##############
705 705
706 706 [handler_console]
707 707 class = StreamHandler
708 708 args = (sys.stderr, )
709 709 level = DEBUG
710 710 formatter = color_formatter
711 711
712 712 [handler_console_sql]
713 713 # "level = DEBUG" logs SQL queries and results.
714 714 # "level = INFO" logs SQL queries.
715 715 # "level = WARN" logs neither. (Recommended for production systems.)
716 716 class = StreamHandler
717 717 args = (sys.stderr, )
718 718 level = WARN
719 719 formatter = color_formatter_sql
720 720
721 721 ################
722 722 ## FORMATTERS ##
723 723 ################
724 724
725 725 [formatter_generic]
726 726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
727 727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
728 728 datefmt = %Y-%m-%d %H:%M:%S
729 729
730 730 [formatter_color_formatter]
731 731 class = rhodecode.lib.logging_formatter.ColorFormatter
732 732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
733 733 datefmt = %Y-%m-%d %H:%M:%S
734 734
735 735 [formatter_color_formatter_sql]
736 736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
737 737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
738 738 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,707 +1,707 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 #use = egg:waitress#main
55 55 ## number of worker threads
56 56 #threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 #max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 #asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 max_requests = 1000
88 88 max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 ## encryption key used to encrypt social plugin tokens,
112 112 ## remote_urls with credentials etc, if not set it defaults to
113 113 ## `beaker.session.secret`
114 114 #rhodecode.encrypted_values.secret =
115 115
116 116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 118 #rhodecode.encrypted_values.strict = false
119 119
120 120 ## return gzipped responses from Rhodecode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ## autogenerate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ## Optional Languages
127 127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ## perform a full repository scan on each server start, this should be
131 131 ## set to false after first startup, to allow faster server restarts.
132 132 startup.import_repos = false
133 133
134 134 ## Uncomment and set this path to use archive download cache.
135 135 ## Once enabled, generated archives will be cached at this location
136 136 ## and served from the cache during subsequent requests for the same archive of
137 137 ## the repository.
138 138 #archive_cache_dir = /tmp/tarballcache
139 139
140 140 ## URL at which the application is running. This is used for bootstraping
141 141 ## requests in context when no web request is available. Used in ishell, or
142 142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 143 app.base_url = http://rhodecode.local
144 144
145 145 ## change this to unique ID for security
146 146 app_instance_uuid = rc-production
147 147
148 148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ## commit, or pull request exceeds this limit this diff will be displayed
150 150 ## partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ## file inside diff which exceeds this limit will be displayed partially.
155 155 ## E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ## use cache version of scm repo everywhere
159 159 vcs_full_cache = true
160 160
161 161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 162 ## Normally this is controlled by proper http flags sent from http server
163 163 force_https = false
164 164
165 165 ## use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ## git rev filter option, --all is the default filter, if you need to
169 169 ## hide all refs in changelog switch this to --branches --tags
170 170 git_rev_filter = --branches --tags
171 171
172 172 # Set to true if your repos are exposed using the dumb protocol
173 173 git_update_server_info = false
174 174
175 175 ## RSS/ATOM feed options
176 176 rss_cut_off_limit = 256000
177 177 rss_items_per_page = 10
178 178 rss_include_diff = false
179 179
180 180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 181 ## url that does rewrites to _admin/gists/{gistid}.
182 182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 184 gist_alias_url =
185 185
186 186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 187 ## used for access.
188 188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 189 ## came from the the logged in user who own this authentication token.
190 190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 191 ## authentication token. Such view would be only accessible when used together
192 192 ## with this authentication token
193 193 ##
194 194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 195 ## The list should be "," separated and on a single line.
196 196 ##
197 197 ## Most common views to enable:
198 198 # RepoCommitsView:repo_commit_download
199 199 # RepoCommitsView:repo_commit_patch
200 200 # RepoCommitsView:repo_commit_raw
201 201 # RepoCommitsView:repo_commit_raw@TOKEN
202 202 # RepoFilesView:repo_files_diff
203 203 # RepoFilesView:repo_archivefile
204 204 # RepoFilesView:repo_file_raw
205 205 # GistView:*
206 206 api_access_controllers_whitelist =
207 207
208 208 ## default encoding used to convert from and to unicode
209 209 ## can be also a comma separated list of encoding in case of mixed encodings
210 210 default_encoding = UTF-8
211 211
212 212 ## instance-id prefix
213 213 ## a prefix key for this instance used for cache invalidation when running
214 214 ## multiple instances of rhodecode, make sure it's globally unique for
215 215 ## all running rhodecode instances. Leave empty if you don't use it
216 216 instance_id =
217 217
218 218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 219 ## of an authentication plugin also if it is disabled by it's settings.
220 220 ## This could be useful if you are unable to log in to the system due to broken
221 221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 222 ## module to log in again and fix the settings.
223 223 ##
224 224 ## Available builtin plugin IDs (hash is part of the ID):
225 225 ## egg:rhodecode-enterprise-ce#rhodecode
226 226 ## egg:rhodecode-enterprise-ce#pam
227 227 ## egg:rhodecode-enterprise-ce#ldap
228 228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 229 ## egg:rhodecode-enterprise-ce#headers
230 230 ## egg:rhodecode-enterprise-ce#crowd
231 231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232 232
233 233 ## alternative return HTTP header for failed authentication. Default HTTP
234 234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 235 ## handling that causing a series of failed authentication calls.
236 236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 237 ## This will be served instead of default 401 on bad authnetication
238 238 auth_ret_code =
239 239
240 240 ## use special detection method when serving auth_ret_code, instead of serving
241 241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 242 ## and then serve auth_ret_code to clients
243 243 auth_ret_code_detection = false
244 244
245 245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 246 ## codes don't break the transactions while 4XX codes do
247 247 lock_ret_code = 423
248 248
249 249 ## allows to change the repository location in settings page
250 250 allow_repo_location_change = true
251 251
252 252 ## allows to setup custom hooks in settings page
253 253 allow_custom_hooks_settings = true
254 254
255 255 ## generated license token, goto license page in RhodeCode settings to obtain
256 256 ## new token
257 257 license_token =
258 258
259 259 ## supervisor connection uri, for managing supervisor and logs.
260 260 supervisor.uri =
261 261 ## supervisord group name/id we only want this RC instance to handle
262 262 supervisor.group_id = prod
263 263
264 264 ## Display extended labs settings
265 265 labs_settings_active = true
266 266
267 267 ####################################
268 268 ### CELERY CONFIG ####
269 269 ####################################
270 270 ## run: /path/to/celery worker \
271 271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274 274
275 275 use_celery = false
276 276
277 277 ## connection url to the message broker (default rabbitmq)
278 278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279 279
280 280 ## maximum tasks to execute before worker restart
281 281 celery.max_tasks_per_child = 100
282 282
283 283 ## tasks will never be sent to the queue, but executed locally instead.
284 284 celery.task_always_eager = false
285 285
286 286 #####################################
287 287 ### DOGPILE CACHE ####
288 288 #####################################
289 289 ## Default cache dir for caches. Putting this into a ramdisk
290 290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 291 ## of space
292 292 cache_dir = /tmp/rcdev/data
293 293
294 294 ## cache settings for permission tree, auth TTL.
295 295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 296 rc_cache.cache_perms.expiration_time = 300
297 297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298 298
299 299 ## redis backend with distributed locks
300 300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 301 #rc_cache.cache_perms.expiration_time = 300
302 302 #rc_cache.cache_perms.arguments.host = localhost
303 303 #rc_cache.cache_perms.arguments.port = 6379
304 304 #rc_cache.cache_perms.arguments.db = 0
305 305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 306 #rc_cache.cache_perms.arguments.distributed_lock = true
307 307
308 308
309 309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 310 rc_cache.cache_repo.expiration_time = 2592000
311 311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312 312
313 313 ## redis backend with distributed locks
314 314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 315 #rc_cache.cache_repo.expiration_time = 2592000
316 316 ## this needs to be greater then expiration_time
317 317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 318 #rc_cache.cache_repo.arguments.host = localhost
319 319 #rc_cache.cache_repo.arguments.port = 6379
320 320 #rc_cache.cache_repo.arguments.db = 1
321 321 #rc_cache.cache_repo.arguments.distributed_lock = true
322 322
323 ## cache settings for SQL queries
324 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
325 rc_cache.sql_cache_short.expiration_time = 30
326
323 327
324 328 ####################################
325 329 ### BEAKER CACHE ####
326 330 ####################################
327 331
328 332 ## locking and default file storage for Beaker. Putting this into a ramdisk
329 333 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
330 334 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
331 335 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
332 336
333 beaker.cache.regions = long_term, sql_cache_short
337 beaker.cache.regions = long_term
334 338
335 339 beaker.cache.long_term.type = memory
336 340 beaker.cache.long_term.expire = 172800
337 341 beaker.cache.long_term.key_length = 256
338 342
339 beaker.cache.sql_cache_short.type = memory
340 beaker.cache.sql_cache_short.expire = 10
341 beaker.cache.sql_cache_short.key_length = 256
342
343 343
344 344 ####################################
345 345 ### BEAKER SESSION ####
346 346 ####################################
347 347
348 348 ## .session.type is type of storage options for the session, current allowed
349 349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 350 beaker.session.type = file
351 351 beaker.session.data_dir = %(here)s/data/sessions
352 352
353 353 ## db based session, fast, and allows easy management over logged in users
354 354 #beaker.session.type = ext:database
355 355 #beaker.session.table_name = db_session
356 356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
357 357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
358 358 #beaker.session.sa.pool_recycle = 3600
359 359 #beaker.session.sa.echo = false
360 360
361 361 beaker.session.key = rhodecode
362 362 beaker.session.secret = production-rc-uytcxaz
363 363 beaker.session.lock_dir = %(here)s/data/sessions/lock
364 364
365 365 ## Secure encrypted cookie. Requires AES and AES python libraries
366 366 ## you must disable beaker.session.secret to use this
367 367 #beaker.session.encrypt_key = key_for_encryption
368 368 #beaker.session.validate_key = validation_key
369 369
370 370 ## sets session as invalid(also logging out user) if it haven not been
371 371 ## accessed for given amount of time in seconds
372 372 beaker.session.timeout = 2592000
373 373 beaker.session.httponly = true
374 374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
375 375 #beaker.session.cookie_path = /custom_prefix
376 376
377 377 ## uncomment for https secure cookie
378 378 beaker.session.secure = false
379 379
380 380 ## auto save the session to not to use .save()
381 381 beaker.session.auto = false
382 382
383 383 ## default cookie expiration time in seconds, set to `true` to set expire
384 384 ## at browser close
385 385 #beaker.session.cookie_expires = 3600
386 386
387 387 ###################################
388 388 ## SEARCH INDEXING CONFIGURATION ##
389 389 ###################################
390 390 ## Full text search indexer is available in rhodecode-tools under
391 391 ## `rhodecode-tools index` command
392 392
393 393 ## WHOOSH Backend, doesn't require additional services to run
394 394 ## it works good with few dozen repos
395 395 search.module = rhodecode.lib.index.whoosh
396 396 search.location = %(here)s/data/index
397 397
398 398 ########################################
399 399 ### CHANNELSTREAM CONFIG ####
400 400 ########################################
401 401 ## channelstream enables persistent connections and live notification
402 402 ## in the system. It's also used by the chat system
403 403 channelstream.enabled = false
404 404
405 405 ## server address for channelstream server on the backend
406 406 channelstream.server = 127.0.0.1:9800
407 407
408 408 ## location of the channelstream server from outside world
409 409 ## use ws:// for http or wss:// for https. This address needs to be handled
410 410 ## by external HTTP server such as Nginx or Apache
411 411 ## see nginx/apache configuration examples in our docs
412 412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 413 channelstream.secret = secret
414 414 channelstream.history.location = %(here)s/channelstream_history
415 415
416 416 ## Internal application path that Javascript uses to connect into.
417 417 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 418 channelstream.proxy_path = /_channelstream
419 419
420 420
421 421 ###################################
422 422 ## APPENLIGHT CONFIG ##
423 423 ###################################
424 424
425 425 ## Appenlight is tailored to work with RhodeCode, see
426 426 ## http://appenlight.com for details how to obtain an account
427 427
428 428 ## appenlight integration enabled
429 429 appenlight = false
430 430
431 431 appenlight.server_url = https://api.appenlight.com
432 432 appenlight.api_key = YOUR_API_KEY
433 433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434 434
435 435 # used for JS client
436 436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437 437
438 438 ## TWEAK AMOUNT OF INFO SENT HERE
439 439
440 440 ## enables 404 error logging (default False)
441 441 appenlight.report_404 = false
442 442
443 443 ## time in seconds after request is considered being slow (default 1)
444 444 appenlight.slow_request_time = 1
445 445
446 446 ## record slow requests in application
447 447 ## (needs to be enabled for slow datastore recording and time tracking)
448 448 appenlight.slow_requests = true
449 449
450 450 ## enable hooking to application loggers
451 451 appenlight.logging = true
452 452
453 453 ## minimum log level for log capture
454 454 appenlight.logging.level = WARNING
455 455
456 456 ## send logs only from erroneous/slow requests
457 457 ## (saves API quota for intensive logging)
458 458 appenlight.logging_on_error = false
459 459
460 460 ## list of additonal keywords that should be grabbed from environ object
461 461 ## can be string with comma separated list of words in lowercase
462 462 ## (by default client will always send following info:
463 463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 464 ## start with HTTP* this list be extended with additional keywords here
465 465 appenlight.environ_keys_whitelist =
466 466
467 467 ## list of keywords that should be blanked from request object
468 468 ## can be string with comma separated list of words in lowercase
469 469 ## (by default client will always blank keys that contain following words
470 470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 471 ## this list be extended with additional keywords set here
472 472 appenlight.request_keys_blacklist =
473 473
474 474 ## list of namespaces that should be ignores when gathering log entries
475 475 ## can be string with comma separated list of namespaces
476 476 ## (by default the client ignores own entries: appenlight_client.client)
477 477 appenlight.log_namespace_blacklist =
478 478
479 479
480 480 ################################################################################
481 481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
482 482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
483 483 ## execute malicious code after an exception is raised. ##
484 484 ################################################################################
485 485 set debug = false
486 486
487 487
488 488 ###########################################
489 489 ### MAIN RHODECODE DATABASE CONFIG ###
490 490 ###########################################
491 491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495 495
496 496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
497 497
498 498 # see sqlalchemy docs for other advanced settings
499 499
500 500 ## print the sql statements to output
501 501 sqlalchemy.db1.echo = false
502 502 ## recycle the connections after this amount of seconds
503 503 sqlalchemy.db1.pool_recycle = 3600
504 504 sqlalchemy.db1.convert_unicode = true
505 505
506 506 ## the number of connections to keep open inside the connection pool.
507 507 ## 0 indicates no limit
508 508 #sqlalchemy.db1.pool_size = 5
509 509
510 510 ## the number of connections to allow in connection pool "overflow", that is
511 511 ## connections that can be opened above and beyond the pool_size setting,
512 512 ## which defaults to five.
513 513 #sqlalchemy.db1.max_overflow = 10
514 514
515 515 ## Connection check ping, used to detect broken database connections
516 516 ## could be enabled to better handle cases if MySQL has gone away errors
517 517 #sqlalchemy.db1.ping_connection = true
518 518
519 519 ##################
520 520 ### VCS CONFIG ###
521 521 ##################
522 522 vcs.server.enable = true
523 523 vcs.server = localhost:9900
524 524
525 525 ## Web server connectivity protocol, responsible for web based VCS operatations
526 526 ## Available protocols are:
527 527 ## `http` - use http-rpc backend (default)
528 528 vcs.server.protocol = http
529 529
530 530 ## Push/Pull operations protocol, available options are:
531 531 ## `http` - use http-rpc backend (default)
532 532 ##
533 533 vcs.scm_app_implementation = http
534 534
535 535 ## Push/Pull operations hooks protocol, available options are:
536 536 ## `http` - use http-rpc backend (default)
537 537 vcs.hooks.protocol = http
538 538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
539 539 ## this should be adjusted.
540 540 vcs.hooks.host = 127.0.0.1
541 541
542 542 vcs.server.log_level = info
543 543 ## Start VCSServer with this instance as a subprocess, usefull for development
544 544 vcs.start_server = false
545 545
546 546 ## List of enabled VCS backends, available options are:
547 547 ## `hg` - mercurial
548 548 ## `git` - git
549 549 ## `svn` - subversion
550 550 vcs.backends = hg, git, svn
551 551
552 552 vcs.connection_timeout = 3600
553 553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
554 554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
555 555 #vcs.svn.compatible_version = pre-1.8-compatible
556 556
557 557
558 558 ############################################################
559 559 ### Subversion proxy support (mod_dav_svn) ###
560 560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
561 561 ############################################################
562 562 ## Enable or disable the config file generation.
563 563 svn.proxy.generate_config = false
564 564 ## Generate config file with `SVNListParentPath` set to `On`.
565 565 svn.proxy.list_parent_path = true
566 566 ## Set location and file name of generated config file.
567 567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
568 568 ## alternative mod_dav config template. This needs to be a mako template
569 569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
570 570 ## Used as a prefix to the `Location` block in the generated config file.
571 571 ## In most cases it should be set to `/`.
572 572 svn.proxy.location_root = /
573 573 ## Command to reload the mod dav svn configuration on change.
574 574 ## Example: `/etc/init.d/apache2 reload`
575 575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
576 576 ## If the timeout expires before the reload command finishes, the command will
577 577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
578 578 #svn.proxy.reload_timeout = 10
579 579
580 580 ############################################################
581 581 ### SSH Support Settings ###
582 582 ############################################################
583 583
584 584 ## Defines if a custom authorized_keys file should be created and written on
585 585 ## any change user ssh keys. Setting this to false also disables posibility
586 586 ## of adding SSH keys by users from web interface. Super admins can still
587 587 ## manage SSH Keys.
588 588 ssh.generate_authorized_keyfile = false
589 589
590 590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
591 591 # ssh.authorized_keys_ssh_opts =
592 592
593 593 ## Path to the authrozied_keys file where the generate entries are placed.
594 594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
595 595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
596 596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
597 597
598 598 ## Command to execute the SSH wrapper. The binary is available in the
599 599 ## rhodecode installation directory.
600 600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
601 601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
602 602
603 603 ## Allow shell when executing the ssh-wrapper command
604 604 ssh.wrapper_cmd_allow_shell = false
605 605
606 606 ## Enables logging, and detailed output send back to the client during SSH
607 607 ## operations. Usefull for debugging, shouldn't be used in production.
608 608 ssh.enable_debug_logging = false
609 609
610 610 ## Paths to binary executable, by default they are the names, but we can
611 611 ## override them if we want to use a custom one
612 612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
613 613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
614 614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
615 615
616 616
617 617 ## Dummy marker to add new entries after.
618 618 ## Add any custom entries below. Please don't remove.
619 619 custom.conf = 1
620 620
621 621
622 622 ################################
623 623 ### LOGGING CONFIGURATION ####
624 624 ################################
625 625 [loggers]
626 626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
627 627
628 628 [handlers]
629 629 keys = console, console_sql
630 630
631 631 [formatters]
632 632 keys = generic, color_formatter, color_formatter_sql
633 633
634 634 #############
635 635 ## LOGGERS ##
636 636 #############
637 637 [logger_root]
638 638 level = NOTSET
639 639 handlers = console
640 640
641 641 [logger_sqlalchemy]
642 642 level = INFO
643 643 handlers = console_sql
644 644 qualname = sqlalchemy.engine
645 645 propagate = 0
646 646
647 647 [logger_beaker]
648 648 level = DEBUG
649 649 handlers =
650 650 qualname = beaker.container
651 651 propagate = 1
652 652
653 653 [logger_rhodecode]
654 654 level = DEBUG
655 655 handlers =
656 656 qualname = rhodecode
657 657 propagate = 1
658 658
659 659 [logger_ssh_wrapper]
660 660 level = DEBUG
661 661 handlers =
662 662 qualname = ssh_wrapper
663 663 propagate = 1
664 664
665 665 [logger_celery]
666 666 level = DEBUG
667 667 handlers =
668 668 qualname = celery
669 669
670 670
671 671 ##############
672 672 ## HANDLERS ##
673 673 ##############
674 674
675 675 [handler_console]
676 676 class = StreamHandler
677 677 args = (sys.stderr, )
678 678 level = INFO
679 679 formatter = generic
680 680
681 681 [handler_console_sql]
682 682 # "level = DEBUG" logs SQL queries and results.
683 683 # "level = INFO" logs SQL queries.
684 684 # "level = WARN" logs neither. (Recommended for production systems.)
685 685 class = StreamHandler
686 686 args = (sys.stderr, )
687 687 level = WARN
688 688 formatter = generic
689 689
690 690 ################
691 691 ## FORMATTERS ##
692 692 ################
693 693
694 694 [formatter_generic]
695 695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
696 696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
697 697 datefmt = %Y-%m-%d %H:%M:%S
698 698
699 699 [formatter_color_formatter]
700 700 class = rhodecode.lib.logging_formatter.ColorFormatter
701 701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
702 702 datefmt = %Y-%m-%d %H:%M:%S
703 703
704 704 [formatter_color_formatter_sql]
705 705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
706 706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
707 707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,300 +1,299 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 from rhodecode.model.db import User, UserIpMap
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.permission import PermissionModel
26 26 from rhodecode.model.ssh_key import SshKeyModel
27 27 from rhodecode.tests import (
28 TestController, clear_all_caches, assert_session_flash)
28 TestController, clear_cache_regions, assert_session_flash)
29 29
30 30
31 31 def route_path(name, params=None, **kwargs):
32 32 import urllib
33 33 from rhodecode.apps._base import ADMIN_PREFIX
34 34
35 35 base_url = {
36 36 'edit_user_ips':
37 37 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
38 38 'edit_user_ips_add':
39 39 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
40 40 'edit_user_ips_delete':
41 41 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
42 42
43 43 'admin_permissions_application':
44 44 ADMIN_PREFIX + '/permissions/application',
45 45 'admin_permissions_application_update':
46 46 ADMIN_PREFIX + '/permissions/application/update',
47 47
48 48 'admin_permissions_global':
49 49 ADMIN_PREFIX + '/permissions/global',
50 50 'admin_permissions_global_update':
51 51 ADMIN_PREFIX + '/permissions/global/update',
52 52
53 53 'admin_permissions_object':
54 54 ADMIN_PREFIX + '/permissions/object',
55 55 'admin_permissions_object_update':
56 56 ADMIN_PREFIX + '/permissions/object/update',
57 57
58 58 'admin_permissions_ips':
59 59 ADMIN_PREFIX + '/permissions/ips',
60 60 'admin_permissions_overview':
61 61 ADMIN_PREFIX + '/permissions/overview',
62 62
63 63 'admin_permissions_ssh_keys':
64 64 ADMIN_PREFIX + '/permissions/ssh_keys',
65 65 'admin_permissions_ssh_keys_data':
66 66 ADMIN_PREFIX + '/permissions/ssh_keys/data',
67 67 'admin_permissions_ssh_keys_update':
68 68 ADMIN_PREFIX + '/permissions/ssh_keys/update'
69 69
70 70 }[name].format(**kwargs)
71 71
72 72 if params:
73 73 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
74 74 return base_url
75 75
76 76
77 77 class TestAdminPermissionsController(TestController):
78 78
79 79 @pytest.fixture(scope='class', autouse=True)
80 80 def prepare(self, request):
81 81 # cleanup and reset to default permissions after
82 82 @request.addfinalizer
83 83 def cleanup():
84 84 PermissionModel().create_default_user_permissions(
85 85 User.get_default_user(), force=True)
86 86
87 87 def test_index_application(self):
88 88 self.log_user()
89 89 self.app.get(route_path('admin_permissions_application'))
90 90
91 91 @pytest.mark.parametrize(
92 92 'anonymous, default_register, default_register_message, default_password_reset,'
93 93 'default_extern_activate, expect_error, expect_form_error', [
94 94 (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
95 95 False, False),
96 96 (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto',
97 97 False, False),
98 98 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
99 99 False, False),
100 100 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
101 101 False, False),
102 102 (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
103 103 False, True),
104 104 (True, '', '', 'hg.password_reset.enabled', '', True, False),
105 105 ])
106 106 def test_update_application_permissions(
107 107 self, anonymous, default_register, default_register_message, default_password_reset,
108 108 default_extern_activate, expect_error, expect_form_error):
109 109
110 110 self.log_user()
111 111
112 112 # TODO: anonymous access set here to False, breaks some other tests
113 113 params = {
114 114 'csrf_token': self.csrf_token,
115 115 'anonymous': anonymous,
116 116 'default_register': default_register,
117 117 'default_register_message': default_register_message,
118 118 'default_password_reset': default_password_reset,
119 119 'default_extern_activate': default_extern_activate,
120 120 }
121 121 response = self.app.post(route_path('admin_permissions_application_update'),
122 122 params=params)
123 123 if expect_form_error:
124 124 assert response.status_int == 200
125 125 response.mustcontain('Value must be one of')
126 126 else:
127 127 if expect_error:
128 128 msg = 'Error occurred during update of permissions'
129 129 else:
130 130 msg = 'Application permissions updated successfully'
131 131 assert_session_flash(response, msg)
132 132
133 133 def test_index_object(self):
134 134 self.log_user()
135 135 self.app.get(route_path('admin_permissions_object'))
136 136
137 137 @pytest.mark.parametrize(
138 138 'repo, repo_group, user_group, expect_error, expect_form_error', [
139 139 ('repository.none', 'group.none', 'usergroup.none', False, False),
140 140 ('repository.read', 'group.read', 'usergroup.read', False, False),
141 141 ('repository.write', 'group.write', 'usergroup.write',
142 142 False, False),
143 143 ('repository.admin', 'group.admin', 'usergroup.admin',
144 144 False, False),
145 145 ('repository.XXX', 'group.admin', 'usergroup.admin', False, True),
146 146 ('', '', '', True, False),
147 147 ])
148 148 def test_update_object_permissions(self, repo, repo_group, user_group,
149 149 expect_error, expect_form_error):
150 150 self.log_user()
151 151
152 152 params = {
153 153 'csrf_token': self.csrf_token,
154 154 'default_repo_perm': repo,
155 155 'overwrite_default_repo': False,
156 156 'default_group_perm': repo_group,
157 157 'overwrite_default_group': False,
158 158 'default_user_group_perm': user_group,
159 159 'overwrite_default_user_group': False,
160 160 }
161 161 response = self.app.post(route_path('admin_permissions_object_update'),
162 162 params=params)
163 163 if expect_form_error:
164 164 assert response.status_int == 200
165 165 response.mustcontain('Value must be one of')
166 166 else:
167 167 if expect_error:
168 168 msg = 'Error occurred during update of permissions'
169 169 else:
170 170 msg = 'Object permissions updated successfully'
171 171 assert_session_flash(response, msg)
172 172
173 173 def test_index_global(self):
174 174 self.log_user()
175 175 self.app.get(route_path('admin_permissions_global'))
176 176
177 177 @pytest.mark.parametrize(
178 178 'repo_create, repo_create_write, user_group_create, repo_group_create,'
179 179 'fork_create, inherit_default_permissions, expect_error,'
180 180 'expect_form_error', [
181 181 ('hg.create.none', 'hg.create.write_on_repogroup.false',
182 182 'hg.usergroup.create.false', 'hg.repogroup.create.false',
183 183 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
184 184 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
185 185 'hg.usergroup.create.true', 'hg.repogroup.create.true',
186 186 'hg.fork.repository', 'hg.inherit_default_perms.false',
187 187 False, False),
188 188 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
189 189 'hg.usergroup.create.true', 'hg.repogroup.create.true',
190 190 'hg.fork.repository', 'hg.inherit_default_perms.false',
191 191 False, True),
192 192 ('', '', '', '', '', '', True, False),
193 193 ])
194 194 def test_update_global_permissions(
195 195 self, repo_create, repo_create_write, user_group_create,
196 196 repo_group_create, fork_create, inherit_default_permissions,
197 197 expect_error, expect_form_error):
198 198 self.log_user()
199 199
200 200 params = {
201 201 'csrf_token': self.csrf_token,
202 202 'default_repo_create': repo_create,
203 203 'default_repo_create_on_write': repo_create_write,
204 204 'default_user_group_create': user_group_create,
205 205 'default_repo_group_create': repo_group_create,
206 206 'default_fork_create': fork_create,
207 207 'default_inherit_default_permissions': inherit_default_permissions
208 208 }
209 209 response = self.app.post(route_path('admin_permissions_global_update'),
210 210 params=params)
211 211 if expect_form_error:
212 212 assert response.status_int == 200
213 213 response.mustcontain('Value must be one of')
214 214 else:
215 215 if expect_error:
216 216 msg = 'Error occurred during update of permissions'
217 217 else:
218 218 msg = 'Global permissions updated successfully'
219 219 assert_session_flash(response, msg)
220 220
221 221 def test_index_ips(self):
222 222 self.log_user()
223 223 response = self.app.get(route_path('admin_permissions_ips'))
224 # TODO: Test response...
225 224 response.mustcontain('All IP addresses are allowed')
226 225
227 226 def test_add_delete_ips(self):
227 clear_cache_regions(['sql_cache_short'])
228 228 self.log_user()
229 clear_all_caches()
230 229
231 230 # ADD
232 231 default_user_id = User.get_default_user().user_id
233 232 self.app.post(
234 233 route_path('edit_user_ips_add', user_id=default_user_id),
235 params={'new_ip': '127.0.0.0/24', 'csrf_token': self.csrf_token})
234 params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token})
236 235
237 236 response = self.app.get(route_path('admin_permissions_ips'))
238 response.mustcontain('127.0.0.0/24')
239 response.mustcontain('127.0.0.0 - 127.0.0.255')
237 response.mustcontain('0.0.0.0/24')
238 response.mustcontain('0.0.0.0 - 0.0.0.255')
240 239
241 240 # DELETE
242 241 default_user_id = User.get_default_user().user_id
243 242 del_ip_id = UserIpMap.query().filter(UserIpMap.user_id ==
244 243 default_user_id).first().ip_id
245 244
246 245 response = self.app.post(
247 246 route_path('edit_user_ips_delete', user_id=default_user_id),
248 247 params={'del_ip_id': del_ip_id, 'csrf_token': self.csrf_token})
249 248
250 249 assert_session_flash(response, 'Removed ip address from user whitelist')
251 250
252 clear_all_caches()
251 clear_cache_regions(['sql_cache_short'])
253 252 response = self.app.get(route_path('admin_permissions_ips'))
254 253 response.mustcontain('All IP addresses are allowed')
255 response.mustcontain(no=['127.0.0.0/24'])
256 response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
254 response.mustcontain(no=['0.0.0.0/24'])
255 response.mustcontain(no=['0.0.0.0 - 0.0.0.255'])
257 256
258 257 def test_index_overview(self):
259 258 self.log_user()
260 259 self.app.get(route_path('admin_permissions_overview'))
261 260
262 261 def test_ssh_keys(self):
263 262 self.log_user()
264 263 self.app.get(route_path('admin_permissions_ssh_keys'), status=200)
265 264
266 265 def test_ssh_keys_data(self, user_util, xhr_header):
267 266 self.log_user()
268 267 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
269 268 extra_environ=xhr_header)
270 269 assert response.json == {u'data': [], u'draw': None,
271 270 u'recordsFiltered': 0, u'recordsTotal': 0}
272 271
273 272 dummy_user = user_util.create_user()
274 273 SshKeyModel().create(dummy_user, 'ab:cd:ef', 'KEYKEY', 'test_key')
275 274 Session().commit()
276 275 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
277 276 extra_environ=xhr_header)
278 277 assert response.json['data'][0]['fingerprint'] == 'ab:cd:ef'
279 278
280 279 def test_ssh_keys_update(self):
281 280 self.log_user()
282 281 response = self.app.post(
283 282 route_path('admin_permissions_ssh_keys_update'),
284 283 dict(csrf_token=self.csrf_token), status=302)
285 284
286 285 assert_session_flash(
287 286 response, 'Updated SSH keys file')
288 287
289 288 def test_ssh_keys_update_disabled(self):
290 289 self.log_user()
291 290
292 291 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
293 292 with mock.patch.object(AdminPermissionsView, 'ssh_enabled',
294 293 return_value=False):
295 294 response = self.app.post(
296 295 route_path('admin_permissions_ssh_keys_update'),
297 296 dict(csrf_token=self.csrf_token), status=302)
298 297
299 298 assert_session_flash(
300 299 response, 'SSH key support is disabled in .ini file') No newline at end of file
@@ -1,121 +1,121 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib import helpers as h
24 24 from rhodecode.tests import (
25 TestController, clear_all_caches,
25 TestController, clear_cache_regions,
26 26 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
27 27 from rhodecode.tests.fixture import Fixture
28 28 from rhodecode.tests.utils import AssertResponse
29 29
30 30 fixture = Fixture()
31 31
32 32
33 33 def route_path(name, params=None, **kwargs):
34 34 import urllib
35 35 from rhodecode.apps._base import ADMIN_PREFIX
36 36
37 37 base_url = {
38 38 'login': ADMIN_PREFIX + '/login',
39 39 'logout': ADMIN_PREFIX + '/logout',
40 40 'register': ADMIN_PREFIX + '/register',
41 41 'reset_password':
42 42 ADMIN_PREFIX + '/password_reset',
43 43 'reset_password_confirmation':
44 44 ADMIN_PREFIX + '/password_reset_confirmation',
45 45
46 46 'admin_permissions_application':
47 47 ADMIN_PREFIX + '/permissions/application',
48 48 'admin_permissions_application_update':
49 49 ADMIN_PREFIX + '/permissions/application/update',
50 50 }[name].format(**kwargs)
51 51
52 52 if params:
53 53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
54 54 return base_url
55 55
56 56
57 57 class TestPasswordReset(TestController):
58 58
59 59 @pytest.mark.parametrize(
60 60 'pwd_reset_setting, show_link, show_reset', [
61 61 ('hg.password_reset.enabled', True, True),
62 62 ('hg.password_reset.hidden', False, True),
63 63 ('hg.password_reset.disabled', False, False),
64 64 ])
65 65 def test_password_reset_settings(
66 66 self, pwd_reset_setting, show_link, show_reset):
67 clear_all_caches()
67 clear_cache_regions()
68 68 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
69 69 params = {
70 70 'csrf_token': self.csrf_token,
71 71 'anonymous': 'True',
72 72 'default_register': 'hg.register.auto_activate',
73 73 'default_register_message': '',
74 74 'default_password_reset': pwd_reset_setting,
75 75 'default_extern_activate': 'hg.extern_activate.auto',
76 76 }
77 77 resp = self.app.post(route_path('admin_permissions_application_update'), params=params)
78 78 self.logout_user()
79 79
80 80 login_page = self.app.get(route_path('login'))
81 81 asr_login = AssertResponse(login_page)
82 82 index_page = self.app.get(h.route_path('home'))
83 83 asr_index = AssertResponse(index_page)
84 84
85 85 if show_link:
86 86 asr_login.one_element_exists('a.pwd_reset')
87 87 asr_index.one_element_exists('a.pwd_reset')
88 88 else:
89 89 asr_login.no_element_exists('a.pwd_reset')
90 90 asr_index.no_element_exists('a.pwd_reset')
91 91
92 92 response = self.app.get(route_path('reset_password'))
93 93
94 94 assert_response = AssertResponse(response)
95 95 if show_reset:
96 96 response.mustcontain('Send password reset email')
97 97 assert_response.one_element_exists('#email')
98 98 assert_response.one_element_exists('#send')
99 99 else:
100 100 response.mustcontain('Password reset is disabled.')
101 101 assert_response.no_element_exists('#email')
102 102 assert_response.no_element_exists('#send')
103 103
104 104 def test_password_form_disabled(self):
105 105 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 106 params = {
107 107 'csrf_token': self.csrf_token,
108 108 'anonymous': 'True',
109 109 'default_register': 'hg.register.auto_activate',
110 110 'default_register_message': '',
111 111 'default_password_reset': 'hg.password_reset.disabled',
112 112 'default_extern_activate': 'hg.extern_activate.auto',
113 113 }
114 114 self.app.post(route_path('admin_permissions_application_update'), params=params)
115 115 self.logout_user()
116 116
117 117 response = self.app.post(
118 118 route_path('reset_password'), {'email': 'lisa@rhodecode.com',}
119 119 )
120 120 response = response.follow()
121 121 response.mustcontain('Password reset is disabled.')
@@ -1,180 +1,179 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 import formencode.htmlfill
23 23 import logging
24 24
25 25 from pyramid.httpexceptions import HTTPFound
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import BaseAppView
30 30 from rhodecode.authentication.base import get_authn_registry
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib.auth import (
33 33 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
34 from rhodecode.lib.caches import clear_cache_manager
35 34 from rhodecode.model.forms import AuthSettingsForm
36 35 from rhodecode.model.meta import Session
37 36 from rhodecode.model.settings import SettingsModel
38 37
39 38 log = logging.getLogger(__name__)
40 39
41 40
42 41 class AuthnPluginViewBase(BaseAppView):
43 42
44 43 def load_default_context(self):
45 44 c = self._get_local_tmpl_context()
46 45 self.plugin = self.context.plugin
47 46 return c
48 47
49 48 @LoginRequired()
50 49 @HasPermissionAllDecorator('hg.admin')
51 50 def settings_get(self, defaults=None, errors=None):
52 51 """
53 52 View that displays the plugin settings as a form.
54 53 """
55 54 c = self.load_default_context()
56 55 defaults = defaults or {}
57 56 errors = errors or {}
58 57 schema = self.plugin.get_settings_schema()
59 58
60 59 # Compute default values for the form. Priority is:
61 60 # 1. Passed to this method 2. DB value 3. Schema default
62 61 for node in schema:
63 62 if node.name not in defaults:
64 63 defaults[node.name] = self.plugin.get_setting_by_name(
65 64 node.name, node.default)
66 65
67 66 template_context = {
68 67 'defaults': defaults,
69 68 'errors': errors,
70 69 'plugin': self.context.plugin,
71 70 'resource': self.context,
72 71 }
73 72
74 73 return self._get_template_context(c, **template_context)
75 74
76 75 @LoginRequired()
77 76 @HasPermissionAllDecorator('hg.admin')
78 77 @CSRFRequired()
79 78 def settings_post(self):
80 79 """
81 80 View that validates and stores the plugin settings.
82 81 """
83 82 _ = self.request.translate
84 83 self.load_default_context()
85 84 schema = self.plugin.get_settings_schema()
86 85 data = self.request.params
87 86
88 87 try:
89 88 valid_data = schema.deserialize(data)
90 89 except colander.Invalid as e:
91 90 # Display error message and display form again.
92 91 h.flash(
93 92 _('Errors exist when saving plugin settings. '
94 93 'Please check the form inputs.'),
95 94 category='error')
96 95 defaults = {key: data[key] for key in data if key in schema}
97 96 return self.settings_get(errors=e.asdict(), defaults=defaults)
98 97
99 98 # Store validated data.
100 99 for name, value in valid_data.items():
101 100 self.plugin.create_or_update_setting(name, value)
102 101 Session().commit()
103 102
104 103 # Display success message and redirect.
105 104 h.flash(_('Auth settings updated successfully.'), category='success')
106 105 redirect_to = self.request.resource_path(
107 106 self.context, route_name='auth_home')
108 107 return HTTPFound(redirect_to)
109 108
110 109
111 110 class AuthSettingsView(BaseAppView):
112 111 def load_default_context(self):
113 112 c = self._get_local_tmpl_context()
114 113 return c
115 114
116 115 @LoginRequired()
117 116 @HasPermissionAllDecorator('hg.admin')
118 117 def index(self, defaults=None, errors=None, prefix_error=False):
119 118 c = self.load_default_context()
120 119
121 120 defaults = defaults or {}
122 121 authn_registry = get_authn_registry(self.request.registry)
123 122 enabled_plugins = SettingsModel().get_auth_plugins()
124 123
125 124 # Create template context and render it.
126 125 template_context = {
127 126 'resource': self.context,
128 127 'available_plugins': authn_registry.get_plugins(),
129 128 'enabled_plugins': enabled_plugins,
130 129 }
131 130 html = render('rhodecode:templates/admin/auth/auth_settings.mako',
132 131 self._get_template_context(c, **template_context),
133 132 self.request)
134 133
135 134 # Create form default values and fill the form.
136 135 form_defaults = {
137 136 'auth_plugins': ',\n'.join(enabled_plugins)
138 137 }
139 138 form_defaults.update(defaults)
140 139 html = formencode.htmlfill.render(
141 140 html,
142 141 defaults=form_defaults,
143 142 errors=errors,
144 143 prefix_error=prefix_error,
145 144 encoding="UTF-8",
146 145 force_defaults=False)
147 146
148 147 return Response(html)
149 148
150 149 @LoginRequired()
151 150 @HasPermissionAllDecorator('hg.admin')
152 151 @CSRFRequired()
153 152 def auth_settings(self):
154 153 _ = self.request.translate
155 154 try:
156 155 form = AuthSettingsForm(self.request.translate)()
157 156 form_result = form.to_python(self.request.POST)
158 157 plugins = ','.join(form_result['auth_plugins'])
159 158 setting = SettingsModel().create_or_update_setting(
160 159 'auth_plugins', plugins)
161 160 Session().add(setting)
162 161 Session().commit()
163 162
164 163 h.flash(_('Auth settings updated successfully.'), category='success')
165 164 except formencode.Invalid as errors:
166 165 e = errors.error_dict or {}
167 166 h.flash(_('Errors exist when saving plugin setting. '
168 167 'Please check the form inputs.'), category='error')
169 168 return self.index(
170 169 defaults=errors.value,
171 170 errors=e,
172 171 prefix_error=False)
173 172 except Exception:
174 173 log.exception('Exception in auth_settings')
175 174 h.flash(_('Error occurred during update of auth settings.'),
176 175 category='error')
177 176
178 177 redirect_to = self.request.resource_path(
179 178 self.context, route_name='auth_home')
180 179 return HTTPFound(redirect_to)
@@ -1,483 +1,510 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import traceback
24 24 import collections
25 25 import tempfile
26 26
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.settings import asbool, aslist
32 32 from pyramid.httpexceptions import (
33 33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
34 34 from pyramid.events import ApplicationCreated
35 35 from pyramid.renderers import render_to_response
36 36
37 37 from rhodecode.model import meta
38 38 from rhodecode.config import patches
39 39 from rhodecode.config import utils as config_utils
40 40 from rhodecode.config.environment import load_pyramid_environment
41 41
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.celerylib.loader import configure_celery
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 51 from rhodecode.subscribers import (
52 52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, inject_app_settings)
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def make_pyramid_app(global_config, **settings):
65 65 """
66 66 Constructs the WSGI application based on Pyramid.
67 67
68 68 Specials:
69 69
70 70 * The application can also be integrated like a plugin via the call to
71 71 `includeme`. This is accompanied with the other utility functions which
72 72 are called. Changing this should be done with great care to not break
73 73 cases when these fragments are assembled from another place.
74 74
75 75 """
76 76
77 77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
78 78 # will be replaced by the value of the environment variable "NAME" in this case.
79 79 environ = {
80 80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
81 81
82 82 global_config = _substitute_values(global_config, environ)
83 83 settings = _substitute_values(settings, environ)
84 84
85 85 sanitize_settings_and_apply_defaults(settings)
86 86
87 87 config = Configurator(settings=settings)
88 88
89 89 # Apply compatibility patches
90 90 patches.inspect_getargspec()
91 91
92 92 load_pyramid_environment(global_config, settings)
93 93
94 94 # Static file view comes first
95 95 includeme_first(config)
96 96
97 97 includeme(config)
98 98
99 99 pyramid_app = config.make_wsgi_app()
100 100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
101 101 pyramid_app.config = config
102 102
103 103 config.configure_celery(global_config['__file__'])
104 104 # creating the app uses a connection - return it after we are done
105 105 meta.Session.remove()
106 106
107 107 log.info('Pyramid app %s created and configured.', pyramid_app)
108 108 return pyramid_app
109 109
110 110
111 111 def not_found_view(request):
112 112 """
113 113 This creates the view which should be registered as not-found-view to
114 114 pyramid.
115 115 """
116 116
117 117 if not getattr(request, 'vcs_call', None):
118 118 # handle like regular case with our error_handler
119 119 return error_handler(HTTPNotFound(), request)
120 120
121 121 # handle not found view as a vcs call
122 122 settings = request.registry.settings
123 123 ae_client = getattr(request, 'ae_client', None)
124 124 vcs_app = VCSMiddleware(
125 125 HTTPNotFound(), request.registry, settings,
126 126 appenlight_client=ae_client)
127 127
128 128 return wsgiapp(vcs_app)(None, request)
129 129
130 130
131 131 def error_handler(exception, request):
132 132 import rhodecode
133 133 from rhodecode.lib import helpers
134 134
135 135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
136 136
137 137 base_response = HTTPInternalServerError()
138 138 # prefer original exception for the response since it may have headers set
139 139 if isinstance(exception, HTTPException):
140 140 base_response = exception
141 141 elif isinstance(exception, VCSCommunicationError):
142 142 base_response = VCSServerUnavailable()
143 143
144 144 if is_http_error(base_response):
145 145 log.exception(
146 146 'error occurred handling this request for path: %s', request.path)
147 147
148 148 error_explanation = base_response.explanation or str(base_response)
149 149 if base_response.status_code == 404:
150 150 error_explanation += " Or you don't have permission to access it."
151 151 c = AttributeDict()
152 152 c.error_message = base_response.status
153 153 c.error_explanation = error_explanation
154 154 c.visual = AttributeDict()
155 155
156 156 c.visual.rhodecode_support_url = (
157 157 request.registry.settings.get('rhodecode_support_url') or
158 158 request.route_url('rhodecode_support')
159 159 )
160 160 c.redirect_time = 0
161 161 c.rhodecode_name = rhodecode_title
162 162 if not c.rhodecode_name:
163 163 c.rhodecode_name = 'Rhodecode'
164 164
165 165 c.causes = []
166 166 if is_http_error(base_response):
167 167 c.causes.append('Server is overloaded.')
168 168 c.causes.append('Server database connection is lost.')
169 169 c.causes.append('Server expected unhandled error.')
170 170
171 171 if hasattr(base_response, 'causes'):
172 172 c.causes = base_response.causes
173 173
174 174 c.messages = helpers.flash.pop_messages(request=request)
175 175 c.traceback = traceback.format_exc()
176 176 response = render_to_response(
177 177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
178 178 response=base_response)
179 179
180 180 return response
181 181
182 182
183 183 def includeme_first(config):
184 184 # redirect automatic browser favicon.ico requests to correct place
185 185 def favicon_redirect(context, request):
186 186 return HTTPFound(
187 187 request.static_path('rhodecode:public/images/favicon.ico'))
188 188
189 189 config.add_view(favicon_redirect, route_name='favicon')
190 190 config.add_route('favicon', '/favicon.ico')
191 191
192 192 def robots_redirect(context, request):
193 193 return HTTPFound(
194 194 request.static_path('rhodecode:public/robots.txt'))
195 195
196 196 config.add_view(robots_redirect, route_name='robots')
197 197 config.add_route('robots', '/robots.txt')
198 198
199 199 config.add_static_view(
200 200 '_static/deform', 'deform:static')
201 201 config.add_static_view(
202 202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207 config.set_request_factory(Request)
208 208
209 209 # plugin information
210 210 config.registry.rhodecode_plugins = collections.OrderedDict()
211 211
212 212 config.add_directive(
213 213 'register_rhodecode_plugin', register_rhodecode_plugin)
214 214
215 215 config.add_directive('configure_celery', configure_celery)
216 216
217 217 if asbool(settings.get('appenlight', 'false')):
218 218 config.include('appenlight_client.ext.pyramid_tween')
219 219
220 220 # Includes which are required. The application would fail without them.
221 221 config.include('pyramid_mako')
222 222 config.include('pyramid_beaker')
223 223 config.include('rhodecode.lib.caches')
224 224 config.include('rhodecode.lib.rc_cache')
225 225
226 226 config.include('rhodecode.authentication')
227 227 config.include('rhodecode.integrations')
228 228
229 229 # apps
230 230 config.include('rhodecode.apps._base')
231 231 config.include('rhodecode.apps.ops')
232 232
233 233 config.include('rhodecode.apps.admin')
234 234 config.include('rhodecode.apps.channelstream')
235 235 config.include('rhodecode.apps.login')
236 236 config.include('rhodecode.apps.home')
237 237 config.include('rhodecode.apps.journal')
238 238 config.include('rhodecode.apps.repository')
239 239 config.include('rhodecode.apps.repo_group')
240 240 config.include('rhodecode.apps.user_group')
241 241 config.include('rhodecode.apps.search')
242 242 config.include('rhodecode.apps.user_profile')
243 243 config.include('rhodecode.apps.user_group_profile')
244 244 config.include('rhodecode.apps.my_account')
245 245 config.include('rhodecode.apps.svn_support')
246 246 config.include('rhodecode.apps.ssh_support')
247 247 config.include('rhodecode.apps.gist')
248 248
249 249 config.include('rhodecode.apps.debug_style')
250 250 config.include('rhodecode.tweens')
251 251 config.include('rhodecode.api')
252 252
253 253 config.add_route(
254 254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
255 255
256 256 config.add_translation_dirs('rhodecode:i18n/')
257 257 settings['default_locale_name'] = settings.get('lang', 'en')
258 258
259 259 # Add subscribers.
260 260 config.add_subscriber(inject_app_settings, ApplicationCreated)
261 261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
262 262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
263 263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
264 264
265 265 # events
266 266 # TODO(marcink): this should be done when pyramid migration is finished
267 267 # config.add_subscriber(
268 268 # 'rhodecode.integrations.integrations_event_handler',
269 269 # 'rhodecode.events.RhodecodeEvent')
270 270
271 271 # request custom methods
272 272 config.add_request_method(
273 273 'rhodecode.lib.partial_renderer.get_partial_renderer',
274 274 'get_partial_renderer')
275 275
276 276 # Set the authorization policy.
277 277 authz_policy = ACLAuthorizationPolicy()
278 278 config.set_authorization_policy(authz_policy)
279 279
280 280 # Set the default renderer for HTML templates to mako.
281 281 config.add_mako_renderer('.html')
282 282
283 283 config.add_renderer(
284 284 name='json_ext',
285 285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
286 286
287 287 # include RhodeCode plugins
288 288 includes = aslist(settings.get('rhodecode.includes', []))
289 289 for inc in includes:
290 290 config.include(inc)
291 291
292 292 # custom not found view, if our pyramid app doesn't know how to handle
293 293 # the request pass it to potential VCS handling ap
294 294 config.add_notfound_view(not_found_view)
295 295 if not settings.get('debugtoolbar.enabled', False):
296 296 # disabled debugtoolbar handle all exceptions via the error_handlers
297 297 config.add_view(error_handler, context=Exception)
298 298
299 299 # all errors including 403/404/50X
300 300 config.add_view(error_handler, context=HTTPError)
301 301
302 302
303 303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
304 304 """
305 305 Apply outer WSGI middlewares around the application.
306 306 """
307 307 settings = config.registry.settings
308 308
309 309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
310 310 pyramid_app = HttpsFixup(pyramid_app, settings)
311 311
312 312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
313 313 pyramid_app, settings)
314 314 config.registry.ae_client = _ae_client
315 315
316 316 if settings['gzip_responses']:
317 317 pyramid_app = make_gzip_middleware(
318 318 pyramid_app, settings, compress_level=1)
319 319
320 320 # this should be the outer most middleware in the wsgi stack since
321 321 # middleware like Routes make database calls
322 322 def pyramid_app_with_cleanup(environ, start_response):
323 323 try:
324 324 return pyramid_app(environ, start_response)
325 325 finally:
326 326 # Dispose current database session and rollback uncommitted
327 327 # transactions.
328 328 meta.Session.remove()
329 329
330 330 # In a single threaded mode server, on non sqlite db we should have
331 331 # '0 Current Checked out connections' at the end of a request,
332 332 # if not, then something, somewhere is leaving a connection open
333 333 pool = meta.Base.metadata.bind.engine.pool
334 334 log.debug('sa pool status: %s', pool.status())
335 335
336 336 return pyramid_app_with_cleanup
337 337
338 338
339 339 def sanitize_settings_and_apply_defaults(settings):
340 340 """
341 341 Applies settings defaults and does all type conversion.
342 342
343 343 We would move all settings parsing and preparation into this place, so that
344 344 we have only one place left which deals with this part. The remaining parts
345 345 of the application would start to rely fully on well prepared settings.
346 346
347 347 This piece would later be split up per topic to avoid a big fat monster
348 348 function.
349 349 """
350 350
351 351 settings.setdefault('rhodecode.edition', 'Community Edition')
352 352
353 353 if 'mako.default_filters' not in settings:
354 354 # set custom default filters if we don't have it defined
355 355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
356 356 settings['mako.default_filters'] = 'h_filter'
357 357
358 358 if 'mako.directories' not in settings:
359 359 mako_directories = settings.setdefault('mako.directories', [
360 360 # Base templates of the original application
361 361 'rhodecode:templates',
362 362 ])
363 363 log.debug(
364 364 "Using the following Mako template directories: %s",
365 365 mako_directories)
366 366
367 367 # Default includes, possible to change as a user
368 368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 369 'rhodecode.lib.middleware.request_wrapper',
370 370 ])
371 371 log.debug(
372 372 "Using the following pyramid.includes: %s",
373 373 pyramid_includes)
374 374
375 375 # TODO: johbo: Re-think this, usually the call to config.include
376 376 # should allow to pass in a prefix.
377 377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 378
379 379 # Sanitize generic settings.
380 380 _list_setting(settings, 'default_encoding', 'UTF-8')
381 381 _bool_setting(settings, 'is_test', 'false')
382 382 _bool_setting(settings, 'gzip_responses', 'false')
383 383
384 384 # Call split out functions that sanitize settings for each topic.
385 385 _sanitize_appenlight_settings(settings)
386 386 _sanitize_vcs_settings(settings)
387 387 _sanitize_cache_settings(settings)
388 388
389 389 # configure instance id
390 390 config_utils.set_instance_id(settings)
391 391
392 392 return settings
393 393
394 394
395 395 def _sanitize_appenlight_settings(settings):
396 396 _bool_setting(settings, 'appenlight', 'false')
397 397
398 398
399 399 def _sanitize_vcs_settings(settings):
400 400 """
401 401 Applies settings defaults and does type conversion for all VCS related
402 402 settings.
403 403 """
404 404 _string_setting(settings, 'vcs.svn.compatible_version', '')
405 405 _string_setting(settings, 'git_rev_filter', '--all')
406 406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
407 407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
408 408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
409 409 _string_setting(settings, 'vcs.server', '')
410 410 _string_setting(settings, 'vcs.server.log_level', 'debug')
411 411 _string_setting(settings, 'vcs.server.protocol', 'http')
412 412 _bool_setting(settings, 'startup.import_repos', 'false')
413 413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
414 414 _bool_setting(settings, 'vcs.server.enable', 'true')
415 415 _bool_setting(settings, 'vcs.start_server', 'false')
416 416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
417 417 _int_setting(settings, 'vcs.connection_timeout', 3600)
418 418
419 419 # Support legacy values of vcs.scm_app_implementation. Legacy
420 420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
421 421 # which is now mapped to 'http'.
422 422 scm_app_impl = settings['vcs.scm_app_implementation']
423 423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
424 424 settings['vcs.scm_app_implementation'] = 'http'
425 425
426 426
427 427 def _sanitize_cache_settings(settings):
428 428 _string_setting(settings, 'cache_dir',
429 429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
430 # cache_perms
431 _string_setting(
432 settings,
433 'rc_cache.cache_perms.backend',
434 'dogpile.cache.rc.file_namespace')
435 _int_setting(
436 settings,
437 'rc_cache.cache_perms.expiration_time',
438 60)
439 _string_setting(
440 settings,
441 'rc_cache.cache_perms.arguments.filename',
442 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
430 443
431 _string_setting(settings, 'rc_cache.cache_perms.backend',
432 'dogpile.cache.rc.file_namespace')
433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
434 60)
435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
444 # cache_repo
445 _string_setting(
446 settings,
447 'rc_cache.cache_repo.backend',
448 'dogpile.cache.rc.file_namespace')
449 _int_setting(
450 settings,
451 'rc_cache.cache_repo.expiration_time',
452 60)
453 _string_setting(
454 settings,
455 'rc_cache.cache_repo.arguments.filename',
456 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
437 457
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
441 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
458 # sql_cache_short
459 _string_setting(
460 settings,
461 'rc_cache.sql_cache_short.backend',
462 'dogpile.cache.rc.memory_lru')
463 _int_setting(
464 settings,
465 'rc_cache.sql_cache_short.expiration_time',
466 30)
467 _int_setting(
468 settings,
469 'rc_cache.sql_cache_short.max_size',
470 10000)
444 471
445 472
446 473 def _int_setting(settings, name, default):
447 474 settings[name] = int(settings.get(name, default))
448 475
449 476
450 477 def _bool_setting(settings, name, default):
451 478 input_val = settings.get(name, default)
452 479 if isinstance(input_val, unicode):
453 480 input_val = input_val.encode('utf8')
454 481 settings[name] = asbool(input_val)
455 482
456 483
457 484 def _list_setting(settings, name, default):
458 485 raw_value = settings.get(name, default)
459 486
460 487 old_separator = ','
461 488 if old_separator in raw_value:
462 489 # If we get a comma separated list, pass it to our own function.
463 490 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
464 491 else:
465 492 # Otherwise we assume it uses pyramids space/newline separation.
466 493 settings[name] = aslist(raw_value)
467 494
468 495
469 496 def _string_setting(settings, name, default, lower=True):
470 497 value = settings.get(name, default)
471 498 if lower:
472 499 value = value.lower()
473 500 settings[name] = value
474 501
475 502
476 503 def _substitute_values(mapping, substitutions):
477 504 result = {
478 505 # Note: Cannot use regular replacements, since they would clash
479 506 # with the implementation of ConfigParser. Using "format" instead.
480 507 key: value.format(**substitutions)
481 508 for key, value in mapping.items()
482 509 }
483 510 return result
@@ -1,226 +1,188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import functools
21 21
22 22 import beaker
23 23 import logging
24 24 import threading
25 25
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
26 from beaker.cache import _cache_decorate, region_invalidate
27 27 from sqlalchemy.exc import IntegrityError
28 28
29 29 from rhodecode.lib.utils import safe_str, sha1
30 30 from rhodecode.model.db import Session, CacheKey
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 DEFAULT_CACHE_MANAGER_CONFIG = {
36 36 'type': 'memorylru_base',
37 37 'max_items': 10240,
38 38 'key_length': 256,
39 39 'enabled': True
40 40 }
41 41
42 42
43 43 def get_default_cache_settings(settings):
44 44 cache_settings = {}
45 45 for key in settings.keys():
46 46 for prefix in ['beaker.cache.', 'cache.']:
47 47 if key.startswith(prefix):
48 48 name = key.split(prefix)[1].strip()
49 49 cache_settings[name] = settings[key].strip()
50 50 return cache_settings
51 51
52 52
53 53 # set cache regions for beaker so celery can utilise it
54 54 def configure_caches(settings, default_region_settings=None):
55 55 cache_settings = {'regions': None}
56 56 # main cache settings used as default ...
57 57 cache_settings.update(get_default_cache_settings(settings))
58 58 default_region_settings = default_region_settings or \
59 59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
60 60 if cache_settings['regions']:
61 61 for region in cache_settings['regions'].split(','):
62 62 region = region.strip()
63 63 region_settings = default_region_settings.copy()
64 64 for key, value in cache_settings.items():
65 65 if key.startswith(region):
66 66 region_settings[key.split(region + '.')[-1]] = value
67 67 log.debug('Configuring cache region `%s` with settings %s',
68 68 region, region_settings)
69 69 configure_cache_region(
70 70 region, region_settings, cache_settings)
71 71
72 72
73 73 def configure_cache_region(
74 74 region_name, region_settings, default_cache_kw, default_expire=60):
75 75 default_type = default_cache_kw.get('type', 'memory')
76 76 default_lock_dir = default_cache_kw.get('lock_dir')
77 77 default_data_dir = default_cache_kw.get('data_dir')
78 78
79 79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
80 80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
81 81 region_settings['type'] = region_settings.get('type', default_type)
82 82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
83 83
84 84 beaker.cache.cache_regions[region_name] = region_settings
85 85
86 86
87 def get_cache_manager(region_name, cache_name, custom_ttl=None):
88 """
89 Creates a Beaker cache manager. Such instance can be used like that::
90
91 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
92 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
93 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
94 def heavy_compute():
95 ...
96 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
97
98 :param region_name: region from ini file
99 :param cache_name: custom cache name, usually prefix+repo_name. eg
100 file_switcher_repo1
101 :param custom_ttl: override .ini file timeout on this cache
102 :return: instance of cache manager
103 """
104
105 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
106 if custom_ttl:
107 log.debug('Updating region %s with custom ttl: %s',
108 region_name, custom_ttl)
109 cache_config.update({'expire': custom_ttl})
110
111 return beaker.cache.Cache._get_cache(cache_name, cache_config)
112
113
114 def clear_cache_manager(cache_manager):
115 """
116 namespace = 'foobar'
117 cache_manager = get_cache_manager('some_namespace_name', namespace)
118 clear_cache_manager(cache_manager)
119 """
120
121 log.debug('Clearing all values for cache manager %s', cache_manager)
122 cache_manager.clear()
123
124
125 87 def compute_key_from_params(*args):
126 88 """
127 89 Helper to compute key from given params to be used in cache manager
128 90 """
129 91 return sha1("_".join(map(safe_str, args)))
130 92
131 93
132 94 def get_repo_namespace_key(prefix, repo_name):
133 95 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
134 96
135 97
136 98 class ActiveRegionCache(object):
137 99 def __init__(self, context):
138 100 self.context = context
139 101
140 102 def invalidate(self, *args, **kwargs):
141 103 return False
142 104
143 105 def compute(self):
144 106 log.debug('Context cache: getting obj %s from cache', self.context)
145 107 return self.context.compute_func(self.context.cache_key)
146 108
147 109
148 110 class FreshRegionCache(ActiveRegionCache):
149 111 def invalidate(self):
150 112 log.debug('Context cache: invalidating cache for %s', self.context)
151 113 region_invalidate(
152 114 self.context.compute_func, None, self.context.cache_key)
153 115 return True
154 116
155 117
156 118 class InvalidationContext(object):
157 119 def __repr__(self):
158 120 return '<InvalidationContext:{}[{}]>'.format(
159 121 safe_str(self.repo_name), safe_str(self.cache_type))
160 122
161 123 def __init__(self, compute_func, repo_name, cache_type,
162 124 raise_exception=False, thread_scoped=False):
163 125 self.compute_func = compute_func
164 126 self.repo_name = repo_name
165 127 self.cache_type = cache_type
166 128 self.cache_key = compute_key_from_params(
167 129 repo_name, cache_type)
168 130 self.raise_exception = raise_exception
169 131
170 132 # Append the thread id to the cache key if this invalidation context
171 133 # should be scoped to the current thread.
172 134 if thread_scoped:
173 135 thread_id = threading.current_thread().ident
174 136 self.cache_key = '{cache_key}_{thread_id}'.format(
175 137 cache_key=self.cache_key, thread_id=thread_id)
176 138
177 139 def get_cache_obj(self):
178 140 cache_key = CacheKey.get_cache_key(
179 141 self.repo_name, self.cache_type)
180 142 cache_obj = CacheKey.get_active_cache(cache_key)
181 143 if not cache_obj:
182 144 cache_obj = CacheKey(cache_key, self.repo_name)
183 145 return cache_obj
184 146
185 147 def __enter__(self):
186 148 """
187 149 Test if current object is valid, and return CacheRegion function
188 150 that does invalidation and calculation
189 151 """
190 152
191 153 self.cache_obj = self.get_cache_obj()
192 154 if self.cache_obj.cache_active:
193 155 # means our cache obj is existing and marked as it's
194 156 # cache is not outdated, we return BaseInvalidator
195 157 self.skip_cache_active_change = True
196 158 return ActiveRegionCache(self)
197 159
198 160 # the key is either not existing or set to False, we return
199 161 # the real invalidator which re-computes value. We additionally set
200 162 # the flag to actually update the Database objects
201 163 self.skip_cache_active_change = False
202 164 return FreshRegionCache(self)
203 165
204 166 def __exit__(self, exc_type, exc_val, exc_tb):
205 167
206 168 if self.skip_cache_active_change:
207 169 return
208 170
209 171 try:
210 172 self.cache_obj.cache_active = True
211 173 Session().add(self.cache_obj)
212 174 Session().commit()
213 175 except IntegrityError:
214 176 # if we catch integrity error, it means we inserted this object
215 177 # assumption is that's really an edge race-condition case and
216 178 # it's safe is to skip it
217 179 Session().rollback()
218 180 except Exception:
219 181 log.exception('Failed to commit on cache key update')
220 182 Session().rollback()
221 183 if self.raise_exception:
222 184 raise
223 185
224 186
225 187 def includeme(config):
226 188 configure_caches(config.registry.settings)
@@ -1,325 +1,298 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 """
22 caching_query.py
21 """caching_query.py
23 22
24 Represent persistence structures which allow the usage of
25 Beaker caching with SQLAlchemy.
23 Represent functions and classes
24 which allow the usage of Dogpile caching with SQLAlchemy.
25 Introduces a query option called FromCache.
26 26
27 27 The three new concepts introduced here are:
28 28
29 29 * CachingQuery - a Query subclass that caches and
30 retrieves results in/from Beaker.
30 retrieves results in/from dogpile.cache.
31 31 * FromCache - a query option that establishes caching
32 32 parameters on a Query
33 33 * RelationshipCache - a variant of FromCache which is specific
34 34 to a query invoked during a lazy load.
35 35 * _params_from_query - extracts value parameters from
36 36 a Query.
37 37
38 38 The rest of what's here are standard SQLAlchemy and
39 Beaker constructs.
39 dogpile.cache constructs.
40 40
41 41 """
42 import beaker
43 from beaker.exceptions import BeakerException
44
45 42 from sqlalchemy.orm.interfaces import MapperOption
46 43 from sqlalchemy.orm.query import Query
47 44 from sqlalchemy.sql import visitors
45 from dogpile.cache.api import NO_VALUE
48 46
49 47 from rhodecode.lib.utils2 import safe_str
50 48
51 49
52 50 class CachingQuery(Query):
53 """A Query subclass which optionally loads full results from a Beaker
51 """A Query subclass which optionally loads full results from a dogpile
54 52 cache region.
55 53
56 The CachingQuery stores additional state that allows it to consult
57 a Beaker cache before accessing the database:
58
59 * A "region", which is a cache region argument passed to a
60 Beaker CacheManager, specifies a particular cache configuration
61 (including backend implementation, expiration times, etc.)
62 * A "namespace", which is a qualifying name that identifies a
63 group of keys within the cache. A query that filters on a name
64 might use the name "by_name", a query that filters on a date range
65 to a joined table might use the name "related_date_range".
66
67 When the above state is present, a Beaker cache is retrieved.
68
69 The "namespace" name is first concatenated with
70 a string composed of the individual entities and columns the Query
71 requests, i.e. such as ``Query(User.id, User.name)``.
72
73 The Beaker cache is then loaded from the cache manager based
74 on the region and composed namespace. The key within the cache
75 itself is then constructed against the bind parameters specified
76 by this query, which are usually literals defined in the
77 WHERE clause.
54 The CachingQuery optionally stores additional state that allows it to consult
55 a dogpile.cache cache before accessing the database, in the form
56 of a FromCache or RelationshipCache object. Each of these objects
57 refer to the name of a :class:`dogpile.cache.Region` that's been configured
58 and stored in a lookup dictionary. When such an object has associated
59 itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
60 is used to locate a cached result. If none is present, then the
61 Query is invoked normally, the results being cached.
78 62
79 63 The FromCache and RelationshipCache mapper options below represent
80 64 the "public" method of configuring this state upon the CachingQuery.
81 65
82 66 """
67 def _get_region(self):
68 from rhodecode.lib.rc_cache import region_meta
69 return region_meta.dogpile_cache_regions
83 70
84 def __init__(self, manager, *args, **kw):
85 self.cache_manager = manager
71 def __init__(self, regions, *args, **kw):
72 self.cache_regions = regions or self._get_region()
86 73 Query.__init__(self, *args, **kw)
87 74
88 75 def __iter__(self):
89 """override __iter__ to pull results from Beaker
76 """override __iter__ to pull results from dogpile
90 77 if particular attributes have been configured.
91 78
92 79 Note that this approach does *not* detach the loaded objects from
93 80 the current session. If the cache backend is an in-process cache
94 81 (like "memory") and lives beyond the scope of the current session's
95 82 transaction, those objects may be expired. The method here can be
96 83 modified to first expunge() each loaded item from the current
97 84 session before returning the list of items, so that the items
98 85 in the cache are not the same ones in the current Session.
99 86
100 87 """
101 if hasattr(self, '_cache_parameters'):
88 super_ = super(CachingQuery, self)
89
90 if hasattr(self, '_cache_region'):
91 return self.get_value(createfunc=lambda: list(super_.__iter__()))
92 else:
93 return super_.__iter__()
94
95 def _execute_and_instances(self, context):
96 """override _execute_and_instances to pull results from dogpile
97 if the query is invoked directly from an external context.
98
99 This method is necessary in order to maintain compatibility
100 with the "baked query" system now used by default in some
101 relationship loader scenarios. Note also the
102 RelationshipCache._generate_cache_key method which enables
103 the baked query to be used within lazy loads.
102 104
103 def caching_query():
104 return list(Query.__iter__(self))
105 .. versionadded:: 1.2.7
106 """
107 super_ = super(CachingQuery, self)
105 108
106 return self.get_value(createfunc=caching_query)
109 if context.query is not self and hasattr(self, '_cache_region'):
110 # special logic called when the Query._execute_and_instances()
111 # method is called directly from the baked query
112 return self.get_value(
113 createfunc=lambda: list(
114 super_._execute_and_instances(context)
115 )
116 )
107 117 else:
108 return Query.__iter__(self)
118 return super_._execute_and_instances(context)
119
120 def _get_cache_plus_key(self):
121 """Return a cache region plus key."""
122 dogpile_region = self.cache_regions[self._cache_region.region]
123 if self._cache_region.cache_key:
124 key = self._cache_region.cache_key
125 else:
126 key = _key_from_query(self)
127 return dogpile_region, key
109 128
110 129 def invalidate(self):
111 """Invalidate the value represented by this Query."""
130 """Invalidate the cache value represented by this Query."""
112 131
113 cache, cache_key = _get_cache_parameters(self)
114 cache.remove(cache_key)
132 dogpile_region, cache_key = self._get_cache_plus_key()
133 dogpile_region.delete(cache_key)
115 134
116 def get_value(self, merge=True, createfunc=None):
135 def get_value(self, merge=True, createfunc=None,
136 expiration_time=None, ignore_expiration=False):
117 137 """Return the value from the cache for this query.
118 138
119 139 Raise KeyError if no value present and no
120 140 createfunc specified.
121 141
122 142 """
123 cache, cache_key = _get_cache_parameters(self)
124 ret = cache.get_value(cache_key, createfunc=createfunc)
143 dogpile_region, cache_key = self._get_cache_plus_key()
144
145 # ignore_expiration means, if the value is in the cache
146 # but is expired, return it anyway. This doesn't make sense
147 # with createfunc, which says, if the value is expired, generate
148 # a new value.
149 assert not ignore_expiration or not createfunc, \
150 "Can't ignore expiration and also provide createfunc"
151
152 if ignore_expiration or not createfunc:
153 cached_value = dogpile_region.get(cache_key,
154 expiration_time=expiration_time,
155 ignore_expiration=ignore_expiration)
156 else:
157 cached_value = dogpile_region.get_or_create(
158 cache_key,
159 createfunc,
160 expiration_time=expiration_time
161 )
162 if cached_value is NO_VALUE:
163 raise KeyError(cache_key)
125 164 if merge:
126 ret = self.merge_result(ret, load=False)
127 return ret
165 cached_value = self.merge_result(cached_value, load=False)
166 return cached_value
128 167
129 168 def set_value(self, value):
130 169 """Set the value in the cache for this query."""
131 170
132 cache, cache_key = _get_cache_parameters(self)
133 cache.put(cache_key, value)
171 dogpile_region, cache_key = self._get_cache_plus_key()
172 dogpile_region.set(cache_key, value)
134 173
135 174
136 def query_callable(manager, query_cls=CachingQuery):
175 def query_callable(regions=None, query_cls=CachingQuery):
137 176 def query(*arg, **kw):
138 return query_cls(manager, *arg, **kw)
177 return query_cls(regions, *arg, **kw)
139 178 return query
140 179
141 180
142 def get_cache_region(name, region):
143 if region not in beaker.cache.cache_regions:
144 raise BeakerException('Cache region `%s` not configured '
145 'Check if proper cache settings are in the .ini files' % region)
146 kw = beaker.cache.cache_regions[region]
147 return beaker.cache.Cache._get_cache(name, kw)
181 def _key_from_query(query, qualifier=None):
182 """Given a Query, create a cache key.
148 183
149
150 def _get_cache_parameters(query):
151 """For a query with cache_region and cache_namespace configured,
152 return the correspoinding Cache instance and cache key, based
153 on this query's current criterion and parameter values.
184 There are many approaches to this; here we use the simplest,
185 which is to create an md5 hash of the text of the SQL statement,
186 combined with stringified versions of all the bound parameters
187 within it. There's a bit of a performance hit with
188 compiling out "query.statement" here; other approaches include
189 setting up an explicit cache key with a particular Query,
190 then combining that with the bound parameter values.
154 191
155 192 """
156 if not hasattr(query, '_cache_parameters'):
157 raise ValueError("This Query does not have caching "
158 "parameters configured.")
159 193
160 region, namespace, cache_key = query._cache_parameters
161
162 namespace = _namespace_from_query(namespace, query)
163
164 if cache_key is None:
165 # cache key - the value arguments from this query's parameters.
166 args = [safe_str(x) for x in _params_from_query(query)]
167 args.extend(filter(lambda k: k not in ['None', None, u'None'],
168 [str(query._limit), str(query._offset)]))
169
170 cache_key = " ".join(args)
171
172 if cache_key is None:
173 raise Exception('Cache key cannot be None')
194 stmt = query.with_labels().statement
195 compiled = stmt.compile()
196 params = compiled.params
174 197
175 # get cache
176 #cache = query.cache_manager.get_cache_region(namespace, region)
177 cache = get_cache_region(namespace, region)
178 # optional - hash the cache_key too for consistent length
179 # import uuid
180 # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
181
182 return cache, cache_key
183
184
185 def _namespace_from_query(namespace, query):
186 # cache namespace - the token handed in by the
187 # option + class we're querying against
188 namespace = " ".join([namespace] + [str(x) for x in query._entities])
189
190 # memcached wants this
191 namespace = namespace.replace(' ', '_')
192
193 return namespace
194
195
196 def _set_cache_parameters(query, region, namespace, cache_key):
197
198 if hasattr(query, '_cache_parameters'):
199 region, namespace, cache_key = query._cache_parameters
200 raise ValueError("This query is already configured "
201 "for region %r namespace %r" %
202 (region, namespace))
203 query._cache_parameters = region, namespace, cache_key
198 # here we return the key as a long string. our "key mangler"
199 # set up with the region will boil it down to an md5.
200 return " ".join(
201 [safe_str(compiled)] +
202 [safe_str(params[k]) for k in sorted(params)])
204 203
205 204
206 205 class FromCache(MapperOption):
207 206 """Specifies that a Query should load results from a cache."""
208 207
209 208 propagate_to_loaders = False
210 209
211 def __init__(self, region, namespace, cache_key=None):
210 def __init__(self, region="sql_cache_short", cache_key=None):
212 211 """Construct a new FromCache.
213 212
214 213 :param region: the cache region. Should be a
215 region configured in the Beaker CacheManager.
216
217 :param namespace: the cache namespace. Should
218 be a name uniquely describing the target Query's
219 lexical structure.
214 region configured in the dictionary of dogpile
215 regions.
220 216
221 217 :param cache_key: optional. A string cache key
222 218 that will serve as the key to the query. Use this
223 219 if your query has a huge amount of parameters (such
224 220 as when using in_()) which correspond more simply to
225 221 some other identifier.
226 222
227 223 """
228 224 self.region = region
229 self.namespace = namespace
230 225 self.cache_key = cache_key
231 226
232 227 def process_query(self, query):
233 228 """Process a Query during normal loading operation."""
234
235 _set_cache_parameters(query, self.region, self.namespace,
236 self.cache_key)
229 query._cache_region = self
237 230
238 231
239 232 class RelationshipCache(MapperOption):
240 233 """Specifies that a Query as called within a "lazy load"
241 234 should load results from a cache."""
242 235
243 236 propagate_to_loaders = True
244 237
245 def __init__(self, region, namespace, attribute):
238 def __init__(self, attribute, region="sql_cache_short", cache_key=None):
246 239 """Construct a new RelationshipCache.
247 240
248 :param region: the cache region. Should be a
249 region configured in the Beaker CacheManager.
250
251 :param namespace: the cache namespace. Should
252 be a name uniquely describing the target Query's
253 lexical structure.
254
255 241 :param attribute: A Class.attribute which
256 242 indicates a particular class relationship() whose
257 243 lazy loader should be pulled from the cache.
258 244
245 :param region: name of the cache region.
246
247 :param cache_key: optional. A string cache key
248 that will serve as the key to the query, bypassing
249 the usual means of forming a key from the Query itself.
250
259 251 """
260 252 self.region = region
261 self.namespace = namespace
253 self.cache_key = cache_key
262 254 self._relationship_options = {
263 255 (attribute.property.parent.class_, attribute.property.key): self
264 256 }
265 257
258 def _generate_cache_key(self, path):
259 """Indicate to the lazy-loader strategy that a "baked" query
260 may be used by returning ``None``.
261
262 If this method is omitted, the default implementation of
263 :class:`.MapperOption._generate_cache_key` takes place, which
264 returns ``False`` to disable the "baked" query from being used.
265
266 .. versionadded:: 1.2.7
267
268 """
269 return None
270
266 271 def process_query_conditionally(self, query):
267 272 """Process a Query that is used within a lazy loader.
268 273
269 274 (the process_query_conditionally() method is a SQLAlchemy
270 275 hook invoked only within lazyload.)
271 276
272 277 """
273 278 if query._current_path:
274 mapper, key = query._current_path[-2:]
279 mapper, prop = query._current_path[-2:]
280 key = prop.key
275 281
276 282 for cls in mapper.class_.__mro__:
277 283 if (cls, key) in self._relationship_options:
278 relationship_option = \
279 self._relationship_options[(cls, key)]
280 _set_cache_parameters(
281 query,
282 relationship_option.region,
283 relationship_option.namespace,
284 None)
284 relationship_option = self._relationship_options[(cls, key)]
285 query._cache_region = relationship_option
286 break
285 287
286 288 def and_(self, option):
287 289 """Chain another RelationshipCache option to this one.
288 290
289 291 While many RelationshipCache objects can be specified on a single
290 292 Query separately, chaining them together allows for a more efficient
291 293 lookup during load.
292 294
293 295 """
294 296 self._relationship_options.update(option._relationship_options)
295 297 return self
296 298
297
298 def _params_from_query(query):
299 """Pull the bind parameter values from a query.
300
301 This takes into account any scalar attribute bindparam set up.
302
303 E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
304 would return [5, 7].
305
306 """
307 v = []
308 def visit_bindparam(bind):
309
310 if bind.key in query._params:
311 value = query._params[bind.key]
312 elif bind.callable:
313 # lazyloader may dig a callable in here, intended
314 # to late-evaluate params after autoflush is called.
315 # convert to a scalar value.
316 value = bind.callable()
317 else:
318 value = bind.value
319
320 v.append(value)
321 if query._criterion is not None:
322 visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
323 for f in query._from_obj:
324 visitors.traverse(f, {}, {'bindparam':visit_bindparam})
325 return v
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,50 +1,45 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SQLAlchemy Metadata and Session object
23 23 """
24 24
25 25 from sqlalchemy.ext.declarative import declarative_base
26 26 from sqlalchemy.orm import scoped_session, sessionmaker
27 from beaker import cache
28 27
29 28 from rhodecode.lib import caching_query
30 29
31
32 # Beaker CacheManager. A home base for cache configurations.
33 cache_manager = cache.CacheManager()
30 __all__ = ['Base', 'Session']
34 31
35 __all__ = ['Base', 'Session']
36 #
37 # SQLAlchemy session manager. Updated by model.init_model()
38 #
32 # scoped_session. Apply our custom CachingQuery class to it,
33 # using a callable that will associate the dictionary
34 # of regions with the Query.
35 # to use cache use this in query
36 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
39 37 Session = scoped_session(
40 38 sessionmaker(
41 query_cls=caching_query.query_callable(cache_manager),
39 query_cls=caching_query.query_callable(),
42 40 expire_on_commit=True,
43 41 )
44 42 )
45 43
46 44 # The declarative Base
47 45 Base = declarative_base()
48
49 #to use cache use this in query
50 #.options(FromCache("sqlalchemy_cache_type", "cachekey"))
@@ -1,830 +1,829 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import hashlib
23 23 import logging
24 import time
24 25 from collections import namedtuple
25 26 from functools import wraps
26 27 import bleach
27 28
28 from rhodecode.lib import caches
29 from rhodecode.lib import caches, rc_cache
29 30 from rhodecode.lib.utils2 import (
30 31 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 32 from rhodecode.lib.vcs.backends import base
32 33 from rhodecode.model import BaseModel
33 34 from rhodecode.model.db import (
34 35 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
35 36 from rhodecode.model.meta import Session
36 37
37 38
38 39 log = logging.getLogger(__name__)
39 40
40 41
41 42 UiSetting = namedtuple(
42 43 'UiSetting', ['section', 'key', 'value', 'active'])
43 44
44 45 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45 46
46 47
47 48 class SettingNotFound(Exception):
48 49 def __init__(self, setting_id):
49 50 msg = 'Setting `{}` is not found'.format(setting_id)
50 51 super(SettingNotFound, self).__init__(msg)
51 52
52 53
53 54 class SettingsModel(BaseModel):
54 55 BUILTIN_HOOKS = (
55 56 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 57 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 58 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 59 RhodeCodeUi.HOOK_PUSH_KEY,)
59 60 HOOKS_SECTION = 'hooks'
60 61
61 62 def __init__(self, sa=None, repo=None):
62 63 self.repo = repo
63 64 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 65 self.SettingsDbModel = (
65 66 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 67 super(SettingsModel, self).__init__(sa)
67 68
68 69 def get_ui_by_key(self, key):
69 70 q = self.UiDbModel.query()
70 71 q = q.filter(self.UiDbModel.ui_key == key)
71 72 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 73 return q.scalar()
73 74
74 75 def get_ui_by_section(self, section):
75 76 q = self.UiDbModel.query()
76 77 q = q.filter(self.UiDbModel.ui_section == section)
77 78 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 79 return q.all()
79 80
80 81 def get_ui_by_section_and_key(self, section, key):
81 82 q = self.UiDbModel.query()
82 83 q = q.filter(self.UiDbModel.ui_section == section)
83 84 q = q.filter(self.UiDbModel.ui_key == key)
84 85 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 86 return q.scalar()
86 87
87 88 def get_ui(self, section=None, key=None):
88 89 q = self.UiDbModel.query()
89 90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 91
91 92 if section:
92 93 q = q.filter(self.UiDbModel.ui_section == section)
93 94 if key:
94 95 q = q.filter(self.UiDbModel.ui_key == key)
95 96
96 97 # TODO: mikhail: add caching
97 98 result = [
98 99 UiSetting(
99 100 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 101 value=safe_str(r.ui_value), active=r.ui_active
101 102 )
102 103 for r in q.all()
103 104 ]
104 105 return result
105 106
106 107 def get_builtin_hooks(self):
107 108 q = self.UiDbModel.query()
108 109 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 110 return self._get_hooks(q)
110 111
111 112 def get_custom_hooks(self):
112 113 q = self.UiDbModel.query()
113 114 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 115 return self._get_hooks(q)
115 116
116 117 def create_ui_section_value(self, section, val, key=None, active=True):
117 118 new_ui = self.UiDbModel()
118 119 new_ui.ui_section = section
119 120 new_ui.ui_value = val
120 121 new_ui.ui_active = active
121 122
122 123 if self.repo:
123 124 repo = self._get_repo(self.repo)
124 125 repository_id = repo.repo_id
125 126 new_ui.repository_id = repository_id
126 127
127 128 if not key:
128 129 # keys are unique so they need appended info
129 130 if self.repo:
130 131 key = hashlib.sha1(
131 132 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 133 else:
133 134 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134 135
135 136 new_ui.ui_key = key
136 137
137 138 Session().add(new_ui)
138 139 return new_ui
139 140
140 141 def create_or_update_hook(self, key, value):
141 142 ui = (
142 143 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 144 self.UiDbModel())
144 145 ui.ui_section = self.HOOKS_SECTION
145 146 ui.ui_active = True
146 147 ui.ui_key = key
147 148 ui.ui_value = value
148 149
149 150 if self.repo:
150 151 repo = self._get_repo(self.repo)
151 152 repository_id = repo.repo_id
152 153 ui.repository_id = repository_id
153 154
154 155 Session().add(ui)
155 156 return ui
156 157
157 158 def delete_ui(self, id_):
158 159 ui = self.UiDbModel.get(id_)
159 160 if not ui:
160 161 raise SettingNotFound(id_)
161 162 Session().delete(ui)
162 163
163 164 def get_setting_by_name(self, name):
164 165 q = self._get_settings_query()
165 166 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 167 return q.scalar()
167 168
168 169 def create_or_update_setting(
169 170 self, name, val=Optional(''), type_=Optional('unicode')):
170 171 """
171 172 Creates or updates RhodeCode setting. If updates is triggered it will
172 173 only update parameters that are explicityl set Optional instance will
173 174 be skipped
174 175
175 176 :param name:
176 177 :param val:
177 178 :param type_:
178 179 :return:
179 180 """
180 181
181 182 res = self.get_setting_by_name(name)
182 183 repo = self._get_repo(self.repo) if self.repo else None
183 184
184 185 if not res:
185 186 val = Optional.extract(val)
186 187 type_ = Optional.extract(type_)
187 188
188 189 args = (
189 190 (repo.repo_id, name, val, type_)
190 191 if repo else (name, val, type_))
191 192 res = self.SettingsDbModel(*args)
192 193
193 194 else:
194 195 if self.repo:
195 196 res.repository_id = repo.repo_id
196 197
197 198 res.app_settings_name = name
198 199 if not isinstance(type_, Optional):
199 200 # update if set
200 201 res.app_settings_type = type_
201 202 if not isinstance(val, Optional):
202 203 # update if set
203 204 res.app_settings_value = val
204 205
205 206 Session().add(res)
206 207 return res
207 208
208 209 def invalidate_settings_cache(self):
209 namespace = 'rhodecode_settings'
210 cache_manager = caches.get_cache_manager('sql_cache_short', namespace)
211 caches.clear_cache_manager(cache_manager)
210 # NOTE:(marcink) we flush the whole sql_cache_short region, because it
211 # reads different settings etc. It's little too much but those caches are
212 # anyway very short lived and it's a safest way.
213 region = rc_cache.get_or_create_region('sql_cache_short')
214 region.invalidate()
212 215
213 216 def get_all_settings(self, cache=False):
217 region = rc_cache.get_or_create_region('sql_cache_short')
214 218
215 def _compute():
219 @region.cache_on_arguments(should_cache_fn=lambda v: cache)
220 def _get_all_settings(name, key):
216 221 q = self._get_settings_query()
217 222 if not q:
218 223 raise Exception('Could not get application settings !')
219 224
220 225 settings = {
221 226 'rhodecode_' + result.app_settings_name: result.app_settings_value
222 227 for result in q
223 228 }
224 229 return settings
225 230
226 if cache:
227 log.debug('Fetching app settings using cache')
228 repo = self._get_repo(self.repo) if self.repo else None
229 namespace = 'rhodecode_settings'
230 cache_manager = caches.get_cache_manager(
231 'sql_cache_short', namespace)
232 _cache_key = (
233 "get_repo_{}_settings".format(repo.repo_id)
234 if repo else "get_app_settings")
231 repo = self._get_repo(self.repo) if self.repo else None
232 key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app"
233 start = time.time()
234 result = _get_all_settings('rhodecode_settings', key)
235 total = time.time() - start
236 log.debug('Fetching app settings for key: %s took: %.3fs', key, total)
235 237
236 return cache_manager.get(_cache_key, createfunc=_compute)
237
238 else:
239 return _compute()
238 return result
240 239
241 240 def get_auth_settings(self):
242 241 q = self._get_settings_query()
243 242 q = q.filter(
244 243 self.SettingsDbModel.app_settings_name.startswith('auth_'))
245 244 rows = q.all()
246 245 auth_settings = {
247 246 row.app_settings_name: row.app_settings_value for row in rows}
248 247 return auth_settings
249 248
250 249 def get_auth_plugins(self):
251 250 auth_plugins = self.get_setting_by_name("auth_plugins")
252 251 return auth_plugins.app_settings_value
253 252
254 253 def get_default_repo_settings(self, strip_prefix=False):
255 254 q = self._get_settings_query()
256 255 q = q.filter(
257 256 self.SettingsDbModel.app_settings_name.startswith('default_'))
258 257 rows = q.all()
259 258
260 259 result = {}
261 260 for row in rows:
262 261 key = row.app_settings_name
263 262 if strip_prefix:
264 263 key = remove_prefix(key, prefix='default_')
265 264 result.update({key: row.app_settings_value})
266 265 return result
267 266
268 267 def get_repo(self):
269 268 repo = self._get_repo(self.repo)
270 269 if not repo:
271 270 raise Exception(
272 271 'Repository `{}` cannot be found inside the database'.format(
273 272 self.repo))
274 273 return repo
275 274
276 275 def _filter_by_repo(self, model, query):
277 276 if self.repo:
278 277 repo = self.get_repo()
279 278 query = query.filter(model.repository_id == repo.repo_id)
280 279 return query
281 280
282 281 def _get_hooks(self, query):
283 282 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
284 283 query = self._filter_by_repo(RepoRhodeCodeUi, query)
285 284 return query.all()
286 285
287 286 def _get_settings_query(self):
288 287 q = self.SettingsDbModel.query()
289 288 return self._filter_by_repo(RepoRhodeCodeSetting, q)
290 289
291 290 def list_enabled_social_plugins(self, settings):
292 291 enabled = []
293 292 for plug in SOCIAL_PLUGINS_LIST:
294 293 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
295 294 )):
296 295 enabled.append(plug)
297 296 return enabled
298 297
299 298
300 299 def assert_repo_settings(func):
301 300 @wraps(func)
302 301 def _wrapper(self, *args, **kwargs):
303 302 if not self.repo_settings:
304 303 raise Exception('Repository is not specified')
305 304 return func(self, *args, **kwargs)
306 305 return _wrapper
307 306
308 307
309 308 class IssueTrackerSettingsModel(object):
310 309 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
311 310 SETTINGS_PREFIX = 'issuetracker_'
312 311
313 312 def __init__(self, sa=None, repo=None):
314 313 self.global_settings = SettingsModel(sa=sa)
315 314 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
316 315
317 316 @property
318 317 def inherit_global_settings(self):
319 318 if not self.repo_settings:
320 319 return True
321 320 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
322 321 return setting.app_settings_value if setting else True
323 322
324 323 @inherit_global_settings.setter
325 324 def inherit_global_settings(self, value):
326 325 if self.repo_settings:
327 326 settings = self.repo_settings.create_or_update_setting(
328 327 self.INHERIT_SETTINGS, value, type_='bool')
329 328 Session().add(settings)
330 329
331 330 def _get_keyname(self, key, uid, prefix=''):
332 331 return '{0}{1}{2}_{3}'.format(
333 332 prefix, self.SETTINGS_PREFIX, key, uid)
334 333
335 334 def _make_dict_for_settings(self, qs):
336 335 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
337 336
338 337 issuetracker_entries = {}
339 338 # create keys
340 339 for k, v in qs.items():
341 340 if k.startswith(prefix_match):
342 341 uid = k[len(prefix_match):]
343 342 issuetracker_entries[uid] = None
344 343
345 344 # populate
346 345 for uid in issuetracker_entries:
347 346 issuetracker_entries[uid] = AttributeDict({
348 347 'pat': qs.get(
349 348 self._get_keyname('pat', uid, 'rhodecode_')),
350 349 'url': bleach.clean(
351 350 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
352 351 'pref': bleach.clean(
353 352 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
354 353 'desc': qs.get(
355 354 self._get_keyname('desc', uid, 'rhodecode_')),
356 355 })
357 356 return issuetracker_entries
358 357
359 358 def get_global_settings(self, cache=False):
360 359 """
361 360 Returns list of global issue tracker settings
362 361 """
363 362 defaults = self.global_settings.get_all_settings(cache=cache)
364 363 settings = self._make_dict_for_settings(defaults)
365 364 return settings
366 365
367 366 def get_repo_settings(self, cache=False):
368 367 """
369 368 Returns list of issue tracker settings per repository
370 369 """
371 370 if not self.repo_settings:
372 371 raise Exception('Repository is not specified')
373 372 all_settings = self.repo_settings.get_all_settings(cache=cache)
374 373 settings = self._make_dict_for_settings(all_settings)
375 374 return settings
376 375
377 376 def get_settings(self, cache=False):
378 377 if self.inherit_global_settings:
379 378 return self.get_global_settings(cache=cache)
380 379 else:
381 380 return self.get_repo_settings(cache=cache)
382 381
383 382 def delete_entries(self, uid):
384 383 if self.repo_settings:
385 384 all_patterns = self.get_repo_settings()
386 385 settings_model = self.repo_settings
387 386 else:
388 387 all_patterns = self.get_global_settings()
389 388 settings_model = self.global_settings
390 389 entries = all_patterns.get(uid, [])
391 390
392 391 for del_key in entries:
393 392 setting_name = self._get_keyname(del_key, uid)
394 393 entry = settings_model.get_setting_by_name(setting_name)
395 394 if entry:
396 395 Session().delete(entry)
397 396
398 397 Session().commit()
399 398
400 399 def create_or_update_setting(
401 400 self, name, val=Optional(''), type_=Optional('unicode')):
402 401 if self.repo_settings:
403 402 setting = self.repo_settings.create_or_update_setting(
404 403 name, val, type_)
405 404 else:
406 405 setting = self.global_settings.create_or_update_setting(
407 406 name, val, type_)
408 407 return setting
409 408
410 409
411 410 class VcsSettingsModel(object):
412 411
413 412 INHERIT_SETTINGS = 'inherit_vcs_settings'
414 413 GENERAL_SETTINGS = (
415 414 'use_outdated_comments',
416 415 'pr_merge_enabled',
417 416 'hg_use_rebase_for_merging',
418 417 'hg_close_branch_before_merging',
419 418 'git_use_rebase_for_merging',
420 419 'git_close_branch_before_merging',
421 420 'diff_cache',
422 421 )
423 422
424 423 HOOKS_SETTINGS = (
425 424 ('hooks', 'changegroup.repo_size'),
426 425 ('hooks', 'changegroup.push_logger'),
427 426 ('hooks', 'outgoing.pull_logger'),)
428 427 HG_SETTINGS = (
429 428 ('extensions', 'largefiles'),
430 429 ('phases', 'publish'),
431 430 ('extensions', 'evolve'),)
432 431 GIT_SETTINGS = (
433 432 ('vcs_git_lfs', 'enabled'),)
434 433 GLOBAL_HG_SETTINGS = (
435 434 ('extensions', 'largefiles'),
436 435 ('largefiles', 'usercache'),
437 436 ('phases', 'publish'),
438 437 ('extensions', 'hgsubversion'),
439 438 ('extensions', 'evolve'),)
440 439 GLOBAL_GIT_SETTINGS = (
441 440 ('vcs_git_lfs', 'enabled'),
442 441 ('vcs_git_lfs', 'store_location'))
443 442
444 443 GLOBAL_SVN_SETTINGS = (
445 444 ('vcs_svn_proxy', 'http_requests_enabled'),
446 445 ('vcs_svn_proxy', 'http_server_url'))
447 446
448 447 SVN_BRANCH_SECTION = 'vcs_svn_branch'
449 448 SVN_TAG_SECTION = 'vcs_svn_tag'
450 449 SSL_SETTING = ('web', 'push_ssl')
451 450 PATH_SETTING = ('paths', '/')
452 451
453 452 def __init__(self, sa=None, repo=None):
454 453 self.global_settings = SettingsModel(sa=sa)
455 454 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
456 455 self._ui_settings = (
457 456 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
458 457 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
459 458
460 459 @property
461 460 @assert_repo_settings
462 461 def inherit_global_settings(self):
463 462 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
464 463 return setting.app_settings_value if setting else True
465 464
466 465 @inherit_global_settings.setter
467 466 @assert_repo_settings
468 467 def inherit_global_settings(self, value):
469 468 self.repo_settings.create_or_update_setting(
470 469 self.INHERIT_SETTINGS, value, type_='bool')
471 470
472 471 def get_global_svn_branch_patterns(self):
473 472 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
474 473
475 474 @assert_repo_settings
476 475 def get_repo_svn_branch_patterns(self):
477 476 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
478 477
479 478 def get_global_svn_tag_patterns(self):
480 479 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
481 480
482 481 @assert_repo_settings
483 482 def get_repo_svn_tag_patterns(self):
484 483 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
485 484
486 485 def get_global_settings(self):
487 486 return self._collect_all_settings(global_=True)
488 487
489 488 @assert_repo_settings
490 489 def get_repo_settings(self):
491 490 return self._collect_all_settings(global_=False)
492 491
493 492 @assert_repo_settings
494 493 def create_or_update_repo_settings(
495 494 self, data, inherit_global_settings=False):
496 495 from rhodecode.model.scm import ScmModel
497 496
498 497 self.inherit_global_settings = inherit_global_settings
499 498
500 499 repo = self.repo_settings.get_repo()
501 500 if not inherit_global_settings:
502 501 if repo.repo_type == 'svn':
503 502 self.create_repo_svn_settings(data)
504 503 else:
505 504 self.create_or_update_repo_hook_settings(data)
506 505 self.create_or_update_repo_pr_settings(data)
507 506
508 507 if repo.repo_type == 'hg':
509 508 self.create_or_update_repo_hg_settings(data)
510 509
511 510 if repo.repo_type == 'git':
512 511 self.create_or_update_repo_git_settings(data)
513 512
514 513 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
515 514
516 515 @assert_repo_settings
517 516 def create_or_update_repo_hook_settings(self, data):
518 517 for section, key in self.HOOKS_SETTINGS:
519 518 data_key = self._get_form_ui_key(section, key)
520 519 if data_key not in data:
521 520 raise ValueError(
522 521 'The given data does not contain {} key'.format(data_key))
523 522
524 523 active = data.get(data_key)
525 524 repo_setting = self.repo_settings.get_ui_by_section_and_key(
526 525 section, key)
527 526 if not repo_setting:
528 527 global_setting = self.global_settings.\
529 528 get_ui_by_section_and_key(section, key)
530 529 self.repo_settings.create_ui_section_value(
531 530 section, global_setting.ui_value, key=key, active=active)
532 531 else:
533 532 repo_setting.ui_active = active
534 533 Session().add(repo_setting)
535 534
536 535 def update_global_hook_settings(self, data):
537 536 for section, key in self.HOOKS_SETTINGS:
538 537 data_key = self._get_form_ui_key(section, key)
539 538 if data_key not in data:
540 539 raise ValueError(
541 540 'The given data does not contain {} key'.format(data_key))
542 541 active = data.get(data_key)
543 542 repo_setting = self.global_settings.get_ui_by_section_and_key(
544 543 section, key)
545 544 repo_setting.ui_active = active
546 545 Session().add(repo_setting)
547 546
548 547 @assert_repo_settings
549 548 def create_or_update_repo_pr_settings(self, data):
550 549 return self._create_or_update_general_settings(
551 550 self.repo_settings, data)
552 551
553 552 def create_or_update_global_pr_settings(self, data):
554 553 return self._create_or_update_general_settings(
555 554 self.global_settings, data)
556 555
557 556 @assert_repo_settings
558 557 def create_repo_svn_settings(self, data):
559 558 return self._create_svn_settings(self.repo_settings, data)
560 559
561 560 @assert_repo_settings
562 561 def create_or_update_repo_hg_settings(self, data):
563 562 largefiles, phases, evolve = \
564 563 self.HG_SETTINGS
565 564 largefiles_key, phases_key, evolve_key = \
566 565 self._get_settings_keys(self.HG_SETTINGS, data)
567 566
568 567 self._create_or_update_ui(
569 568 self.repo_settings, *largefiles, value='',
570 569 active=data[largefiles_key])
571 570 self._create_or_update_ui(
572 571 self.repo_settings, *evolve, value='',
573 572 active=data[evolve_key])
574 573 self._create_or_update_ui(
575 574 self.repo_settings, *phases, value=safe_str(data[phases_key]))
576 575
577 576
578 577 def create_or_update_global_hg_settings(self, data):
579 578 largefiles, largefiles_store, phases, hgsubversion, evolve \
580 579 = self.GLOBAL_HG_SETTINGS
581 580 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
582 581 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
583 582
584 583 self._create_or_update_ui(
585 584 self.global_settings, *largefiles, value='',
586 585 active=data[largefiles_key])
587 586 self._create_or_update_ui(
588 587 self.global_settings, *largefiles_store,
589 588 value=data[largefiles_store_key])
590 589 self._create_or_update_ui(
591 590 self.global_settings, *phases, value=safe_str(data[phases_key]))
592 591 self._create_or_update_ui(
593 592 self.global_settings, *hgsubversion, active=data[subversion_key])
594 593 self._create_or_update_ui(
595 594 self.global_settings, *evolve, value='',
596 595 active=data[evolve_key])
597 596
598 597 def create_or_update_repo_git_settings(self, data):
599 598 # NOTE(marcink): # comma make unpack work properly
600 599 lfs_enabled, \
601 600 = self.GIT_SETTINGS
602 601
603 602 lfs_enabled_key, \
604 603 = self._get_settings_keys(self.GIT_SETTINGS, data)
605 604
606 605 self._create_or_update_ui(
607 606 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
608 607 active=data[lfs_enabled_key])
609 608
610 609 def create_or_update_global_git_settings(self, data):
611 610 lfs_enabled, lfs_store_location \
612 611 = self.GLOBAL_GIT_SETTINGS
613 612 lfs_enabled_key, lfs_store_location_key \
614 613 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
615 614
616 615 self._create_or_update_ui(
617 616 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
618 617 active=data[lfs_enabled_key])
619 618 self._create_or_update_ui(
620 619 self.global_settings, *lfs_store_location,
621 620 value=data[lfs_store_location_key])
622 621
623 622 def create_or_update_global_svn_settings(self, data):
624 623 # branch/tags patterns
625 624 self._create_svn_settings(self.global_settings, data)
626 625
627 626 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
628 627 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
629 628 self.GLOBAL_SVN_SETTINGS, data)
630 629
631 630 self._create_or_update_ui(
632 631 self.global_settings, *http_requests_enabled,
633 632 value=safe_str(data[http_requests_enabled_key]))
634 633 self._create_or_update_ui(
635 634 self.global_settings, *http_server_url,
636 635 value=data[http_server_url_key])
637 636
638 637 def update_global_ssl_setting(self, value):
639 638 self._create_or_update_ui(
640 639 self.global_settings, *self.SSL_SETTING, value=value)
641 640
642 641 def update_global_path_setting(self, value):
643 642 self._create_or_update_ui(
644 643 self.global_settings, *self.PATH_SETTING, value=value)
645 644
646 645 @assert_repo_settings
647 646 def delete_repo_svn_pattern(self, id_):
648 647 ui = self.repo_settings.UiDbModel.get(id_)
649 648 if ui and ui.repository.repo_name == self.repo_settings.repo:
650 649 # only delete if it's the same repo as initialized settings
651 650 self.repo_settings.delete_ui(id_)
652 651 else:
653 652 # raise error as if we wouldn't find this option
654 653 self.repo_settings.delete_ui(-1)
655 654
656 655 def delete_global_svn_pattern(self, id_):
657 656 self.global_settings.delete_ui(id_)
658 657
659 658 @assert_repo_settings
660 659 def get_repo_ui_settings(self, section=None, key=None):
661 660 global_uis = self.global_settings.get_ui(section, key)
662 661 repo_uis = self.repo_settings.get_ui(section, key)
663 662 filtered_repo_uis = self._filter_ui_settings(repo_uis)
664 663 filtered_repo_uis_keys = [
665 664 (s.section, s.key) for s in filtered_repo_uis]
666 665
667 666 def _is_global_ui_filtered(ui):
668 667 return (
669 668 (ui.section, ui.key) in filtered_repo_uis_keys
670 669 or ui.section in self._svn_sections)
671 670
672 671 filtered_global_uis = [
673 672 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
674 673
675 674 return filtered_global_uis + filtered_repo_uis
676 675
677 676 def get_global_ui_settings(self, section=None, key=None):
678 677 return self.global_settings.get_ui(section, key)
679 678
680 679 def get_ui_settings_as_config_obj(self, section=None, key=None):
681 680 config = base.Config()
682 681
683 682 ui_settings = self.get_ui_settings(section=section, key=key)
684 683
685 684 for entry in ui_settings:
686 685 config.set(entry.section, entry.key, entry.value)
687 686
688 687 return config
689 688
690 689 def get_ui_settings(self, section=None, key=None):
691 690 if not self.repo_settings or self.inherit_global_settings:
692 691 return self.get_global_ui_settings(section, key)
693 692 else:
694 693 return self.get_repo_ui_settings(section, key)
695 694
696 695 def get_svn_patterns(self, section=None):
697 696 if not self.repo_settings:
698 697 return self.get_global_ui_settings(section)
699 698 else:
700 699 return self.get_repo_ui_settings(section)
701 700
702 701 @assert_repo_settings
703 702 def get_repo_general_settings(self):
704 703 global_settings = self.global_settings.get_all_settings()
705 704 repo_settings = self.repo_settings.get_all_settings()
706 705 filtered_repo_settings = self._filter_general_settings(repo_settings)
707 706 global_settings.update(filtered_repo_settings)
708 707 return global_settings
709 708
710 709 def get_global_general_settings(self):
711 710 return self.global_settings.get_all_settings()
712 711
713 712 def get_general_settings(self):
714 713 if not self.repo_settings or self.inherit_global_settings:
715 714 return self.get_global_general_settings()
716 715 else:
717 716 return self.get_repo_general_settings()
718 717
719 718 def get_repos_location(self):
720 719 return self.global_settings.get_ui_by_key('/').ui_value
721 720
722 721 def _filter_ui_settings(self, settings):
723 722 filtered_settings = [
724 723 s for s in settings if self._should_keep_setting(s)]
725 724 return filtered_settings
726 725
727 726 def _should_keep_setting(self, setting):
728 727 keep = (
729 728 (setting.section, setting.key) in self._ui_settings or
730 729 setting.section in self._svn_sections)
731 730 return keep
732 731
733 732 def _filter_general_settings(self, settings):
734 733 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
735 734 return {
736 735 k: settings[k]
737 736 for k in settings if k in keys}
738 737
739 738 def _collect_all_settings(self, global_=False):
740 739 settings = self.global_settings if global_ else self.repo_settings
741 740 result = {}
742 741
743 742 for section, key in self._ui_settings:
744 743 ui = settings.get_ui_by_section_and_key(section, key)
745 744 result_key = self._get_form_ui_key(section, key)
746 745
747 746 if ui:
748 747 if section in ('hooks', 'extensions'):
749 748 result[result_key] = ui.ui_active
750 749 elif result_key in ['vcs_git_lfs_enabled']:
751 750 result[result_key] = ui.ui_active
752 751 else:
753 752 result[result_key] = ui.ui_value
754 753
755 754 for name in self.GENERAL_SETTINGS:
756 755 setting = settings.get_setting_by_name(name)
757 756 if setting:
758 757 result_key = 'rhodecode_{}'.format(name)
759 758 result[result_key] = setting.app_settings_value
760 759
761 760 return result
762 761
763 762 def _get_form_ui_key(self, section, key):
764 763 return '{section}_{key}'.format(
765 764 section=section, key=key.replace('.', '_'))
766 765
767 766 def _create_or_update_ui(
768 767 self, settings, section, key, value=None, active=None):
769 768 ui = settings.get_ui_by_section_and_key(section, key)
770 769 if not ui:
771 770 active = True if active is None else active
772 771 settings.create_ui_section_value(
773 772 section, value, key=key, active=active)
774 773 else:
775 774 if active is not None:
776 775 ui.ui_active = active
777 776 if value is not None:
778 777 ui.ui_value = value
779 778 Session().add(ui)
780 779
781 780 def _create_svn_settings(self, settings, data):
782 781 svn_settings = {
783 782 'new_svn_branch': self.SVN_BRANCH_SECTION,
784 783 'new_svn_tag': self.SVN_TAG_SECTION
785 784 }
786 785 for key in svn_settings:
787 786 if data.get(key):
788 787 settings.create_ui_section_value(svn_settings[key], data[key])
789 788
790 789 def _create_or_update_general_settings(self, settings, data):
791 790 for name in self.GENERAL_SETTINGS:
792 791 data_key = 'rhodecode_{}'.format(name)
793 792 if data_key not in data:
794 793 raise ValueError(
795 794 'The given data does not contain {} key'.format(data_key))
796 795 setting = settings.create_or_update_setting(
797 796 name, data[data_key], 'bool')
798 797 Session().add(setting)
799 798
800 799 def _get_settings_keys(self, settings, data):
801 800 data_keys = [self._get_form_ui_key(*s) for s in settings]
802 801 for data_key in data_keys:
803 802 if data_key not in data:
804 803 raise ValueError(
805 804 'The given data does not contain {} key'.format(data_key))
806 805 return data_keys
807 806
808 807 def create_largeobjects_dirs_if_needed(self, repo_store_path):
809 808 """
810 809 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
811 810 does a repository scan if enabled in the settings.
812 811 """
813 812
814 813 from rhodecode.lib.vcs.backends.hg import largefiles_store
815 814 from rhodecode.lib.vcs.backends.git import lfs_store
816 815
817 816 paths = [
818 817 largefiles_store(repo_store_path),
819 818 lfs_store(repo_store_path)]
820 819
821 820 for path in paths:
822 821 if os.path.isdir(path):
823 822 continue
824 823 if os.path.isfile(path):
825 824 continue
826 825 # not a file nor dir, we try to create it
827 826 try:
828 827 os.makedirs(path)
829 828 except Exception:
830 829 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,243 +1,245 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import time
23 23 import logging
24 24 import datetime
25 25 import hashlib
26 26 import tempfile
27 27 from os.path import join as jn
28 28
29 29 from tempfile import _RandomNameSequence
30 30
31 31 import pytest
32 32
33 33 from rhodecode.model.db import User
34 34 from rhodecode.lib import auth
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib.helpers import flash, link_to
37 37 from rhodecode.lib.utils2 import safe_str
38 38
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 __all__ = [
43 43 'get_new_dir', 'TestController',
44 'link_to', 'clear_all_caches',
44 'link_to', 'clear_cache_regions',
45 45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 54 ]
55 55
56 56
57 57 # SOME GLOBALS FOR TESTS
58 58 TEST_DIR = tempfile.gettempdir()
59 59
60 60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 62 TEST_USER_ADMIN_PASS = 'test12'
63 63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64 64
65 65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 66 TEST_USER_REGULAR_PASS = 'test12'
67 67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68 68
69 69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 70 TEST_USER_REGULAR2_PASS = 'test12'
71 71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72 72
73 73 HG_REPO = 'vcs_test_hg'
74 74 GIT_REPO = 'vcs_test_git'
75 75 SVN_REPO = 'vcs_test_svn'
76 76
77 77 NEW_HG_REPO = 'vcs_test_hg_new'
78 78 NEW_GIT_REPO = 'vcs_test_git_new'
79 79
80 80 HG_FORK = 'vcs_test_hg_fork'
81 81 GIT_FORK = 'vcs_test_git_fork'
82 82
83 83 ## VCS
84 84 SCM_TESTS = ['hg', 'git']
85 85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86 86
87 87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90 90
91 91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94 94
95 95 TEST_REPO_PREFIX = 'vcs-test'
96 96
97 97
98 def clear_all_caches():
99 from beaker.cache import cache_managers
100 for _cache in cache_managers.values():
101 _cache.clear()
98 def clear_cache_regions(regions=None):
99 # dogpile
100 from rhodecode.lib.rc_cache import region_meta
101 for region_name, region in region_meta.dogpile_cache_regions.items():
102 if not regions or region_name in regions:
103 region.invalidate()
102 104
103 105
104 106 def get_new_dir(title):
105 107 """
106 108 Returns always new directory path.
107 109 """
108 110 from rhodecode.tests.vcs.utils import get_normalized_path
109 111 name_parts = [TEST_REPO_PREFIX]
110 112 if title:
111 113 name_parts.append(title)
112 114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
113 115 name_parts.append(hex_str)
114 116 name = '-'.join(name_parts)
115 117 path = os.path.join(TEST_DIR, name)
116 118 return get_normalized_path(path)
117 119
118 120
119 121 def repo_id_generator(name):
120 122 numeric_hash = 0
121 123 for char in name:
122 124 numeric_hash += (ord(char))
123 125 return numeric_hash
124 126
125 127
126 128 @pytest.mark.usefixtures('app', 'index_location')
127 129 class TestController(object):
128 130
129 131 maxDiff = None
130 132
131 133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
132 134 password=TEST_USER_ADMIN_PASS):
133 135 self._logged_username = username
134 136 self._session = login_user_session(self.app, username, password)
135 137 self.csrf_token = auth.get_csrf_token(self._session)
136 138
137 139 return self._session['rhodecode_user']
138 140
139 141 def logout_user(self):
140 142 logout_user_session(self.app, auth.get_csrf_token(self._session))
141 143 self.csrf_token = None
142 144 self._logged_username = None
143 145 self._session = None
144 146
145 147 def _get_logged_user(self):
146 148 return User.get_by_username(self._logged_username)
147 149
148 150
149 151 def login_user_session(
150 152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
151 153
152 154 response = app.post(
153 155 h.route_path('login'),
154 156 {'username': username, 'password': password})
155 157 if 'invalid user name' in response.body:
156 158 pytest.fail('could not login using %s %s' % (username, password))
157 159
158 160 assert response.status == '302 Found'
159 161 response = response.follow()
160 162 assert response.status == '200 OK'
161 163
162 164 session = response.get_session_from_response()
163 165 assert 'rhodecode_user' in session
164 166 rc_user = session['rhodecode_user']
165 167 assert rc_user.get('username') == username
166 168 assert rc_user.get('is_authenticated')
167 169
168 170 return session
169 171
170 172
171 173 def logout_user_session(app, csrf_token):
172 174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
173 175
174 176
175 177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
176 178 password=TEST_USER_ADMIN_PASS):
177 179 return login_user_session(app, username, password)['rhodecode_user']
178 180
179 181
180 182 def assert_session_flash(response, msg=None, category=None, no_=None):
181 183 """
182 184 Assert on a flash message in the current session.
183 185
184 186 :param response: Response from give calll, it will contain flash
185 187 messages or bound session with them.
186 188 :param msg: The expected message. Will be evaluated if a
187 189 :class:`LazyString` is passed in.
188 190 :param category: Optional. If passed, the message category will be
189 191 checked as well.
190 192 :param no_: Optional. If passed, the message will be checked to NOT
191 193 be in the flash session
192 194 """
193 195 if msg is None and no_ is None:
194 196 raise ValueError("Parameter msg or no_ is required.")
195 197
196 198 if msg and no_:
197 199 raise ValueError("Please specify either msg or no_, but not both")
198 200
199 201 session = response.get_session_from_response()
200 202 messages = flash.pop_messages(session=session)
201 203 msg = _eval_if_lazy(msg)
202 204
203 205 if no_:
204 206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
205 207 else:
206 208 error_msg = 'unable to find message `%s` in empty flash list' % msg
207 209 assert messages, error_msg
208 210 message = messages[0]
209 211
210 212 message_text = _eval_if_lazy(message.message) or ''
211 213
212 214 if no_:
213 215 if no_ in message_text:
214 216 msg = u'msg `%s` found in session flash.' % (no_,)
215 217 pytest.fail(safe_str(msg))
216 218 else:
217 219 if msg not in message_text:
218 220 fail_msg = u'msg `%s` not found in session ' \
219 221 u'flash: got `%s` (type:%s) instead' % (
220 222 msg, message_text, type(message_text))
221 223
222 224 pytest.fail(safe_str(fail_msg))
223 225 if category:
224 226 assert category == message.category
225 227
226 228
227 229 def _eval_if_lazy(value):
228 230 return value.eval() if hasattr(value, 'eval') else value
229 231
230 232
231 233 def no_newline_id_generator(test_name):
232 234 """
233 235 Generates a test name without spaces or newlines characters. Used for
234 236 nicer output of progress of test
235 237 """
236 238 org_name = test_name
237 239 test_name = test_name\
238 240 .replace('\n', '_N') \
239 241 .replace('\r', '_N') \
240 242 .replace('\t', '_T') \
241 243 .replace(' ', '_S')
242 244
243 245 return test_name or 'test-with-empty-name'
@@ -1,446 +1,445 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import multiprocessing
23 23 import os
24 24
25 25 import mock
26 26 import py
27 27 import pytest
28 28
29 29 from rhodecode.lib import caching_query
30 30 from rhodecode.lib import utils
31 31 from rhodecode.lib.utils2 import md5
32 32 from rhodecode.model import settings
33 33 from rhodecode.model import db
34 34 from rhodecode.model import meta
35 35 from rhodecode.model.repo import RepoModel
36 36 from rhodecode.model.repo_group import RepoGroupModel
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.settings import UiSetting, SettingsModel
39 39 from rhodecode.tests.fixture import Fixture
40 40
41 41
42 42 fixture = Fixture()
43 43
44 44
45 45 def extract_hooks(config):
46 46 """Return a dictionary with the hook entries of the given config."""
47 47 hooks = {}
48 48 config_items = config.serialize()
49 49 for section, name, value in config_items:
50 50 if section != 'hooks':
51 51 continue
52 52 hooks[name] = value
53 53
54 54 return hooks
55 55
56 56
57 57 def disable_hooks(request, hooks):
58 58 """Disables the given hooks from the UI settings."""
59 59 session = meta.Session()
60 60
61 61 model = SettingsModel()
62 62 for hook_key in hooks:
63 63 sett = model.get_ui_by_key(hook_key)
64 64 sett.ui_active = False
65 65 session.add(sett)
66 66
67 67 # Invalidate cache
68 68 ui_settings = session.query(db.RhodeCodeUi).options(
69 69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 70 ui_settings.invalidate()
71 71
72 72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache(
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
75 74 ui_settings.invalidate()
76 75
77 76 @request.addfinalizer
78 77 def rollback():
79 78 session.rollback()
80 79
81 80
82 81 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 82 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 83 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 84 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 85 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 86 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 87 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89 88
90 89 HG_HOOKS = frozenset(
91 90 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 91 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93 92
94 93
95 94 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 95 ([], HG_HOOKS),
97 96 (HG_HOOKS, []),
98 97
99 98 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100 99
101 100 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 101 # be disabled too.
103 102 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 103 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 104 HOOK_PUSH_KEY]),
106 105 ])
107 106 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 107 expected_hooks):
109 108 disable_hooks(request, disabled_hooks)
110 109
111 110 config = utils.make_db_config()
112 111 hooks = extract_hooks(config)
113 112
114 113 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115 114
116 115
117 116 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 117 ([], ['pull', 'push']),
119 118 ([HOOK_PUSH], ['pull']),
120 119 ([HOOK_PULL], ['push']),
121 120 ([HOOK_PULL, HOOK_PUSH], []),
122 121 ])
123 122 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 123 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 124 ui_settings = [
126 125 ('hooks', key, 'some value', key not in disabled_hooks)
127 126 for key in hook_keys]
128 127
129 128 result = utils.get_enabled_hook_classes(ui_settings)
130 129 assert sorted(result) == expected_hooks
131 130
132 131
133 132 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 133 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 134 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 135 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137 136
138 137
139 138 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 139 tmpdir.ensure('not-a-repo', dir=True)
141 140 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 141 assert repos == []
143 142
144 143
145 144 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 145 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 146 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 147 assert repos == []
149 148
150 149
151 150 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 151 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 152 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 153 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155 154
156 155
157 156 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 157 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 158 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 159 assert repos == []
161 160
162 161
163 162 def test_get_filesystem_repos_skips_files(tmpdir):
164 163 tmpdir.ensure('test-file')
165 164 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 165 assert repos == []
167 166
168 167
169 168 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 169 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 170 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 171 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 173 assert repos == []
175 174
176 175
177 176 def _stub_git_repo(repo_path):
178 177 """
179 178 Make `repo_path` look like a Git repository.
180 179 """
181 180 repo_path.ensure('.git', dir=True)
182 181
183 182
184 183 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 184 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 185 tmpdir.ensure('test-file')
187 186 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 187 assert dirpaths == ['test-file']
189 188
190 189
191 190 def test_get_dirpaths_returns_all_paths_bytes(
192 191 tmpdir, platform_encodes_filenames):
193 192 if platform_encodes_filenames:
194 193 pytest.skip("This platform seems to encode filenames.")
195 194 tmpdir.ensure('repo-a-umlaut-\xe4')
196 195 dirpaths = utils._get_dirpaths(str(tmpdir))
197 196 assert dirpaths == ['repo-a-umlaut-\xe4']
198 197
199 198
200 199 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 200 tmpdir, platform_encodes_filenames):
202 201 if platform_encodes_filenames:
203 202 pytest.skip("This platform seems to encode filenames.")
204 203 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 204 tmpdir.ensure(path_with_latin1)
206 205 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 206 assert dirpaths == []
208 207
209 208
210 209 @pytest.fixture(scope='session')
211 210 def platform_encodes_filenames():
212 211 """
213 212 Boolean indicator if the current platform changes filename encodings.
214 213 """
215 214 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 215 tmpdir = py.path.local.mkdtemp()
217 216 tmpdir.ensure(path_with_latin1)
218 217 read_path = tmpdir.listdir()[0].basename
219 218 tmpdir.remove()
220 219 return path_with_latin1 != read_path
221 220
222 221
223 222
224 223
225 224 def test_repo2db_mapper_groups(repo_groups):
226 225 session = meta.Session()
227 226 zombie_group, parent_group, child_group = repo_groups
228 227 zombie_path = os.path.join(
229 228 RepoGroupModel().repos_path, zombie_group.full_path)
230 229 os.rmdir(zombie_path)
231 230
232 231 # Avoid removing test repos when calling repo2db_mapper
233 232 repo_list = {
234 233 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 234 }
236 235 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237 236
238 237 groups_in_db = session.query(db.RepoGroup).all()
239 238 assert child_group in groups_in_db
240 239 assert parent_group in groups_in_db
241 240 assert zombie_path not in groups_in_db
242 241
243 242
244 243 def test_repo2db_mapper_enables_largefiles(backend):
245 244 repo = backend.create_repo()
246 245 repo_list = {repo.repo_name: 'test'}
247 246 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 247 utils.repo2db_mapper(repo_list, remove_obsolete=False)
249 248 _, kwargs = scm_mock.call_args
250 249 assert kwargs['config'].get('extensions', 'largefiles') == ''
251 250
252 251
253 252 @pytest.mark.backends("git", "svn")
254 253 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
255 254 repo = backend.create_repo()
256 255 repo_list = {repo.repo_name: 'test'}
257 256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
258 257
259 258
260 259 @pytest.mark.backends("git", "svn")
261 260 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
262 261 repo = backend.create_repo()
263 262 RepoModel().delete(repo, fs_remove=False)
264 263 meta.Session().commit()
265 264 repo_list = {repo.repo_name: repo.scm_instance()}
266 265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
267 266
268 267
269 268 class TestPasswordChanged(object):
270 269 def setup(self):
271 270 self.session = {
272 271 'rhodecode_user': {
273 272 'password': '0cc175b9c0f1b6a831c399e269772661'
274 273 }
275 274 }
276 275 self.auth_user = mock.Mock()
277 276 self.auth_user.userame = 'test'
278 277 self.auth_user.password = 'abc123'
279 278
280 279 def test_returns_false_for_default_user(self):
281 280 self.auth_user.username = db.User.DEFAULT_USER
282 281 result = utils.password_changed(self.auth_user, self.session)
283 282 assert result is False
284 283
285 284 def test_returns_false_if_password_was_not_changed(self):
286 285 self.session['rhodecode_user']['password'] = md5(
287 286 self.auth_user.password)
288 287 result = utils.password_changed(self.auth_user, self.session)
289 288 assert result is False
290 289
291 290 def test_returns_true_if_password_was_changed(self):
292 291 result = utils.password_changed(self.auth_user, self.session)
293 292 assert result is True
294 293
295 294 def test_returns_true_if_auth_user_password_is_empty(self):
296 295 self.auth_user.password = None
297 296 result = utils.password_changed(self.auth_user, self.session)
298 297 assert result is True
299 298
300 299 def test_returns_true_if_session_password_is_empty(self):
301 300 self.session['rhodecode_user'].pop('password')
302 301 result = utils.password_changed(self.auth_user, self.session)
303 302 assert result is True
304 303
305 304
306 305 class TestReadOpensourceLicenses(object):
307 306 def test_success(self):
308 307 utils._license_cache = None
309 308 json_data = '''
310 309 {
311 310 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
312 311 "python2.7-Markdown-2.6.2": {
313 312 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
314 313 }
315 314 }
316 315 '''
317 316 resource_string_patch = mock.patch.object(
318 317 utils.pkg_resources, 'resource_string', return_value=json_data)
319 318 with resource_string_patch:
320 319 result = utils.read_opensource_licenses()
321 320 assert result == json.loads(json_data)
322 321
323 322 def test_caching(self):
324 323 utils._license_cache = {
325 324 "python2.7-pytest-2.7.1": {
326 325 "UNKNOWN": None
327 326 },
328 327 "python2.7-Markdown-2.6.2": {
329 328 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
330 329 }
331 330 }
332 331 resource_patch = mock.patch.object(
333 332 utils.pkg_resources, 'resource_string', side_effect=Exception)
334 333 json_patch = mock.patch.object(
335 334 utils.json, 'loads', side_effect=Exception)
336 335
337 336 with resource_patch as resource_mock, json_patch as json_mock:
338 337 result = utils.read_opensource_licenses()
339 338
340 339 assert resource_mock.call_count == 0
341 340 assert json_mock.call_count == 0
342 341 assert result == utils._license_cache
343 342
344 343 def test_licenses_file_contains_no_unknown_licenses(self):
345 344 utils._license_cache = None
346 345 result = utils.read_opensource_licenses()
347 346 license_names = []
348 347 for licenses in result.values():
349 348 license_names.extend(licenses.keys())
350 349 assert 'UNKNOWN' not in license_names
351 350
352 351
353 352 class TestMakeDbConfig(object):
354 353 def test_data_from_config_data_from_db_returned(self):
355 354 test_data = [
356 355 ('section1', 'option1', 'value1'),
357 356 ('section2', 'option2', 'value2'),
358 357 ('section3', 'option3', 'value3'),
359 358 ]
360 359 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
361 360 config_mock.return_value = test_data
362 361 kwargs = {'clear_session': False, 'repo': 'test_repo'}
363 362 result = utils.make_db_config(**kwargs)
364 363 config_mock.assert_called_once_with(**kwargs)
365 364 for section, option, expected_value in test_data:
366 365 value = result.get(section, option)
367 366 assert value == expected_value
368 367
369 368
370 369 class TestConfigDataFromDb(object):
371 370 def test_config_data_from_db_returns_active_settings(self):
372 371 test_data = [
373 372 UiSetting('section1', 'option1', 'value1', True),
374 373 UiSetting('section2', 'option2', 'value2', True),
375 374 UiSetting('section3', 'option3', 'value3', False),
376 375 ]
377 376 repo_name = 'test_repo'
378 377
379 378 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
380 379 hooks_patch = mock.patch.object(
381 380 utils, 'get_enabled_hook_classes',
382 381 return_value=['pull', 'push', 'repo_size'])
383 382 with model_patch as model_mock, hooks_patch:
384 383 instance_mock = mock.Mock()
385 384 model_mock.return_value = instance_mock
386 385 instance_mock.get_ui_settings.return_value = test_data
387 386 result = utils.config_data_from_db(
388 387 clear_session=False, repo=repo_name)
389 388
390 389 self._assert_repo_name_passed(model_mock, repo_name)
391 390
392 391 expected_result = [
393 392 ('section1', 'option1', 'value1'),
394 393 ('section2', 'option2', 'value2'),
395 394 ]
396 395 assert result == expected_result
397 396
398 397 def _assert_repo_name_passed(self, model_mock, repo_name):
399 398 assert model_mock.call_count == 1
400 399 call_args, call_kwargs = model_mock.call_args
401 400 assert call_kwargs['repo'] == repo_name
402 401
403 402
404 403 class TestIsDirWritable(object):
405 404 def test_returns_false_when_not_writable(self):
406 405 with mock.patch('__builtin__.open', side_effect=OSError):
407 406 assert not utils._is_dir_writable('/stub-path')
408 407
409 408 def test_returns_true_when_writable(self, tmpdir):
410 409 assert utils._is_dir_writable(str(tmpdir))
411 410
412 411 def test_is_safe_against_race_conditions(self, tmpdir):
413 412 workers = multiprocessing.Pool()
414 413 directories = [str(tmpdir)] * 10
415 414 workers.map(utils._is_dir_writable, directories)
416 415
417 416
418 417 class TestGetEnabledHooks(object):
419 418 def test_only_active_hooks_are_enabled(self):
420 419 ui_settings = [
421 420 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
422 421 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
423 422 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
424 423 ]
425 424 result = utils.get_enabled_hook_classes(ui_settings)
426 425 assert result == ['push', 'repo_size']
427 426
428 427 def test_all_hooks_are_enabled(self):
429 428 ui_settings = [
430 429 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
431 430 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
432 431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
433 432 ]
434 433 result = utils.get_enabled_hook_classes(ui_settings)
435 434 assert result == ['push', 'repo_size', 'pull']
436 435
437 436 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
438 437 ui_settings = []
439 438 result = utils.get_enabled_hook_classes(ui_settings)
440 439 assert result == []
441 440
442 441
443 442 def test_obfuscate_url_pw():
444 443 from rhodecode.lib.utils2 import obfuscate_url_pw
445 444 engine = u'/home/repos/malmΓΆ'
446 445 assert obfuscate_url_pw(engine) No newline at end of file
@@ -1,671 +1,674 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 0.0.0.0
47 47 port = 5000
48 48
49 49 ##########################
50 50 ## GUNICORN WSGI SERVER ##
51 51 ##########################
52 52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53 53
54 54 use = egg:gunicorn#main
55 55 ## Sets the number of process workers. You must set `instance_id = *`
56 56 ## when this option is set to more than one worker, recommended
57 57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 58 ## The `instance_id = *` must be set in the [app:main] section below
59 59 #workers = 2
60 60 ## number of threads for each of the worker, must be set to 1 for gevent
61 61 ## generally recommened to be at 1
62 62 #threads = 1
63 63 ## process name
64 64 #proc_name = rhodecode
65 65 ## type of worker class, one of sync, gevent
66 66 ## recommended for bigger setup is using of of other than sync one
67 67 #worker_class = sync
68 68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 69 #worker_connections = 10
70 70 ## max number of requests that worker will handle before being gracefully
71 71 ## restarted, could prevent memory leaks
72 72 #max_requests = 1000
73 73 #max_requests_jitter = 30
74 74 ## amount of time a worker can spend with handling a request before it
75 75 ## gets killed and restarted. Set to 6hrs
76 76 #timeout = 21600
77 77
78 78 ## prefix middleware for RhodeCode.
79 79 ## recommended when using proxy setup.
80 80 ## allows to set RhodeCode under a prefix in server.
81 81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 82 ## And set your prefix like: `prefix = /custom_prefix`
83 83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 84 ## to make your cookies only work on prefix url
85 85 [filter:proxy-prefix]
86 86 use = egg:PasteDeploy#prefix
87 87 prefix = /
88 88
89 89 [app:main]
90 90 is_test = True
91 91 use = egg:rhodecode-enterprise-ce
92 92
93 93 ## enable proxy prefix middleware, defined above
94 94 #filter-with = proxy-prefix
95 95
96 96
97 97 ## RHODECODE PLUGINS ##
98 98 rhodecode.includes = rhodecode.api
99 99
100 100 # api prefix url
101 101 rhodecode.api.url = /_admin/api
102 102
103 103
104 104 ## END RHODECODE PLUGINS ##
105 105
106 106 ## encryption key used to encrypt social plugin tokens,
107 107 ## remote_urls with credentials etc, if not set it defaults to
108 108 ## `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ## return gzipped responses from Rhodecode (static files/application)
116 116 gzip_responses = false
117 117
118 118 ## autogenerate javascript routes file on startup
119 119 generate_js_files = false
120 120
121 121 ## Optional Languages
122 122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 123 lang = en
124 124
125 125 ## perform a full repository scan on each server start, this should be
126 126 ## set to false after first startup, to allow faster server restarts.
127 127 startup.import_repos = true
128 128
129 129 ## Uncomment and set this path to use archive download cache.
130 130 ## Once enabled, generated archives will be cached at this location
131 131 ## and served from the cache during subsequent requests for the same archive of
132 132 ## the repository.
133 133 #archive_cache_dir = /tmp/tarballcache
134 134
135 135 ## URL at which the application is running. This is used for bootstraping
136 136 ## requests in context when no web request is available. Used in ishell, or
137 137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 138 app.base_url = http://rhodecode.local
139 139
140 140 ## change this to unique ID for security
141 141 app_instance_uuid = rc-production
142 142
143 143 ## cut off limit for large diffs (size in bytes)
144 144 cut_off_limit_diff = 1024000
145 145 cut_off_limit_file = 256000
146 146
147 147 ## use cache version of scm repo everywhere
148 148 vcs_full_cache = false
149 149
150 150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 151 ## Normally this is controlled by proper http flags sent from http server
152 152 force_https = false
153 153
154 154 ## use Strict-Transport-Security headers
155 155 use_htsts = false
156 156
157 157 ## git rev filter option, --all is the default filter, if you need to
158 158 ## hide all refs in changelog switch this to --branches --tags
159 159 git_rev_filter = --all
160 160
161 161 # Set to true if your repos are exposed using the dumb protocol
162 162 git_update_server_info = false
163 163
164 164 ## RSS/ATOM feed options
165 165 rss_cut_off_limit = 256000
166 166 rss_items_per_page = 10
167 167 rss_include_diff = false
168 168
169 169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 170 ## url that does rewrites to _admin/gists/{gistid}.
171 171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 173 gist_alias_url =
174 174
175 175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 176 ## used for access.
177 177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 178 ## came from the the logged in user who own this authentication token.
179 179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 180 ## authentication token. Such view would be only accessible when used together
181 181 ## with this authentication token
182 182 ##
183 183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 184 ## The list should be "," separated and on a single line.
185 185 ##
186 186 ## Most common views to enable:
187 187 # RepoCommitsView:repo_commit_download
188 188 # RepoCommitsView:repo_commit_patch
189 189 # RepoCommitsView:repo_commit_raw
190 190 # RepoCommitsView:repo_commit_raw@TOKEN
191 191 # RepoFilesView:repo_files_diff
192 192 # RepoFilesView:repo_archivefile
193 193 # RepoFilesView:repo_file_raw
194 194 # GistView:*
195 195 api_access_controllers_whitelist =
196 196
197 197 ## default encoding used to convert from and to unicode
198 198 ## can be also a comma separated list of encoding in case of mixed encodings
199 199 default_encoding = UTF-8
200 200
201 201 ## instance-id prefix
202 202 ## a prefix key for this instance used for cache invalidation when running
203 203 ## multiple instances of rhodecode, make sure it's globally unique for
204 204 ## all running rhodecode instances. Leave empty if you don't use it
205 205 instance_id =
206 206
207 207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 208 ## of an authentication plugin also if it is disabled by it's settings.
209 209 ## This could be useful if you are unable to log in to the system due to broken
210 210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 211 ## module to log in again and fix the settings.
212 212 ##
213 213 ## Available builtin plugin IDs (hash is part of the ID):
214 214 ## egg:rhodecode-enterprise-ce#rhodecode
215 215 ## egg:rhodecode-enterprise-ce#pam
216 216 ## egg:rhodecode-enterprise-ce#ldap
217 217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 218 ## egg:rhodecode-enterprise-ce#headers
219 219 ## egg:rhodecode-enterprise-ce#crowd
220 220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221 221
222 222 ## alternative return HTTP header for failed authentication. Default HTTP
223 223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 224 ## handling that causing a series of failed authentication calls.
225 225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 226 ## This will be served instead of default 401 on bad authnetication
227 227 auth_ret_code =
228 228
229 229 ## use special detection method when serving auth_ret_code, instead of serving
230 230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 231 ## and then serve auth_ret_code to clients
232 232 auth_ret_code_detection = false
233 233
234 234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 235 ## codes don't break the transactions while 4XX codes do
236 236 lock_ret_code = 423
237 237
238 238 ## allows to change the repository location in settings page
239 239 allow_repo_location_change = true
240 240
241 241 ## allows to setup custom hooks in settings page
242 242 allow_custom_hooks_settings = true
243 243
244 244 ## generated license token, goto license page in RhodeCode settings to obtain
245 245 ## new token
246 246 license_token = abra-cada-bra1-rce3
247 247
248 248 ## supervisor connection uri, for managing supervisor and logs.
249 249 supervisor.uri =
250 250 ## supervisord group name/id we only want this RC instance to handle
251 251 supervisor.group_id = dev
252 252
253 253 ## Display extended labs settings
254 254 labs_settings_active = true
255 255
256 256 ####################################
257 257 ### CELERY CONFIG ####
258 258 ####################################
259 259 use_celery = false
260 260 broker.host = localhost
261 261 broker.vhost = rabbitmqhost
262 262 broker.port = 5672
263 263 broker.user = rabbitmq
264 264 broker.password = qweqwe
265 265
266 266 celery.imports = rhodecode.lib.celerylib.tasks
267 267
268 268 celery.result.backend = amqp
269 269 celery.result.dburi = amqp://
270 270 celery.result.serialier = json
271 271
272 272 #celery.send.task.error.emails = true
273 273 #celery.amqp.task.result.expires = 18000
274 274
275 275 celeryd.concurrency = 2
276 276 #celeryd.log.file = celeryd.log
277 277 celeryd.log.level = debug
278 278 celeryd.max.tasks.per.child = 1
279 279
280 280 ## tasks will never be sent to the queue, but executed locally instead.
281 281 celery.always.eager = false
282 282
283 283 ####################################
284 284 ### BEAKER CACHE ####
285 285 ####################################
286 286 # default cache dir for templates. Putting this into a ramdisk
287 287 ## can boost performance, eg. %(here)s/data_ramdisk
288 288 cache_dir = %(here)s/data
289 289
290 290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294 294
295 beaker.cache.regions = long_term, sql_cache_short
295 beaker.cache.regions = long_term
296 296
297 297 beaker.cache.long_term.type = memory
298 298 beaker.cache.long_term.expire = 36000
299 299 beaker.cache.long_term.key_length = 256
300 300
301 beaker.cache.sql_cache_short.type = memory
302 beaker.cache.sql_cache_short.expire = 1
303 beaker.cache.sql_cache_short.key_length = 256
304 301
305 302 #####################################
306 303 ### DOGPILE CACHE ####
307 304 #####################################
308 305
309 306 ## permission tree cache settings
310 307 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
311 308 rc_cache.cache_perms.expiration_time = 0
312 309 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
313 310
311
312 ## cache settings for SQL queries
313 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
314 rc_cache.sql_cache_short.expiration_time = 0
315
316
314 317 ####################################
315 318 ### BEAKER SESSION ####
316 319 ####################################
317 320
318 321 ## .session.type is type of storage options for the session, current allowed
319 322 ## types are file, ext:memcached, ext:database, and memory (default).
320 323 beaker.session.type = file
321 324 beaker.session.data_dir = %(here)s/rc/data/sessions/data
322 325
323 326 ## db based session, fast, and allows easy management over logged in users
324 327 #beaker.session.type = ext:database
325 328 #beaker.session.table_name = db_session
326 329 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
327 330 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
328 331 #beaker.session.sa.pool_recycle = 3600
329 332 #beaker.session.sa.echo = false
330 333
331 334 beaker.session.key = rhodecode
332 335 beaker.session.secret = test-rc-uytcxaz
333 336 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
334 337
335 338 ## Secure encrypted cookie. Requires AES and AES python libraries
336 339 ## you must disable beaker.session.secret to use this
337 340 #beaker.session.encrypt_key = key_for_encryption
338 341 #beaker.session.validate_key = validation_key
339 342
340 343 ## sets session as invalid(also logging out user) if it haven not been
341 344 ## accessed for given amount of time in seconds
342 345 beaker.session.timeout = 2592000
343 346 beaker.session.httponly = true
344 347 ## Path to use for the cookie. Set to prefix if you use prefix middleware
345 348 #beaker.session.cookie_path = /custom_prefix
346 349
347 350 ## uncomment for https secure cookie
348 351 beaker.session.secure = false
349 352
350 353 ## auto save the session to not to use .save()
351 354 beaker.session.auto = false
352 355
353 356 ## default cookie expiration time in seconds, set to `true` to set expire
354 357 ## at browser close
355 358 #beaker.session.cookie_expires = 3600
356 359
357 360 ###################################
358 361 ## SEARCH INDEXING CONFIGURATION ##
359 362 ###################################
360 363 ## Full text search indexer is available in rhodecode-tools under
361 364 ## `rhodecode-tools index` command
362 365
363 366 ## WHOOSH Backend, doesn't require additional services to run
364 367 ## it works good with few dozen repos
365 368 search.module = rhodecode.lib.index.whoosh
366 369 search.location = %(here)s/data/index
367 370
368 371 ########################################
369 372 ### CHANNELSTREAM CONFIG ####
370 373 ########################################
371 374 ## channelstream enables persistent connections and live notification
372 375 ## in the system. It's also used by the chat system
373 376
374 377 channelstream.enabled = false
375 378
376 379 ## server address for channelstream server on the backend
377 380 channelstream.server = 127.0.0.1:9800
378 381 ## location of the channelstream server from outside world
379 382 ## use ws:// for http or wss:// for https. This address needs to be handled
380 383 ## by external HTTP server such as Nginx or Apache
381 384 ## see nginx/apache configuration examples in our docs
382 385 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
383 386 channelstream.secret = secret
384 387 channelstream.history.location = %(here)s/channelstream_history
385 388
386 389 ## Internal application path that Javascript uses to connect into.
387 390 ## If you use proxy-prefix the prefix should be added before /_channelstream
388 391 channelstream.proxy_path = /_channelstream
389 392
390 393
391 394 ###################################
392 395 ## APPENLIGHT CONFIG ##
393 396 ###################################
394 397
395 398 ## Appenlight is tailored to work with RhodeCode, see
396 399 ## http://appenlight.com for details how to obtain an account
397 400
398 401 ## appenlight integration enabled
399 402 appenlight = false
400 403
401 404 appenlight.server_url = https://api.appenlight.com
402 405 appenlight.api_key = YOUR_API_KEY
403 406 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
404 407
405 408 # used for JS client
406 409 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
407 410
408 411 ## TWEAK AMOUNT OF INFO SENT HERE
409 412
410 413 ## enables 404 error logging (default False)
411 414 appenlight.report_404 = false
412 415
413 416 ## time in seconds after request is considered being slow (default 1)
414 417 appenlight.slow_request_time = 1
415 418
416 419 ## record slow requests in application
417 420 ## (needs to be enabled for slow datastore recording and time tracking)
418 421 appenlight.slow_requests = true
419 422
420 423 ## enable hooking to application loggers
421 424 appenlight.logging = true
422 425
423 426 ## minimum log level for log capture
424 427 appenlight.logging.level = WARNING
425 428
426 429 ## send logs only from erroneous/slow requests
427 430 ## (saves API quota for intensive logging)
428 431 appenlight.logging_on_error = false
429 432
430 433 ## list of additonal keywords that should be grabbed from environ object
431 434 ## can be string with comma separated list of words in lowercase
432 435 ## (by default client will always send following info:
433 436 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
434 437 ## start with HTTP* this list be extended with additional keywords here
435 438 appenlight.environ_keys_whitelist =
436 439
437 440 ## list of keywords that should be blanked from request object
438 441 ## can be string with comma separated list of words in lowercase
439 442 ## (by default client will always blank keys that contain following words
440 443 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
441 444 ## this list be extended with additional keywords set here
442 445 appenlight.request_keys_blacklist =
443 446
444 447 ## list of namespaces that should be ignores when gathering log entries
445 448 ## can be string with comma separated list of namespaces
446 449 ## (by default the client ignores own entries: appenlight_client.client)
447 450 appenlight.log_namespace_blacklist =
448 451
449 452
450 453 ################################################################################
451 454 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
452 455 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
453 456 ## execute malicious code after an exception is raised. ##
454 457 ################################################################################
455 458 set debug = false
456 459
457 460
458 461 ##############
459 462 ## STYLING ##
460 463 ##############
461 464 debug_style = false
462 465
463 466 ###########################################
464 467 ### MAIN RHODECODE DATABASE CONFIG ###
465 468 ###########################################
466 469 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
467 470 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
468 471 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
469 472 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
470 473
471 474 # see sqlalchemy docs for other advanced settings
472 475
473 476 ## print the sql statements to output
474 477 sqlalchemy.db1.echo = false
475 478 ## recycle the connections after this amount of seconds
476 479 sqlalchemy.db1.pool_recycle = 3600
477 480 sqlalchemy.db1.convert_unicode = true
478 481
479 482 ## the number of connections to keep open inside the connection pool.
480 483 ## 0 indicates no limit
481 484 #sqlalchemy.db1.pool_size = 5
482 485
483 486 ## the number of connections to allow in connection pool "overflow", that is
484 487 ## connections that can be opened above and beyond the pool_size setting,
485 488 ## which defaults to five.
486 489 #sqlalchemy.db1.max_overflow = 10
487 490
488 491
489 492 ##################
490 493 ### VCS CONFIG ###
491 494 ##################
492 495 vcs.server.enable = true
493 496 vcs.server = localhost:9901
494 497
495 498 ## Web server connectivity protocol, responsible for web based VCS operatations
496 499 ## Available protocols are:
497 500 ## `http` - use http-rpc backend (default)
498 501 vcs.server.protocol = http
499 502
500 503 ## Push/Pull operations protocol, available options are:
501 504 ## `http` - use http-rpc backend (default)
502 505 ## `vcsserver.scm_app` - internal app (EE only)
503 506 vcs.scm_app_implementation = http
504 507
505 508 ## Push/Pull operations hooks protocol, available options are:
506 509 ## `http` - use http-rpc backend (default)
507 510 vcs.hooks.protocol = http
508 511 vcs.hooks.host = 127.0.0.1
509 512
510 513 vcs.server.log_level = debug
511 514 ## Start VCSServer with this instance as a subprocess, usefull for development
512 515 vcs.start_server = false
513 516
514 517 ## List of enabled VCS backends, available options are:
515 518 ## `hg` - mercurial
516 519 ## `git` - git
517 520 ## `svn` - subversion
518 521 vcs.backends = hg, git, svn
519 522
520 523 vcs.connection_timeout = 3600
521 524 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
522 525 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
523 526 #vcs.svn.compatible_version = pre-1.8-compatible
524 527
525 528
526 529 ############################################################
527 530 ### Subversion proxy support (mod_dav_svn) ###
528 531 ### Maps RhodeCode repo groups into SVN paths for Apache ###
529 532 ############################################################
530 533 ## Enable or disable the config file generation.
531 534 svn.proxy.generate_config = false
532 535 ## Generate config file with `SVNListParentPath` set to `On`.
533 536 svn.proxy.list_parent_path = true
534 537 ## Set location and file name of generated config file.
535 538 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
536 539 ## Used as a prefix to the `Location` block in the generated config file.
537 540 ## In most cases it should be set to `/`.
538 541 svn.proxy.location_root = /
539 542 ## Command to reload the mod dav svn configuration on change.
540 543 ## Example: `/etc/init.d/apache2 reload`
541 544 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
542 545 ## If the timeout expires before the reload command finishes, the command will
543 546 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
544 547 #svn.proxy.reload_timeout = 10
545 548
546 549 ############################################################
547 550 ### SSH Support Settings ###
548 551 ############################################################
549 552
550 553 ## Defines if the authorized_keys file should be written on any change of
551 554 ## user ssh keys, setting this to false also disables posibility of adding
552 555 ## ssh keys for users from web interface.
553 556 ssh.generate_authorized_keyfile = true
554 557
555 558 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
556 559 # ssh.authorized_keys_ssh_opts =
557 560
558 561 ## File to generate the authorized keys together with options
559 562 ## It is possible to have multiple key files specified in `sshd_config` e.g.
560 563 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
561 564 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
562 565
563 566 ## Command to execute the SSH wrapper. The binary is available in the
564 567 ## rhodecode installation directory.
565 568 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
566 569 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
567 570
568 571 ## Allow shell when executing the ssh-wrapper command
569 572 ssh.wrapper_cmd_allow_shell = false
570 573
571 574 ## Enables logging, and detailed output send back to the client. Usefull for
572 575 ## debugging, shouldn't be used in production.
573 576 ssh.enable_debug_logging = false
574 577
575 578 ## Paths to binary executrables, by default they are the names, but we can
576 579 ## override them if we want to use a custom one
577 580 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
578 581 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
579 582 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
580 583
581 584
582 585 ## Dummy marker to add new entries after.
583 586 ## Add any custom entries below. Please don't remove.
584 587 custom.conf = 1
585 588
586 589
587 590 ################################
588 591 ### LOGGING CONFIGURATION ####
589 592 ################################
590 593 [loggers]
591 594 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
592 595
593 596 [handlers]
594 597 keys = console, console_sql
595 598
596 599 [formatters]
597 600 keys = generic, color_formatter, color_formatter_sql
598 601
599 602 #############
600 603 ## LOGGERS ##
601 604 #############
602 605 [logger_root]
603 606 level = NOTSET
604 607 handlers = console
605 608
606 609 [logger_routes]
607 610 level = DEBUG
608 611 handlers =
609 612 qualname = routes.middleware
610 613 ## "level = DEBUG" logs the route matched and routing variables.
611 614 propagate = 1
612 615
613 616 [logger_beaker]
614 617 level = DEBUG
615 618 handlers =
616 619 qualname = beaker.container
617 620 propagate = 1
618 621
619 622 [logger_rhodecode]
620 623 level = DEBUG
621 624 handlers =
622 625 qualname = rhodecode
623 626 propagate = 1
624 627
625 628 [logger_sqlalchemy]
626 629 level = ERROR
627 630 handlers = console_sql
628 631 qualname = sqlalchemy.engine
629 632 propagate = 0
630 633
631 634 [logger_ssh_wrapper]
632 635 level = DEBUG
633 636 handlers =
634 637 qualname = ssh_wrapper
635 638 propagate = 1
636 639
637 640
638 641 ##############
639 642 ## HANDLERS ##
640 643 ##############
641 644
642 645 [handler_console]
643 646 class = StreamHandler
644 647 args = (sys.stderr,)
645 648 level = DEBUG
646 649 formatter = generic
647 650
648 651 [handler_console_sql]
649 652 class = StreamHandler
650 653 args = (sys.stderr,)
651 654 level = WARN
652 655 formatter = generic
653 656
654 657 ################
655 658 ## FORMATTERS ##
656 659 ################
657 660
658 661 [formatter_generic]
659 662 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
660 663 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
661 664 datefmt = %Y-%m-%d %H:%M:%S
662 665
663 666 [formatter_color_formatter]
664 667 class = rhodecode.lib.logging_formatter.ColorFormatter
665 668 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
666 669 datefmt = %Y-%m-%d %H:%M:%S
667 670
668 671 [formatter_color_formatter_sql]
669 672 class = rhodecode.lib.logging_formatter.ColorFormatterSql
670 673 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
671 674 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now