##// END OF EJS Templates
feat(configs): deprecared old hooks protocol and ssh wrapper....
super-admin -
r5496:cab50adf default
parent child Browse files
Show More
@@ -1,857 +1,858 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; #############
75 75 ; DEBUG OPTIONS
76 76 ; #############
77 77
78 78 pyramid.reload_templates = true
79 79
80 80 # During development the we want to have the debug toolbar enabled
81 81 pyramid.includes =
82 82 pyramid_debugtoolbar
83 83
84 84 debugtoolbar.hosts = 0.0.0.0/0
85 85 debugtoolbar.exclude_prefixes =
86 86 /css
87 87 /fonts
88 88 /images
89 89 /js
90 90
91 91 ## RHODECODE PLUGINS ##
92 92 rhodecode.includes =
93 93 rhodecode.api
94 94
95 95
96 96 # api prefix url
97 97 rhodecode.api.url = /_admin/api
98 98
99 99 ; enable debug style page
100 100 debug_style = true
101 101
102 102 ; #################
103 103 ; END DEBUG OPTIONS
104 104 ; #################
105 105
106 106 ; encryption key used to encrypt social plugin tokens,
107 107 ; remote_urls with credentials etc, if not set it defaults to
108 108 ; `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ; decryption strict mode (enabled by default). It controls if decryption raises
112 112 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
116 116 ; fernet is safer, and we strongly recommend switching to it.
117 117 ; Due to backward compatibility aes is used as default.
118 118 #rhodecode.encrypted_values.algorithm = fernet
119 119
120 120 ; Return gzipped responses from RhodeCode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ; Auto-generate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ; System global default language.
127 127 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ; Perform a full repository scan and import on each server start.
131 131 ; Settings this to true could lead to very long startup time.
132 132 startup.import_repos = false
133 133
134 134 ; URL at which the application is running. This is used for Bootstrapping
135 135 ; requests in context when no web request is available. Used in ishell, or
136 136 ; SSH calls. Set this for events to receive proper url for SSH calls.
137 137 app.base_url = http://rhodecode.local
138 138
139 139 ; Host at which the Service API is running.
140 140 app.service_api.host = http://rhodecode.local:10020
141 141
142 142 ; Secret for Service API authentication.
143 143 app.service_api.token =
144 144
145 145 ; Unique application ID. Should be a random unique string for security.
146 146 app_instance_uuid = rc-production
147 147
148 148 ; Cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ; commit, or pull request exceeds this limit this diff will be displayed
150 150 ; partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ; Cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ; file inside diff which exceeds this limit will be displayed partially.
155 155 ; E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
159 159 vcs_full_cache = true
160 160
161 161 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
162 162 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
163 163 force_https = false
164 164
165 165 ; use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ; Set to true if your repos are exposed using the dumb protocol
169 169 git_update_server_info = false
170 170
171 171 ; RSS/ATOM feed options
172 172 rss_cut_off_limit = 256000
173 173 rss_items_per_page = 10
174 174 rss_include_diff = false
175 175
176 176 ; gist URL alias, used to create nicer urls for gist. This should be an
177 177 ; url that does rewrites to _admin/gists/{gistid}.
178 178 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
179 179 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
180 180 gist_alias_url =
181 181
182 182 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
183 183 ; used for access.
184 184 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
185 185 ; came from the the logged in user who own this authentication token.
186 186 ; Additionally @TOKEN syntax can be used to bound the view to specific
187 187 ; authentication token. Such view would be only accessible when used together
188 188 ; with this authentication token
189 189 ; list of all views can be found under `/_admin/permissions/auth_token_access`
190 190 ; The list should be "," separated and on a single line.
191 191 ; Most common views to enable:
192 192
193 193 # RepoCommitsView:repo_commit_download
194 194 # RepoCommitsView:repo_commit_patch
195 195 # RepoCommitsView:repo_commit_raw
196 196 # RepoCommitsView:repo_commit_raw@TOKEN
197 197 # RepoFilesView:repo_files_diff
198 198 # RepoFilesView:repo_archivefile
199 199 # RepoFilesView:repo_file_raw
200 200 # GistView:*
201 201 api_access_controllers_whitelist =
202 202
203 203 ; Default encoding used to convert from and to unicode
204 204 ; can be also a comma separated list of encoding in case of mixed encodings
205 205 default_encoding = UTF-8
206 206
207 207 ; instance-id prefix
208 208 ; a prefix key for this instance used for cache invalidation when running
209 209 ; multiple instances of RhodeCode, make sure it's globally unique for
210 210 ; all running RhodeCode instances. Leave empty if you don't use it
211 211 instance_id =
212 212
213 213 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
214 214 ; of an authentication plugin also if it is disabled by it's settings.
215 215 ; This could be useful if you are unable to log in to the system due to broken
216 216 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
217 217 ; module to log in again and fix the settings.
218 218 ; Available builtin plugin IDs (hash is part of the ID):
219 219 ; egg:rhodecode-enterprise-ce#rhodecode
220 220 ; egg:rhodecode-enterprise-ce#pam
221 221 ; egg:rhodecode-enterprise-ce#ldap
222 222 ; egg:rhodecode-enterprise-ce#jasig_cas
223 223 ; egg:rhodecode-enterprise-ce#headers
224 224 ; egg:rhodecode-enterprise-ce#crowd
225 225
226 226 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
227 227
228 228 ; Flag to control loading of legacy plugins in py:/path format
229 229 auth_plugin.import_legacy_plugins = true
230 230
231 231 ; alternative return HTTP header for failed authentication. Default HTTP
232 232 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
233 233 ; handling that causing a series of failed authentication calls.
234 234 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
235 235 ; This will be served instead of default 401 on bad authentication
236 236 auth_ret_code =
237 237
238 238 ; use special detection method when serving auth_ret_code, instead of serving
239 239 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
240 240 ; and then serve auth_ret_code to clients
241 241 auth_ret_code_detection = false
242 242
243 243 ; locking return code. When repository is locked return this HTTP code. 2XX
244 244 ; codes don't break the transactions while 4XX codes do
245 245 lock_ret_code = 423
246 246
247 247 ; Filesystem location were repositories should be stored
248 248 repo_store.path = /var/opt/rhodecode_repo_store
249 249
250 250 ; allows to setup custom hooks in settings page
251 251 allow_custom_hooks_settings = true
252 252
253 253 ; Generated license token required for EE edition license.
254 254 ; New generated token value can be found in Admin > settings > license page.
255 255 license_token =
256 256
257 257 ; This flag hides sensitive information on the license page such as token, and license data
258 258 license.hide_license_info = false
259 259
260 260 ; supervisor connection uri, for managing supervisor and logs.
261 261 supervisor.uri =
262 262
263 263 ; supervisord group name/id we only want this RC instance to handle
264 264 supervisor.group_id = dev
265 265
266 266 ; Display extended labs settings
267 267 labs_settings_active = true
268 268
269 269 ; Custom exception store path, defaults to TMPDIR
270 270 ; This is used to store exception from RhodeCode in shared directory
271 271 #exception_tracker.store_path =
272 272
273 273 ; Send email with exception details when it happens
274 274 #exception_tracker.send_email = false
275 275
276 276 ; Comma separated list of recipients for exception emails,
277 277 ; e.g admin@rhodecode.com,devops@rhodecode.com
278 278 ; Can be left empty, then emails will be sent to ALL super-admins
279 279 #exception_tracker.send_email_recipients =
280 280
281 281 ; optional prefix to Add to email Subject
282 282 #exception_tracker.email_prefix = [RHODECODE ERROR]
283 283
284 284 ; File store configuration. This is used to store and serve uploaded files
285 285 file_store.enabled = true
286 286
287 287 ; Storage backend, available options are: local
288 288 file_store.backend = local
289 289
290 290 ; path to store the uploaded binaries and artifacts
291 291 file_store.storage_path = /var/opt/rhodecode_data/file_store
292 292
293 293
294 294 ; Redis url to acquire/check generation of archives locks
295 295 archive_cache.locking.url = redis://redis:6379/1
296 296
297 297 ; Storage backend, only 'filesystem' and 'objectstore' are available now
298 298 archive_cache.backend.type = filesystem
299 299
300 300 ; url for s3 compatible storage that allows to upload artifacts
301 301 ; e.g http://minio:9000
302 302 archive_cache.objectstore.url = http://s3-minio:9000
303 303
304 304 ; key for s3 auth
305 305 archive_cache.objectstore.key = key
306 306
307 307 ; secret for s3 auth
308 308 archive_cache.objectstore.secret = secret
309 309
310 310 ;region for s3 storage
311 311 archive_cache.objectstore.region = eu-central-1
312 312
313 313 ; number of sharded buckets to create to distribute archives across
314 314 ; default is 8 shards
315 315 archive_cache.objectstore.bucket_shards = 8
316 316
317 317 ; a top-level bucket to put all other shards in
318 318 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
319 319 archive_cache.objectstore.bucket = rhodecode-archive-cache
320 320
321 321 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
322 322 archive_cache.objectstore.retry = false
323 323
324 324 ; number of seconds to wait for next try using retry
325 325 archive_cache.objectstore.retry_backoff = 1
326 326
327 327 ; how many tries do do a retry fetch from this backend
328 328 archive_cache.objectstore.retry_attempts = 10
329 329
330 330 ; Default is $cache_dir/archive_cache if not set
331 331 ; Generated repo archives will be cached at this location
332 332 ; and served from the cache during subsequent requests for the same archive of
333 333 ; the repository. This path is important to be shared across filesystems and with
334 334 ; RhodeCode and vcsserver
335 335 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
336 336
337 337 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
338 338 archive_cache.filesystem.cache_size_gb = 1
339 339
340 340 ; Eviction policy used to clear out after cache_size_gb limit is reached
341 341 archive_cache.filesystem.eviction_policy = least-recently-stored
342 342
343 343 ; By default cache uses sharding technique, this specifies how many shards are there
344 344 ; default is 8 shards
345 345 archive_cache.filesystem.cache_shards = 8
346 346
347 347 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
348 348 archive_cache.filesystem.retry = false
349 349
350 350 ; number of seconds to wait for next try using retry
351 351 archive_cache.filesystem.retry_backoff = 1
352 352
353 353 ; how many tries do do a retry fetch from this backend
354 354 archive_cache.filesystem.retry_attempts = 10
355 355
356 356
357 357 ; #############
358 358 ; CELERY CONFIG
359 359 ; #############
360 360
361 361 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
362 362
363 363 use_celery = true
364 364
365 365 ; path to store schedule database
366 366 #celerybeat-schedule.path =
367 367
368 368 ; connection url to the message broker (default redis)
369 369 celery.broker_url = redis://redis:6379/8
370 370
371 371 ; results backend to get results for (default redis)
372 372 celery.result_backend = redis://redis:6379/8
373 373
374 374 ; rabbitmq example
375 375 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
376 376
377 377 ; maximum tasks to execute before worker restart
378 378 celery.max_tasks_per_child = 20
379 379
380 380 ; tasks will never be sent to the queue, but executed locally instead.
381 381 celery.task_always_eager = false
382 382
383 383 ; #############
384 384 ; DOGPILE CACHE
385 385 ; #############
386 386
387 387 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
388 388 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
389 389 cache_dir = /var/opt/rhodecode_data
390 390
391 391 ; *********************************************
392 392 ; `sql_cache_short` cache for heavy SQL queries
393 393 ; Only supported backend is `memory_lru`
394 394 ; *********************************************
395 395 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
396 396 rc_cache.sql_cache_short.expiration_time = 30
397 397
398 398
399 399 ; *****************************************************
400 400 ; `cache_repo_longterm` cache for repo object instances
401 401 ; Only supported backend is `memory_lru`
402 402 ; *****************************************************
403 403 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
404 404 ; by default we use 30 Days, cache is still invalidated on push
405 405 rc_cache.cache_repo_longterm.expiration_time = 2592000
406 406 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
407 407 rc_cache.cache_repo_longterm.max_size = 10000
408 408
409 409
410 410 ; *********************************************
411 411 ; `cache_general` cache for general purpose use
412 412 ; for simplicity use rc.file_namespace backend,
413 413 ; for performance and scale use rc.redis
414 414 ; *********************************************
415 415 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
416 416 rc_cache.cache_general.expiration_time = 43200
417 417 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
418 418 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
419 419
420 420 ; alternative `cache_general` redis backend with distributed lock
421 421 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
422 422 #rc_cache.cache_general.expiration_time = 300
423 423
424 424 ; redis_expiration_time needs to be greater then expiration_time
425 425 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
426 426
427 427 #rc_cache.cache_general.arguments.host = localhost
428 428 #rc_cache.cache_general.arguments.port = 6379
429 429 #rc_cache.cache_general.arguments.db = 0
430 430 #rc_cache.cache_general.arguments.socket_timeout = 30
431 431 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
432 432 #rc_cache.cache_general.arguments.distributed_lock = true
433 433
434 434 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
435 435 #rc_cache.cache_general.arguments.lock_auto_renewal = true
436 436
437 437 ; *************************************************
438 438 ; `cache_perms` cache for permission tree, auth TTL
439 439 ; for simplicity use rc.file_namespace backend,
440 440 ; for performance and scale use rc.redis
441 441 ; *************************************************
442 442 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
443 443 rc_cache.cache_perms.expiration_time = 3600
444 444 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
445 445 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
446 446
447 447 ; alternative `cache_perms` redis backend with distributed lock
448 448 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
449 449 #rc_cache.cache_perms.expiration_time = 300
450 450
451 451 ; redis_expiration_time needs to be greater then expiration_time
452 452 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
453 453
454 454 #rc_cache.cache_perms.arguments.host = localhost
455 455 #rc_cache.cache_perms.arguments.port = 6379
456 456 #rc_cache.cache_perms.arguments.db = 0
457 457 #rc_cache.cache_perms.arguments.socket_timeout = 30
458 458 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
459 459 #rc_cache.cache_perms.arguments.distributed_lock = true
460 460
461 461 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
462 462 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
463 463
464 464 ; ***************************************************
465 465 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
466 466 ; for simplicity use rc.file_namespace backend,
467 467 ; for performance and scale use rc.redis
468 468 ; ***************************************************
469 469 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
470 470 rc_cache.cache_repo.expiration_time = 2592000
471 471 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
472 472 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
473 473
474 474 ; alternative `cache_repo` redis backend with distributed lock
475 475 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
476 476 #rc_cache.cache_repo.expiration_time = 2592000
477 477
478 478 ; redis_expiration_time needs to be greater then expiration_time
479 479 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
480 480
481 481 #rc_cache.cache_repo.arguments.host = localhost
482 482 #rc_cache.cache_repo.arguments.port = 6379
483 483 #rc_cache.cache_repo.arguments.db = 1
484 484 #rc_cache.cache_repo.arguments.socket_timeout = 30
485 485 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
486 486 #rc_cache.cache_repo.arguments.distributed_lock = true
487 487
488 488 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
489 489 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
490 490
491 491 ; ##############
492 492 ; BEAKER SESSION
493 493 ; ##############
494 494
495 495 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
496 496 ; types are file, ext:redis, ext:database, ext:memcached
497 497 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
498 498 #beaker.session.type = file
499 499 #beaker.session.data_dir = %(here)s/data/sessions
500 500
501 501 ; Redis based sessions
502 502 beaker.session.type = ext:redis
503 503 beaker.session.url = redis://redis:6379/2
504 504
505 505 ; DB based session, fast, and allows easy management over logged in users
506 506 #beaker.session.type = ext:database
507 507 #beaker.session.table_name = db_session
508 508 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
509 509 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
510 510 #beaker.session.sa.pool_recycle = 3600
511 511 #beaker.session.sa.echo = false
512 512
513 513 beaker.session.key = rhodecode
514 514 beaker.session.secret = develop-rc-uytcxaz
515 515 beaker.session.lock_dir = /data_ramdisk/lock
516 516
517 517 ; Secure encrypted cookie. Requires AES and AES python libraries
518 518 ; you must disable beaker.session.secret to use this
519 519 #beaker.session.encrypt_key = key_for_encryption
520 520 #beaker.session.validate_key = validation_key
521 521
522 522 ; Sets session as invalid (also logging out user) if it haven not been
523 523 ; accessed for given amount of time in seconds
524 524 beaker.session.timeout = 2592000
525 525 beaker.session.httponly = true
526 526
527 527 ; Path to use for the cookie. Set to prefix if you use prefix middleware
528 528 #beaker.session.cookie_path = /custom_prefix
529 529
530 530 ; Set https secure cookie
531 531 beaker.session.secure = false
532 532
533 533 ; default cookie expiration time in seconds, set to `true` to set expire
534 534 ; at browser close
535 535 #beaker.session.cookie_expires = 3600
536 536
537 537 ; #############################
538 538 ; SEARCH INDEXING CONFIGURATION
539 539 ; #############################
540 540
541 541 ; Full text search indexer is available in rhodecode-tools under
542 542 ; `rhodecode-tools index` command
543 543
544 544 ; WHOOSH Backend, doesn't require additional services to run
545 545 ; it works good with few dozen repos
546 546 search.module = rhodecode.lib.index.whoosh
547 547 search.location = %(here)s/data/index
548 548
549 549 ; ####################
550 550 ; CHANNELSTREAM CONFIG
551 551 ; ####################
552 552
553 553 ; channelstream enables persistent connections and live notification
554 554 ; in the system. It's also used by the chat system
555 555
556 556 channelstream.enabled = true
557 557
558 558 ; server address for channelstream server on the backend
559 559 channelstream.server = channelstream:9800
560 560
561 561 ; location of the channelstream server from outside world
562 562 ; use ws:// for http or wss:// for https. This address needs to be handled
563 563 ; by external HTTP server such as Nginx or Apache
564 564 ; see Nginx/Apache configuration examples in our docs
565 565 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
566 566 channelstream.secret = ENV_GENERATED
567 567 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
568 568
569 569 ; Internal application path that Javascript uses to connect into.
570 570 ; If you use proxy-prefix the prefix should be added before /_channelstream
571 571 channelstream.proxy_path = /_channelstream
572 572
573 573
574 574 ; ##############################
575 575 ; MAIN RHODECODE DATABASE CONFIG
576 576 ; ##############################
577 577
578 578 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
579 579 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
580 580 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
581 581 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
582 582 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
583 583
584 584 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
585 585
586 586 ; see sqlalchemy docs for other advanced settings
587 587 ; print the sql statements to output
588 588 sqlalchemy.db1.echo = false
589 589
590 590 ; recycle the connections after this amount of seconds
591 591 sqlalchemy.db1.pool_recycle = 3600
592 592
593 593 ; the number of connections to keep open inside the connection pool.
594 594 ; 0 indicates no limit
595 595 ; the general calculus with gevent is:
596 596 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
597 597 ; then increase pool size + max overflow so that they add up to 500.
598 598 #sqlalchemy.db1.pool_size = 5
599 599
600 600 ; The number of connections to allow in connection pool "overflow", that is
601 601 ; connections that can be opened above and beyond the pool_size setting,
602 602 ; which defaults to five.
603 603 #sqlalchemy.db1.max_overflow = 10
604 604
605 605 ; Connection check ping, used to detect broken database connections
606 606 ; could be enabled to better handle cases if MySQL has gone away errors
607 607 #sqlalchemy.db1.ping_connection = true
608 608
609 609 ; ##########
610 610 ; VCS CONFIG
611 611 ; ##########
612 612 vcs.server.enable = true
613 613 vcs.server = vcsserver:10010
614 614
615 615 ; Web server connectivity protocol, responsible for web based VCS operations
616 616 ; Available protocols are:
617 617 ; `http` - use http-rpc backend (default)
618 618 vcs.server.protocol = http
619 619
620 620 ; Push/Pull operations protocol, available options are:
621 621 ; `http` - use http-rpc backend (default)
622 622 vcs.scm_app_implementation = http
623 623
624 624 ; Push/Pull operations hooks protocol, available options are:
625 625 ; `http` - use http-rpc backend (default)
626 626 ; `celery` - use celery based hooks
627 vcs.hooks.protocol = http
627 #DEPRECATED:vcs.hooks.protocol = http
628 vcs.hooks.protocol.v2 = celery
628 629
629 630 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
630 631 ; accessible via network.
631 632 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
632 633 vcs.hooks.host = *
633 634
634 635 ; Start VCSServer with this instance as a subprocess, useful for development
635 636 vcs.start_server = false
636 637
637 638 ; List of enabled VCS backends, available options are:
638 639 ; `hg` - mercurial
639 640 ; `git` - git
640 641 ; `svn` - subversion
641 642 vcs.backends = hg, git, svn
642 643
643 644 ; Wait this number of seconds before killing connection to the vcsserver
644 645 vcs.connection_timeout = 3600
645 646
646 647 ; Cache flag to cache vcsserver remote calls locally
647 648 ; It uses cache_region `cache_repo`
648 649 vcs.methods.cache = true
649 650
650 651 ; ####################################################
651 652 ; Subversion proxy support (mod_dav_svn)
652 653 ; Maps RhodeCode repo groups into SVN paths for Apache
653 654 ; ####################################################
654 655
655 656 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
656 657 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
657 658 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
658 659 #vcs.svn.compatible_version = 1.8
659 660
660 661 ; Redis connection settings for svn integrations logic
661 662 ; This connection string needs to be the same on ce and vcsserver
662 663 vcs.svn.redis_conn = redis://redis:6379/0
663 664
664 665 ; Enable SVN proxy of requests over HTTP
665 666 vcs.svn.proxy.enabled = true
666 667
667 668 ; host to connect to running SVN subsystem
668 669 vcs.svn.proxy.host = http://svn:8090
669 670
670 671 ; Enable or disable the config file generation.
671 672 svn.proxy.generate_config = true
672 673
673 674 ; Generate config file with `SVNListParentPath` set to `On`.
674 675 svn.proxy.list_parent_path = true
675 676
676 677 ; Set location and file name of generated config file.
677 678 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
678 679
679 680 ; alternative mod_dav config template. This needs to be a valid mako template
680 681 ; Example template can be found in the source code:
681 682 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
682 683 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
683 684
684 685 ; Used as a prefix to the `Location` block in the generated config file.
685 686 ; In most cases it should be set to `/`.
686 687 svn.proxy.location_root = /
687 688
688 689 ; Command to reload the mod dav svn configuration on change.
689 690 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
690 691 ; Make sure user who runs RhodeCode process is allowed to reload Apache
691 692 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
692 693
693 694 ; If the timeout expires before the reload command finishes, the command will
694 695 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
695 696 #svn.proxy.reload_timeout = 10
696 697
697 698 ; ####################
698 699 ; SSH Support Settings
699 700 ; ####################
700 701
701 702 ; Defines if a custom authorized_keys file should be created and written on
702 703 ; any change user ssh keys. Setting this to false also disables possibility
703 704 ; of adding SSH keys by users from web interface. Super admins can still
704 705 ; manage SSH Keys.
705 706 ssh.generate_authorized_keyfile = true
706 707
707 708 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
708 709 # ssh.authorized_keys_ssh_opts =
709 710
710 711 ; Path to the authorized_keys file where the generate entries are placed.
711 712 ; It is possible to have multiple key files specified in `sshd_config` e.g.
712 713 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
713 714 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
714 715
715 716 ; Command to execute the SSH wrapper. The binary is available in the
716 717 ; RhodeCode installation directory.
717 718 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
718 719 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
719 720 #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
720 721 ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
721 722
722 723 ; Allow shell when executing the ssh-wrapper command
723 724 ssh.wrapper_cmd_allow_shell = false
724 725
725 726 ; Enables logging, and detailed output send back to the client during SSH
726 727 ; operations. Useful for debugging, shouldn't be used in production.
727 728 ssh.enable_debug_logging = true
728 729
729 730 ; Paths to binary executable, by default they are the names, but we can
730 731 ; override them if we want to use a custom one
731 732 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
732 733 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
733 734 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
734 735
735 736 ; Enables SSH key generator web interface. Disabling this still allows users
736 737 ; to add their own keys.
737 738 ssh.enable_ui_key_generator = true
738 739
739 740 ; Statsd client config, this is used to send metrics to statsd
740 741 ; We recommend setting statsd_exported and scrape them using Prometheus
741 742 #statsd.enabled = false
742 743 #statsd.statsd_host = 0.0.0.0
743 744 #statsd.statsd_port = 8125
744 745 #statsd.statsd_prefix =
745 746 #statsd.statsd_ipv6 = false
746 747
747 748 ; configure logging automatically at server startup set to false
748 749 ; to use the below custom logging config.
749 750 ; RC_LOGGING_FORMATTER
750 751 ; RC_LOGGING_LEVEL
751 752 ; env variables can control the settings for logging in case of autoconfigure
752 753
753 754 #logging.autoconfigure = true
754 755
755 756 ; specify your own custom logging config file to configure logging
756 757 #logging.logging_conf_file = /path/to/custom_logging.ini
757 758
758 759 ; Dummy marker to add new entries after.
759 760 ; Add any custom entries below. Please don't remove this marker.
760 761 custom.conf = 1
761 762
762 763
763 764 ; #####################
764 765 ; LOGGING CONFIGURATION
765 766 ; #####################
766 767
767 768 [loggers]
768 769 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
769 770
770 771 [handlers]
771 772 keys = console, console_sql
772 773
773 774 [formatters]
774 775 keys = generic, json, color_formatter, color_formatter_sql
775 776
776 777 ; #######
777 778 ; LOGGERS
778 779 ; #######
779 780 [logger_root]
780 781 level = NOTSET
781 782 handlers = console
782 783
783 784 [logger_sqlalchemy]
784 785 level = INFO
785 786 handlers = console_sql
786 787 qualname = sqlalchemy.engine
787 788 propagate = 0
788 789
789 790 [logger_beaker]
790 791 level = DEBUG
791 792 handlers =
792 793 qualname = beaker.container
793 794 propagate = 1
794 795
795 796 [logger_rhodecode]
796 797 level = DEBUG
797 798 handlers =
798 799 qualname = rhodecode
799 800 propagate = 1
800 801
801 802 [logger_ssh_wrapper]
802 803 level = DEBUG
803 804 handlers =
804 805 qualname = ssh_wrapper
805 806 propagate = 1
806 807
807 808 [logger_celery]
808 809 level = DEBUG
809 810 handlers =
810 811 qualname = celery
811 812
812 813
813 814 ; ########
814 815 ; HANDLERS
815 816 ; ########
816 817
817 818 [handler_console]
818 819 class = StreamHandler
819 820 args = (sys.stderr, )
820 821 level = DEBUG
821 822 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
822 823 ; This allows sending properly formatted logs to grafana loki or elasticsearch
823 824 formatter = color_formatter
824 825
825 826 [handler_console_sql]
826 827 ; "level = DEBUG" logs SQL queries and results.
827 828 ; "level = INFO" logs SQL queries.
828 829 ; "level = WARN" logs neither. (Recommended for production systems.)
829 830 class = StreamHandler
830 831 args = (sys.stderr, )
831 832 level = WARN
832 833 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
833 834 ; This allows sending properly formatted logs to grafana loki or elasticsearch
834 835 formatter = color_formatter_sql
835 836
836 837 ; ##########
837 838 ; FORMATTERS
838 839 ; ##########
839 840
840 841 [formatter_generic]
841 842 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
842 843 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
843 844 datefmt = %Y-%m-%d %H:%M:%S
844 845
845 846 [formatter_color_formatter]
846 847 class = rhodecode.lib.logging_formatter.ColorFormatter
847 848 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
848 849 datefmt = %Y-%m-%d %H:%M:%S
849 850
850 851 [formatter_color_formatter_sql]
851 852 class = rhodecode.lib.logging_formatter.ColorFormatterSql
852 853 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
853 854 datefmt = %Y-%m-%d %H:%M:%S
854 855
855 856 [formatter_json]
856 857 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
857 858 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,825 +1,826 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = false
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --config gunicorn_conf.py --paste rhodecode.ini
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = false
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 107 ; Host at which the Service API is running.
108 108 app.service_api.host = http://rhodecode.local:10020
109 109
110 110 ; Secret for Service API authentication.
111 111 app.service_api.token =
112 112
113 113 ; Unique application ID. Should be a random unique string for security.
114 114 app_instance_uuid = rc-production
115 115
116 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 117 ; commit, or pull request exceeds this limit this diff will be displayed
118 118 ; partially. E.g 512000 == 512Kb
119 119 cut_off_limit_diff = 512000
120 120
121 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 122 ; file inside diff which exceeds this limit will be displayed partially.
123 123 ; E.g 128000 == 128Kb
124 124 cut_off_limit_file = 128000
125 125
126 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 127 vcs_full_cache = true
128 128
129 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 131 force_https = false
132 132
133 133 ; use Strict-Transport-Security headers
134 134 use_htsts = false
135 135
136 136 ; Set to true if your repos are exposed using the dumb protocol
137 137 git_update_server_info = false
138 138
139 139 ; RSS/ATOM feed options
140 140 rss_cut_off_limit = 256000
141 141 rss_items_per_page = 10
142 142 rss_include_diff = false
143 143
144 144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 145 ; url that does rewrites to _admin/gists/{gistid}.
146 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 148 gist_alias_url =
149 149
150 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 151 ; used for access.
152 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 153 ; came from the the logged in user who own this authentication token.
154 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 155 ; authentication token. Such view would be only accessible when used together
156 156 ; with this authentication token
157 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 158 ; The list should be "," separated and on a single line.
159 159 ; Most common views to enable:
160 160
161 161 # RepoCommitsView:repo_commit_download
162 162 # RepoCommitsView:repo_commit_patch
163 163 # RepoCommitsView:repo_commit_raw
164 164 # RepoCommitsView:repo_commit_raw@TOKEN
165 165 # RepoFilesView:repo_files_diff
166 166 # RepoFilesView:repo_archivefile
167 167 # RepoFilesView:repo_file_raw
168 168 # GistView:*
169 169 api_access_controllers_whitelist =
170 170
171 171 ; Default encoding used to convert from and to unicode
172 172 ; can be also a comma separated list of encoding in case of mixed encodings
173 173 default_encoding = UTF-8
174 174
175 175 ; instance-id prefix
176 176 ; a prefix key for this instance used for cache invalidation when running
177 177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 178 ; all running RhodeCode instances. Leave empty if you don't use it
179 179 instance_id =
180 180
181 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 182 ; of an authentication plugin also if it is disabled by it's settings.
183 183 ; This could be useful if you are unable to log in to the system due to broken
184 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 185 ; module to log in again and fix the settings.
186 186 ; Available builtin plugin IDs (hash is part of the ID):
187 187 ; egg:rhodecode-enterprise-ce#rhodecode
188 188 ; egg:rhodecode-enterprise-ce#pam
189 189 ; egg:rhodecode-enterprise-ce#ldap
190 190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 191 ; egg:rhodecode-enterprise-ce#headers
192 192 ; egg:rhodecode-enterprise-ce#crowd
193 193
194 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195 195
196 196 ; Flag to control loading of legacy plugins in py:/path format
197 197 auth_plugin.import_legacy_plugins = true
198 198
199 199 ; alternative return HTTP header for failed authentication. Default HTTP
200 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 201 ; handling that causing a series of failed authentication calls.
202 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 203 ; This will be served instead of default 401 on bad authentication
204 204 auth_ret_code =
205 205
206 206 ; use special detection method when serving auth_ret_code, instead of serving
207 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 208 ; and then serve auth_ret_code to clients
209 209 auth_ret_code_detection = false
210 210
211 211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 212 ; codes don't break the transactions while 4XX codes do
213 213 lock_ret_code = 423
214 214
215 215 ; Filesystem location were repositories should be stored
216 216 repo_store.path = /var/opt/rhodecode_repo_store
217 217
218 218 ; allows to setup custom hooks in settings page
219 219 allow_custom_hooks_settings = true
220 220
221 221 ; Generated license token required for EE edition license.
222 222 ; New generated token value can be found in Admin > settings > license page.
223 223 license_token =
224 224
225 225 ; This flag hides sensitive information on the license page such as token, and license data
226 226 license.hide_license_info = false
227 227
228 228 ; supervisor connection uri, for managing supervisor and logs.
229 229 supervisor.uri =
230 230
231 231 ; supervisord group name/id we only want this RC instance to handle
232 232 supervisor.group_id = prod
233 233
234 234 ; Display extended labs settings
235 235 labs_settings_active = true
236 236
237 237 ; Custom exception store path, defaults to TMPDIR
238 238 ; This is used to store exception from RhodeCode in shared directory
239 239 #exception_tracker.store_path =
240 240
241 241 ; Send email with exception details when it happens
242 242 #exception_tracker.send_email = false
243 243
244 244 ; Comma separated list of recipients for exception emails,
245 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 246 ; Can be left empty, then emails will be sent to ALL super-admins
247 247 #exception_tracker.send_email_recipients =
248 248
249 249 ; optional prefix to Add to email Subject
250 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251 251
252 252 ; File store configuration. This is used to store and serve uploaded files
253 253 file_store.enabled = true
254 254
255 255 ; Storage backend, available options are: local
256 256 file_store.backend = local
257 257
258 258 ; path to store the uploaded binaries and artifacts
259 259 file_store.storage_path = /var/opt/rhodecode_data/file_store
260 260
261 261
262 262 ; Redis url to acquire/check generation of archives locks
263 263 archive_cache.locking.url = redis://redis:6379/1
264 264
265 265 ; Storage backend, only 'filesystem' and 'objectstore' are available now
266 266 archive_cache.backend.type = filesystem
267 267
268 268 ; url for s3 compatible storage that allows to upload artifacts
269 269 ; e.g http://minio:9000
270 270 archive_cache.objectstore.url = http://s3-minio:9000
271 271
272 272 ; key for s3 auth
273 273 archive_cache.objectstore.key = key
274 274
275 275 ; secret for s3 auth
276 276 archive_cache.objectstore.secret = secret
277 277
278 278 ;region for s3 storage
279 279 archive_cache.objectstore.region = eu-central-1
280 280
281 281 ; number of sharded buckets to create to distribute archives across
282 282 ; default is 8 shards
283 283 archive_cache.objectstore.bucket_shards = 8
284 284
285 285 ; a top-level bucket to put all other shards in
286 286 ; objects will be stored in rhodecode-archive-cache/shard-N based on the bucket_shards number
287 287 archive_cache.objectstore.bucket = rhodecode-archive-cache
288 288
289 289 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
290 290 archive_cache.objectstore.retry = false
291 291
292 292 ; number of seconds to wait for next try using retry
293 293 archive_cache.objectstore.retry_backoff = 1
294 294
295 295 ; how many tries do do a retry fetch from this backend
296 296 archive_cache.objectstore.retry_attempts = 10
297 297
298 298 ; Default is $cache_dir/archive_cache if not set
299 299 ; Generated repo archives will be cached at this location
300 300 ; and served from the cache during subsequent requests for the same archive of
301 301 ; the repository. This path is important to be shared across filesystems and with
302 302 ; RhodeCode and vcsserver
303 303 archive_cache.filesystem.store_dir = /var/opt/rhodecode_data/archive_cache
304 304
305 305 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
306 306 archive_cache.filesystem.cache_size_gb = 40
307 307
308 308 ; Eviction policy used to clear out after cache_size_gb limit is reached
309 309 archive_cache.filesystem.eviction_policy = least-recently-stored
310 310
311 311 ; By default cache uses sharding technique, this specifies how many shards are there
312 312 ; default is 8 shards
313 313 archive_cache.filesystem.cache_shards = 8
314 314
315 315 ; if true, this cache will try to retry with retry_attempts=N times waiting retry_backoff time
316 316 archive_cache.filesystem.retry = false
317 317
318 318 ; number of seconds to wait for next try using retry
319 319 archive_cache.filesystem.retry_backoff = 1
320 320
321 321 ; how many tries do do a retry fetch from this backend
322 322 archive_cache.filesystem.retry_attempts = 10
323 323
324 324
325 325 ; #############
326 326 ; CELERY CONFIG
327 327 ; #############
328 328
329 329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330 330
331 331 use_celery = true
332 332
333 333 ; path to store schedule database
334 334 #celerybeat-schedule.path =
335 335
336 336 ; connection url to the message broker (default redis)
337 337 celery.broker_url = redis://redis:6379/8
338 338
339 339 ; results backend to get results for (default redis)
340 340 celery.result_backend = redis://redis:6379/8
341 341
342 342 ; rabbitmq example
343 343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
344 344
345 345 ; maximum tasks to execute before worker restart
346 346 celery.max_tasks_per_child = 20
347 347
348 348 ; tasks will never be sent to the queue, but executed locally instead.
349 349 celery.task_always_eager = false
350 350
351 351 ; #############
352 352 ; DOGPILE CACHE
353 353 ; #############
354 354
355 355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
356 356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
357 357 cache_dir = /var/opt/rhodecode_data
358 358
359 359 ; *********************************************
360 360 ; `sql_cache_short` cache for heavy SQL queries
361 361 ; Only supported backend is `memory_lru`
362 362 ; *********************************************
363 363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
364 364 rc_cache.sql_cache_short.expiration_time = 30
365 365
366 366
367 367 ; *****************************************************
368 368 ; `cache_repo_longterm` cache for repo object instances
369 369 ; Only supported backend is `memory_lru`
370 370 ; *****************************************************
371 371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
372 372 ; by default we use 30 Days, cache is still invalidated on push
373 373 rc_cache.cache_repo_longterm.expiration_time = 2592000
374 374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
375 375 rc_cache.cache_repo_longterm.max_size = 10000
376 376
377 377
378 378 ; *********************************************
379 379 ; `cache_general` cache for general purpose use
380 380 ; for simplicity use rc.file_namespace backend,
381 381 ; for performance and scale use rc.redis
382 382 ; *********************************************
383 383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
384 384 rc_cache.cache_general.expiration_time = 43200
385 385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
386 386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
387 387
388 388 ; alternative `cache_general` redis backend with distributed lock
389 389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
390 390 #rc_cache.cache_general.expiration_time = 300
391 391
392 392 ; redis_expiration_time needs to be greater then expiration_time
393 393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
394 394
395 395 #rc_cache.cache_general.arguments.host = localhost
396 396 #rc_cache.cache_general.arguments.port = 6379
397 397 #rc_cache.cache_general.arguments.db = 0
398 398 #rc_cache.cache_general.arguments.socket_timeout = 30
399 399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
400 400 #rc_cache.cache_general.arguments.distributed_lock = true
401 401
402 402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
403 403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
404 404
405 405 ; *************************************************
406 406 ; `cache_perms` cache for permission tree, auth TTL
407 407 ; for simplicity use rc.file_namespace backend,
408 408 ; for performance and scale use rc.redis
409 409 ; *************************************************
410 410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
411 411 rc_cache.cache_perms.expiration_time = 3600
412 412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
413 413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
414 414
415 415 ; alternative `cache_perms` redis backend with distributed lock
416 416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
417 417 #rc_cache.cache_perms.expiration_time = 300
418 418
419 419 ; redis_expiration_time needs to be greater then expiration_time
420 420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
421 421
422 422 #rc_cache.cache_perms.arguments.host = localhost
423 423 #rc_cache.cache_perms.arguments.port = 6379
424 424 #rc_cache.cache_perms.arguments.db = 0
425 425 #rc_cache.cache_perms.arguments.socket_timeout = 30
426 426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
427 427 #rc_cache.cache_perms.arguments.distributed_lock = true
428 428
429 429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
430 430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
431 431
432 432 ; ***************************************************
433 433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
434 434 ; for simplicity use rc.file_namespace backend,
435 435 ; for performance and scale use rc.redis
436 436 ; ***************************************************
437 437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
438 438 rc_cache.cache_repo.expiration_time = 2592000
439 439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
440 440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
441 441
442 442 ; alternative `cache_repo` redis backend with distributed lock
443 443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
444 444 #rc_cache.cache_repo.expiration_time = 2592000
445 445
446 446 ; redis_expiration_time needs to be greater then expiration_time
447 447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
448 448
449 449 #rc_cache.cache_repo.arguments.host = localhost
450 450 #rc_cache.cache_repo.arguments.port = 6379
451 451 #rc_cache.cache_repo.arguments.db = 1
452 452 #rc_cache.cache_repo.arguments.socket_timeout = 30
453 453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
454 454 #rc_cache.cache_repo.arguments.distributed_lock = true
455 455
456 456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
457 457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
458 458
459 459 ; ##############
460 460 ; BEAKER SESSION
461 461 ; ##############
462 462
463 463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
464 464 ; types are file, ext:redis, ext:database, ext:memcached
465 465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
466 466 #beaker.session.type = file
467 467 #beaker.session.data_dir = %(here)s/data/sessions
468 468
469 469 ; Redis based sessions
470 470 beaker.session.type = ext:redis
471 471 beaker.session.url = redis://redis:6379/2
472 472
473 473 ; DB based session, fast, and allows easy management over logged in users
474 474 #beaker.session.type = ext:database
475 475 #beaker.session.table_name = db_session
476 476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
477 477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
478 478 #beaker.session.sa.pool_recycle = 3600
479 479 #beaker.session.sa.echo = false
480 480
481 481 beaker.session.key = rhodecode
482 482 beaker.session.secret = production-rc-uytcxaz
483 483 beaker.session.lock_dir = /data_ramdisk/lock
484 484
485 485 ; Secure encrypted cookie. Requires AES and AES python libraries
486 486 ; you must disable beaker.session.secret to use this
487 487 #beaker.session.encrypt_key = key_for_encryption
488 488 #beaker.session.validate_key = validation_key
489 489
490 490 ; Sets session as invalid (also logging out user) if it haven not been
491 491 ; accessed for given amount of time in seconds
492 492 beaker.session.timeout = 2592000
493 493 beaker.session.httponly = true
494 494
495 495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
496 496 #beaker.session.cookie_path = /custom_prefix
497 497
498 498 ; Set https secure cookie
499 499 beaker.session.secure = false
500 500
501 501 ; default cookie expiration time in seconds, set to `true` to set expire
502 502 ; at browser close
503 503 #beaker.session.cookie_expires = 3600
504 504
505 505 ; #############################
506 506 ; SEARCH INDEXING CONFIGURATION
507 507 ; #############################
508 508
509 509 ; Full text search indexer is available in rhodecode-tools under
510 510 ; `rhodecode-tools index` command
511 511
512 512 ; WHOOSH Backend, doesn't require additional services to run
513 513 ; it works good with few dozen repos
514 514 search.module = rhodecode.lib.index.whoosh
515 515 search.location = %(here)s/data/index
516 516
517 517 ; ####################
518 518 ; CHANNELSTREAM CONFIG
519 519 ; ####################
520 520
521 521 ; channelstream enables persistent connections and live notification
522 522 ; in the system. It's also used by the chat system
523 523
524 524 channelstream.enabled = true
525 525
526 526 ; server address for channelstream server on the backend
527 527 channelstream.server = channelstream:9800
528 528
529 529 ; location of the channelstream server from outside world
530 530 ; use ws:// for http or wss:// for https. This address needs to be handled
531 531 ; by external HTTP server such as Nginx or Apache
532 532 ; see Nginx/Apache configuration examples in our docs
533 533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
534 534 channelstream.secret = ENV_GENERATED
535 535 channelstream.history.location = /var/opt/rhodecode_data/channelstream_history
536 536
537 537 ; Internal application path that Javascript uses to connect into.
538 538 ; If you use proxy-prefix the prefix should be added before /_channelstream
539 539 channelstream.proxy_path = /_channelstream
540 540
541 541
542 542 ; ##############################
543 543 ; MAIN RHODECODE DATABASE CONFIG
544 544 ; ##############################
545 545
546 546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
548 548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
549 549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
550 550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
551 551
552 552 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
553 553
554 554 ; see sqlalchemy docs for other advanced settings
555 555 ; print the sql statements to output
556 556 sqlalchemy.db1.echo = false
557 557
558 558 ; recycle the connections after this amount of seconds
559 559 sqlalchemy.db1.pool_recycle = 3600
560 560
561 561 ; the number of connections to keep open inside the connection pool.
562 562 ; 0 indicates no limit
563 563 ; the general calculus with gevent is:
564 564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
565 565 ; then increase pool size + max overflow so that they add up to 500.
566 566 #sqlalchemy.db1.pool_size = 5
567 567
568 568 ; The number of connections to allow in connection pool "overflow", that is
569 569 ; connections that can be opened above and beyond the pool_size setting,
570 570 ; which defaults to five.
571 571 #sqlalchemy.db1.max_overflow = 10
572 572
573 573 ; Connection check ping, used to detect broken database connections
574 574 ; could be enabled to better handle cases if MySQL has gone away errors
575 575 #sqlalchemy.db1.ping_connection = true
576 576
577 577 ; ##########
578 578 ; VCS CONFIG
579 579 ; ##########
580 580 vcs.server.enable = true
581 581 vcs.server = vcsserver:10010
582 582
583 583 ; Web server connectivity protocol, responsible for web based VCS operations
584 584 ; Available protocols are:
585 585 ; `http` - use http-rpc backend (default)
586 586 vcs.server.protocol = http
587 587
588 588 ; Push/Pull operations protocol, available options are:
589 589 ; `http` - use http-rpc backend (default)
590 590 vcs.scm_app_implementation = http
591 591
592 592 ; Push/Pull operations hooks protocol, available options are:
593 593 ; `http` - use http-rpc backend (default)
594 594 ; `celery` - use celery based hooks
595 vcs.hooks.protocol = http
595 #DEPRECATED:vcs.hooks.protocol = http
596 vcs.hooks.protocol.v2 = celery
596 597
597 598 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
598 599 ; accessible via network.
599 600 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
600 601 vcs.hooks.host = *
601 602
602 603 ; Start VCSServer with this instance as a subprocess, useful for development
603 604 vcs.start_server = false
604 605
605 606 ; List of enabled VCS backends, available options are:
606 607 ; `hg` - mercurial
607 608 ; `git` - git
608 609 ; `svn` - subversion
609 610 vcs.backends = hg, git, svn
610 611
611 612 ; Wait this number of seconds before killing connection to the vcsserver
612 613 vcs.connection_timeout = 3600
613 614
614 615 ; Cache flag to cache vcsserver remote calls locally
615 616 ; It uses cache_region `cache_repo`
616 617 vcs.methods.cache = true
617 618
618 619 ; ####################################################
619 620 ; Subversion proxy support (mod_dav_svn)
620 621 ; Maps RhodeCode repo groups into SVN paths for Apache
621 622 ; ####################################################
622 623
623 624 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
624 625 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
625 626 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
626 627 #vcs.svn.compatible_version = 1.8
627 628
628 629 ; Redis connection settings for svn integrations logic
629 630 ; This connection string needs to be the same on ce and vcsserver
630 631 vcs.svn.redis_conn = redis://redis:6379/0
631 632
632 633 ; Enable SVN proxy of requests over HTTP
633 634 vcs.svn.proxy.enabled = true
634 635
635 636 ; host to connect to running SVN subsystem
636 637 vcs.svn.proxy.host = http://svn:8090
637 638
638 639 ; Enable or disable the config file generation.
639 640 svn.proxy.generate_config = true
640 641
641 642 ; Generate config file with `SVNListParentPath` set to `On`.
642 643 svn.proxy.list_parent_path = true
643 644
644 645 ; Set location and file name of generated config file.
645 646 svn.proxy.config_file_path = /etc/rhodecode/conf/svn/mod_dav_svn.conf
646 647
647 648 ; alternative mod_dav config template. This needs to be a valid mako template
648 649 ; Example template can be found in the source code:
649 650 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
650 651 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
651 652
652 653 ; Used as a prefix to the `Location` block in the generated config file.
653 654 ; In most cases it should be set to `/`.
654 655 svn.proxy.location_root = /
655 656
656 657 ; Command to reload the mod dav svn configuration on change.
657 658 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
658 659 ; Make sure user who runs RhodeCode process is allowed to reload Apache
659 660 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
660 661
661 662 ; If the timeout expires before the reload command finishes, the command will
662 663 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
663 664 #svn.proxy.reload_timeout = 10
664 665
665 666 ; ####################
666 667 ; SSH Support Settings
667 668 ; ####################
668 669
669 670 ; Defines if a custom authorized_keys file should be created and written on
670 671 ; any change user ssh keys. Setting this to false also disables possibility
671 672 ; of adding SSH keys by users from web interface. Super admins can still
672 673 ; manage SSH Keys.
673 674 ssh.generate_authorized_keyfile = true
674 675
675 676 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
676 677 # ssh.authorized_keys_ssh_opts =
677 678
678 679 ; Path to the authorized_keys file where the generate entries are placed.
679 680 ; It is possible to have multiple key files specified in `sshd_config` e.g.
680 681 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
681 682 ssh.authorized_keys_file_path = /etc/rhodecode/conf/ssh/authorized_keys_rhodecode
682 683
683 684 ; Command to execute the SSH wrapper. The binary is available in the
684 685 ; RhodeCode installation directory.
685 686 ; legacy: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
686 687 ; new rewrite: /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
687 688 #DEPRECATED: ssh.wrapper_cmd = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper
688 689 ssh.wrapper_cmd.v2 = /usr/local/bin/rhodecode_bin/bin/rc-ssh-wrapper-v2
689 690
690 691 ; Allow shell when executing the ssh-wrapper command
691 692 ssh.wrapper_cmd_allow_shell = false
692 693
693 694 ; Enables logging, and detailed output send back to the client during SSH
694 695 ; operations. Useful for debugging, shouldn't be used in production.
695 696 ssh.enable_debug_logging = false
696 697
697 698 ; Paths to binary executable, by default they are the names, but we can
698 699 ; override them if we want to use a custom one
699 700 ssh.executable.hg = /usr/local/bin/rhodecode_bin/vcs_bin/hg
700 701 ssh.executable.git = /usr/local/bin/rhodecode_bin/vcs_bin/git
701 702 ssh.executable.svn = /usr/local/bin/rhodecode_bin/vcs_bin/svnserve
702 703
703 704 ; Enables SSH key generator web interface. Disabling this still allows users
704 705 ; to add their own keys.
705 706 ssh.enable_ui_key_generator = true
706 707
707 708 ; Statsd client config, this is used to send metrics to statsd
708 709 ; We recommend setting statsd_exported and scrape them using Prometheus
709 710 #statsd.enabled = false
710 711 #statsd.statsd_host = 0.0.0.0
711 712 #statsd.statsd_port = 8125
712 713 #statsd.statsd_prefix =
713 714 #statsd.statsd_ipv6 = false
714 715
715 716 ; configure logging automatically at server startup set to false
716 717 ; to use the below custom logging config.
717 718 ; RC_LOGGING_FORMATTER
718 719 ; RC_LOGGING_LEVEL
719 720 ; env variables can control the settings for logging in case of autoconfigure
720 721
721 722 #logging.autoconfigure = true
722 723
723 724 ; specify your own custom logging config file to configure logging
724 725 #logging.logging_conf_file = /path/to/custom_logging.ini
725 726
726 727 ; Dummy marker to add new entries after.
727 728 ; Add any custom entries below. Please don't remove this marker.
728 729 custom.conf = 1
729 730
730 731
731 732 ; #####################
732 733 ; LOGGING CONFIGURATION
733 734 ; #####################
734 735
735 736 [loggers]
736 737 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
737 738
738 739 [handlers]
739 740 keys = console, console_sql
740 741
741 742 [formatters]
742 743 keys = generic, json, color_formatter, color_formatter_sql
743 744
744 745 ; #######
745 746 ; LOGGERS
746 747 ; #######
747 748 [logger_root]
748 749 level = NOTSET
749 750 handlers = console
750 751
751 752 [logger_sqlalchemy]
752 753 level = INFO
753 754 handlers = console_sql
754 755 qualname = sqlalchemy.engine
755 756 propagate = 0
756 757
757 758 [logger_beaker]
758 759 level = DEBUG
759 760 handlers =
760 761 qualname = beaker.container
761 762 propagate = 1
762 763
763 764 [logger_rhodecode]
764 765 level = DEBUG
765 766 handlers =
766 767 qualname = rhodecode
767 768 propagate = 1
768 769
769 770 [logger_ssh_wrapper]
770 771 level = DEBUG
771 772 handlers =
772 773 qualname = ssh_wrapper
773 774 propagate = 1
774 775
775 776 [logger_celery]
776 777 level = DEBUG
777 778 handlers =
778 779 qualname = celery
779 780
780 781
781 782 ; ########
782 783 ; HANDLERS
783 784 ; ########
784 785
785 786 [handler_console]
786 787 class = StreamHandler
787 788 args = (sys.stderr, )
788 789 level = INFO
789 790 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
790 791 ; This allows sending properly formatted logs to grafana loki or elasticsearch
791 792 formatter = generic
792 793
793 794 [handler_console_sql]
794 795 ; "level = DEBUG" logs SQL queries and results.
795 796 ; "level = INFO" logs SQL queries.
796 797 ; "level = WARN" logs neither. (Recommended for production systems.)
797 798 class = StreamHandler
798 799 args = (sys.stderr, )
799 800 level = WARN
800 801 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
801 802 ; This allows sending properly formatted logs to grafana loki or elasticsearch
802 803 formatter = generic
803 804
804 805 ; ##########
805 806 ; FORMATTERS
806 807 ; ##########
807 808
808 809 [formatter_generic]
809 810 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
810 811 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
811 812 datefmt = %Y-%m-%d %H:%M:%S
812 813
813 814 [formatter_color_formatter]
814 815 class = rhodecode.lib.logging_formatter.ColorFormatter
815 816 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
816 817 datefmt = %Y-%m-%d %H:%M:%S
817 818
818 819 [formatter_color_formatter_sql]
819 820 class = rhodecode.lib.logging_formatter.ColorFormatterSql
820 821 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
821 822 datefmt = %Y-%m-%d %H:%M:%S
822 823
823 824 [formatter_json]
824 825 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
825 826 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,175 +1,175 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import sys
21 21 import logging
22 22
23 23 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
24 24 from rhodecode.lib.ext_json import sjson as json
25 25 from rhodecode.lib.vcs.conf import settings as vcs_settings
26 26 from rhodecode.lib.api_utils import call_service_api
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 class SshVcsServer(object):
32 32 repo_user_agent = None # set in child classes
33 33 _path = None # set executable path for hg/git/svn binary
34 34 backend = None # set in child classes
35 35 tunnel = None # subprocess handling tunnel
36 36 settings = None # parsed settings module
37 37 write_perms = ['repository.admin', 'repository.write']
38 38 read_perms = ['repository.read', 'repository.admin', 'repository.write']
39 39
40 40 def __init__(self, user, user_permissions, settings, env):
41 41 self.user = user
42 42 self.user_permissions = user_permissions
43 43 self.settings = settings
44 44 self.env = env
45 45 self.stdin = sys.stdin
46 46
47 47 self.repo_name = None
48 48 self.repo_mode = None
49 49 self.store = ''
50 50 self.ini_path = ''
51 51 self.hooks_protocol = None
52 52
53 53 def _invalidate_cache(self, repo_name):
54 54 """
55 55 Set's cache for this repository for invalidation on next access
56 56
57 57 :param repo_name: full repo name, also a cache key
58 58 """
59 59 # Todo: Leave only "celery" case after transition.
60 60 match self.hooks_protocol:
61 61 case 'http':
62 62 from rhodecode.model.scm import ScmModel
63 63 ScmModel().mark_for_invalidation(repo_name)
64 64 case 'celery':
65 65 call_service_api(self.settings, {
66 66 "method": "service_mark_for_invalidation",
67 67 "args": {"repo_name": repo_name}
68 68 })
69 69
70 70 def has_write_perm(self):
71 71 permission = self.user_permissions.get(self.repo_name)
72 72 if permission in ['repository.write', 'repository.admin']:
73 73 return True
74 74
75 75 return False
76 76
77 77 def _check_permissions(self, action):
78 78 permission = self.user_permissions.get(self.repo_name)
79 79 user_info = f'{self.user["user_id"]}:{self.user["username"]}'
80 80 log.debug('permission for %s on %s are: %s',
81 81 user_info, self.repo_name, permission)
82 82
83 83 if not permission:
84 84 log.error('user `%s` permissions to repo:%s are empty. Forbidding access.',
85 85 user_info, self.repo_name)
86 86 return -2
87 87
88 88 if action == 'pull':
89 89 if permission in self.read_perms:
90 90 log.info(
91 91 'READ Permissions for User "%s" detected to repo "%s"!',
92 92 user_info, self.repo_name)
93 93 return 0
94 94 else:
95 95 if permission in self.write_perms:
96 96 log.info(
97 97 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!',
98 98 user_info, self.repo_name)
99 99 return 0
100 100
101 101 log.error('Cannot properly fetch or verify user `%s` permissions. '
102 102 'Permissions: %s, vcs action: %s',
103 103 user_info, permission, action)
104 104 return -2
105 105
106 106 def update_environment(self, action, extras=None):
107 107
108 108 scm_data = {
109 109 'ip': os.environ['SSH_CLIENT'].split()[0],
110 110 'username': self.user.username,
111 111 'user_id': self.user.user_id,
112 112 'action': action,
113 113 'repository': self.repo_name,
114 114 'scm': self.backend,
115 115 'config': self.ini_path,
116 116 'repo_store': self.store,
117 117 'make_lock': None,
118 118 'locked_by': [None, None],
119 119 'server_url': None,
120 120 'user_agent': f'{self.repo_user_agent}/ssh-user-agent',
121 121 'hooks': ['push', 'pull'],
122 122 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module',
123 123 'is_shadow_repo': False,
124 124 'detect_force_push': False,
125 125 'check_branch_perms': False,
126 126
127 127 'SSH': True,
128 128 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name),
129 129 }
130 130 if extras:
131 131 scm_data.update(extras)
132 132 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
133 133 return scm_data
134 134
135 135 def get_root_store(self):
136 136 root_store = self.store
137 137 if not root_store.endswith('/'):
138 138 # always append trailing slash
139 139 root_store = root_store + '/'
140 140 return root_store
141 141
142 142 def _handle_tunnel(self, extras):
143 143 # pre-auth
144 144 action = 'pull'
145 145 exit_code = self._check_permissions(action)
146 146 if exit_code:
147 147 return exit_code, False
148 148
149 149 req = self.env.get('request')
150 150 if req:
151 151 server_url = req.host_url + req.script_name
152 152 extras['server_url'] = server_url
153 153
154 154 log.debug('Using %s binaries from path %s', self.backend, self._path)
155 155 exit_code = self.tunnel.run(extras)
156 156
157 157 return exit_code, action == "push"
158 158
159 159 def run(self, tunnel_extras=None):
160 self.hooks_protocol = self.settings['vcs.hooks.protocol']
160 self.hooks_protocol = self.settings['vcs.hooks.protocol.v2']
161 161 tunnel_extras = tunnel_extras or {}
162 162 extras = {}
163 163 extras.update(tunnel_extras)
164 164
165 165 callback_daemon, extras = prepare_callback_daemon(
166 166 extras, protocol=self.hooks_protocol,
167 167 host=vcs_settings.HOOKS_HOST)
168 168
169 169 with callback_daemon:
170 170 try:
171 171 return self._handle_tunnel(extras)
172 172 finally:
173 173 log.debug('Running cleanup with cache invalidation')
174 174 if self.repo_name:
175 175 self._invalidate_cache(self.repo_name)
@@ -1,151 +1,151 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
25 25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 26 from rhodecode.lib.ext_json import json
27 27
28 28
29 29 class GitServerCreator(object):
30 30 root = '/tmp/repo/path/'
31 31 git_path = '/usr/local/bin/git'
32 32 config_data = {
33 33 'app:main': {
34 34 'ssh.executable.git': git_path,
35 'vcs.hooks.protocol': 'http',
35 'vcs.hooks.protocol.v2': 'celery',
36 36 }
37 37 }
38 38 repo_name = 'test_git'
39 39 repo_mode = 'receive-pack'
40 40 user = plain_dummy_user()
41 41
42 42 def __init__(self):
43 43 pass
44 44
45 45 def create(self, **kwargs):
46 46 parameters = {
47 47 'store': self.root,
48 48 'ini_path': '',
49 49 'user': self.user,
50 50 'repo_name': self.repo_name,
51 51 'repo_mode': self.repo_mode,
52 52 'user_permissions': {
53 53 self.repo_name: 'repository.admin'
54 54 },
55 55 'settings': self.config_data['app:main'],
56 56 'env': plain_dummy_env()
57 57 }
58 58 parameters.update(kwargs)
59 59 server = GitServer(**parameters)
60 60 return server
61 61
62 62
63 63 @pytest.fixture()
64 64 def git_server(app):
65 65 return GitServerCreator()
66 66
67 67
68 68 class TestGitServer(object):
69 69
70 70 def test_command(self, git_server):
71 71 server = git_server.create()
72 72 expected_command = (
73 73 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
74 74 root=git_server.root, git_path=git_server.git_path,
75 75 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
76 76 )
77 77 assert expected_command == server.tunnel.command()
78 78
79 79 @pytest.mark.parametrize('permissions, action, code', [
80 80 ({}, 'pull', -2),
81 81 ({'test_git': 'repository.read'}, 'pull', 0),
82 82 ({'test_git': 'repository.read'}, 'push', -2),
83 83 ({'test_git': 'repository.write'}, 'push', 0),
84 84 ({'test_git': 'repository.admin'}, 'push', 0),
85 85
86 86 ])
87 87 def test_permission_checks(self, git_server, permissions, action, code):
88 88 server = git_server.create(user_permissions=permissions)
89 89 result = server._check_permissions(action)
90 90 assert result is code
91 91
92 92 @pytest.mark.parametrize('permissions, value', [
93 93 ({}, False),
94 94 ({'test_git': 'repository.read'}, False),
95 95 ({'test_git': 'repository.write'}, True),
96 96 ({'test_git': 'repository.admin'}, True),
97 97
98 98 ])
99 99 def test_has_write_permissions(self, git_server, permissions, value):
100 100 server = git_server.create(user_permissions=permissions)
101 101 result = server.has_write_perm()
102 102 assert result is value
103 103
104 104 def test_run_returns_executes_command(self, git_server):
105 105 server = git_server.create()
106 106 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
107 107
108 108 os.environ['SSH_CLIENT'] = '127.0.0.1'
109 109 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
110 110 _patch.return_value = 0
111 111 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
112 112 exit_code = server.run()
113 113
114 114 assert exit_code == (0, False)
115 115
116 116 @pytest.mark.parametrize(
117 117 'repo_mode, action', [
118 118 ['receive-pack', 'push'],
119 119 ['upload-pack', 'pull']
120 120 ])
121 121 def test_update_environment(self, git_server, repo_mode, action):
122 122 server = git_server.create(repo_mode=repo_mode)
123 123 store = server.store
124 124
125 125 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
126 126 with mock.patch('os.putenv') as putenv_mock:
127 127 server.update_environment(action)
128 128
129 129 expected_data = {
130 130 'username': git_server.user.username,
131 131 'user_id': git_server.user.user_id,
132 132 'scm': 'git',
133 133 'repository': git_server.repo_name,
134 134 'make_lock': None,
135 135 'action': action,
136 136 'ip': '10.10.10.10',
137 137 'locked_by': [None, None],
138 138 'config': '',
139 139 'repo_store': store,
140 140 'server_url': None,
141 141 'hooks': ['push', 'pull'],
142 142 'is_shadow_repo': False,
143 143 'hooks_module': 'rhodecode.lib.hook_daemon.hook_module',
144 144 'check_branch_perms': False,
145 145 'detect_force_push': False,
146 146 'user_agent': u'git/ssh-user-agent',
147 147 'SSH': True,
148 148 'SSH_PERMISSIONS': 'repository.admin',
149 149 }
150 150 args, kwargs = putenv_mock.call_args
151 151 assert json.loads(args[1]) == expected_data
@@ -1,115 +1,115 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import mock
21 21 import pytest
22 22
23 23 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
24 24 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
25 25
26 26
27 27 class MercurialServerCreator(object):
28 28 root = '/tmp/repo/path/'
29 29 hg_path = '/usr/local/bin/hg'
30 30
31 31 config_data = {
32 32 'app:main': {
33 33 'ssh.executable.hg': hg_path,
34 'vcs.hooks.protocol': 'http',
34 'vcs.hooks.protocol.v2': 'celery',
35 35 }
36 36 }
37 37 repo_name = 'test_hg'
38 38 user = plain_dummy_user()
39 39
40 40 def __init__(self):
41 41 pass
42 42
43 43 def create(self, **kwargs):
44 44 parameters = {
45 45 'store': self.root,
46 46 'ini_path': '',
47 47 'user': self.user,
48 48 'repo_name': self.repo_name,
49 49 'user_permissions': {
50 50 'test_hg': 'repository.admin'
51 51 },
52 52 'settings': self.config_data['app:main'],
53 53 'env': plain_dummy_env()
54 54 }
55 55 parameters.update(kwargs)
56 56 server = MercurialServer(**parameters)
57 57 return server
58 58
59 59
60 60 @pytest.fixture()
61 61 def hg_server(app):
62 62 return MercurialServerCreator()
63 63
64 64
65 65 class TestMercurialServer(object):
66 66
67 67 def test_command(self, hg_server, tmpdir):
68 68 server = hg_server.create()
69 69 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
70 70 expected_command = (
71 71 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
72 72 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
73 73 repo_name=hg_server.repo_name)
74 74 )
75 75 server_command = server.tunnel.command(custom_hgrc)
76 76 assert expected_command == server_command
77 77
78 78 @pytest.mark.parametrize('permissions, action, code', [
79 79 ({}, 'pull', -2),
80 80 ({'test_hg': 'repository.read'}, 'pull', 0),
81 81 ({'test_hg': 'repository.read'}, 'push', -2),
82 82 ({'test_hg': 'repository.write'}, 'push', 0),
83 83 ({'test_hg': 'repository.admin'}, 'push', 0),
84 84
85 85 ])
86 86 def test_permission_checks(self, hg_server, permissions, action, code):
87 87 server = hg_server.create(user_permissions=permissions)
88 88 result = server._check_permissions(action)
89 89 assert result is code
90 90
91 91 @pytest.mark.parametrize('permissions, value', [
92 92 ({}, False),
93 93 ({'test_hg': 'repository.read'}, False),
94 94 ({'test_hg': 'repository.write'}, True),
95 95 ({'test_hg': 'repository.admin'}, True),
96 96
97 97 ])
98 98 def test_has_write_permissions(self, hg_server, permissions, value):
99 99 server = hg_server.create(user_permissions=permissions)
100 100 result = server.has_write_perm()
101 101 assert result is value
102 102
103 103 def test_run_returns_executes_command(self, hg_server):
104 104 server = hg_server.create()
105 105 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
106 106 os.environ['SSH_CLIENT'] = '127.0.0.1'
107 107 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
108 108 _patch.return_value = 0
109 109 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
110 110 exit_code = server.run()
111 111
112 112 assert exit_code == (0, False)
113 113
114 114
115 115
@@ -1,203 +1,203 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18 import os
19 19 import mock
20 20 import pytest
21 21
22 22 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
23 23 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
24 24
25 25
26 26 class SubversionServerCreator(object):
27 27 root = '/tmp/repo/path/'
28 28 svn_path = '/usr/local/bin/svnserve'
29 29 config_data = {
30 30 'app:main': {
31 31 'ssh.executable.svn': svn_path,
32 'vcs.hooks.protocol': 'http',
32 'vcs.hooks.protocol.v2': 'celery',
33 33 }
34 34 }
35 35 repo_name = 'test-svn'
36 36 user = plain_dummy_user()
37 37
38 38 def __init__(self):
39 39 pass
40 40
41 41 def create(self, **kwargs):
42 42 parameters = {
43 43 'store': self.root,
44 44 'repo_name': self.repo_name,
45 45 'ini_path': '',
46 46 'user': self.user,
47 47 'user_permissions': {
48 48 self.repo_name: 'repository.admin'
49 49 },
50 50 'settings': self.config_data['app:main'],
51 51 'env': plain_dummy_env()
52 52 }
53 53
54 54 parameters.update(kwargs)
55 55 server = SubversionServer(**parameters)
56 56 return server
57 57
58 58
59 59 @pytest.fixture()
60 60 def svn_server(app):
61 61 return SubversionServerCreator()
62 62
63 63
64 64 class TestSubversionServer(object):
65 65
66 66 def test_command(self, svn_server):
67 67 server = svn_server.create()
68 68 expected_command = [
69 69 svn_server.svn_path, '-t',
70 70 '--config-file', server.tunnel.svn_conf_path,
71 71 '--tunnel-user', svn_server.user.username,
72 72 '-r', svn_server.root
73 73 ]
74 74
75 75 assert expected_command == server.tunnel.command()
76 76
77 77 @pytest.mark.parametrize('permissions, action, code', [
78 78 ({}, 'pull', -2),
79 79 ({'test-svn': 'repository.read'}, 'pull', 0),
80 80 ({'test-svn': 'repository.read'}, 'push', -2),
81 81 ({'test-svn': 'repository.write'}, 'push', 0),
82 82 ({'test-svn': 'repository.admin'}, 'push', 0),
83 83
84 84 ])
85 85 def test_permission_checks(self, svn_server, permissions, action, code):
86 86 server = svn_server.create(user_permissions=permissions)
87 87 result = server._check_permissions(action)
88 88 assert result is code
89 89
90 90 @pytest.mark.parametrize('permissions, access_paths, expected_match', [
91 91 # not matched repository name
92 92 ({
93 93 'test-svn': ''
94 94 }, ['test-svn-1', 'test-svn-1/subpath'],
95 95 None),
96 96
97 97 # exact match
98 98 ({
99 99 'test-svn': ''
100 100 },
101 101 ['test-svn'],
102 102 'test-svn'),
103 103
104 104 # subdir commits
105 105 ({
106 106 'test-svn': ''
107 107 },
108 108 ['test-svn/foo',
109 109 'test-svn/foo/test-svn',
110 110 'test-svn/trunk/development.txt',
111 111 ],
112 112 'test-svn'),
113 113
114 114 # subgroups + similar patterns
115 115 ({
116 116 'test-svn': '',
117 117 'test-svn-1': '',
118 118 'test-svn-subgroup/test-svn': '',
119 119
120 120 },
121 121 ['test-svn-1',
122 122 'test-svn-1/foo/test-svn',
123 123 'test-svn-1/test-svn',
124 124 ],
125 125 'test-svn-1'),
126 126
127 127 # subgroups + similar patterns
128 128 ({
129 129 'test-svn-1': '',
130 130 'test-svn-10': '',
131 131 'test-svn-100': '',
132 132 },
133 133 ['test-svn-10',
134 134 'test-svn-10/foo/test-svn',
135 135 'test-svn-10/test-svn',
136 136 ],
137 137 'test-svn-10'),
138 138
139 139 # subgroups + similar patterns
140 140 ({
141 141 'name': '',
142 142 'nameContains': '',
143 143 'nameContainsThis': '',
144 144 },
145 145 ['nameContains',
146 146 'nameContains/This',
147 147 'nameContains/This/test-svn',
148 148 ],
149 149 'nameContains'),
150 150
151 151 # subgroups + similar patterns
152 152 ({
153 153 'test-svn': '',
154 154 'test-svn-1': '',
155 155 'test-svn-subgroup/test-svn': '',
156 156
157 157 },
158 158 ['test-svn-subgroup/test-svn',
159 159 'test-svn-subgroup/test-svn/foo/test-svn',
160 160 'test-svn-subgroup/test-svn/trunk/example.txt',
161 161 ],
162 162 'test-svn-subgroup/test-svn'),
163 163 ])
164 164 def test_repo_extraction_on_subdir(self, svn_server, permissions, access_paths, expected_match):
165 165 server = svn_server.create(user_permissions=permissions)
166 166 for path in access_paths:
167 167 repo_name = server.tunnel._match_repo_name(path)
168 168 assert repo_name == expected_match
169 169
170 170 def test_run_returns_executes_command(self, svn_server):
171 171 server = svn_server.create()
172 172 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
173 173 os.environ['SSH_CLIENT'] = '127.0.0.1'
174 174 with mock.patch.object(
175 175 SubversionTunnelWrapper, 'get_first_client_response',
176 176 return_value={'url': 'http://server/test-svn'}):
177 177 with mock.patch.object(
178 178 SubversionTunnelWrapper, 'patch_first_client_response',
179 179 return_value=0):
180 180 with mock.patch.object(
181 181 SubversionTunnelWrapper, 'sync',
182 182 return_value=0):
183 183 with mock.patch.object(
184 184 SubversionTunnelWrapper, 'command',
185 185 return_value=['date']):
186 186
187 187 exit_code = server.run()
188 188 # SVN has this differently configured, and we get in our mock env
189 189 # None as return code
190 190 assert exit_code == (None, False)
191 191
192 192 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
193 193 server = svn_server.create()
194 194 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
195 195 with mock.patch.object(
196 196 SubversionTunnelWrapper, 'command',
197 197 return_value=['date']):
198 198 with mock.patch.object(
199 199 SubversionTunnelWrapper, 'get_first_client_response',
200 200 return_value=None):
201 201 exit_code = server.run()
202 202
203 203 assert exit_code == (1, False)
@@ -1,228 +1,228 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import tempfile
21 21 import logging
22 22
23 23 from pyramid.settings import asbool
24 24
25 25 from rhodecode.config.settings_maker import SettingsMaker
26 26 from rhodecode.config import utils as config_utils
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 32 """
33 33 Applies settings defaults and does all type conversion.
34 34
35 35 We would move all settings parsing and preparation into this place, so that
36 36 we have only one place left which deals with this part. The remaining parts
37 37 of the application would start to rely fully on well-prepared settings.
38 38
39 39 This piece would later be split up per topic to avoid a big fat monster
40 40 function.
41 41 """
42 42 jn = os.path.join
43 43
44 44 global_settings_maker = SettingsMaker(global_config)
45 45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 46 debug_enabled = asbool(global_config.get('debug'))
47 47
48 48 settings_maker = SettingsMaker(settings)
49 49
50 50 settings_maker.make_setting(
51 51 'logging.autoconfigure',
52 52 default=False,
53 53 parser='bool')
54 54
55 55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57 57
58 58 # Default includes, possible to change as a user
59 59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 60 log.debug(
61 61 "Using the following pyramid.includes: %s",
62 62 pyramid_includes)
63 63
64 64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66 66
67 67 if 'mako.default_filters' not in settings:
68 68 # set custom default filters if we don't have it defined
69 69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 70 settings['mako.default_filters'] = 'h_filter'
71 71
72 72 if 'mako.directories' not in settings:
73 73 mako_directories = settings.setdefault('mako.directories', [
74 74 # Base templates of the original application
75 75 'rhodecode:templates',
76 76 ])
77 77 log.debug(
78 78 "Using the following Mako template directories: %s",
79 79 mako_directories)
80 80
81 81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 83 raw_url = settings['beaker.session.url']
84 84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 85 settings['beaker.session.url'] = 'redis://' + raw_url
86 86
87 87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88 88
89 89 # TODO: johbo: Re-think this, usually the call to config.include
90 90 # should allow to pass in a prefix.
91 91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92 92
93 93 # Sanitize generic settings.
94 94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 95 settings_maker.make_setting('gzip_responses', False, parser='bool')
96 96 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
97 97
98 98 # statsd
99 99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104 104
105 105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 106 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
107 107 settings_maker.make_setting('vcs.svn.proxy.enabled', True, parser='bool')
108 108 settings_maker.make_setting('vcs.svn.proxy.host', 'http://svn:8090', parser='string')
109 settings_maker.make_setting('vcs.hooks.protocol', 'http')
109 settings_maker.make_setting('vcs.hooks.protocol.v2', 'celery')
110 110 settings_maker.make_setting('vcs.hooks.host', '*')
111 111 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
112 112 settings_maker.make_setting('vcs.server', '')
113 113 settings_maker.make_setting('vcs.server.protocol', 'http')
114 114 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
115 115 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
116 116 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
117 117 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
118 118 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
119 119
120 120 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
121 121
122 122 # repo_store path
123 123 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
124 124 # Support legacy values of vcs.scm_app_implementation. Legacy
125 125 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
126 126 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
127 127 scm_app_impl = settings['vcs.scm_app_implementation']
128 128 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
129 129 settings['vcs.scm_app_implementation'] = 'http'
130 130
131 131 settings_maker.make_setting('appenlight', False, parser='bool')
132 132
133 133 temp_store = tempfile.gettempdir()
134 134 tmp_cache_dir = jn(temp_store, 'rc_cache')
135 135
136 136 # save default, cache dir, and use it for all backends later.
137 137 default_cache_dir = settings_maker.make_setting(
138 138 'cache_dir',
139 139 default=tmp_cache_dir, default_when_empty=True,
140 140 parser='dir:ensured')
141 141
142 142 # exception store cache
143 143 settings_maker.make_setting(
144 144 'exception_tracker.store_path',
145 145 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
146 146 parser='dir:ensured'
147 147 )
148 148
149 149 settings_maker.make_setting(
150 150 'celerybeat-schedule.path',
151 151 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
152 152 parser='file:ensured'
153 153 )
154 154
155 155 # celery
156 156 broker_url = settings_maker.make_setting('celery.broker_url', 'redis://redis:6379/8')
157 157 settings_maker.make_setting('celery.result_backend', broker_url)
158 158
159 159 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
160 160 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
161 161
162 162 # sessions, ensure file since no-value is memory
163 163 settings_maker.make_setting('beaker.session.type', 'file')
164 164 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
165 165
166 166 # cache_general
167 167 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
168 168 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
169 169 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
170 170
171 171 # cache_perms
172 172 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
173 173 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
174 174 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
175 175
176 176 # cache_repo
177 177 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
178 178 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
179 179 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
180 180
181 181 # cache_license
182 182 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
183 183 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
184 184 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
185 185
186 186 # cache_repo_longterm memory, 96H
187 187 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
188 188 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
189 189 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
190 190
191 191 # sql_cache_short
192 192 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
193 193 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
194 194 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
195 195
196 196 # archive_cache
197 197 settings_maker.make_setting('archive_cache.locking.url', 'redis://redis:6379/1')
198 198 settings_maker.make_setting('archive_cache.backend.type', 'filesystem')
199 199
200 200 settings_maker.make_setting('archive_cache.filesystem.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
201 201 settings_maker.make_setting('archive_cache.filesystem.cache_shards', 8, parser='int')
202 202 settings_maker.make_setting('archive_cache.filesystem.cache_size_gb', 10, parser='float')
203 203 settings_maker.make_setting('archive_cache.filesystem.eviction_policy', 'least-recently-stored')
204 204
205 205 settings_maker.make_setting('archive_cache.filesystem.retry', False, parser='bool')
206 206 settings_maker.make_setting('archive_cache.filesystem.retry_backoff', 1, parser='int')
207 207 settings_maker.make_setting('archive_cache.filesystem.retry_attempts', 10, parser='int')
208 208
209 209 settings_maker.make_setting('archive_cache.objectstore.url', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
210 210 settings_maker.make_setting('archive_cache.objectstore.key', '')
211 211 settings_maker.make_setting('archive_cache.objectstore.secret', '')
212 212 settings_maker.make_setting('archive_cache.objectstore.region', 'eu-central-1')
213 213 settings_maker.make_setting('archive_cache.objectstore.bucket', 'rhodecode-archive-cache', default_when_empty=True,)
214 214 settings_maker.make_setting('archive_cache.objectstore.bucket_shards', 8, parser='int')
215 215
216 216 settings_maker.make_setting('archive_cache.objectstore.cache_size_gb', 10, parser='float')
217 217 settings_maker.make_setting('archive_cache.objectstore.eviction_policy', 'least-recently-stored')
218 218
219 219 settings_maker.make_setting('archive_cache.objectstore.retry', False, parser='bool')
220 220 settings_maker.make_setting('archive_cache.objectstore.retry_backoff', 1, parser='int')
221 221 settings_maker.make_setting('archive_cache.objectstore.retry_attempts', 10, parser='int')
222 222
223 223 settings_maker.env_expand()
224 224
225 225 # configure instance id
226 226 config_utils.set_instance_id(settings)
227 227
228 228 return settings
@@ -1,110 +1,110 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import platform
21 21
22 22 DEFAULT_USER = 'default'
23 23
24 24
25 25 def configure_vcs(config):
26 26 """
27 27 Patch VCS config with some RhodeCode specific stuff
28 28 """
29 29 from rhodecode.lib.vcs import conf
30 30 import rhodecode.lib.vcs.conf.settings
31 31
32 32 conf.settings.BACKENDS = {
33 33 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
34 34 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
35 35 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
36 36 }
37 37
38 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
38 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol.v2']
39 39 conf.settings.HOOKS_HOST = config['vcs.hooks.host']
40 40 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
41 41 conf.settings.ALIASES[:] = config['vcs.backends']
42 42 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
43 43
44 44
45 45 def initialize_database(config):
46 46 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
47 47 from rhodecode.model import init_model
48 48 engine = engine_from_config(config, 'sqlalchemy.db1.')
49 49 init_model(engine, encryption_key=get_encryption_key(config))
50 50
51 51
52 52 def initialize_test_environment(settings, test_env=None):
53 53 if test_env is None:
54 54 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
55 55
56 56 from rhodecode.lib.utils import (
57 57 create_test_directory, create_test_database, create_test_repositories,
58 58 create_test_index)
59 59 from rhodecode.tests import TESTS_TMP_PATH
60 60 from rhodecode.lib.vcs.backends.hg import largefiles_store
61 61 from rhodecode.lib.vcs.backends.git import lfs_store
62 62
63 63 # test repos
64 64 if test_env:
65 65 create_test_directory(TESTS_TMP_PATH)
66 66 # large object stores
67 67 create_test_directory(largefiles_store(TESTS_TMP_PATH))
68 68 create_test_directory(lfs_store(TESTS_TMP_PATH))
69 69
70 70 create_test_database(TESTS_TMP_PATH, settings)
71 71 create_test_repositories(TESTS_TMP_PATH, settings)
72 72 create_test_index(TESTS_TMP_PATH, settings)
73 73
74 74
75 75 def get_vcs_server_protocol(config):
76 76 return config['vcs.server.protocol']
77 77
78 78
79 79 def set_instance_id(config):
80 80 """
81 81 Sets a dynamic generated config['instance_id'] if missing or '*'
82 82 E.g instance_id = *cluster-1 or instance_id = *
83 83 """
84 84
85 85 config['instance_id'] = config.get('instance_id') or ''
86 86 instance_id = config['instance_id']
87 87 if instance_id.startswith('*') or not instance_id:
88 88 prefix = instance_id.lstrip('*')
89 89 _platform_id = platform.uname()[1] or 'instance'
90 90 config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format(
91 91 prefix=prefix,
92 92 platform=_platform_id,
93 93 pid=os.getpid())
94 94
95 95
96 96 def get_default_user_id():
97 97 from sqlalchemy import text
98 98 from rhodecode.model import meta
99 99
100 100 engine = meta.get_engine()
101 101 with meta.SA_Session(engine) as session:
102 102 result = session.execute(text(
103 103 "SELECT user_id from users where username = :uname"
104 104 ), {'uname': DEFAULT_USER})
105 105 user = result.first()
106 106 if not user:
107 107 raise ValueError('Unable to retrieve default user data from DB')
108 108 user_id = user[0]
109 109
110 110 return user_id
@@ -1,205 +1,205 b''
1 1
2 2
3 3 # Copyright (C) 2016-2023 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.tests import no_newline_id_generator
25 25 from rhodecode.config.middleware import sanitize_settings_and_apply_defaults
26 26 from rhodecode.config.settings_maker import SettingsMaker
27 27
28 28
29 29 class TestHelperFunctions(object):
30 30 @pytest.mark.parametrize('raw, expected', [
31 31 ('true', True), (u'true', True),
32 32 ('yes', True), (u'yes', True),
33 33 ('on', True), (u'on', True),
34 34 ('false', False), (u'false', False),
35 35 ('no', False), (u'no', False),
36 36 ('off', False), (u'off', False),
37 37 ('invalid-bool-value', False),
38 38 ('invalid-∫øø@-√å@¨€', False),
39 39 (u'invalid-∫øø@-√å@¨€', False),
40 40 ])
41 41 def test_bool_func_helper(self, raw, expected):
42 42 val = SettingsMaker._bool_func(raw)
43 43 assert val == expected
44 44
45 45 @pytest.mark.parametrize('raw, expected', [
46 46 ('', ''),
47 47 ('test-string', 'test-string'),
48 48 ('CaSe-TeSt', 'case-test'),
49 49 ('test-string-烩€', 'test-string-烩€'),
50 50 (u'test-string-烩€', u'test-string-烩€'),
51 51 ])
52 52 def test_string_func_helper(self, raw, expected):
53 53 val = SettingsMaker._string_func(raw)
54 54 assert val == expected
55 55
56 56 @pytest.mark.parametrize('raw, expected', [
57 57 ('', []),
58 58 ('test', ['test']),
59 59 ('CaSe-TeSt', ['CaSe-TeSt']),
60 60 ('test-string-烩€', ['test-string-烩€']),
61 61 (u'test-string-烩€', [u'test-string-烩€']),
62 62 ('hg,git,svn', ['hg', 'git', 'svn']),
63 63 ('hg, git, svn', ['hg', 'git', 'svn']),
64 64
65 65 (', hg , git , svn , ', ['', 'hg', 'git', 'svn', '']),
66 66 ('cheese,free node,other', ['cheese', 'free node', 'other']),
67 67 ], ids=no_newline_id_generator)
68 68 def test_list_setting_helper(self, raw, expected):
69 69 val = SettingsMaker._list_func(raw)
70 70 assert val == expected
71 71
72 72 @pytest.mark.parametrize('raw, expected', [
73 73 ('hg git svn', ['hg', 'git', 'svn']),
74 74 ], ids=no_newline_id_generator)
75 75 def test_list_setting_spaces_helper(self, raw, expected):
76 76 val = SettingsMaker._list_func(raw, sep=' ')
77 77 assert val == expected
78 78
79 79 @pytest.mark.parametrize('raw, expected', [
80 80 ('hg\ngit\nsvn', ['hg', 'git', 'svn']),
81 81 (' hg\n git\n svn ', ['hg', 'git', 'svn']),
82 82 ], ids=no_newline_id_generator)
83 83 def test_list_setting_newlines_helper(self, raw, expected):
84 84 val = SettingsMaker._list_func(raw, sep='\n')
85 85 assert val == expected
86 86
87 87 @pytest.mark.parametrize('raw, expected', [
88 88 ('0', 0),
89 89 ('-0', 0),
90 90 ('12345', 12345),
91 91 ('-12345', -12345),
92 92 (u'-12345', -12345),
93 93 ])
94 94 def test_int_setting_helper(self, raw, expected):
95 95 val = SettingsMaker._int_func(raw)
96 96 assert val == expected
97 97
98 98 @pytest.mark.parametrize('raw', [
99 99 ('0xff'),
100 100 (''),
101 101 ('invalid-int'),
102 102 ('invalid-⁄~†'),
103 103 (u'invalid-⁄~†'),
104 104 ])
105 105 def test_int_setting_helper_invalid_input(self, raw):
106 106 with pytest.raises(Exception):
107 107 SettingsMaker._int_func(raw)
108 108
109 109
110 110 class TestSanitizeVcsSettings(object):
111 111 _bool_funcs = [
112 112 ('vcs.hooks.direct_calls', False),
113 113 ('vcs.server.enable', True),
114 114 ('vcs.start_server', False),
115 115 ('startup.import_repos', False),
116 116 ]
117 117
118 118 _string_funcs = [
119 119 ('vcs.svn.compatible_version', ''),
120 ('vcs.hooks.protocol', 'http'),
120 ('vcs.hooks.protocol.v2', 'celery'),
121 121 ('vcs.hooks.host', '*'),
122 122 ('vcs.scm_app_implementation', 'http'),
123 123 ('vcs.server', ''),
124 124 ('vcs.server.protocol', 'http'),
125 125 ]
126 126
127 127 _list_settings = [
128 128 ('vcs.backends', 'hg git'),
129 129 ]
130 130
131 131 # @pytest.mark.parametrize('key, default', _list_settings)
132 132 # def test_list_setting_spacesep_list(self, key, default):
133 133 # test_list = ['test', 'list', 'values', 'for', key]
134 134 # input_value = ' '.join(test_list)
135 135 # settings = {key: input_value}
136 136 # sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
137 137 # assert settings[key] == test_list
138 138 #
139 139 # @pytest.mark.parametrize('key, default', _list_settings)
140 140 # def test_list_setting_newlinesep_list(self, key, default):
141 141 # test_list = ['test', 'list', 'values', 'for', key]
142 142 # input_value = '\n'.join(test_list)
143 143 # settings = {key: input_value}
144 144 # sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
145 145 # assert settings[key] == test_list
146 146
147 147 @pytest.mark.parametrize('key, default', _list_settings)
148 148 def test_list_setting_commasep_list(self, key, default):
149 149 test_list = ['test', 'list', 'values', 'for', key]
150 150 input_value = ','.join(test_list)
151 151 settings = {key: input_value}
152 152 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
153 153 assert settings[key] == test_list
154 154
155 155 @pytest.mark.parametrize('key, default', _list_settings)
156 156 def test_list_setting_comma_and_space_sep_list(self, key, default):
157 157 test_list = ['test', 'list', 'values', 'for', key]
158 158 input_value = ', '.join(test_list)
159 159 settings = {key: input_value}
160 160 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
161 161 assert settings[key] == test_list
162 162
163 163 @pytest.mark.parametrize('key, default', _string_funcs)
164 164 def test_string_func_string(self, key, default):
165 165 test_value = 'test-string-for-{}'.format(key)
166 166 settings = {key: test_value}
167 167 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
168 168 assert settings[key] == test_value
169 169
170 170 @pytest.mark.parametrize('key, default', _string_funcs)
171 171 def test_string_func_default(self, key, default):
172 172 settings = {}
173 173 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
174 174 assert settings[key] == default
175 175
176 176 # @pytest.mark.parametrize('key, default', _string_funcs)
177 177 # def test_string_func_lowercase(self, key, default):
178 178 # test_value = 'Test-String-For-{}'.format(key)
179 179 # settings = {key: test_value}
180 180 # sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
181 181 # assert settings[key] == test_value.lower()
182 182
183 183 @pytest.mark.parametrize('key, default', _bool_funcs)
184 184 def test_bool_func_true(self, key, default):
185 185 settings = {key: 'true'}
186 186 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
187 187 assert settings[key] is True
188 188
189 189 @pytest.mark.parametrize('key, default', _bool_funcs)
190 190 def test_bool_func_false(self, key, default):
191 191 settings = {key: 'false'}
192 192 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
193 193 assert settings[key] is False
194 194
195 195 @pytest.mark.parametrize('key, default', _bool_funcs)
196 196 def test_bool_func_invalid_string(self, key, default):
197 197 settings = {key: 'no-bool-val-string'}
198 198 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
199 199 assert settings[key] is False
200 200
201 201 @pytest.mark.parametrize('key, default', _bool_funcs)
202 202 def test_bool_func_default(self, key, default):
203 203 settings = {}
204 204 sanitize_settings_and_apply_defaults({'__file__': ''}, settings)
205 205 assert settings[key] is default
@@ -1,226 +1,226 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import pytest
21 21
22 22 from rhodecode.lib.config_utils import get_app_config
23 23 from rhodecode.tests.fixture import TestINI
24 24 from rhodecode.tests import TESTS_TMP_PATH
25 25 from rhodecode.tests.server_utils import RcVCSServer
26 26
27 27
28 28 @pytest.fixture(scope='session')
29 29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 30 """
31 31 Session scope VCSServer.
32 32
33 33 Tests which need the VCSServer have to rely on this fixture in order
34 34 to ensure it will be running.
35 35
36 36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 37 adjust the configuration file for the test run.
38 38
39 39 Command line args:
40 40
41 41 --without-vcsserver: Allows to switch this fixture off. You have to
42 42 manually start the server.
43 43
44 44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 45 """
46 46
47 47 if not request.config.getoption('with_vcsserver'):
48 48 return None
49 49
50 50 return vcsserver_factory(
51 51 request, vcsserver_port=vcsserver_port)
52 52
53 53
54 54 @pytest.fixture(scope='session')
55 55 def vcsserver_factory(tmpdir_factory):
56 56 """
57 57 Use this if you need a running vcsserver with a special configuration.
58 58 """
59 59
60 60 def factory(request, overrides=(), vcsserver_port=None,
61 61 log_file=None, workers='3'):
62 62
63 63 if vcsserver_port is None:
64 64 vcsserver_port = get_available_port()
65 65
66 66 overrides = list(overrides)
67 67 overrides.append({'server:main': {'port': vcsserver_port}})
68 68
69 69 option_name = 'vcsserver_config_http'
70 70 override_option_name = 'vcsserver_config_override'
71 71 config_file = get_config(
72 72 request.config, option_name=option_name,
73 73 override_option_name=override_option_name, overrides=overrides,
74 74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 75 prefix='test_vcs_')
76 76
77 77 server = RcVCSServer(config_file, log_file, workers)
78 78 server.start()
79 79
80 80 @request.addfinalizer
81 81 def cleanup():
82 82 server.shutdown()
83 83
84 84 server.wait_until_ready()
85 85 return server
86 86
87 87 return factory
88 88
89 89
90 90 def _use_log_level(config):
91 91 level = config.getoption('test_loglevel') or 'critical'
92 92 return level.upper()
93 93
94 94
95 95 @pytest.fixture(scope='session')
96 96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 97 option_name = 'pyramid_config'
98 98 log_level = _use_log_level(request.config)
99 99
100 100 overrides = [
101 101 {'server:main': {'port': rcserver_port}},
102 102 {'app:main': {
103 103 'cache_dir': '%(here)s/rc-tests/rc_data',
104 104 'vcs.server': f'localhost:{vcsserver_port}',
105 105 # johbo: We will always start the VCSServer on our own based on the
106 106 # fixtures of the test cases. For the test run it must always be
107 107 # off in the INI file.
108 108 'vcs.start_server': 'false',
109 109
110 110 'vcs.server.protocol': 'http',
111 111 'vcs.scm_app_implementation': 'http',
112 112 'vcs.svn.proxy.enabled': 'true',
113 'vcs.hooks.protocol': 'http',
113 'vcs.hooks.protocol.v2': 'celery',
114 114 'vcs.hooks.host': '*',
115 115 'repo_store.path': TESTS_TMP_PATH,
116 116 'app.service_api.token': 'service_secret_token',
117 117 }},
118 118
119 119 {'handler_console': {
120 120 'class': 'StreamHandler',
121 121 'args': '(sys.stderr,)',
122 122 'level': log_level,
123 123 }},
124 124
125 125 ]
126 126
127 127 filename = get_config(
128 128 request.config, option_name=option_name,
129 129 override_option_name='{}_override'.format(option_name),
130 130 overrides=overrides,
131 131 basetemp=tmpdir_factory.getbasetemp().strpath,
132 132 prefix='test_rce_')
133 133 return filename
134 134
135 135
136 136 @pytest.fixture(scope='session')
137 137 def ini_settings(ini_config):
138 138 ini_path = ini_config
139 139 return get_app_config(ini_path)
140 140
141 141
142 142 def get_available_port(min_port=40000, max_port=55555):
143 143 from rhodecode.lib.utils2 import get_available_port as _get_port
144 144 return _get_port(min_port, max_port)
145 145
146 146
147 147 @pytest.fixture(scope='session')
148 148 def rcserver_port(request):
149 149 port = get_available_port()
150 150 print(f'Using rhodecode port {port}')
151 151 return port
152 152
153 153
154 154 @pytest.fixture(scope='session')
155 155 def vcsserver_port(request):
156 156 port = request.config.getoption('--vcsserver-port')
157 157 if port is None:
158 158 port = get_available_port()
159 159 print(f'Using vcsserver port {port}')
160 160 return port
161 161
162 162
163 163 @pytest.fixture(scope='session')
164 164 def available_port_factory() -> get_available_port:
165 165 """
166 166 Returns a callable which returns free port numbers.
167 167 """
168 168 return get_available_port
169 169
170 170
171 171 @pytest.fixture()
172 172 def available_port(available_port_factory):
173 173 """
174 174 Gives you one free port for the current test.
175 175
176 176 Uses "available_port_factory" to retrieve the port.
177 177 """
178 178 return available_port_factory()
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def testini_factory(tmpdir_factory, ini_config):
183 183 """
184 184 Factory to create an INI file based on TestINI.
185 185
186 186 It will make sure to place the INI file in the correct directory.
187 187 """
188 188 basetemp = tmpdir_factory.getbasetemp().strpath
189 189 return TestIniFactory(basetemp, ini_config)
190 190
191 191
192 192 class TestIniFactory(object):
193 193
194 194 def __init__(self, basetemp, template_ini):
195 195 self._basetemp = basetemp
196 196 self._template_ini = template_ini
197 197
198 198 def __call__(self, ini_params, new_file_prefix='test'):
199 199 ini_file = TestINI(
200 200 self._template_ini, ini_params=ini_params,
201 201 new_file_prefix=new_file_prefix, dir=self._basetemp)
202 202 result = ini_file.create()
203 203 return result
204 204
205 205
206 206 def get_config(
207 207 config, option_name, override_option_name, overrides=None,
208 208 basetemp=None, prefix='test'):
209 209 """
210 210 Find a configuration file and apply overrides for the given `prefix`.
211 211 """
212 212 config_file = (
213 213 config.getoption(option_name) or config.getini(option_name))
214 214 if not config_file:
215 215 pytest.exit(
216 216 "Configuration error, could not extract {}.".format(option_name))
217 217
218 218 overrides = overrides or []
219 219 config_override = config.getoption(override_option_name)
220 220 if config_override:
221 221 overrides.append(config_override)
222 222 temp_ini_file = TestINI(
223 223 config_file, ini_params=overrides, new_file_prefix=prefix,
224 224 dir=basetemp)
225 225
226 226 return temp_ini_file.create()
@@ -1,451 +1,451 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from rhodecode.lib.str_utils import base64_to_str
24 24 from rhodecode.lib.utils2 import AttributeDict
25 25 from rhodecode.tests.utils import CustomTestApp
26 26
27 27 from rhodecode.lib.caching_query import FromCache
28 28 from rhodecode.lib.middleware import simplevcs
29 29 from rhodecode.lib.middleware.https_fixup import HttpsFixup
30 30 from rhodecode.lib.middleware.utils import scm_app_http
31 31 from rhodecode.model.db import User, _hash_key
32 32 from rhodecode.model.meta import Session, cache as db_cache
33 33 from rhodecode.tests import (
34 34 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
35 35 from rhodecode.tests.lib.middleware import mock_scm_app
36 36
37 37
38 38 class StubVCSController(simplevcs.SimpleVCS):
39 39
40 40 SCM = 'hg'
41 41 stub_response_body = tuple()
42 42
43 43 def __init__(self, *args, **kwargs):
44 44 super(StubVCSController, self).__init__(*args, **kwargs)
45 45 self._action = 'pull'
46 46 self._is_shadow_repo_dir = True
47 47 self._name = HG_REPO
48 48 self.set_repo_names(None)
49 49
50 50 @property
51 51 def is_shadow_repo_dir(self):
52 52 return self._is_shadow_repo_dir
53 53
54 54 def _get_repository_name(self, environ):
55 55 return self._name
56 56
57 57 def _get_action(self, environ):
58 58 return self._action
59 59
60 60 def _create_wsgi_app(self, repo_path, repo_name, config):
61 61 def fake_app(environ, start_response):
62 62 headers = [
63 63 ('Http-Accept', 'application/mercurial')
64 64 ]
65 65 start_response('200 OK', headers)
66 66 return self.stub_response_body
67 67 return fake_app
68 68
69 69 def _create_config(self, extras, repo_name, scheme='http'):
70 70 return None
71 71
72 72
73 73 @pytest.fixture()
74 74 def vcscontroller(baseapp, config_stub, request_stub):
75 75 from rhodecode.config.middleware import ce_auth_resources
76 76
77 77 config_stub.testing_securitypolicy()
78 78 config_stub.include('rhodecode.authentication')
79 79
80 80 for resource in ce_auth_resources:
81 81 config_stub.include(resource)
82 82
83 83 controller = StubVCSController(
84 84 baseapp.config.get_settings(), request_stub.registry)
85 85 app = HttpsFixup(controller, baseapp.config.get_settings())
86 86 app = CustomTestApp(app)
87 87
88 88 _remove_default_user_from_query_cache()
89 89
90 90 # Sanity checks that things are set up correctly
91 91 app.get('/' + HG_REPO, status=200)
92 92
93 93 app.controller = controller
94 94 return app
95 95
96 96
97 97 def _remove_default_user_from_query_cache():
98 98 user = User.get_default_user(cache=True)
99 99 query = Session().query(User).filter(User.username == user.username)
100 100 query = query.options(
101 101 FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}"))
102 102
103 103 db_cache.invalidate(
104 104 query, {},
105 105 FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}"))
106 106
107 107 Session().expire(user)
108 108
109 109
110 110 def test_handles_exceptions_during_permissions_checks(
111 111 vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory):
112 112
113 113 test_password = 'qweqwe'
114 114 test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers')
115 115 test_username = test_user.username
116 116
117 117 enable_auth_plugins.enable([
118 118 'egg:rhodecode-enterprise-ce#headers',
119 119 'egg:rhodecode-enterprise-ce#token',
120 120 'egg:rhodecode-enterprise-ce#rhodecode'],
121 121 override={
122 122 'egg:rhodecode-enterprise-ce#headers': {'auth_headers_header': 'REMOTE_USER'}
123 123 })
124 124
125 125 user_and_pass = f'{test_username}:{test_password}'
126 126 auth_password = base64_to_str(user_and_pass)
127 127
128 128 extra_environ = {
129 129 'AUTH_TYPE': 'Basic',
130 130 'HTTP_AUTHORIZATION': f'Basic {auth_password}',
131 131 'REMOTE_USER': test_username,
132 132 }
133 133
134 134 # Verify that things are hooked up correctly, we pass user with headers bound auth, and headers filled in
135 135 vcscontroller.get('/', status=200, extra_environ=extra_environ)
136 136
137 137 # Simulate trouble during permission checks
138 138 with mock.patch('rhodecode.model.db.User.get_by_username',
139 139 side_effect=Exception('permission_error_test')) as get_user:
140 140 # Verify that a correct 500 is returned and check that the expected
141 141 # code path was hit.
142 142 vcscontroller.get('/', status=500, extra_environ=extra_environ)
143 143 assert get_user.called
144 144
145 145
146 146 class StubFailVCSController(simplevcs.SimpleVCS):
147 147 def _handle_request(self, environ, start_response):
148 148 raise Exception("BOOM")
149 149
150 150
151 151 @pytest.fixture(scope='module')
152 152 def fail_controller(baseapp):
153 153 controller = StubFailVCSController(
154 154 baseapp.config.get_settings(), baseapp.config)
155 155 controller = HttpsFixup(controller, baseapp.config.get_settings())
156 156 controller = CustomTestApp(controller)
157 157 return controller
158 158
159 159
160 160 def test_handles_exceptions_as_internal_server_error(fail_controller):
161 161 fail_controller.get('/', status=500)
162 162
163 163
164 164 def test_provides_traceback_for_appenlight(fail_controller):
165 165 response = fail_controller.get(
166 166 '/', status=500, extra_environ={'appenlight.client': 'fake'})
167 167 assert 'appenlight.__traceback' in response.request.environ
168 168
169 169
170 170 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
171 171 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
172 172 assert controller.scm_app is scm_app_http
173 173
174 174
175 175 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
176 176 config = baseapp.config.get_settings().copy()
177 177 config['vcs.scm_app_implementation'] = (
178 178 'rhodecode.tests.lib.middleware.mock_scm_app')
179 179 controller = StubVCSController(config, request_stub.registry)
180 180 assert controller.scm_app is mock_scm_app
181 181
182 182
183 183 @pytest.mark.parametrize('query_string, expected', [
184 184 ('cmd=stub_command', True),
185 185 ('cmd=listkeys', False),
186 186 ])
187 187 def test_should_check_locking(query_string, expected):
188 188 result = simplevcs._should_check_locking(query_string)
189 189 assert result == expected
190 190
191 191
192 192 class TestShadowRepoRegularExpression(object):
193 193 pr_segment = 'pull-request'
194 194 shadow_segment = 'repository'
195 195
196 196 @pytest.mark.parametrize('url, expected', [
197 197 # repo with/without groups
198 198 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
199 199 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
200 200 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
201 201 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
202 202
203 203 # pull request ID
204 204 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
205 205 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
206 206 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
207 207 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
208 208
209 209 # unicode
210 210 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
211 211 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
212 212
213 213 # trailing/leading slash
214 214 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
215 215 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
216 216 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
217 217
218 218 # misc
219 219 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
220 220 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
221 221 ])
222 222 def test_shadow_repo_regular_expression(self, url, expected):
223 223 from rhodecode.lib.middleware.simplevcs import SimpleVCS
224 224 url = url.format(
225 225 pr_segment=self.pr_segment,
226 226 shadow_segment=self.shadow_segment)
227 227 match_obj = SimpleVCS.shadow_repo_re.match(url)
228 228 assert (match_obj is not None) == expected
229 229
230 230
231 231 @pytest.mark.backends('git', 'hg')
232 232 class TestShadowRepoExposure(object):
233 233
234 234 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
235 235 self, baseapp, request_stub):
236 236 """
237 237 Check that a pull action to a shadow repo is propagated to the
238 238 underlying wsgi app.
239 239 """
240 240 controller = StubVCSController(
241 241 baseapp.config.get_settings(), request_stub.registry)
242 242 controller._check_ssl = mock.Mock()
243 243 controller.is_shadow_repo = True
244 244 controller._action = 'pull'
245 245 controller._is_shadow_repo_dir = True
246 246 controller.stub_response_body = (b'dummy body value',)
247 247 controller._get_default_cache_ttl = mock.Mock(
248 248 return_value=(False, 0))
249 249
250 250 environ_stub = {
251 251 'HTTP_HOST': 'test.example.com',
252 252 'HTTP_ACCEPT': 'application/mercurial',
253 253 'REQUEST_METHOD': 'GET',
254 254 'wsgi.url_scheme': 'http',
255 255 }
256 256
257 257 response = controller(environ_stub, mock.Mock())
258 258 response_body = b''.join(response)
259 259
260 260 # Assert that we got the response from the wsgi app.
261 261 assert response_body == b''.join(controller.stub_response_body)
262 262
263 263 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
264 264 """
265 265 Check that a pull action to a shadow repo is propagated to the
266 266 underlying wsgi app.
267 267 """
268 268 controller = StubVCSController(
269 269 baseapp.config.get_settings(), request_stub.registry)
270 270 controller._check_ssl = mock.Mock()
271 271 controller.is_shadow_repo = True
272 272 controller._action = 'pull'
273 273 controller._is_shadow_repo_dir = False
274 274 controller.stub_response_body = (b'dummy body value',)
275 275 environ_stub = {
276 276 'HTTP_HOST': 'test.example.com',
277 277 'HTTP_ACCEPT': 'application/mercurial',
278 278 'REQUEST_METHOD': 'GET',
279 279 'wsgi.url_scheme': 'http',
280 280 }
281 281
282 282 response = controller(environ_stub, mock.Mock())
283 283 response_body = b''.join(response)
284 284
285 285 # Assert that we got the response from the wsgi app.
286 286 assert b'404 Not Found' in response_body
287 287
288 288 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
289 289 """
290 290 Check that a push action to a shadow repo is aborted.
291 291 """
292 292 controller = StubVCSController(
293 293 baseapp.config.get_settings(), request_stub.registry)
294 294 controller._check_ssl = mock.Mock()
295 295 controller.is_shadow_repo = True
296 296 controller._action = 'push'
297 297 controller.stub_response_body = (b'dummy body value',)
298 298 environ_stub = {
299 299 'HTTP_HOST': 'test.example.com',
300 300 'HTTP_ACCEPT': 'application/mercurial',
301 301 'REQUEST_METHOD': 'GET',
302 302 'wsgi.url_scheme': 'http',
303 303 }
304 304
305 305 response = controller(environ_stub, mock.Mock())
306 306 response_body = b''.join(response)
307 307
308 308 assert response_body != controller.stub_response_body
309 309 # Assert that a 406 error is returned.
310 310 assert b'406 Not Acceptable' in response_body
311 311
312 312 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
313 313 """
314 314 Check that the set_repo_names method sets all names to the one returned
315 315 by the _get_repository_name method on a request to a non shadow repo.
316 316 """
317 317 environ_stub = {}
318 318 controller = StubVCSController(
319 319 baseapp.config.get_settings(), request_stub.registry)
320 320 controller._name = 'RepoGroup/MyRepo'
321 321 controller.set_repo_names(environ_stub)
322 322 assert not controller.is_shadow_repo
323 323 assert (controller.url_repo_name ==
324 324 controller.acl_repo_name ==
325 325 controller.vcs_repo_name ==
326 326 controller._get_repository_name(environ_stub))
327 327
328 328 def test_set_repo_names_with_shadow(
329 329 self, baseapp, pr_util, config_stub, request_stub):
330 330 """
331 331 Check that the set_repo_names method sets correct names on a request
332 332 to a shadow repo.
333 333 """
334 334 from rhodecode.model.pull_request import PullRequestModel
335 335
336 336 pull_request = pr_util.create_pull_request()
337 337 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
338 338 target=pull_request.target_repo.repo_name,
339 339 pr_id=pull_request.pull_request_id,
340 340 pr_segment=TestShadowRepoRegularExpression.pr_segment,
341 341 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
342 342 controller = StubVCSController(
343 343 baseapp.config.get_settings(), request_stub.registry)
344 344 controller._name = shadow_url
345 345 controller.set_repo_names({})
346 346
347 347 # Get file system path to shadow repo for assertions.
348 348 workspace_id = PullRequestModel()._workspace_id(pull_request)
349 349 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
350 350
351 351 assert controller.vcs_repo_name == vcs_repo_name
352 352 assert controller.url_repo_name == shadow_url
353 353 assert controller.acl_repo_name == pull_request.target_repo.repo_name
354 354 assert controller.is_shadow_repo
355 355
356 356 def test_set_repo_names_with_shadow_but_missing_pr(
357 357 self, baseapp, pr_util, config_stub, request_stub):
358 358 """
359 359 Checks that the set_repo_names method enforces matching target repos
360 360 and pull request IDs.
361 361 """
362 362 pull_request = pr_util.create_pull_request()
363 363 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
364 364 target=pull_request.target_repo.repo_name,
365 365 pr_id=999999999,
366 366 pr_segment=TestShadowRepoRegularExpression.pr_segment,
367 367 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
368 368 controller = StubVCSController(
369 369 baseapp.config.get_settings(), request_stub.registry)
370 370 controller._name = shadow_url
371 371 controller.set_repo_names({})
372 372
373 373 assert not controller.is_shadow_repo
374 374 assert (controller.url_repo_name ==
375 375 controller.acl_repo_name ==
376 376 controller.vcs_repo_name)
377 377
378 378
379 379 @pytest.mark.usefixtures('baseapp')
380 380 class TestGenerateVcsResponse(object):
381 381
382 382 def test_ensures_that_start_response_is_called_early_enough(self):
383 383 self.call_controller_with_response_body(iter(['a', 'b']))
384 384 assert self.start_response.called
385 385
386 386 def test_invalidates_cache_after_body_is_consumed(self):
387 387 result = self.call_controller_with_response_body(iter(['a', 'b']))
388 388 assert not self.was_cache_invalidated()
389 389 # Consume the result
390 390 list(result)
391 391 assert self.was_cache_invalidated()
392 392
393 393 def test_raises_unknown_exceptions(self):
394 394 result = self.call_controller_with_response_body(
395 395 self.raise_result_iter(vcs_kind='unknown'))
396 396 with pytest.raises(Exception):
397 397 list(result)
398 398
399 399 def call_controller_with_response_body(self, response_body):
400 400 settings = {
401 401 'base_path': 'fake_base_path',
402 'vcs.hooks.protocol': 'http',
402 'vcs.hooks.protocol.v2': 'celery',
403 403 'vcs.hooks.direct_calls': False,
404 404 }
405 405 registry = AttributeDict()
406 406 controller = StubVCSController(settings, registry)
407 407 controller._invalidate_cache = mock.Mock()
408 408 controller.stub_response_body = response_body
409 409 self.start_response = mock.Mock()
410 410 result = controller._generate_vcs_response(
411 411 environ={}, start_response=self.start_response,
412 412 repo_path='fake_repo_path',
413 413 extras={}, action='push')
414 414 self.controller = controller
415 415 return result
416 416
417 417 def raise_result_iter(self, vcs_kind='repo_locked'):
418 418 """
419 419 Simulates an exception due to a vcs raised exception if kind vcs_kind
420 420 """
421 421 raise self.vcs_exception(vcs_kind=vcs_kind)
422 422 yield "never_reached"
423 423
424 424 def vcs_exception(self, vcs_kind='repo_locked'):
425 425 locked_exception = Exception('TEST_MESSAGE')
426 426 locked_exception._vcs_kind = vcs_kind
427 427 return locked_exception
428 428
429 429 def was_cache_invalidated(self):
430 430 return self.controller._invalidate_cache.called
431 431
432 432
433 433 class TestInitializeGenerator(object):
434 434
435 435 def test_drains_first_element(self):
436 436 gen = self.factory(['__init__', 1, 2])
437 437 result = list(gen)
438 438 assert result == [1, 2]
439 439
440 440 @pytest.mark.parametrize('values', [
441 441 [],
442 442 [1, 2],
443 443 ])
444 444 def test_raises_value_error(self, values):
445 445 with pytest.raises(ValueError):
446 446 self.factory(values)
447 447
448 448 @simplevcs.initialize_generator
449 449 def factory(self, iterable):
450 450 for elem in iterable:
451 451 yield elem
@@ -1,978 +1,978 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22 import textwrap
23 23
24 24 import rhodecode
25 25 from rhodecode.lib.vcs.backends import get_backend
26 26 from rhodecode.lib.vcs.backends.base import (
27 27 MergeResponse, MergeFailureReason, Reference)
28 28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 29 from rhodecode.lib.vcs.nodes import FileNode
30 30 from rhodecode.model.comment import CommentsModel
31 31 from rhodecode.model.db import PullRequest, Session
32 32 from rhodecode.model.pull_request import PullRequestModel
33 33 from rhodecode.model.user import UserModel
34 34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
35 35 from rhodecode.lib.str_utils import safe_str
36 36
37 37 pytestmark = [
38 38 pytest.mark.backends("git", "hg"),
39 39 ]
40 40
41 41
42 42 @pytest.mark.usefixtures('config_stub')
43 43 class TestPullRequestModel(object):
44 44
45 45 @pytest.fixture()
46 46 def pull_request(self, request, backend, pr_util):
47 47 """
48 48 A pull request combined with multiples patches.
49 49 """
50 50 BackendClass = get_backend(backend.alias)
51 51 merge_resp = MergeResponse(
52 52 False, False, None, MergeFailureReason.UNKNOWN,
53 53 metadata={'exception': 'MockError'})
54 54 self.merge_patcher = mock.patch.object(
55 55 BackendClass, 'merge', return_value=merge_resp)
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 'trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'ąć')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84 self.repo_id = self.pull_request.target_repo.repo_id
85 85
86 86 @request.addfinalizer
87 87 def cleanup_pull_request():
88 88 calls = [mock.call(
89 89 self.pull_request, self.pull_request.author, 'create')]
90 90 self.hook_mock.assert_has_calls(calls)
91 91
92 92 self.workspace_remove_patcher.stop()
93 93 self.merge_patcher.stop()
94 94 self.comment_patcher.stop()
95 95 self.notification_patcher.stop()
96 96 self.helper_patcher.stop()
97 97 self.hook_patcher.stop()
98 98 self.invalidation_patcher.stop()
99 99
100 100 return self.pull_request
101 101
102 102 def test_get_all(self, pull_request):
103 103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 104 assert isinstance(prs, list)
105 105 assert len(prs) == 1
106 106
107 107 def test_count_all(self, pull_request):
108 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 109 assert pr_count == 1
110 110
111 111 def test_get_awaiting_review(self, pull_request):
112 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 113 assert isinstance(prs, list)
114 114 assert len(prs) == 1
115 115
116 116 def test_count_awaiting_review(self, pull_request):
117 117 pr_count = PullRequestModel().count_awaiting_review(
118 118 pull_request.target_repo)
119 119 assert pr_count == 1
120 120
121 121 def test_get_awaiting_my_review(self, pull_request):
122 122 PullRequestModel().update_reviewers(
123 123 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
124 124 pull_request.author)
125 125 Session().commit()
126 126
127 127 prs = PullRequestModel().get_awaiting_my_review(
128 128 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
129 129 assert isinstance(prs, list)
130 130 assert len(prs) == 1
131 131
132 132 def test_count_awaiting_my_review(self, pull_request):
133 133 PullRequestModel().update_reviewers(
134 134 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
135 135 pull_request.author)
136 136 Session().commit()
137 137
138 138 pr_count = PullRequestModel().count_awaiting_my_review(
139 139 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
140 140 assert pr_count == 1
141 141
142 142 def test_delete_calls_cleanup_merge(self, pull_request):
143 143 repo_id = pull_request.target_repo.repo_id
144 144 PullRequestModel().delete(pull_request, pull_request.author)
145 145 Session().commit()
146 146
147 147 self.workspace_remove_mock.assert_called_once_with(
148 148 repo_id, self.workspace_id)
149 149
150 150 def test_close_calls_cleanup_and_hook(self, pull_request):
151 151 PullRequestModel().close_pull_request(
152 152 pull_request, pull_request.author)
153 153 Session().commit()
154 154
155 155 repo_id = pull_request.target_repo.repo_id
156 156
157 157 self.workspace_remove_mock.assert_called_once_with(
158 158 repo_id, self.workspace_id)
159 159 self.hook_mock.assert_called_with(
160 160 self.pull_request, self.pull_request.author, 'close')
161 161
162 162 def test_merge_status(self, pull_request):
163 163 self.merge_mock.return_value = MergeResponse(
164 164 True, False, None, MergeFailureReason.NONE)
165 165
166 166 assert pull_request._last_merge_source_rev is None
167 167 assert pull_request._last_merge_target_rev is None
168 168 assert pull_request.last_merge_status is None
169 169
170 170 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
171 171 assert status is True
172 172 assert msg == 'This pull request can be automatically merged.'
173 173 self.merge_mock.assert_called_with(
174 174 self.repo_id, self.workspace_id,
175 175 pull_request.target_ref_parts,
176 176 pull_request.source_repo.scm_instance(),
177 177 pull_request.source_ref_parts, dry_run=True,
178 178 use_rebase=False, close_branch=False)
179 179
180 180 assert pull_request._last_merge_source_rev == self.source_commit
181 181 assert pull_request._last_merge_target_rev == self.target_commit
182 182 assert pull_request.last_merge_status is MergeFailureReason.NONE
183 183
184 184 self.merge_mock.reset_mock()
185 185 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
186 186 assert status is True
187 187 assert msg == 'This pull request can be automatically merged.'
188 188 assert self.merge_mock.called is False
189 189
190 190 def test_merge_status_known_failure(self, pull_request):
191 191 self.merge_mock.return_value = MergeResponse(
192 192 False, False, None, MergeFailureReason.MERGE_FAILED,
193 193 metadata={'unresolved_files': 'file1'})
194 194
195 195 assert pull_request._last_merge_source_rev is None
196 196 assert pull_request._last_merge_target_rev is None
197 197 assert pull_request.last_merge_status is None
198 198
199 199 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
200 200 assert status is False
201 201 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
202 202 self.merge_mock.assert_called_with(
203 203 self.repo_id, self.workspace_id,
204 204 pull_request.target_ref_parts,
205 205 pull_request.source_repo.scm_instance(),
206 206 pull_request.source_ref_parts, dry_run=True,
207 207 use_rebase=False, close_branch=False)
208 208
209 209 assert pull_request._last_merge_source_rev == self.source_commit
210 210 assert pull_request._last_merge_target_rev == self.target_commit
211 211 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
212 212
213 213 self.merge_mock.reset_mock()
214 214 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
215 215 assert status is False
216 216 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
217 217 assert self.merge_mock.called is False
218 218
219 219 def test_merge_status_unknown_failure(self, pull_request):
220 220 self.merge_mock.return_value = MergeResponse(
221 221 False, False, None, MergeFailureReason.UNKNOWN,
222 222 metadata={'exception': 'MockError'})
223 223
224 224 assert pull_request._last_merge_source_rev is None
225 225 assert pull_request._last_merge_target_rev is None
226 226 assert pull_request.last_merge_status is None
227 227
228 228 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
229 229 assert status is False
230 230 assert msg == (
231 231 'This pull request cannot be merged because of an unhandled exception. '
232 232 'MockError')
233 233 self.merge_mock.assert_called_with(
234 234 self.repo_id, self.workspace_id,
235 235 pull_request.target_ref_parts,
236 236 pull_request.source_repo.scm_instance(),
237 237 pull_request.source_ref_parts, dry_run=True,
238 238 use_rebase=False, close_branch=False)
239 239
240 240 assert pull_request._last_merge_source_rev is None
241 241 assert pull_request._last_merge_target_rev is None
242 242 assert pull_request.last_merge_status is None
243 243
244 244 self.merge_mock.reset_mock()
245 245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
246 246 assert status is False
247 247 assert msg == (
248 248 'This pull request cannot be merged because of an unhandled exception. '
249 249 'MockError')
250 250 assert self.merge_mock.called is True
251 251
252 252 def test_merge_status_when_target_is_locked(self, pull_request):
253 253 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
254 254 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
255 255 assert status is False
256 256 assert msg == (
257 257 'This pull request cannot be merged because the target repository '
258 258 'is locked by user:1.')
259 259
260 260 def test_merge_status_requirements_check_target(self, pull_request):
261 261
262 262 def has_largefiles(self, repo):
263 263 return repo == pull_request.source_repo
264 264
265 265 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
266 266 with patcher:
267 267 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
268 268
269 269 assert status is False
270 270 assert msg == 'Target repository large files support is disabled.'
271 271
272 272 def test_merge_status_requirements_check_source(self, pull_request):
273 273
274 274 def has_largefiles(self, repo):
275 275 return repo == pull_request.target_repo
276 276
277 277 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
278 278 with patcher:
279 279 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
280 280
281 281 assert status is False
282 282 assert msg == 'Source repository large files support is disabled.'
283 283
284 284 def test_merge(self, pull_request, merge_extras):
285 285 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
286 286 merge_ref = Reference(
287 287 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
288 288 self.merge_mock.return_value = MergeResponse(
289 289 True, True, merge_ref, MergeFailureReason.NONE)
290 290
291 291 merge_extras['repository'] = pull_request.target_repo.repo_name
292 292 PullRequestModel().merge_repo(
293 293 pull_request, pull_request.author, extras=merge_extras)
294 294 Session().commit()
295 295
296 296 message = (
297 297 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
298 298 u'\n\n {pr_title}'.format(
299 299 pr_id=pull_request.pull_request_id,
300 300 source_repo=safe_str(
301 301 pull_request.source_repo.scm_instance().name),
302 302 source_ref_name=pull_request.source_ref_parts.name,
303 303 pr_title=safe_str(pull_request.title)
304 304 )
305 305 )
306 306 self.merge_mock.assert_called_with(
307 307 self.repo_id, self.workspace_id,
308 308 pull_request.target_ref_parts,
309 309 pull_request.source_repo.scm_instance(),
310 310 pull_request.source_ref_parts,
311 311 user_name=user.short_contact, user_email=user.email, message=message,
312 312 use_rebase=False, close_branch=False
313 313 )
314 314 self.invalidation_mock.assert_called_once_with(
315 315 pull_request.target_repo.repo_name)
316 316
317 317 self.hook_mock.assert_called_with(
318 318 self.pull_request, self.pull_request.author, 'merge')
319 319
320 320 pull_request = PullRequest.get(pull_request.pull_request_id)
321 321 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
322 322
323 323 def test_merge_with_status_lock(self, pull_request, merge_extras):
324 324 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
325 325 merge_ref = Reference(
326 326 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
327 327 self.merge_mock.return_value = MergeResponse(
328 328 True, True, merge_ref, MergeFailureReason.NONE)
329 329
330 330 merge_extras['repository'] = pull_request.target_repo.repo_name
331 331
332 332 with pull_request.set_state(PullRequest.STATE_UPDATING):
333 333 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
334 334 PullRequestModel().merge_repo(
335 335 pull_request, pull_request.author, extras=merge_extras)
336 336 Session().commit()
337 337
338 338 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
339 339
340 340 message = (
341 341 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
342 342 u'\n\n {pr_title}'.format(
343 343 pr_id=pull_request.pull_request_id,
344 344 source_repo=safe_str(
345 345 pull_request.source_repo.scm_instance().name),
346 346 source_ref_name=pull_request.source_ref_parts.name,
347 347 pr_title=safe_str(pull_request.title)
348 348 )
349 349 )
350 350 self.merge_mock.assert_called_with(
351 351 self.repo_id, self.workspace_id,
352 352 pull_request.target_ref_parts,
353 353 pull_request.source_repo.scm_instance(),
354 354 pull_request.source_ref_parts,
355 355 user_name=user.short_contact, user_email=user.email, message=message,
356 356 use_rebase=False, close_branch=False
357 357 )
358 358 self.invalidation_mock.assert_called_once_with(
359 359 pull_request.target_repo.repo_name)
360 360
361 361 self.hook_mock.assert_called_with(
362 362 self.pull_request, self.pull_request.author, 'merge')
363 363
364 364 pull_request = PullRequest.get(pull_request.pull_request_id)
365 365 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
366 366
367 367 def test_merge_failed(self, pull_request, merge_extras):
368 368 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
369 369 merge_ref = Reference(
370 370 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
371 371 self.merge_mock.return_value = MergeResponse(
372 372 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
373 373
374 374 merge_extras['repository'] = pull_request.target_repo.repo_name
375 375 PullRequestModel().merge_repo(
376 376 pull_request, pull_request.author, extras=merge_extras)
377 377 Session().commit()
378 378
379 379 message = (
380 380 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
381 381 u'\n\n {pr_title}'.format(
382 382 pr_id=pull_request.pull_request_id,
383 383 source_repo=safe_str(
384 384 pull_request.source_repo.scm_instance().name),
385 385 source_ref_name=pull_request.source_ref_parts.name,
386 386 pr_title=safe_str(pull_request.title)
387 387 )
388 388 )
389 389 self.merge_mock.assert_called_with(
390 390 self.repo_id, self.workspace_id,
391 391 pull_request.target_ref_parts,
392 392 pull_request.source_repo.scm_instance(),
393 393 pull_request.source_ref_parts,
394 394 user_name=user.short_contact, user_email=user.email, message=message,
395 395 use_rebase=False, close_branch=False
396 396 )
397 397
398 398 pull_request = PullRequest.get(pull_request.pull_request_id)
399 399 assert self.invalidation_mock.called is False
400 400 assert pull_request.merge_rev is None
401 401
402 402 def test_get_commit_ids(self, pull_request):
403 403 # The PR has been not merged yet, so expect an exception
404 404 with pytest.raises(ValueError):
405 405 PullRequestModel()._get_commit_ids(pull_request)
406 406
407 407 # Merge revision is in the revisions list
408 408 pull_request.merge_rev = pull_request.revisions[0]
409 409 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
410 410 assert commit_ids == pull_request.revisions
411 411
412 412 # Merge revision is not in the revisions list
413 413 pull_request.merge_rev = 'f000' * 10
414 414 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
415 415 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
416 416
417 417 def test_get_diff_from_pr_version(self, pull_request):
418 418 source_repo = pull_request.source_repo
419 419 source_ref_id = pull_request.source_ref_parts.commit_id
420 420 target_ref_id = pull_request.target_ref_parts.commit_id
421 421 diff = PullRequestModel()._get_diff_from_pr_or_version(
422 422 source_repo, source_ref_id, target_ref_id,
423 423 hide_whitespace_changes=False, diff_context=6)
424 424 assert b'file_1' in diff.raw.tobytes()
425 425
426 426 def test_generate_title_returns_unicode(self):
427 427 title = PullRequestModel().generate_pullrequest_title(
428 428 source='source-dummy',
429 429 source_ref='source-ref-dummy',
430 430 target='target-dummy',
431 431 )
432 432 assert type(title) == str
433 433
434 434 @pytest.mark.parametrize('title, has_wip', [
435 435 ('hello', False),
436 436 ('hello wip', False),
437 437 ('hello wip: xxx', False),
438 438 ('[wip] hello', True),
439 439 ('[wip] hello', True),
440 440 ('wip: hello', True),
441 441 ('wip hello', True),
442 442
443 443 ])
444 444 def test_wip_title_marker(self, pull_request, title, has_wip):
445 445 pull_request.title = title
446 446 assert pull_request.work_in_progress == has_wip
447 447
448 448
449 449 @pytest.mark.usefixtures('config_stub')
450 450 class TestIntegrationMerge(object):
451 451 @pytest.mark.parametrize('extra_config', (
452 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
452 {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False},
453 453 ))
454 454 def test_merge_triggers_push_hooks(
455 455 self, pr_util, user_admin, capture_rcextensions, merge_extras,
456 456 extra_config):
457 457
458 458 pull_request = pr_util.create_pull_request(
459 459 approved=True, mergeable=True)
460 460 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
461 461 merge_extras['repository'] = pull_request.target_repo.repo_name
462 462 Session().commit()
463 463
464 464 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
465 465 merge_state = PullRequestModel().merge_repo(
466 466 pull_request, user_admin, extras=merge_extras)
467 467 Session().commit()
468 468
469 469 assert merge_state.executed
470 470 assert '_pre_push_hook' in capture_rcextensions
471 471 assert '_push_hook' in capture_rcextensions
472 472
473 473 def test_merge_can_be_rejected_by_pre_push_hook(
474 474 self, pr_util, user_admin, capture_rcextensions, merge_extras):
475 475 pull_request = pr_util.create_pull_request(
476 476 approved=True, mergeable=True)
477 477 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 478 merge_extras['repository'] = pull_request.target_repo.repo_name
479 479 Session().commit()
480 480
481 481 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
482 482 pre_pull.side_effect = RepositoryError("Disallow push!")
483 483 merge_status = PullRequestModel().merge_repo(
484 484 pull_request, user_admin, extras=merge_extras)
485 485 Session().commit()
486 486
487 487 assert not merge_status.executed
488 488 assert 'pre_push' not in capture_rcextensions
489 489 assert 'post_push' not in capture_rcextensions
490 490
491 491 def test_merge_fails_if_target_is_locked(
492 492 self, pr_util, user_regular, merge_extras):
493 493 pull_request = pr_util.create_pull_request(
494 494 approved=True, mergeable=True)
495 495 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
496 496 pull_request.target_repo.locked = locked_by
497 497 # TODO: johbo: Check if this can work based on the database, currently
498 498 # all data is pre-computed, that's why just updating the DB is not
499 499 # enough.
500 500 merge_extras['locked_by'] = locked_by
501 501 merge_extras['repository'] = pull_request.target_repo.repo_name
502 502 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
503 503 Session().commit()
504 504 merge_status = PullRequestModel().merge_repo(
505 505 pull_request, user_regular, extras=merge_extras)
506 506 Session().commit()
507 507
508 508 assert not merge_status.executed
509 509
510 510
511 511 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
512 512 (False, 1, 0),
513 513 (True, 0, 1),
514 514 ])
515 515 def test_outdated_comments(
516 516 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
517 517 pull_request = pr_util.create_pull_request()
518 518 pr_util.create_inline_comment(file_path='not_in_updated_diff')
519 519
520 520 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
521 521 pr_util.add_one_commit()
522 522 assert_inline_comments(
523 523 pull_request, visible=inlines_count, outdated=outdated_count)
524 524 outdated_comment_mock.assert_called_with(pull_request)
525 525
526 526
527 527 @pytest.mark.parametrize('mr_type, expected_msg', [
528 528 (MergeFailureReason.NONE,
529 529 'This pull request can be automatically merged.'),
530 530 (MergeFailureReason.UNKNOWN,
531 531 'This pull request cannot be merged because of an unhandled exception. CRASH'),
532 532 (MergeFailureReason.MERGE_FAILED,
533 533 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
534 534 (MergeFailureReason.PUSH_FAILED,
535 535 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
536 536 (MergeFailureReason.TARGET_IS_NOT_HEAD,
537 537 'This pull request cannot be merged because the target `ref_name` is not a head.'),
538 538 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
539 539 'This pull request cannot be merged because the source contains more branches than the target.'),
540 540 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
541 541 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
542 542 (MergeFailureReason.TARGET_IS_LOCKED,
543 543 'This pull request cannot be merged because the target repository is locked by user:123.'),
544 544 (MergeFailureReason.MISSING_TARGET_REF,
545 545 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
546 546 (MergeFailureReason.MISSING_SOURCE_REF,
547 547 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
548 548 (MergeFailureReason.SUBREPO_MERGE_FAILED,
549 549 'This pull request cannot be merged because of conflicts related to sub repositories.'),
550 550
551 551 ])
552 552 def test_merge_response_message(mr_type, expected_msg):
553 553 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
554 554 metadata = {
555 555 'unresolved_files': 'CONFLICT_FILE',
556 556 'exception': "CRASH",
557 557 'target': 'some-repo',
558 558 'merge_commit': 'merge_commit',
559 559 'target_ref': merge_ref,
560 560 'source_ref': merge_ref,
561 561 'heads': ','.join(['a', 'b', 'c']),
562 562 'locked_by': 'user:123'
563 563 }
564 564
565 565 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
566 566 assert merge_response.merge_status_message == expected_msg
567 567
568 568
569 569 @pytest.fixture()
570 570 def merge_extras(user_regular):
571 571 """
572 572 Context for the vcs operation when running a merge.
573 573 """
574 574 extras = {
575 575 'ip': '127.0.0.1',
576 576 'username': user_regular.username,
577 577 'user_id': user_regular.user_id,
578 578 'action': 'push',
579 579 'repository': 'fake_target_repo_name',
580 580 'scm': 'git',
581 581 'config': 'fake_config_ini_path',
582 582 'repo_store': '',
583 583 'make_lock': None,
584 584 'locked_by': [None, None, None],
585 585 'server_url': 'http://test.example.com:5000',
586 586 'hooks': ['push', 'pull'],
587 587 'is_shadow_repo': False,
588 588 }
589 589 return extras
590 590
591 591
592 592 @pytest.mark.usefixtures('config_stub')
593 593 class TestUpdateCommentHandling(object):
594 594
595 595 @pytest.fixture(autouse=True, scope='class')
596 596 def enable_outdated_comments(self, request, baseapp):
597 597 config_patch = mock.patch.dict(
598 598 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
599 599 config_patch.start()
600 600
601 601 @request.addfinalizer
602 602 def cleanup():
603 603 config_patch.stop()
604 604
605 605 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
606 606 commits = [
607 607 {'message': 'a'},
608 608 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
609 609 {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]},
610 610 ]
611 611 pull_request = pr_util.create_pull_request(
612 612 commits=commits, target_head='a', source_head='b', revisions=['b'])
613 613 pr_util.create_inline_comment(file_path='file_b')
614 614 pr_util.add_one_commit(head='c')
615 615
616 616 assert_inline_comments(pull_request, visible=1, outdated=0)
617 617
618 618 def test_comment_stays_unflagged_on_change_above(self, pr_util):
619 619 original_content = b''.join((b'line %d\n' % x for x in range(1, 11)))
620 620 updated_content = b'new_line_at_top\n' + original_content
621 621 commits = [
622 622 {'message': 'a'},
623 623 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
624 624 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
625 625 ]
626 626 pull_request = pr_util.create_pull_request(
627 627 commits=commits, target_head='a', source_head='b', revisions=['b'])
628 628
629 629 with outdated_comments_patcher():
630 630 comment = pr_util.create_inline_comment(
631 631 line_no=u'n8', file_path='file_b')
632 632 pr_util.add_one_commit(head='c')
633 633
634 634 assert_inline_comments(pull_request, visible=1, outdated=0)
635 635 assert comment.line_no == u'n9'
636 636
637 637 def test_comment_stays_unflagged_on_change_below(self, pr_util):
638 638 original_content = b''.join([b'line %d\n' % x for x in range(10)])
639 639 updated_content = original_content + b'new_line_at_end\n'
640 640 commits = [
641 641 {'message': 'a'},
642 642 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
643 643 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
644 644 ]
645 645 pull_request = pr_util.create_pull_request(
646 646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 647 pr_util.create_inline_comment(file_path='file_b')
648 648 pr_util.add_one_commit(head='c')
649 649
650 650 assert_inline_comments(pull_request, visible=1, outdated=0)
651 651
652 652 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
653 653 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
654 654 base_lines = [b'line %d\n' % x for x in range(1, 13)]
655 655 change_lines = list(base_lines)
656 656 change_lines.insert(6, b'line 6a added\n')
657 657
658 658 # Changes on the last line of sight
659 659 update_lines = list(change_lines)
660 660 update_lines[0] = b'line 1 changed\n'
661 661 update_lines[-1] = b'line 12 changed\n'
662 662
663 663 def file_b(lines):
664 664 return FileNode(b'file_b', b''.join(lines))
665 665
666 666 commits = [
667 667 {'message': 'a', 'added': [file_b(base_lines)]},
668 668 {'message': 'b', 'changed': [file_b(change_lines)]},
669 669 {'message': 'c', 'changed': [file_b(update_lines)]},
670 670 ]
671 671
672 672 pull_request = pr_util.create_pull_request(
673 673 commits=commits, target_head='a', source_head='b', revisions=['b'])
674 674 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
675 675
676 676 with outdated_comments_patcher():
677 677 pr_util.add_one_commit(head='c')
678 678 assert_inline_comments(pull_request, visible=0, outdated=1)
679 679
680 680 @pytest.mark.parametrize("change, content", [
681 681 ('changed', b'changed\n'),
682 682 ('removed', b''),
683 683 ], ids=['changed', b'removed'])
684 684 def test_comment_flagged_on_change(self, pr_util, change, content):
685 685 commits = [
686 686 {'message': 'a'},
687 687 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
688 688 {'message': 'c', change: [FileNode(b'file_b', content)]},
689 689 ]
690 690 pull_request = pr_util.create_pull_request(
691 691 commits=commits, target_head='a', source_head='b', revisions=['b'])
692 692 pr_util.create_inline_comment(file_path='file_b')
693 693
694 694 with outdated_comments_patcher():
695 695 pr_util.add_one_commit(head='c')
696 696 assert_inline_comments(pull_request, visible=0, outdated=1)
697 697
698 698
699 699 @pytest.mark.usefixtures('config_stub')
700 700 class TestUpdateChangedFiles(object):
701 701
702 702 def test_no_changes_on_unchanged_diff(self, pr_util):
703 703 commits = [
704 704 {'message': 'a'},
705 705 {'message': 'b',
706 706 'added': [FileNode(b'file_b', b'test_content b\n')]},
707 707 {'message': 'c',
708 708 'added': [FileNode(b'file_c', b'test_content c\n')]},
709 709 ]
710 710 # open a PR from a to b, adding file_b
711 711 pull_request = pr_util.create_pull_request(
712 712 commits=commits, target_head='a', source_head='b', revisions=['b'],
713 713 name_suffix='per-file-review')
714 714
715 715 # modify PR adding new file file_c
716 716 pr_util.add_one_commit(head='c')
717 717
718 718 assert_pr_file_changes(
719 719 pull_request,
720 720 added=['file_c'],
721 721 modified=[],
722 722 removed=[])
723 723
724 724 def test_modify_and_undo_modification_diff(self, pr_util):
725 725 commits = [
726 726 {'message': 'a'},
727 727 {'message': 'b',
728 728 'added': [FileNode(b'file_b', b'test_content b\n')]},
729 729 {'message': 'c',
730 730 'changed': [FileNode(b'file_b', b'test_content b modified\n')]},
731 731 {'message': 'd',
732 732 'changed': [FileNode(b'file_b', b'test_content b\n')]},
733 733 ]
734 734 # open a PR from a to b, adding file_b
735 735 pull_request = pr_util.create_pull_request(
736 736 commits=commits, target_head='a', source_head='b', revisions=['b'],
737 737 name_suffix='per-file-review')
738 738
739 739 # modify PR modifying file file_b
740 740 pr_util.add_one_commit(head='c')
741 741
742 742 assert_pr_file_changes(
743 743 pull_request,
744 744 added=[],
745 745 modified=['file_b'],
746 746 removed=[])
747 747
748 748 # move the head again to d, which rollbacks change,
749 749 # meaning we should indicate no changes
750 750 pr_util.add_one_commit(head='d')
751 751
752 752 assert_pr_file_changes(
753 753 pull_request,
754 754 added=[],
755 755 modified=[],
756 756 removed=[])
757 757
758 758 def test_updated_all_files_in_pr(self, pr_util):
759 759 commits = [
760 760 {'message': 'a'},
761 761 {'message': 'b', 'added': [
762 762 FileNode(b'file_a', b'test_content a\n'),
763 763 FileNode(b'file_b', b'test_content b\n'),
764 764 FileNode(b'file_c', b'test_content c\n')]},
765 765 {'message': 'c', 'changed': [
766 766 FileNode(b'file_a', b'test_content a changed\n'),
767 767 FileNode(b'file_b', b'test_content b changed\n'),
768 768 FileNode(b'file_c', b'test_content c changed\n')]},
769 769 ]
770 770 # open a PR from a to b, changing 3 files
771 771 pull_request = pr_util.create_pull_request(
772 772 commits=commits, target_head='a', source_head='b', revisions=['b'],
773 773 name_suffix='per-file-review')
774 774
775 775 pr_util.add_one_commit(head='c')
776 776
777 777 assert_pr_file_changes(
778 778 pull_request,
779 779 added=[],
780 780 modified=['file_a', 'file_b', 'file_c'],
781 781 removed=[])
782 782
783 783 def test_updated_and_removed_all_files_in_pr(self, pr_util):
784 784 commits = [
785 785 {'message': 'a'},
786 786 {'message': 'b', 'added': [
787 787 FileNode(b'file_a', b'test_content a\n'),
788 788 FileNode(b'file_b', b'test_content b\n'),
789 789 FileNode(b'file_c', b'test_content c\n')]},
790 790 {'message': 'c', 'removed': [
791 791 FileNode(b'file_a', b'test_content a changed\n'),
792 792 FileNode(b'file_b', b'test_content b changed\n'),
793 793 FileNode(b'file_c', b'test_content c changed\n')]},
794 794 ]
795 795 # open a PR from a to b, removing 3 files
796 796 pull_request = pr_util.create_pull_request(
797 797 commits=commits, target_head='a', source_head='b', revisions=['b'],
798 798 name_suffix='per-file-review')
799 799
800 800 pr_util.add_one_commit(head='c')
801 801
802 802 assert_pr_file_changes(
803 803 pull_request,
804 804 added=[],
805 805 modified=[],
806 806 removed=['file_a', 'file_b', 'file_c'])
807 807
808 808
809 809 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
810 810 model = PullRequestModel()
811 811 pull_request = pr_util.create_pull_request()
812 812 pr_util.update_source_repository()
813 813
814 814 model.update_commits(pull_request, pull_request.author)
815 815
816 816 # Expect that it has a version entry now
817 817 assert len(model.get_versions(pull_request)) == 1
818 818
819 819
820 820 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
821 821 pull_request = pr_util.create_pull_request()
822 822 model = PullRequestModel()
823 823 model.update_commits(pull_request, pull_request.author)
824 824
825 825 # Expect that it still has no versions
826 826 assert len(model.get_versions(pull_request)) == 0
827 827
828 828
829 829 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
830 830 model = PullRequestModel()
831 831 pull_request = pr_util.create_pull_request()
832 832 comment = pr_util.create_comment()
833 833 pr_util.update_source_repository()
834 834
835 835 model.update_commits(pull_request, pull_request.author)
836 836
837 837 # Expect that the comment is linked to the pr version now
838 838 assert comment.pull_request_version == model.get_versions(pull_request)[0]
839 839
840 840
841 841 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
842 842 model = PullRequestModel()
843 843 pull_request = pr_util.create_pull_request()
844 844 pr_util.update_source_repository()
845 845 pr_util.update_source_repository()
846 846
847 847 update_response = model.update_commits(pull_request, pull_request.author)
848 848
849 849 commit_id = update_response.common_ancestor_id
850 850 # Expect to find a new comment about the change
851 851 expected_message = textwrap.dedent(
852 852 """\
853 853 Pull request updated. Auto status change to |under_review|
854 854
855 855 .. role:: added
856 856 .. role:: removed
857 857 .. parsed-literal::
858 858
859 859 Changed commits:
860 860 * :added:`1 added`
861 861 * :removed:`0 removed`
862 862
863 863 Changed files:
864 864 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
865 865
866 866 .. |under_review| replace:: *"Under Review"*"""
867 867 ).format(commit_id[:12])
868 868 pull_request_comments = sorted(
869 869 pull_request.comments, key=lambda c: c.modified_at)
870 870 update_comment = pull_request_comments[-1]
871 871 assert update_comment.text == expected_message
872 872
873 873
874 874 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
875 875 pull_request = pr_util.create_pull_request()
876 876
877 877 # Avoiding default values
878 878 pull_request.status = PullRequest.STATUS_CLOSED
879 879 pull_request._last_merge_source_rev = "0" * 40
880 880 pull_request._last_merge_target_rev = "1" * 40
881 881 pull_request.last_merge_status = 1
882 882 pull_request.merge_rev = "2" * 40
883 883
884 884 # Remember automatic values
885 885 created_on = pull_request.created_on
886 886 updated_on = pull_request.updated_on
887 887
888 888 # Create a new version of the pull request
889 889 version = PullRequestModel()._create_version_from_snapshot(pull_request)
890 890
891 891 # Check attributes
892 892 assert version.title == pr_util.create_parameters['title']
893 893 assert version.description == pr_util.create_parameters['description']
894 894 assert version.status == PullRequest.STATUS_CLOSED
895 895
896 896 # versions get updated created_on
897 897 assert version.created_on != created_on
898 898
899 899 assert version.updated_on == updated_on
900 900 assert version.user_id == pull_request.user_id
901 901 assert version.revisions == pr_util.create_parameters['revisions']
902 902 assert version.source_repo == pr_util.source_repository
903 903 assert version.source_ref == pr_util.create_parameters['source_ref']
904 904 assert version.target_repo == pr_util.target_repository
905 905 assert version.target_ref == pr_util.create_parameters['target_ref']
906 906 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
907 907 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
908 908 assert version.last_merge_status == pull_request.last_merge_status
909 909 assert version.merge_rev == pull_request.merge_rev
910 910 assert version.pull_request == pull_request
911 911
912 912
913 913 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
914 914 version1 = pr_util.create_version_of_pull_request()
915 915 comment_linked = pr_util.create_comment(linked_to=version1)
916 916 comment_unlinked = pr_util.create_comment()
917 917 version2 = pr_util.create_version_of_pull_request()
918 918
919 919 PullRequestModel()._link_comments_to_version(version2)
920 920 Session().commit()
921 921
922 922 # Expect that only the new comment is linked to version2
923 923 assert (
924 924 comment_unlinked.pull_request_version_id ==
925 925 version2.pull_request_version_id)
926 926 assert (
927 927 comment_linked.pull_request_version_id ==
928 928 version1.pull_request_version_id)
929 929 assert (
930 930 comment_unlinked.pull_request_version_id !=
931 931 comment_linked.pull_request_version_id)
932 932
933 933
934 934 def test_calculate_commits():
935 935 old_ids = [1, 2, 3]
936 936 new_ids = [1, 3, 4, 5]
937 937 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
938 938 assert change.added == [4, 5]
939 939 assert change.common == [1, 3]
940 940 assert change.removed == [2]
941 941 assert change.total == [1, 3, 4, 5]
942 942
943 943
944 944 def assert_inline_comments(pull_request, visible=None, outdated=None):
945 945 if visible is not None:
946 946 inline_comments = CommentsModel().get_inline_comments(
947 947 pull_request.target_repo.repo_id, pull_request=pull_request)
948 948 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
949 949 inline_comments))
950 950 assert inline_cnt == visible
951 951 if outdated is not None:
952 952 outdated_comments = CommentsModel().get_outdated_comments(
953 953 pull_request.target_repo.repo_id, pull_request)
954 954 assert len(outdated_comments) == outdated
955 955
956 956
957 957 def assert_pr_file_changes(
958 958 pull_request, added=None, modified=None, removed=None):
959 959 pr_versions = PullRequestModel().get_versions(pull_request)
960 960 # always use first version, ie original PR to calculate changes
961 961 pull_request_version = pr_versions[0]
962 962 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
963 963 pull_request, pull_request_version)
964 964 file_changes = PullRequestModel()._calculate_file_changes(
965 965 old_diff_data, new_diff_data)
966 966
967 967 assert added == file_changes.added, \
968 968 'expected added:%s vs value:%s' % (added, file_changes.added)
969 969 assert modified == file_changes.modified, \
970 970 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
971 971 assert removed == file_changes.removed, \
972 972 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
973 973
974 974
975 975 def outdated_comments_patcher(use_outdated=True):
976 976 return mock.patch.object(
977 977 CommentsModel, 'use_outdated_comments',
978 978 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now