##// END OF EJS Templates
feat(repo_path-config): moved main storage location path into ini file. Fixes: RCCE-61
ilin.s -
r5356:99a91100 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,871 +1,871 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = true
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34 ; ##################################################
35 35 ; WAITRESS WSGI SERVER - Recommended for Development
36 36 ; ##################################################
37 37
38 38 ; use server type
39 39 use = egg:waitress#main
40 40
41 41 ; number of worker threads
42 42 threads = 5
43 43
44 44 ; MAX BODY SIZE 100GB
45 45 max_request_body_size = 107374182400
46 46
47 47 ; Use poll instead of select, fixes file descriptors limits problems.
48 48 ; May not work on old windows systems.
49 49 asyncore_use_poll = true
50 50
51 51
52 52 ; ###########################
53 53 ; GUNICORN APPLICATION SERVER
54 54 ; ###########################
55 55
56 56 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
57 57
58 58 ; Module to use, this setting shouldn't be changed
59 59 #use = egg:gunicorn#main
60 60
61 61 ; Prefix middleware for RhodeCode.
62 62 ; recommended when using proxy setup.
63 63 ; allows to set RhodeCode under a prefix in server.
64 64 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
65 65 ; And set your prefix like: `prefix = /custom_prefix`
66 66 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
67 67 ; to make your cookies only work on prefix url
68 68 [filter:proxy-prefix]
69 69 use = egg:PasteDeploy#prefix
70 70 prefix = /
71 71
72 72 [app:main]
73 73 ; The %(here)s variable will be replaced with the absolute path of parent directory
74 74 ; of this file
75 75 ; Each option in the app:main can be override by an environmental variable
76 76 ;
77 77 ;To override an option:
78 78 ;
79 79 ;RC_<KeyName>
80 80 ;Everything should be uppercase, . and - should be replaced by _.
81 81 ;For example, if you have these configuration settings:
82 82 ;rc_cache.repo_object.backend = foo
83 83 ;can be overridden by
84 84 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
85 85
86 86 use = egg:rhodecode-enterprise-ce
87 87
88 88 ; enable proxy prefix middleware, defined above
89 89 #filter-with = proxy-prefix
90 90
91 91 ; #############
92 92 ; DEBUG OPTIONS
93 93 ; #############
94 94
95 95 pyramid.reload_templates = true
96 96
97 97 # During development the we want to have the debug toolbar enabled
98 98 pyramid.includes =
99 99 pyramid_debugtoolbar
100 100
101 101 debugtoolbar.hosts = 0.0.0.0/0
102 102 debugtoolbar.exclude_prefixes =
103 103 /css
104 104 /fonts
105 105 /images
106 106 /js
107 107
108 108 ## RHODECODE PLUGINS ##
109 109 rhodecode.includes =
110 110 rhodecode.api
111 111
112 112
113 113 # api prefix url
114 114 rhodecode.api.url = /_admin/api
115 115
116 116 ; enable debug style page
117 117 debug_style = true
118 118
119 119 ; #################
120 120 ; END DEBUG OPTIONS
121 121 ; #################
122 122
123 123 ; encryption key used to encrypt social plugin tokens,
124 124 ; remote_urls with credentials etc, if not set it defaults to
125 125 ; `beaker.session.secret`
126 126 #rhodecode.encrypted_values.secret =
127 127
128 128 ; decryption strict mode (enabled by default). It controls if decryption raises
129 129 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
130 130 #rhodecode.encrypted_values.strict = false
131 131
132 132 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
133 133 ; fernet is safer, and we strongly recommend switching to it.
134 134 ; Due to backward compatibility aes is used as default.
135 135 #rhodecode.encrypted_values.algorithm = fernet
136 136
137 137 ; Return gzipped responses from RhodeCode (static files/application)
138 138 gzip_responses = false
139 139
140 140 ; Auto-generate javascript routes file on startup
141 141 generate_js_files = false
142 142
143 143 ; System global default language.
144 144 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
145 145 lang = en
146 146
147 147 ; Perform a full repository scan and import on each server start.
148 148 ; Settings this to true could lead to very long startup time.
149 149 startup.import_repos = false
150 150
151 151 ; URL at which the application is running. This is used for Bootstrapping
152 152 ; requests in context when no web request is available. Used in ishell, or
153 153 ; SSH calls. Set this for events to receive proper url for SSH calls.
154 154 app.base_url = http://rhodecode.local
155 155
156 156 ; Host at which the Service API is running.
157 157 app.service_api.host = http://rhodecode.local:10020
158 158
159 159 ; Secret for Service API authentication.
160 160 app.service_api.token =
161 161
162 162 ; Unique application ID. Should be a random unique string for security.
163 163 app_instance_uuid = rc-production
164 164
165 165 ; Cut off limit for large diffs (size in bytes). If overall diff size on
166 166 ; commit, or pull request exceeds this limit this diff will be displayed
167 167 ; partially. E.g 512000 == 512Kb
168 168 cut_off_limit_diff = 512000
169 169
170 170 ; Cut off limit for large files inside diffs (size in bytes). Each individual
171 171 ; file inside diff which exceeds this limit will be displayed partially.
172 172 ; E.g 128000 == 128Kb
173 173 cut_off_limit_file = 128000
174 174
175 175 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
176 176 vcs_full_cache = true
177 177
178 178 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
179 179 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
180 180 force_https = false
181 181
182 182 ; use Strict-Transport-Security headers
183 183 use_htsts = false
184 184
185 185 ; Set to true if your repos are exposed using the dumb protocol
186 186 git_update_server_info = false
187 187
188 188 ; RSS/ATOM feed options
189 189 rss_cut_off_limit = 256000
190 190 rss_items_per_page = 10
191 191 rss_include_diff = false
192 192
193 193 ; gist URL alias, used to create nicer urls for gist. This should be an
194 194 ; url that does rewrites to _admin/gists/{gistid}.
195 195 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
196 196 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
197 197 gist_alias_url =
198 198
199 199 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
200 200 ; used for access.
201 201 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
202 202 ; came from the the logged in user who own this authentication token.
203 203 ; Additionally @TOKEN syntax can be used to bound the view to specific
204 204 ; authentication token. Such view would be only accessible when used together
205 205 ; with this authentication token
206 206 ; list of all views can be found under `/_admin/permissions/auth_token_access`
207 207 ; The list should be "," separated and on a single line.
208 208 ; Most common views to enable:
209 209
210 210 # RepoCommitsView:repo_commit_download
211 211 # RepoCommitsView:repo_commit_patch
212 212 # RepoCommitsView:repo_commit_raw
213 213 # RepoCommitsView:repo_commit_raw@TOKEN
214 214 # RepoFilesView:repo_files_diff
215 215 # RepoFilesView:repo_archivefile
216 216 # RepoFilesView:repo_file_raw
217 217 # GistView:*
218 218 api_access_controllers_whitelist =
219 219
220 220 ; Default encoding used to convert from and to unicode
221 221 ; can be also a comma separated list of encoding in case of mixed encodings
222 222 default_encoding = UTF-8
223 223
224 224 ; instance-id prefix
225 225 ; a prefix key for this instance used for cache invalidation when running
226 226 ; multiple instances of RhodeCode, make sure it's globally unique for
227 227 ; all running RhodeCode instances. Leave empty if you don't use it
228 228 instance_id =
229 229
230 230 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
231 231 ; of an authentication plugin also if it is disabled by it's settings.
232 232 ; This could be useful if you are unable to log in to the system due to broken
233 233 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
234 234 ; module to log in again and fix the settings.
235 235 ; Available builtin plugin IDs (hash is part of the ID):
236 236 ; egg:rhodecode-enterprise-ce#rhodecode
237 237 ; egg:rhodecode-enterprise-ce#pam
238 238 ; egg:rhodecode-enterprise-ce#ldap
239 239 ; egg:rhodecode-enterprise-ce#jasig_cas
240 240 ; egg:rhodecode-enterprise-ce#headers
241 241 ; egg:rhodecode-enterprise-ce#crowd
242 242
243 243 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
244 244
245 245 ; Flag to control loading of legacy plugins in py:/path format
246 246 auth_plugin.import_legacy_plugins = true
247 247
248 248 ; alternative return HTTP header for failed authentication. Default HTTP
249 249 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
250 250 ; handling that causing a series of failed authentication calls.
251 251 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
252 252 ; This will be served instead of default 401 on bad authentication
253 253 auth_ret_code =
254 254
255 255 ; use special detection method when serving auth_ret_code, instead of serving
256 256 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
257 257 ; and then serve auth_ret_code to clients
258 258 auth_ret_code_detection = false
259 259
260 260 ; locking return code. When repository is locked return this HTTP code. 2XX
261 261 ; codes don't break the transactions while 4XX codes do
262 262 lock_ret_code = 423
263 263
264 ; allows to change the repository location in settings page
265 allow_repo_location_change = true
264 ; Filesystem location were repositories should be stored
265 repo_store.path = /var/opt/rhodecode_repo_store
266 266
267 267 ; allows to setup custom hooks in settings page
268 268 allow_custom_hooks_settings = true
269 269
270 270 ; Generated license token required for EE edition license.
271 271 ; New generated token value can be found in Admin > settings > license page.
272 272 license_token =
273 273
274 274 ; This flag hides sensitive information on the license page such as token, and license data
275 275 license.hide_license_info = false
276 276
277 277 ; supervisor connection uri, for managing supervisor and logs.
278 278 supervisor.uri =
279 279
280 280 ; supervisord group name/id we only want this RC instance to handle
281 281 supervisor.group_id = dev
282 282
283 283 ; Display extended labs settings
284 284 labs_settings_active = true
285 285
286 286 ; Custom exception store path, defaults to TMPDIR
287 287 ; This is used to store exception from RhodeCode in shared directory
288 288 #exception_tracker.store_path =
289 289
290 290 ; Send email with exception details when it happens
291 291 #exception_tracker.send_email = false
292 292
293 293 ; Comma separated list of recipients for exception emails,
294 294 ; e.g admin@rhodecode.com,devops@rhodecode.com
295 295 ; Can be left empty, then emails will be sent to ALL super-admins
296 296 #exception_tracker.send_email_recipients =
297 297
298 298 ; optional prefix to Add to email Subject
299 299 #exception_tracker.email_prefix = [RHODECODE ERROR]
300 300
301 301 ; File store configuration. This is used to store and serve uploaded files
302 302 file_store.enabled = true
303 303
304 304 ; Storage backend, available options are: local
305 305 file_store.backend = local
306 306
307 307 ; path to store the uploaded binaries
308 308 file_store.storage_path = %(here)s/data/file_store
309 309
310 310 ; Uncomment and set this path to control settings for archive download cache.
311 311 ; Generated repo archives will be cached at this location
312 312 ; and served from the cache during subsequent requests for the same archive of
313 313 ; the repository. This path is important to be shared across filesystems and with
314 314 ; RhodeCode and vcsserver
315 315
316 316 ; Default is $cache_dir/archive_cache if not set
317 317 archive_cache.store_dir = %(here)s/data/archive_cache
318 318
319 319 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
320 320 archive_cache.cache_size_gb = 10
321 321
322 322 ; By default cache uses sharding technique, this specifies how many shards are there
323 323 archive_cache.cache_shards = 10
324 324
325 325 ; #############
326 326 ; CELERY CONFIG
327 327 ; #############
328 328
329 329 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
330 330
331 331 use_celery = false
332 332
333 333 ; path to store schedule database
334 334 #celerybeat-schedule.path =
335 335
336 336 ; connection url to the message broker (default redis)
337 337 celery.broker_url = redis://redis:6379/8
338 338
339 339 ; results backend to get results for (default redis)
340 340 celery.result_backend = redis://redis:6379/8
341 341
342 342 ; rabbitmq example
343 343 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
344 344
345 345 ; maximum tasks to execute before worker restart
346 346 celery.max_tasks_per_child = 20
347 347
348 348 ; tasks will never be sent to the queue, but executed locally instead.
349 349 celery.task_always_eager = false
350 350
351 351 ; #############
352 352 ; DOGPILE CACHE
353 353 ; #############
354 354
355 355 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
356 356 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
357 357 cache_dir = %(here)s/data
358 358
359 359 ; *********************************************
360 360 ; `sql_cache_short` cache for heavy SQL queries
361 361 ; Only supported backend is `memory_lru`
362 362 ; *********************************************
363 363 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
364 364 rc_cache.sql_cache_short.expiration_time = 30
365 365
366 366
367 367 ; *****************************************************
368 368 ; `cache_repo_longterm` cache for repo object instances
369 369 ; Only supported backend is `memory_lru`
370 370 ; *****************************************************
371 371 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
372 372 ; by default we use 30 Days, cache is still invalidated on push
373 373 rc_cache.cache_repo_longterm.expiration_time = 2592000
374 374 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
375 375 rc_cache.cache_repo_longterm.max_size = 10000
376 376
377 377
378 378 ; *********************************************
379 379 ; `cache_general` cache for general purpose use
380 380 ; for simplicity use rc.file_namespace backend,
381 381 ; for performance and scale use rc.redis
382 382 ; *********************************************
383 383 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
384 384 rc_cache.cache_general.expiration_time = 43200
385 385 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
386 386 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
387 387
388 388 ; alternative `cache_general` redis backend with distributed lock
389 389 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
390 390 #rc_cache.cache_general.expiration_time = 300
391 391
392 392 ; redis_expiration_time needs to be greater then expiration_time
393 393 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
394 394
395 395 #rc_cache.cache_general.arguments.host = localhost
396 396 #rc_cache.cache_general.arguments.port = 6379
397 397 #rc_cache.cache_general.arguments.db = 0
398 398 #rc_cache.cache_general.arguments.socket_timeout = 30
399 399 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
400 400 #rc_cache.cache_general.arguments.distributed_lock = true
401 401
402 402 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
403 403 #rc_cache.cache_general.arguments.lock_auto_renewal = true
404 404
405 405 ; *************************************************
406 406 ; `cache_perms` cache for permission tree, auth TTL
407 407 ; for simplicity use rc.file_namespace backend,
408 408 ; for performance and scale use rc.redis
409 409 ; *************************************************
410 410 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
411 411 rc_cache.cache_perms.expiration_time = 3600
412 412 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
413 413 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
414 414
415 415 ; alternative `cache_perms` redis backend with distributed lock
416 416 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
417 417 #rc_cache.cache_perms.expiration_time = 300
418 418
419 419 ; redis_expiration_time needs to be greater then expiration_time
420 420 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
421 421
422 422 #rc_cache.cache_perms.arguments.host = localhost
423 423 #rc_cache.cache_perms.arguments.port = 6379
424 424 #rc_cache.cache_perms.arguments.db = 0
425 425 #rc_cache.cache_perms.arguments.socket_timeout = 30
426 426 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
427 427 #rc_cache.cache_perms.arguments.distributed_lock = true
428 428
429 429 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
430 430 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
431 431
432 432 ; ***************************************************
433 433 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
434 434 ; for simplicity use rc.file_namespace backend,
435 435 ; for performance and scale use rc.redis
436 436 ; ***************************************************
437 437 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
438 438 rc_cache.cache_repo.expiration_time = 2592000
439 439 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
440 440 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
441 441
442 442 ; alternative `cache_repo` redis backend with distributed lock
443 443 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
444 444 #rc_cache.cache_repo.expiration_time = 2592000
445 445
446 446 ; redis_expiration_time needs to be greater then expiration_time
447 447 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
448 448
449 449 #rc_cache.cache_repo.arguments.host = localhost
450 450 #rc_cache.cache_repo.arguments.port = 6379
451 451 #rc_cache.cache_repo.arguments.db = 1
452 452 #rc_cache.cache_repo.arguments.socket_timeout = 30
453 453 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
454 454 #rc_cache.cache_repo.arguments.distributed_lock = true
455 455
456 456 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
457 457 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
458 458
459 459 ; ##############
460 460 ; BEAKER SESSION
461 461 ; ##############
462 462
463 463 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
464 464 ; types are file, ext:redis, ext:database, ext:memcached
465 465 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
466 466 beaker.session.type = file
467 467 beaker.session.data_dir = %(here)s/data/sessions
468 468
469 469 ; Redis based sessions
470 470 #beaker.session.type = ext:redis
471 471 #beaker.session.url = redis://127.0.0.1:6379/2
472 472
473 473 ; DB based session, fast, and allows easy management over logged in users
474 474 #beaker.session.type = ext:database
475 475 #beaker.session.table_name = db_session
476 476 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
477 477 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
478 478 #beaker.session.sa.pool_recycle = 3600
479 479 #beaker.session.sa.echo = false
480 480
481 481 beaker.session.key = rhodecode
482 482 beaker.session.secret = develop-rc-uytcxaz
483 483 beaker.session.lock_dir = %(here)s/data/sessions/lock
484 484
485 485 ; Secure encrypted cookie. Requires AES and AES python libraries
486 486 ; you must disable beaker.session.secret to use this
487 487 #beaker.session.encrypt_key = key_for_encryption
488 488 #beaker.session.validate_key = validation_key
489 489
490 490 ; Sets session as invalid (also logging out user) if it haven not been
491 491 ; accessed for given amount of time in seconds
492 492 beaker.session.timeout = 2592000
493 493 beaker.session.httponly = true
494 494
495 495 ; Path to use for the cookie. Set to prefix if you use prefix middleware
496 496 #beaker.session.cookie_path = /custom_prefix
497 497
498 498 ; Set https secure cookie
499 499 beaker.session.secure = false
500 500
501 501 ; default cookie expiration time in seconds, set to `true` to set expire
502 502 ; at browser close
503 503 #beaker.session.cookie_expires = 3600
504 504
505 505 ; #############################
506 506 ; SEARCH INDEXING CONFIGURATION
507 507 ; #############################
508 508
509 509 ; Full text search indexer is available in rhodecode-tools under
510 510 ; `rhodecode-tools index` command
511 511
512 512 ; WHOOSH Backend, doesn't require additional services to run
513 513 ; it works good with few dozen repos
514 514 search.module = rhodecode.lib.index.whoosh
515 515 search.location = %(here)s/data/index
516 516
517 517 ; ####################
518 518 ; CHANNELSTREAM CONFIG
519 519 ; ####################
520 520
521 521 ; channelstream enables persistent connections and live notification
522 522 ; in the system. It's also used by the chat system
523 523
524 524 channelstream.enabled = false
525 525
526 526 ; server address for channelstream server on the backend
527 527 channelstream.server = 127.0.0.1:9800
528 528
529 529 ; location of the channelstream server from outside world
530 530 ; use ws:// for http or wss:// for https. This address needs to be handled
531 531 ; by external HTTP server such as Nginx or Apache
532 532 ; see Nginx/Apache configuration examples in our docs
533 533 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
534 534 channelstream.secret = secret
535 535 channelstream.history.location = %(here)s/channelstream_history
536 536
537 537 ; Internal application path that Javascript uses to connect into.
538 538 ; If you use proxy-prefix the prefix should be added before /_channelstream
539 539 channelstream.proxy_path = /_channelstream
540 540
541 541
542 542 ; ##############################
543 543 ; MAIN RHODECODE DATABASE CONFIG
544 544 ; ##############################
545 545
546 546 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
547 547 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
548 548 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
549 549 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
550 550 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
551 551
552 552 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
553 553
554 554 ; see sqlalchemy docs for other advanced settings
555 555 ; print the sql statements to output
556 556 sqlalchemy.db1.echo = false
557 557
558 558 ; recycle the connections after this amount of seconds
559 559 sqlalchemy.db1.pool_recycle = 3600
560 560
561 561 ; the number of connections to keep open inside the connection pool.
562 562 ; 0 indicates no limit
563 563 ; the general calculus with gevent is:
564 564 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
565 565 ; then increase pool size + max overflow so that they add up to 500.
566 566 #sqlalchemy.db1.pool_size = 5
567 567
568 568 ; The number of connections to allow in connection pool "overflow", that is
569 569 ; connections that can be opened above and beyond the pool_size setting,
570 570 ; which defaults to five.
571 571 #sqlalchemy.db1.max_overflow = 10
572 572
573 573 ; Connection check ping, used to detect broken database connections
574 574 ; could be enabled to better handle cases if MySQL has gone away errors
575 575 #sqlalchemy.db1.ping_connection = true
576 576
577 577 ; ##########
578 578 ; VCS CONFIG
579 579 ; ##########
580 580 vcs.server.enable = true
581 581 vcs.server = localhost:9900
582 582
583 583 ; Web server connectivity protocol, responsible for web based VCS operations
584 584 ; Available protocols are:
585 585 ; `http` - use http-rpc backend (default)
586 586 vcs.server.protocol = http
587 587
588 588 ; Push/Pull operations protocol, available options are:
589 589 ; `http` - use http-rpc backend (default)
590 590 vcs.scm_app_implementation = http
591 591
592 592 ; Push/Pull operations hooks protocol, available options are:
593 593 ; `http` - use http-rpc backend (default)
594 594 vcs.hooks.protocol = http
595 595
596 596 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
597 597 ; accessible via network.
598 598 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
599 599 vcs.hooks.host = *
600 600
601 601 ; Start VCSServer with this instance as a subprocess, useful for development
602 602 vcs.start_server = false
603 603
604 604 ; List of enabled VCS backends, available options are:
605 605 ; `hg` - mercurial
606 606 ; `git` - git
607 607 ; `svn` - subversion
608 608 vcs.backends = hg, git, svn
609 609
610 610 ; Wait this number of seconds before killing connection to the vcsserver
611 611 vcs.connection_timeout = 3600
612 612
613 613 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
614 614 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
615 615 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
616 616 #vcs.svn.compatible_version = 1.8
617 617
618 618 ; Cache flag to cache vcsserver remote calls locally
619 619 ; It uses cache_region `cache_repo`
620 620 vcs.methods.cache = true
621 621
622 622 ; ####################################################
623 623 ; Subversion proxy support (mod_dav_svn)
624 624 ; Maps RhodeCode repo groups into SVN paths for Apache
625 625 ; ####################################################
626 626
627 627 ; Enable or disable the config file generation.
628 628 svn.proxy.generate_config = false
629 629
630 630 ; Generate config file with `SVNListParentPath` set to `On`.
631 631 svn.proxy.list_parent_path = true
632 632
633 633 ; Set location and file name of generated config file.
634 634 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
635 635
636 636 ; alternative mod_dav config template. This needs to be a valid mako template
637 637 ; Example template can be found in the source code:
638 638 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
639 639 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
640 640
641 641 ; Used as a prefix to the `Location` block in the generated config file.
642 642 ; In most cases it should be set to `/`.
643 643 svn.proxy.location_root = /
644 644
645 645 ; Command to reload the mod dav svn configuration on change.
646 646 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
647 647 ; Make sure user who runs RhodeCode process is allowed to reload Apache
648 648 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
649 649
650 650 ; If the timeout expires before the reload command finishes, the command will
651 651 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
652 652 #svn.proxy.reload_timeout = 10
653 653
654 654 ; ####################
655 655 ; SSH Support Settings
656 656 ; ####################
657 657
658 658 ; Defines if a custom authorized_keys file should be created and written on
659 659 ; any change user ssh keys. Setting this to false also disables possibility
660 660 ; of adding SSH keys by users from web interface. Super admins can still
661 661 ; manage SSH Keys.
662 662 ssh.generate_authorized_keyfile = false
663 663
664 664 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
665 665 # ssh.authorized_keys_ssh_opts =
666 666
667 667 ; Path to the authorized_keys file where the generate entries are placed.
668 668 ; It is possible to have multiple key files specified in `sshd_config` e.g.
669 669 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
670 670 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
671 671
672 672 ; Command to execute the SSH wrapper. The binary is available in the
673 673 ; RhodeCode installation directory.
674 674 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
675 675 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
676 676
677 677 ; Allow shell when executing the ssh-wrapper command
678 678 ssh.wrapper_cmd_allow_shell = false
679 679
680 680 ; Enables logging, and detailed output send back to the client during SSH
681 681 ; operations. Useful for debugging, shouldn't be used in production.
682 682 ssh.enable_debug_logging = true
683 683
684 684 ; Paths to binary executable, by default they are the names, but we can
685 685 ; override them if we want to use a custom one
686 686 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
687 687 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
688 688 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
689 689
690 690 ; Enables SSH key generator web interface. Disabling this still allows users
691 691 ; to add their own keys.
692 692 ssh.enable_ui_key_generator = true
693 693
694 694
695 695 ; #################
696 696 ; APPENLIGHT CONFIG
697 697 ; #################
698 698
699 699 ; Appenlight is tailored to work with RhodeCode, see
700 700 ; http://appenlight.rhodecode.com for details how to obtain an account
701 701
702 702 ; Appenlight integration enabled
703 703 #appenlight = false
704 704
705 705 #appenlight.server_url = https://api.appenlight.com
706 706 #appenlight.api_key = YOUR_API_KEY
707 707 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
708 708
709 709 ; used for JS client
710 710 #appenlight.api_public_key = YOUR_API_PUBLIC_KEY
711 711
712 712 ; TWEAK AMOUNT OF INFO SENT HERE
713 713
714 714 ; enables 404 error logging (default False)
715 715 #appenlight.report_404 = false
716 716
717 717 ; time in seconds after request is considered being slow (default 1)
718 718 #appenlight.slow_request_time = 1
719 719
720 720 ; record slow requests in application
721 721 ; (needs to be enabled for slow datastore recording and time tracking)
722 722 #appenlight.slow_requests = true
723 723
724 724 ; enable hooking to application loggers
725 725 #appenlight.logging = true
726 726
727 727 ; minimum log level for log capture
728 728 #ppenlight.logging.level = WARNING
729 729
730 730 ; send logs only from erroneous/slow requests
731 731 ; (saves API quota for intensive logging)
732 732 #appenlight.logging_on_error = false
733 733
734 734 ; list of additional keywords that should be grabbed from environ object
735 735 ; can be string with comma separated list of words in lowercase
736 736 ; (by default client will always send following info:
737 737 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
738 738 ; start with HTTP* this list be extended with additional keywords here
739 739 #appenlight.environ_keys_whitelist =
740 740
741 741 ; list of keywords that should be blanked from request object
742 742 ; can be string with comma separated list of words in lowercase
743 743 ; (by default client will always blank keys that contain following words
744 744 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
745 745 ; this list be extended with additional keywords set here
746 746 #appenlight.request_keys_blacklist =
747 747
748 748 ; list of namespaces that should be ignores when gathering log entries
749 749 ; can be string with comma separated list of namespaces
750 750 ; (by default the client ignores own entries: appenlight_client.client)
751 751 #appenlight.log_namespace_blacklist =
752 752
753 753 ; Statsd client config, this is used to send metrics to statsd
754 754 ; We recommend setting statsd_exported and scrape them using Prometheus
755 755 #statsd.enabled = false
756 756 #statsd.statsd_host = 0.0.0.0
757 757 #statsd.statsd_port = 8125
758 758 #statsd.statsd_prefix =
759 759 #statsd.statsd_ipv6 = false
760 760
761 761 ; configure logging automatically at server startup set to false
762 762 ; to use the below custom logging config.
763 763 ; RC_LOGGING_FORMATTER
764 764 ; RC_LOGGING_LEVEL
765 765 ; env variables can control the settings for logging in case of autoconfigure
766 766
767 767 #logging.autoconfigure = true
768 768
769 769 ; specify your own custom logging config file to configure logging
770 770 #logging.logging_conf_file = /path/to/custom_logging.ini
771 771
772 772 ; Dummy marker to add new entries after.
773 773 ; Add any custom entries below. Please don't remove this marker.
774 774 custom.conf = 1
775 775
776 776
777 777 ; #####################
778 778 ; LOGGING CONFIGURATION
779 779 ; #####################
780 780
781 781 [loggers]
782 782 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
783 783
784 784 [handlers]
785 785 keys = console, console_sql
786 786
787 787 [formatters]
788 788 keys = generic, json, color_formatter, color_formatter_sql
789 789
790 790 ; #######
791 791 ; LOGGERS
792 792 ; #######
793 793 [logger_root]
794 794 level = NOTSET
795 795 handlers = console
796 796
797 797 [logger_sqlalchemy]
798 798 level = INFO
799 799 handlers = console_sql
800 800 qualname = sqlalchemy.engine
801 801 propagate = 0
802 802
803 803 [logger_beaker]
804 804 level = DEBUG
805 805 handlers =
806 806 qualname = beaker.container
807 807 propagate = 1
808 808
809 809 [logger_rhodecode]
810 810 level = DEBUG
811 811 handlers =
812 812 qualname = rhodecode
813 813 propagate = 1
814 814
815 815 [logger_ssh_wrapper]
816 816 level = DEBUG
817 817 handlers =
818 818 qualname = ssh_wrapper
819 819 propagate = 1
820 820
821 821 [logger_celery]
822 822 level = DEBUG
823 823 handlers =
824 824 qualname = celery
825 825
826 826
827 827 ; ########
828 828 ; HANDLERS
829 829 ; ########
830 830
831 831 [handler_console]
832 832 class = StreamHandler
833 833 args = (sys.stderr, )
834 834 level = DEBUG
835 835 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
836 836 ; This allows sending properly formatted logs to grafana loki or elasticsearch
837 837 formatter = color_formatter
838 838
839 839 [handler_console_sql]
840 840 ; "level = DEBUG" logs SQL queries and results.
841 841 ; "level = INFO" logs SQL queries.
842 842 ; "level = WARN" logs neither. (Recommended for production systems.)
843 843 class = StreamHandler
844 844 args = (sys.stderr, )
845 845 level = WARN
846 846 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
847 847 ; This allows sending properly formatted logs to grafana loki or elasticsearch
848 848 formatter = color_formatter_sql
849 849
850 850 ; ##########
851 851 ; FORMATTERS
852 852 ; ##########
853 853
854 854 [formatter_generic]
855 855 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
856 856 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
857 857 datefmt = %Y-%m-%d %H:%M:%S
858 858
859 859 [formatter_color_formatter]
860 860 class = rhodecode.lib.logging_formatter.ColorFormatter
861 861 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
862 862 datefmt = %Y-%m-%d %H:%M:%S
863 863
864 864 [formatter_color_formatter_sql]
865 865 class = rhodecode.lib.logging_formatter.ColorFormatterSql
866 866 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
867 867 datefmt = %Y-%m-%d %H:%M:%S
868 868
869 869 [formatter_json]
870 870 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
871 871 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,822 +1,822 b''
1 1
2 2 ; #########################################
3 3 ; RHODECODE COMMUNITY EDITION CONFIGURATION
4 4 ; #########################################
5 5
6 6 [DEFAULT]
7 7 ; Debug flag sets all loggers to debug, and enables request tracking
8 8 debug = false
9 9
10 10 ; ########################################################################
11 11 ; EMAIL CONFIGURATION
12 12 ; These settings will be used by the RhodeCode mailing system
13 13 ; ########################################################################
14 14
15 15 ; prefix all emails subjects with given prefix, helps filtering out emails
16 16 #email_prefix = [RhodeCode]
17 17
18 18 ; email FROM address all mails will be sent
19 19 #app_email_from = rhodecode-noreply@localhost
20 20
21 21 #smtp_server = mail.server.com
22 22 #smtp_username =
23 23 #smtp_password =
24 24 #smtp_port =
25 25 #smtp_use_tls = false
26 26 #smtp_use_ssl = true
27 27
28 28 [server:main]
29 29 ; COMMON HOST/IP CONFIG, This applies mostly to develop setup,
30 30 ; Host port for gunicorn are controlled by gunicorn_conf.py
31 31 host = 127.0.0.1
32 32 port = 10020
33 33
34 34
35 35 ; ###########################
36 36 ; GUNICORN APPLICATION SERVER
37 37 ; ###########################
38 38
39 39 ; run with gunicorn --paste rhodecode.ini --config gunicorn_conf.py
40 40
41 41 ; Module to use, this setting shouldn't be changed
42 42 use = egg:gunicorn#main
43 43
44 44 ; Prefix middleware for RhodeCode.
45 45 ; recommended when using proxy setup.
46 46 ; allows to set RhodeCode under a prefix in server.
47 47 ; eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
48 48 ; And set your prefix like: `prefix = /custom_prefix`
49 49 ; be sure to also set beaker.session.cookie_path = /custom_prefix if you need
50 50 ; to make your cookies only work on prefix url
51 51 [filter:proxy-prefix]
52 52 use = egg:PasteDeploy#prefix
53 53 prefix = /
54 54
55 55 [app:main]
56 56 ; The %(here)s variable will be replaced with the absolute path of parent directory
57 57 ; of this file
58 58 ; Each option in the app:main can be override by an environmental variable
59 59 ;
60 60 ;To override an option:
61 61 ;
62 62 ;RC_<KeyName>
63 63 ;Everything should be uppercase, . and - should be replaced by _.
64 64 ;For example, if you have these configuration settings:
65 65 ;rc_cache.repo_object.backend = foo
66 66 ;can be overridden by
67 67 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
68 68
69 69 use = egg:rhodecode-enterprise-ce
70 70
71 71 ; enable proxy prefix middleware, defined above
72 72 #filter-with = proxy-prefix
73 73
74 74 ; encryption key used to encrypt social plugin tokens,
75 75 ; remote_urls with credentials etc, if not set it defaults to
76 76 ; `beaker.session.secret`
77 77 #rhodecode.encrypted_values.secret =
78 78
79 79 ; decryption strict mode (enabled by default). It controls if decryption raises
80 80 ; `SignatureVerificationError` in case of wrong key, or damaged encryption data.
81 81 #rhodecode.encrypted_values.strict = false
82 82
83 83 ; Pick algorithm for encryption. Either fernet (more secure) or aes (default)
84 84 ; fernet is safer, and we strongly recommend switching to it.
85 85 ; Due to backward compatibility aes is used as default.
86 86 #rhodecode.encrypted_values.algorithm = fernet
87 87
88 88 ; Return gzipped responses from RhodeCode (static files/application)
89 89 gzip_responses = false
90 90
91 91 ; Auto-generate javascript routes file on startup
92 92 generate_js_files = false
93 93
94 94 ; System global default language.
95 95 ; All available languages: en (default), be, de, es, fr, it, ja, pl, pt, ru, zh
96 96 lang = en
97 97
98 98 ; Perform a full repository scan and import on each server start.
99 99 ; Settings this to true could lead to very long startup time.
100 100 startup.import_repos = false
101 101
102 102 ; URL at which the application is running. This is used for Bootstrapping
103 103 ; requests in context when no web request is available. Used in ishell, or
104 104 ; SSH calls. Set this for events to receive proper url for SSH calls.
105 105 app.base_url = http://rhodecode.local
106 106
107 107 ; Host at which the Service API is running.
108 108 app.service_api.host= http://rhodecode.local:10020
109 109
110 110 ; Secret for Service API authentication.
111 111 app.service_api.token =
112 112
113 113 ; Unique application ID. Should be a random unique string for security.
114 114 app_instance_uuid = rc-production
115 115
116 116 ; Cut off limit for large diffs (size in bytes). If overall diff size on
117 117 ; commit, or pull request exceeds this limit this diff will be displayed
118 118 ; partially. E.g 512000 == 512Kb
119 119 cut_off_limit_diff = 512000
120 120
121 121 ; Cut off limit for large files inside diffs (size in bytes). Each individual
122 122 ; file inside diff which exceeds this limit will be displayed partially.
123 123 ; E.g 128000 == 128Kb
124 124 cut_off_limit_file = 128000
125 125
126 126 ; Use cached version of vcs repositories everywhere. Recommended to be `true`
127 127 vcs_full_cache = true
128 128
129 129 ; Force https in RhodeCode, fixes https redirects, assumes it's always https.
130 130 ; Normally this is controlled by proper flags sent from http server such as Nginx or Apache
131 131 force_https = false
132 132
133 133 ; use Strict-Transport-Security headers
134 134 use_htsts = false
135 135
136 136 ; Set to true if your repos are exposed using the dumb protocol
137 137 git_update_server_info = false
138 138
139 139 ; RSS/ATOM feed options
140 140 rss_cut_off_limit = 256000
141 141 rss_items_per_page = 10
142 142 rss_include_diff = false
143 143
144 144 ; gist URL alias, used to create nicer urls for gist. This should be an
145 145 ; url that does rewrites to _admin/gists/{gistid}.
146 146 ; example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
147 147 ; RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
148 148 gist_alias_url =
149 149
150 150 ; List of views (using glob pattern syntax) that AUTH TOKENS could be
151 151 ; used for access.
152 152 ; Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
153 153 ; came from the the logged in user who own this authentication token.
154 154 ; Additionally @TOKEN syntax can be used to bound the view to specific
155 155 ; authentication token. Such view would be only accessible when used together
156 156 ; with this authentication token
157 157 ; list of all views can be found under `/_admin/permissions/auth_token_access`
158 158 ; The list should be "," separated and on a single line.
159 159 ; Most common views to enable:
160 160
161 161 # RepoCommitsView:repo_commit_download
162 162 # RepoCommitsView:repo_commit_patch
163 163 # RepoCommitsView:repo_commit_raw
164 164 # RepoCommitsView:repo_commit_raw@TOKEN
165 165 # RepoFilesView:repo_files_diff
166 166 # RepoFilesView:repo_archivefile
167 167 # RepoFilesView:repo_file_raw
168 168 # GistView:*
169 169 api_access_controllers_whitelist =
170 170
171 171 ; Default encoding used to convert from and to unicode
172 172 ; can be also a comma separated list of encoding in case of mixed encodings
173 173 default_encoding = UTF-8
174 174
175 175 ; instance-id prefix
176 176 ; a prefix key for this instance used for cache invalidation when running
177 177 ; multiple instances of RhodeCode, make sure it's globally unique for
178 178 ; all running RhodeCode instances. Leave empty if you don't use it
179 179 instance_id =
180 180
181 181 ; Fallback authentication plugin. Set this to a plugin ID to force the usage
182 182 ; of an authentication plugin also if it is disabled by it's settings.
183 183 ; This could be useful if you are unable to log in to the system due to broken
184 184 ; authentication settings. Then you can enable e.g. the internal RhodeCode auth
185 185 ; module to log in again and fix the settings.
186 186 ; Available builtin plugin IDs (hash is part of the ID):
187 187 ; egg:rhodecode-enterprise-ce#rhodecode
188 188 ; egg:rhodecode-enterprise-ce#pam
189 189 ; egg:rhodecode-enterprise-ce#ldap
190 190 ; egg:rhodecode-enterprise-ce#jasig_cas
191 191 ; egg:rhodecode-enterprise-ce#headers
192 192 ; egg:rhodecode-enterprise-ce#crowd
193 193
194 194 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
195 195
196 196 ; Flag to control loading of legacy plugins in py:/path format
197 197 auth_plugin.import_legacy_plugins = true
198 198
199 199 ; alternative return HTTP header for failed authentication. Default HTTP
200 200 ; response is 401 HTTPUnauthorized. Currently HG clients have troubles with
201 201 ; handling that causing a series of failed authentication calls.
202 202 ; Set this variable to 403 to return HTTPForbidden, or any other HTTP code
203 203 ; This will be served instead of default 401 on bad authentication
204 204 auth_ret_code =
205 205
206 206 ; use special detection method when serving auth_ret_code, instead of serving
207 207 ; ret_code directly, use 401 initially (Which triggers credentials prompt)
208 208 ; and then serve auth_ret_code to clients
209 209 auth_ret_code_detection = false
210 210
211 211 ; locking return code. When repository is locked return this HTTP code. 2XX
212 212 ; codes don't break the transactions while 4XX codes do
213 213 lock_ret_code = 423
214 214
215 ; allows to change the repository location in settings page
216 allow_repo_location_change = true
215 ; Filesystem location were repositories should be stored
216 repo_store.path = /var/opt/rhodecode_repo_store
217 217
218 218 ; allows to setup custom hooks in settings page
219 219 allow_custom_hooks_settings = true
220 220
221 221 ; Generated license token required for EE edition license.
222 222 ; New generated token value can be found in Admin > settings > license page.
223 223 license_token =
224 224
225 225 ; This flag hides sensitive information on the license page such as token, and license data
226 226 license.hide_license_info = false
227 227
228 228 ; supervisor connection uri, for managing supervisor and logs.
229 229 supervisor.uri =
230 230
231 231 ; supervisord group name/id we only want this RC instance to handle
232 232 supervisor.group_id = prod
233 233
234 234 ; Display extended labs settings
235 235 labs_settings_active = true
236 236
237 237 ; Custom exception store path, defaults to TMPDIR
238 238 ; This is used to store exception from RhodeCode in shared directory
239 239 #exception_tracker.store_path =
240 240
241 241 ; Send email with exception details when it happens
242 242 #exception_tracker.send_email = false
243 243
244 244 ; Comma separated list of recipients for exception emails,
245 245 ; e.g admin@rhodecode.com,devops@rhodecode.com
246 246 ; Can be left empty, then emails will be sent to ALL super-admins
247 247 #exception_tracker.send_email_recipients =
248 248
249 249 ; optional prefix to Add to email Subject
250 250 #exception_tracker.email_prefix = [RHODECODE ERROR]
251 251
252 252 ; File store configuration. This is used to store and serve uploaded files
253 253 file_store.enabled = true
254 254
255 255 ; Storage backend, available options are: local
256 256 file_store.backend = local
257 257
258 258 ; path to store the uploaded binaries
259 259 file_store.storage_path = %(here)s/data/file_store
260 260
261 261 ; Uncomment and set this path to control settings for archive download cache.
262 262 ; Generated repo archives will be cached at this location
263 263 ; and served from the cache during subsequent requests for the same archive of
264 264 ; the repository. This path is important to be shared across filesystems and with
265 265 ; RhodeCode and vcsserver
266 266
267 267 ; Default is $cache_dir/archive_cache if not set
268 268 archive_cache.store_dir = %(here)s/data/archive_cache
269 269
270 270 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
271 271 archive_cache.cache_size_gb = 40
272 272
273 273 ; By default cache uses sharding technique, this specifies how many shards are there
274 274 archive_cache.cache_shards = 4
275 275
276 276 ; #############
277 277 ; CELERY CONFIG
278 278 ; #############
279 279
280 280 ; manually run celery: /path/to/celery worker --task-events --beat --app rhodecode.lib.celerylib.loader --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler --loglevel DEBUG --ini /path/to/rhodecode.ini
281 281
282 282 use_celery = false
283 283
284 284 ; path to store schedule database
285 285 #celerybeat-schedule.path =
286 286
287 287 ; connection url to the message broker (default redis)
288 288 celery.broker_url = redis://redis:6379/8
289 289
290 290 ; results backend to get results for (default redis)
291 291 celery.result_backend = redis://redis:6379/8
292 292
293 293 ; rabbitmq example
294 294 #celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
295 295
296 296 ; maximum tasks to execute before worker restart
297 297 celery.max_tasks_per_child = 20
298 298
299 299 ; tasks will never be sent to the queue, but executed locally instead.
300 300 celery.task_always_eager = false
301 301
302 302 ; #############
303 303 ; DOGPILE CACHE
304 304 ; #############
305 305
306 306 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
307 307 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
308 308 cache_dir = %(here)s/data
309 309
310 310 ; *********************************************
311 311 ; `sql_cache_short` cache for heavy SQL queries
312 312 ; Only supported backend is `memory_lru`
313 313 ; *********************************************
314 314 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
315 315 rc_cache.sql_cache_short.expiration_time = 30
316 316
317 317
318 318 ; *****************************************************
319 319 ; `cache_repo_longterm` cache for repo object instances
320 320 ; Only supported backend is `memory_lru`
321 321 ; *****************************************************
322 322 rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru
323 323 ; by default we use 30 Days, cache is still invalidated on push
324 324 rc_cache.cache_repo_longterm.expiration_time = 2592000
325 325 ; max items in LRU cache, set to smaller number to save memory, and expire last used caches
326 326 rc_cache.cache_repo_longterm.max_size = 10000
327 327
328 328
329 329 ; *********************************************
330 330 ; `cache_general` cache for general purpose use
331 331 ; for simplicity use rc.file_namespace backend,
332 332 ; for performance and scale use rc.redis
333 333 ; *********************************************
334 334 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
335 335 rc_cache.cache_general.expiration_time = 43200
336 336 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
337 337 #rc_cache.cache_general.arguments.filename = /tmp/cache_general_db
338 338
339 339 ; alternative `cache_general` redis backend with distributed lock
340 340 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
341 341 #rc_cache.cache_general.expiration_time = 300
342 342
343 343 ; redis_expiration_time needs to be greater then expiration_time
344 344 #rc_cache.cache_general.arguments.redis_expiration_time = 7200
345 345
346 346 #rc_cache.cache_general.arguments.host = localhost
347 347 #rc_cache.cache_general.arguments.port = 6379
348 348 #rc_cache.cache_general.arguments.db = 0
349 349 #rc_cache.cache_general.arguments.socket_timeout = 30
350 350 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
351 351 #rc_cache.cache_general.arguments.distributed_lock = true
352 352
353 353 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
354 354 #rc_cache.cache_general.arguments.lock_auto_renewal = true
355 355
356 356 ; *************************************************
357 357 ; `cache_perms` cache for permission tree, auth TTL
358 358 ; for simplicity use rc.file_namespace backend,
359 359 ; for performance and scale use rc.redis
360 360 ; *************************************************
361 361 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
362 362 rc_cache.cache_perms.expiration_time = 3600
363 363 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
364 364 #rc_cache.cache_perms.arguments.filename = /tmp/cache_perms_db
365 365
366 366 ; alternative `cache_perms` redis backend with distributed lock
367 367 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
368 368 #rc_cache.cache_perms.expiration_time = 300
369 369
370 370 ; redis_expiration_time needs to be greater then expiration_time
371 371 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
372 372
373 373 #rc_cache.cache_perms.arguments.host = localhost
374 374 #rc_cache.cache_perms.arguments.port = 6379
375 375 #rc_cache.cache_perms.arguments.db = 0
376 376 #rc_cache.cache_perms.arguments.socket_timeout = 30
377 377 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
378 378 #rc_cache.cache_perms.arguments.distributed_lock = true
379 379
380 380 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
381 381 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
382 382
383 383 ; ***************************************************
384 384 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
385 385 ; for simplicity use rc.file_namespace backend,
386 386 ; for performance and scale use rc.redis
387 387 ; ***************************************************
388 388 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
389 389 rc_cache.cache_repo.expiration_time = 2592000
390 390 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
391 391 #rc_cache.cache_repo.arguments.filename = /tmp/cache_repo_db
392 392
393 393 ; alternative `cache_repo` redis backend with distributed lock
394 394 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
395 395 #rc_cache.cache_repo.expiration_time = 2592000
396 396
397 397 ; redis_expiration_time needs to be greater then expiration_time
398 398 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
399 399
400 400 #rc_cache.cache_repo.arguments.host = localhost
401 401 #rc_cache.cache_repo.arguments.port = 6379
402 402 #rc_cache.cache_repo.arguments.db = 1
403 403 #rc_cache.cache_repo.arguments.socket_timeout = 30
404 404 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
405 405 #rc_cache.cache_repo.arguments.distributed_lock = true
406 406
407 407 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
408 408 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
409 409
410 410 ; ##############
411 411 ; BEAKER SESSION
412 412 ; ##############
413 413
414 414 ; beaker.session.type is type of storage options for the logged users sessions. Current allowed
415 415 ; types are file, ext:redis, ext:database, ext:memcached
416 416 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
417 417 beaker.session.type = file
418 418 beaker.session.data_dir = %(here)s/data/sessions
419 419
420 420 ; Redis based sessions
421 421 #beaker.session.type = ext:redis
422 422 #beaker.session.url = redis://127.0.0.1:6379/2
423 423
424 424 ; DB based session, fast, and allows easy management over logged in users
425 425 #beaker.session.type = ext:database
426 426 #beaker.session.table_name = db_session
427 427 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
428 428 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
429 429 #beaker.session.sa.pool_recycle = 3600
430 430 #beaker.session.sa.echo = false
431 431
432 432 beaker.session.key = rhodecode
433 433 beaker.session.secret = production-rc-uytcxaz
434 434 beaker.session.lock_dir = %(here)s/data/sessions/lock
435 435
436 436 ; Secure encrypted cookie. Requires AES and AES python libraries
437 437 ; you must disable beaker.session.secret to use this
438 438 #beaker.session.encrypt_key = key_for_encryption
439 439 #beaker.session.validate_key = validation_key
440 440
441 441 ; Sets session as invalid (also logging out user) if it haven not been
442 442 ; accessed for given amount of time in seconds
443 443 beaker.session.timeout = 2592000
444 444 beaker.session.httponly = true
445 445
446 446 ; Path to use for the cookie. Set to prefix if you use prefix middleware
447 447 #beaker.session.cookie_path = /custom_prefix
448 448
449 449 ; Set https secure cookie
450 450 beaker.session.secure = false
451 451
452 452 ; default cookie expiration time in seconds, set to `true` to set expire
453 453 ; at browser close
454 454 #beaker.session.cookie_expires = 3600
455 455
456 456 ; #############################
457 457 ; SEARCH INDEXING CONFIGURATION
458 458 ; #############################
459 459
460 460 ; Full text search indexer is available in rhodecode-tools under
461 461 ; `rhodecode-tools index` command
462 462
463 463 ; WHOOSH Backend, doesn't require additional services to run
464 464 ; it works good with few dozen repos
465 465 search.module = rhodecode.lib.index.whoosh
466 466 search.location = %(here)s/data/index
467 467
468 468 ; ####################
469 469 ; CHANNELSTREAM CONFIG
470 470 ; ####################
471 471
472 472 ; channelstream enables persistent connections and live notification
473 473 ; in the system. It's also used by the chat system
474 474
475 475 channelstream.enabled = false
476 476
477 477 ; server address for channelstream server on the backend
478 478 channelstream.server = 127.0.0.1:9800
479 479
480 480 ; location of the channelstream server from outside world
481 481 ; use ws:// for http or wss:// for https. This address needs to be handled
482 482 ; by external HTTP server such as Nginx or Apache
483 483 ; see Nginx/Apache configuration examples in our docs
484 484 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
485 485 channelstream.secret = secret
486 486 channelstream.history.location = %(here)s/channelstream_history
487 487
488 488 ; Internal application path that Javascript uses to connect into.
489 489 ; If you use proxy-prefix the prefix should be added before /_channelstream
490 490 channelstream.proxy_path = /_channelstream
491 491
492 492
493 493 ; ##############################
494 494 ; MAIN RHODECODE DATABASE CONFIG
495 495 ; ##############################
496 496
497 497 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
498 498 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
499 499 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode?charset=utf8
500 500 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
501 501 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
502 502
503 503 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
504 504
505 505 ; see sqlalchemy docs for other advanced settings
506 506 ; print the sql statements to output
507 507 sqlalchemy.db1.echo = false
508 508
509 509 ; recycle the connections after this amount of seconds
510 510 sqlalchemy.db1.pool_recycle = 3600
511 511
512 512 ; the number of connections to keep open inside the connection pool.
513 513 ; 0 indicates no limit
514 514 ; the general calculus with gevent is:
515 515 ; if your system allows 500 concurrent greenlets (max_connections) that all do database access,
516 516 ; then increase pool size + max overflow so that they add up to 500.
517 517 #sqlalchemy.db1.pool_size = 5
518 518
519 519 ; The number of connections to allow in connection pool "overflow", that is
520 520 ; connections that can be opened above and beyond the pool_size setting,
521 521 ; which defaults to five.
522 522 #sqlalchemy.db1.max_overflow = 10
523 523
524 524 ; Connection check ping, used to detect broken database connections
525 525 ; could be enabled to better handle cases if MySQL has gone away errors
526 526 #sqlalchemy.db1.ping_connection = true
527 527
528 528 ; ##########
529 529 ; VCS CONFIG
530 530 ; ##########
531 531 vcs.server.enable = true
532 532 vcs.server = localhost:9900
533 533
534 534 ; Web server connectivity protocol, responsible for web based VCS operations
535 535 ; Available protocols are:
536 536 ; `http` - use http-rpc backend (default)
537 537 vcs.server.protocol = http
538 538
539 539 ; Push/Pull operations protocol, available options are:
540 540 ; `http` - use http-rpc backend (default)
541 541 vcs.scm_app_implementation = http
542 542
543 543 ; Push/Pull operations hooks protocol, available options are:
544 544 ; `http` - use http-rpc backend (default)
545 545 vcs.hooks.protocol = http
546 546
547 547 ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be
548 548 ; accessible via network.
549 549 ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker)
550 550 vcs.hooks.host = *
551 551
552 552 ; Start VCSServer with this instance as a subprocess, useful for development
553 553 vcs.start_server = false
554 554
555 555 ; List of enabled VCS backends, available options are:
556 556 ; `hg` - mercurial
557 557 ; `git` - git
558 558 ; `svn` - subversion
559 559 vcs.backends = hg, git, svn
560 560
561 561 ; Wait this number of seconds before killing connection to the vcsserver
562 562 vcs.connection_timeout = 3600
563 563
564 564 ; Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
565 565 ; Set a numeric version for your current SVN e.g 1.8, or 1.12
566 566 ; Legacy available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
567 567 #vcs.svn.compatible_version = 1.8
568 568
569 569 ; Cache flag to cache vcsserver remote calls locally
570 570 ; It uses cache_region `cache_repo`
571 571 vcs.methods.cache = true
572 572
573 573 ; ####################################################
574 574 ; Subversion proxy support (mod_dav_svn)
575 575 ; Maps RhodeCode repo groups into SVN paths for Apache
576 576 ; ####################################################
577 577
578 578 ; Enable or disable the config file generation.
579 579 svn.proxy.generate_config = false
580 580
581 581 ; Generate config file with `SVNListParentPath` set to `On`.
582 582 svn.proxy.list_parent_path = true
583 583
584 584 ; Set location and file name of generated config file.
585 585 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
586 586
587 587 ; alternative mod_dav config template. This needs to be a valid mako template
588 588 ; Example template can be found in the source code:
589 589 ; rhodecode/apps/svn_support/templates/mod-dav-svn.conf.mako
590 590 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
591 591
592 592 ; Used as a prefix to the `Location` block in the generated config file.
593 593 ; In most cases it should be set to `/`.
594 594 svn.proxy.location_root = /
595 595
596 596 ; Command to reload the mod dav svn configuration on change.
597 597 ; Example: `/etc/init.d/apache2 reload` or /home/USER/apache_reload.sh
598 598 ; Make sure user who runs RhodeCode process is allowed to reload Apache
599 599 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
600 600
601 601 ; If the timeout expires before the reload command finishes, the command will
602 602 ; be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
603 603 #svn.proxy.reload_timeout = 10
604 604
605 605 ; ####################
606 606 ; SSH Support Settings
607 607 ; ####################
608 608
609 609 ; Defines if a custom authorized_keys file should be created and written on
610 610 ; any change user ssh keys. Setting this to false also disables possibility
611 611 ; of adding SSH keys by users from web interface. Super admins can still
612 612 ; manage SSH Keys.
613 613 ssh.generate_authorized_keyfile = false
614 614
615 615 ; Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
616 616 # ssh.authorized_keys_ssh_opts =
617 617
618 618 ; Path to the authorized_keys file where the generate entries are placed.
619 619 ; It is possible to have multiple key files specified in `sshd_config` e.g.
620 620 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
621 621 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
622 622
623 623 ; Command to execute the SSH wrapper. The binary is available in the
624 624 ; RhodeCode installation directory.
625 625 ; e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
626 626 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
627 627
628 628 ; Allow shell when executing the ssh-wrapper command
629 629 ssh.wrapper_cmd_allow_shell = false
630 630
631 631 ; Enables logging, and detailed output send back to the client during SSH
632 632 ; operations. Useful for debugging, shouldn't be used in production.
633 633 ssh.enable_debug_logging = false
634 634
635 635 ; Paths to binary executable, by default they are the names, but we can
636 636 ; override them if we want to use a custom one
637 637 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
638 638 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
639 639 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
640 640
641 641 ; Enables SSH key generator web interface. Disabling this still allows users
642 642 ; to add their own keys.
643 643 ssh.enable_ui_key_generator = true
644 644
645 645
646 646 ; #################
647 647 ; APPENLIGHT CONFIG
648 648 ; #################
649 649
650 650 ; Appenlight is tailored to work with RhodeCode, see
651 651 ; http://appenlight.rhodecode.com for details how to obtain an account
652 652
653 653 ; Appenlight integration enabled
654 654 #appenlight = false
655 655
656 656 #appenlight.server_url = https://api.appenlight.com
657 657 #appenlight.api_key = YOUR_API_KEY
658 658 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
659 659
660 660 ; used for JS client
661 661 #appenlight.api_public_key = YOUR_API_PUBLIC_KEY
662 662
663 663 ; TWEAK AMOUNT OF INFO SENT HERE
664 664
665 665 ; enables 404 error logging (default False)
666 666 #appenlight.report_404 = false
667 667
668 668 ; time in seconds after request is considered being slow (default 1)
669 669 #appenlight.slow_request_time = 1
670 670
671 671 ; record slow requests in application
672 672 ; (needs to be enabled for slow datastore recording and time tracking)
673 673 #appenlight.slow_requests = true
674 674
675 675 ; enable hooking to application loggers
676 676 #appenlight.logging = true
677 677
678 678 ; minimum log level for log capture
679 679 #ppenlight.logging.level = WARNING
680 680
681 681 ; send logs only from erroneous/slow requests
682 682 ; (saves API quota for intensive logging)
683 683 #appenlight.logging_on_error = false
684 684
685 685 ; list of additional keywords that should be grabbed from environ object
686 686 ; can be string with comma separated list of words in lowercase
687 687 ; (by default client will always send following info:
688 688 ; 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
689 689 ; start with HTTP* this list be extended with additional keywords here
690 690 #appenlight.environ_keys_whitelist =
691 691
692 692 ; list of keywords that should be blanked from request object
693 693 ; can be string with comma separated list of words in lowercase
694 694 ; (by default client will always blank keys that contain following words
695 695 ; 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
696 696 ; this list be extended with additional keywords set here
697 697 #appenlight.request_keys_blacklist =
698 698
699 699 ; list of namespaces that should be ignores when gathering log entries
700 700 ; can be string with comma separated list of namespaces
701 701 ; (by default the client ignores own entries: appenlight_client.client)
702 702 #appenlight.log_namespace_blacklist =
703 703
704 704 ; Statsd client config, this is used to send metrics to statsd
705 705 ; We recommend setting statsd_exported and scrape them using Prometheus
706 706 #statsd.enabled = false
707 707 #statsd.statsd_host = 0.0.0.0
708 708 #statsd.statsd_port = 8125
709 709 #statsd.statsd_prefix =
710 710 #statsd.statsd_ipv6 = false
711 711
712 712 ; configure logging automatically at server startup set to false
713 713 ; to use the below custom logging config.
714 714 ; RC_LOGGING_FORMATTER
715 715 ; RC_LOGGING_LEVEL
716 716 ; env variables can control the settings for logging in case of autoconfigure
717 717
718 718 #logging.autoconfigure = true
719 719
720 720 ; specify your own custom logging config file to configure logging
721 721 #logging.logging_conf_file = /path/to/custom_logging.ini
722 722
723 723 ; Dummy marker to add new entries after.
724 724 ; Add any custom entries below. Please don't remove this marker.
725 725 custom.conf = 1
726 726
727 727
728 728 ; #####################
729 729 ; LOGGING CONFIGURATION
730 730 ; #####################
731 731
732 732 [loggers]
733 733 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
734 734
735 735 [handlers]
736 736 keys = console, console_sql
737 737
738 738 [formatters]
739 739 keys = generic, json, color_formatter, color_formatter_sql
740 740
741 741 ; #######
742 742 ; LOGGERS
743 743 ; #######
744 744 [logger_root]
745 745 level = NOTSET
746 746 handlers = console
747 747
748 748 [logger_sqlalchemy]
749 749 level = INFO
750 750 handlers = console_sql
751 751 qualname = sqlalchemy.engine
752 752 propagate = 0
753 753
754 754 [logger_beaker]
755 755 level = DEBUG
756 756 handlers =
757 757 qualname = beaker.container
758 758 propagate = 1
759 759
760 760 [logger_rhodecode]
761 761 level = DEBUG
762 762 handlers =
763 763 qualname = rhodecode
764 764 propagate = 1
765 765
766 766 [logger_ssh_wrapper]
767 767 level = DEBUG
768 768 handlers =
769 769 qualname = ssh_wrapper
770 770 propagate = 1
771 771
772 772 [logger_celery]
773 773 level = DEBUG
774 774 handlers =
775 775 qualname = celery
776 776
777 777
778 778 ; ########
779 779 ; HANDLERS
780 780 ; ########
781 781
782 782 [handler_console]
783 783 class = StreamHandler
784 784 args = (sys.stderr, )
785 785 level = INFO
786 786 ; To enable JSON formatted logs replace 'generic/color_formatter' with 'json'
787 787 ; This allows sending properly formatted logs to grafana loki or elasticsearch
788 788 formatter = generic
789 789
790 790 [handler_console_sql]
791 791 ; "level = DEBUG" logs SQL queries and results.
792 792 ; "level = INFO" logs SQL queries.
793 793 ; "level = WARN" logs neither. (Recommended for production systems.)
794 794 class = StreamHandler
795 795 args = (sys.stderr, )
796 796 level = WARN
797 797 ; To enable JSON formatted logs replace 'generic/color_formatter_sql' with 'json'
798 798 ; This allows sending properly formatted logs to grafana loki or elasticsearch
799 799 formatter = generic
800 800
801 801 ; ##########
802 802 ; FORMATTERS
803 803 ; ##########
804 804
805 805 [formatter_generic]
806 806 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
807 807 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
808 808 datefmt = %Y-%m-%d %H:%M:%S
809 809
810 810 [formatter_color_formatter]
811 811 class = rhodecode.lib.logging_formatter.ColorFormatter
812 812 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
813 813 datefmt = %Y-%m-%d %H:%M:%S
814 814
815 815 [formatter_color_formatter_sql]
816 816 class = rhodecode.lib.logging_formatter.ColorFormatterSql
817 817 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
818 818 datefmt = %Y-%m-%d %H:%M:%S
819 819
820 820 [formatter_json]
821 821 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
822 822 class = rhodecode.lib._vendor.jsonlogger.JsonFormatter
@@ -1,424 +1,423 b''
1 1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import logging
20 20 import itertools
21 21 import base64
22 22
23 23 from rhodecode.api import (
24 24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25 25
26 26 from rhodecode.api.utils import (
27 27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
29 29 from rhodecode.lib import system_info
30 30 from rhodecode.lib import user_sessions
31 31 from rhodecode.lib import exc_tracking
32 32 from rhodecode.lib.ext_json import json
33 33 from rhodecode.lib.utils2 import safe_int
34 34 from rhodecode.model.db import UserIpMap
35 35 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.settings import VcsSettingsModel
37 36 from rhodecode.apps.file_store import utils
38 37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
39 38 FileOverSizeException
40 39
41 40 log = logging.getLogger(__name__)
42 41
43 42
44 43 @jsonrpc_method()
45 44 def get_server_info(request, apiuser):
46 45 """
47 46 Returns the |RCE| server information.
48 47
49 48 This includes the running version of |RCE| and all installed
50 49 packages. This command takes the following options:
51 50
52 51 :param apiuser: This is filled automatically from the |authtoken|.
53 52 :type apiuser: AuthUser
54 53
55 54 Example output:
56 55
57 56 .. code-block:: bash
58 57
59 58 id : <id_given_in_input>
60 59 result : {
61 60 'modules': [<module name>,...]
62 61 'py_version': <python version>,
63 62 'platform': <platform type>,
64 63 'rhodecode_version': <rhodecode version>
65 64 }
66 65 error : null
67 66 """
68 67
69 68 if not has_superadmin_permission(apiuser):
70 69 raise JSONRPCForbidden()
71 70
72 71 server_info = ScmModel().get_server_info(request.environ)
73 72 # rhodecode-index requires those
74 73
75 74 server_info['index_storage'] = server_info['search']['value']['location']
76 75 server_info['storage'] = server_info['storage']['value']['path']
77 76
78 77 return server_info
79 78
80 79
81 80 @jsonrpc_method()
82 81 def get_repo_store(request, apiuser):
83 82 """
84 83 Returns the |RCE| repository storage information.
85 84
86 85 :param apiuser: This is filled automatically from the |authtoken|.
87 86 :type apiuser: AuthUser
88 87
89 88 Example output:
90 89
91 90 .. code-block:: bash
92 91
93 92 id : <id_given_in_input>
94 93 result : {
95 94 'modules': [<module name>,...]
96 95 'py_version': <python version>,
97 96 'platform': <platform type>,
98 97 'rhodecode_version': <rhodecode version>
99 98 }
100 99 error : null
101 100 """
102 101
103 102 if not has_superadmin_permission(apiuser):
104 103 raise JSONRPCForbidden()
105 104
106 path = VcsSettingsModel().get_repos_location()
105 path = get_rhodecode_repo_store_path()
107 106 return {"path": path}
108 107
109 108
110 109 @jsonrpc_method()
111 110 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
112 111 """
113 112 Displays the IP Address as seen from the |RCE| server.
114 113
115 114 * This command displays the IP Address, as well as all the defined IP
116 115 addresses for the specified user. If the ``userid`` is not set, the
117 116 data returned is for the user calling the method.
118 117
119 118 This command can only be run using an |authtoken| with admin rights to
120 119 the specified repository.
121 120
122 121 This command takes the following options:
123 122
124 123 :param apiuser: This is filled automatically from |authtoken|.
125 124 :type apiuser: AuthUser
126 125 :param userid: Sets the userid for which associated IP Address data
127 126 is returned.
128 127 :type userid: Optional(str or int)
129 128
130 129 Example output:
131 130
132 131 .. code-block:: bash
133 132
134 133 id : <id_given_in_input>
135 134 result : {
136 135 "server_ip_addr": "<ip_from_clien>",
137 136 "user_ips": [
138 137 {
139 138 "ip_addr": "<ip_with_mask>",
140 139 "ip_range": ["<start_ip>", "<end_ip>"],
141 140 },
142 141 ...
143 142 ]
144 143 }
145 144
146 145 """
147 146 if not has_superadmin_permission(apiuser):
148 147 raise JSONRPCForbidden()
149 148
150 149 userid = Optional.extract(userid, evaluate_locals=locals())
151 150 userid = getattr(userid, 'user_id', userid)
152 151
153 152 user = get_user_or_error(userid)
154 153 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
155 154 return {
156 155 'server_ip_addr': request.rpc_ip_addr,
157 156 'user_ips': ips
158 157 }
159 158
160 159
161 160 @jsonrpc_method()
162 161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
163 162 """
164 163 Triggers a rescan of the specified repositories.
165 164
166 165 * If the ``remove_obsolete`` option is set, it also deletes repositories
167 166 that are found in the database but not on the file system, so called
168 167 "clean zombies".
169 168
170 169 This command can only be run using an |authtoken| with admin rights to
171 170 the specified repository.
172 171
173 172 This command takes the following options:
174 173
175 174 :param apiuser: This is filled automatically from the |authtoken|.
176 175 :type apiuser: AuthUser
177 176 :param remove_obsolete: Deletes repositories from the database that
178 177 are not found on the filesystem.
179 178 :type remove_obsolete: Optional(``True`` | ``False``)
180 179
181 180 Example output:
182 181
183 182 .. code-block:: bash
184 183
185 184 id : <id_given_in_input>
186 185 result : {
187 186 'added': [<added repository name>,...]
188 187 'removed': [<removed repository name>,...]
189 188 }
190 189 error : null
191 190
192 191 Example error output:
193 192
194 193 .. code-block:: bash
195 194
196 195 id : <id_given_in_input>
197 196 result : null
198 197 error : {
199 198 'Error occurred during rescan repositories action'
200 199 }
201 200
202 201 """
203 202 if not has_superadmin_permission(apiuser):
204 203 raise JSONRPCForbidden()
205 204
206 205 try:
207 206 rm_obsolete = Optional.extract(remove_obsolete)
208 207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
209 208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
210 209 return {'added': added, 'removed': removed}
211 210 except Exception:
212 211 log.exception('Failed to run repo rescann')
213 212 raise JSONRPCError(
214 213 'Error occurred during rescan repositories action'
215 214 )
216 215
217 216
218 217 @jsonrpc_method()
219 218 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
220 219 """
221 220 Triggers a session cleanup action.
222 221
223 222 If the ``older_then`` option is set, only sessions that hasn't been
224 223 accessed in the given number of days will be removed.
225 224
226 225 This command can only be run using an |authtoken| with admin rights to
227 226 the specified repository.
228 227
229 228 This command takes the following options:
230 229
231 230 :param apiuser: This is filled automatically from the |authtoken|.
232 231 :type apiuser: AuthUser
233 232 :param older_then: Deletes session that hasn't been accessed
234 233 in given number of days.
235 234 :type older_then: Optional(int)
236 235
237 236 Example output:
238 237
239 238 .. code-block:: bash
240 239
241 240 id : <id_given_in_input>
242 241 result: {
243 242 "backend": "<type of backend>",
244 243 "sessions_removed": <number_of_removed_sessions>
245 244 }
246 245 error : null
247 246
248 247 Example error output:
249 248
250 249 .. code-block:: bash
251 250
252 251 id : <id_given_in_input>
253 252 result : null
254 253 error : {
255 254 'Error occurred during session cleanup'
256 255 }
257 256
258 257 """
259 258 if not has_superadmin_permission(apiuser):
260 259 raise JSONRPCForbidden()
261 260
262 261 older_then = safe_int(Optional.extract(older_then)) or 60
263 262 older_than_seconds = 60 * 60 * 24 * older_then
264 263
265 264 config = system_info.rhodecode_config().get_value()['value']['config']
266 265 session_model = user_sessions.get_session_handler(
267 266 config.get('beaker.session.type', 'memory'))(config)
268 267
269 268 backend = session_model.SESSION_TYPE
270 269 try:
271 270 cleaned = session_model.clean_sessions(
272 271 older_than_seconds=older_than_seconds)
273 272 return {'sessions_removed': cleaned, 'backend': backend}
274 273 except user_sessions.CleanupCommand as msg:
275 274 return {'cleanup_command': str(msg), 'backend': backend}
276 275 except Exception as e:
277 276 log.exception('Failed session cleanup')
278 277 raise JSONRPCError(
279 278 'Error occurred during session cleanup'
280 279 )
281 280
282 281
283 282 @jsonrpc_method()
284 283 def get_method(request, apiuser, pattern=Optional('*')):
285 284 """
286 285 Returns list of all available API methods. By default match pattern
287 286 os "*" but any other pattern can be specified. eg *comment* will return
288 287 all methods with comment inside them. If just single method is matched
289 288 returned data will also include method specification
290 289
291 290 This command can only be run using an |authtoken| with admin rights to
292 291 the specified repository.
293 292
294 293 This command takes the following options:
295 294
296 295 :param apiuser: This is filled automatically from the |authtoken|.
297 296 :type apiuser: AuthUser
298 297 :param pattern: pattern to match method names against
299 298 :type pattern: Optional("*")
300 299
301 300 Example output:
302 301
303 302 .. code-block:: bash
304 303
305 304 id : <id_given_in_input>
306 305 "result": [
307 306 "changeset_comment",
308 307 "comment_pull_request",
309 308 "comment_commit"
310 309 ]
311 310 error : null
312 311
313 312 .. code-block:: bash
314 313
315 314 id : <id_given_in_input>
316 315 "result": [
317 316 "comment_commit",
318 317 {
319 318 "apiuser": "<RequiredType>",
320 319 "comment_type": "<Optional:u'note'>",
321 320 "commit_id": "<RequiredType>",
322 321 "message": "<RequiredType>",
323 322 "repoid": "<RequiredType>",
324 323 "request": "<RequiredType>",
325 324 "resolves_comment_id": "<Optional:None>",
326 325 "status": "<Optional:None>",
327 326 "userid": "<Optional:<OptionalAttr:apiuser>>"
328 327 }
329 328 ]
330 329 error : null
331 330 """
332 331 from rhodecode.config.patches import inspect_getargspec
333 332 inspect = inspect_getargspec()
334 333
335 334 if not has_superadmin_permission(apiuser):
336 335 raise JSONRPCForbidden()
337 336
338 337 pattern = Optional.extract(pattern)
339 338
340 339 matches = find_methods(request.registry.jsonrpc_methods, pattern)
341 340
342 341 args_desc = []
343 342 matches_keys = list(matches.keys())
344 343 if len(matches_keys) == 1:
345 344 func = matches[matches_keys[0]]
346 345
347 346 argspec = inspect.getargspec(func)
348 347 arglist = argspec[0]
349 348 defaults = list(map(repr, argspec[3] or []))
350 349
351 350 default_empty = '<RequiredType>'
352 351
353 352 # kw arguments required by this method
354 353 func_kwargs = dict(itertools.zip_longest(
355 354 reversed(arglist), reversed(defaults), fillvalue=default_empty))
356 355 args_desc.append(func_kwargs)
357 356
358 357 return matches_keys + args_desc
359 358
360 359
361 360 @jsonrpc_method()
362 361 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
363 362 """
364 363 Stores sent exception inside the built-in exception tracker in |RCE| server.
365 364
366 365 This command can only be run using an |authtoken| with admin rights to
367 366 the specified repository.
368 367
369 368 This command takes the following options:
370 369
371 370 :param apiuser: This is filled automatically from the |authtoken|.
372 371 :type apiuser: AuthUser
373 372
374 373 :param exc_data_json: JSON data with exception e.g
375 374 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
376 375 :type exc_data_json: JSON data
377 376
378 377 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
379 378 :type prefix: Optional("rhodecode")
380 379
381 380 Example output:
382 381
383 382 .. code-block:: bash
384 383
385 384 id : <id_given_in_input>
386 385 "result": {
387 386 "exc_id": 139718459226384,
388 387 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
389 388 }
390 389 error : null
391 390 """
392 391 if not has_superadmin_permission(apiuser):
393 392 raise JSONRPCForbidden()
394 393
395 394 prefix = Optional.extract(prefix)
396 395 exc_id = exc_tracking.generate_id()
397 396
398 397 try:
399 398 exc_data = json.loads(exc_data_json)
400 399 except Exception:
401 400 log.error('Failed to parse JSON: %r', exc_data_json)
402 401 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
403 402 'Please make sure it contains a valid JSON.')
404 403
405 404 try:
406 405 exc_traceback = exc_data['exc_traceback']
407 406 exc_type_name = exc_data['exc_type_name']
408 407 exc_value = ''
409 408 except KeyError as err:
410 409 raise JSONRPCError(
411 410 f'Missing exc_traceback, or exc_type_name '
412 411 f'in exc_data_json field. Missing: {err}')
413 412
414 413 class ExcType:
415 414 __name__ = exc_type_name
416 415
417 416 exc_info = (ExcType(), exc_value, exc_traceback)
418 417
419 418 exc_tracking._store_exception(
420 419 exc_id=exc_id, exc_info=exc_info, prefix=prefix)
421 420
422 421 exc_url = request.route_url(
423 422 'admin_settings_exception_tracker_show', exception_id=exc_id)
424 423 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,714 +1,711 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import logging
21 21 import collections
22 22
23 23 import datetime
24 24 import formencode
25 25 import formencode.htmlfill
26 26
27 27 import rhodecode
28 28
29 29 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32
33 33 from rhodecode.apps._base import BaseAppView
34 34 from rhodecode.apps._base.navigation import navigation_list
35 35 from rhodecode.apps.svn_support.config_keys import generate_config
36 36 from rhodecode.lib import helpers as h
37 37 from rhodecode.lib.auth import (
38 38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 39 from rhodecode.lib.celerylib import tasks, run_task
40 40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
42 42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 43 from rhodecode.lib.index import searcher_from_config
44 44
45 45 from rhodecode.model.db import RhodeCodeUi, Repository
46 46 from rhodecode.model.forms import (ApplicationSettingsForm,
47 47 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 48 LabsSettingsForm, IssueTrackerPatternsForm)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51
52 52 from rhodecode.model.scm import ScmModel
53 53 from rhodecode.model.notification import EmailNotificationModel
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.settings import (
56 56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 57 SettingsModel)
58 58
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 class AdminSettingsView(BaseAppView):
64 64
65 65 def load_default_context(self):
66 66 c = self._get_local_tmpl_context()
67 67 c.labs_active = str2bool(
68 68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 69 c.navlist = navigation_list(self.request)
70 70 return c
71 71
72 72 @classmethod
73 73 def _get_ui_settings(cls):
74 74 ret = RhodeCodeUi.query().all()
75 75
76 76 if not ret:
77 77 raise Exception('Could not get application ui settings !')
78 78 settings = {}
79 79 for each in ret:
80 80 k = each.ui_key
81 81 v = each.ui_value
82 82 if k == '/':
83 83 k = 'root_path'
84 84
85 85 if k in ['push_ssl', 'publish', 'enabled']:
86 86 v = str2bool(v)
87 87
88 88 if k.find('.') != -1:
89 89 k = k.replace('.', '_')
90 90
91 91 if each.ui_section in ['hooks', 'extensions']:
92 92 v = each.ui_active
93 93
94 94 settings[each.ui_section + '_' + k] = v
95 95 return settings
96 96
97 97 @classmethod
98 98 def _form_defaults(cls):
99 99 defaults = SettingsModel().get_all_settings()
100 100 defaults.update(cls._get_ui_settings())
101 101
102 102 defaults.update({
103 103 'new_svn_branch': '',
104 104 'new_svn_tag': '',
105 105 })
106 106 return defaults
107 107
108 108 @LoginRequired()
109 109 @HasPermissionAllDecorator('hg.admin')
110 110 def settings_vcs(self):
111 111 c = self.load_default_context()
112 112 c.active = 'vcs'
113 113 model = VcsSettingsModel()
114 114 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
115 115 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
116 116
117 117 settings = self.request.registry.settings
118 118 c.svn_proxy_generate_config = settings[generate_config]
119 119
120 120 defaults = self._form_defaults()
121 121
122 122 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
123 123
124 124 data = render('rhodecode:templates/admin/settings/settings.mako',
125 125 self._get_template_context(c), self.request)
126 126 html = formencode.htmlfill.render(
127 127 data,
128 128 defaults=defaults,
129 129 encoding="UTF-8",
130 130 force_defaults=False
131 131 )
132 132 return Response(html)
133 133
134 134 @LoginRequired()
135 135 @HasPermissionAllDecorator('hg.admin')
136 136 @CSRFRequired()
137 137 def settings_vcs_update(self):
138 138 _ = self.request.translate
139 139 c = self.load_default_context()
140 140 c.active = 'vcs'
141 141
142 142 model = VcsSettingsModel()
143 143 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
144 144 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
145 145
146 146 settings = self.request.registry.settings
147 147 c.svn_proxy_generate_config = settings[generate_config]
148 148
149 149 application_form = ApplicationUiSettingsForm(self.request.translate)()
150 150
151 151 try:
152 152 form_result = application_form.to_python(dict(self.request.POST))
153 153 except formencode.Invalid as errors:
154 154 h.flash(
155 155 _("Some form inputs contain invalid data."),
156 156 category='error')
157 157 data = render('rhodecode:templates/admin/settings/settings.mako',
158 158 self._get_template_context(c), self.request)
159 159 html = formencode.htmlfill.render(
160 160 data,
161 161 defaults=errors.value,
162 162 errors=errors.unpack_errors() or {},
163 163 prefix_error=False,
164 164 encoding="UTF-8",
165 165 force_defaults=False
166 166 )
167 167 return Response(html)
168 168
169 169 try:
170 if c.visual.allow_repo_location_change:
171 model.update_global_path_setting(form_result['paths_root_path'])
172
173 170 model.update_global_ssl_setting(form_result['web_push_ssl'])
174 171 model.update_global_hook_settings(form_result)
175 172
176 173 model.create_or_update_global_svn_settings(form_result)
177 174 model.create_or_update_global_hg_settings(form_result)
178 175 model.create_or_update_global_git_settings(form_result)
179 176 model.create_or_update_global_pr_settings(form_result)
180 177 except Exception:
181 178 log.exception("Exception while updating settings")
182 179 h.flash(_('Error occurred during updating '
183 180 'application settings'), category='error')
184 181 else:
185 182 Session().commit()
186 183 h.flash(_('Updated VCS settings'), category='success')
187 184 raise HTTPFound(h.route_path('admin_settings_vcs'))
188 185
189 186 data = render('rhodecode:templates/admin/settings/settings.mako',
190 187 self._get_template_context(c), self.request)
191 188 html = formencode.htmlfill.render(
192 189 data,
193 190 defaults=self._form_defaults(),
194 191 encoding="UTF-8",
195 192 force_defaults=False
196 193 )
197 194 return Response(html)
198 195
199 196 @LoginRequired()
200 197 @HasPermissionAllDecorator('hg.admin')
201 198 @CSRFRequired()
202 199 def settings_vcs_delete_svn_pattern(self):
203 200 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
204 201 model = VcsSettingsModel()
205 202 try:
206 203 model.delete_global_svn_pattern(delete_pattern_id)
207 204 except SettingNotFound:
208 205 log.exception(
209 206 'Failed to delete svn_pattern with id %s', delete_pattern_id)
210 207 raise HTTPNotFound()
211 208
212 209 Session().commit()
213 210 return True
214 211
215 212 @LoginRequired()
216 213 @HasPermissionAllDecorator('hg.admin')
217 214 def settings_mapping(self):
218 215 c = self.load_default_context()
219 216 c.active = 'mapping'
220 c.storage_path = VcsSettingsModel().get_repos_location()
217 c.storage_path = get_rhodecode_repo_store_path()
221 218 data = render('rhodecode:templates/admin/settings/settings.mako',
222 219 self._get_template_context(c), self.request)
223 220 html = formencode.htmlfill.render(
224 221 data,
225 222 defaults=self._form_defaults(),
226 223 encoding="UTF-8",
227 224 force_defaults=False
228 225 )
229 226 return Response(html)
230 227
231 228 @LoginRequired()
232 229 @HasPermissionAllDecorator('hg.admin')
233 230 @CSRFRequired()
234 231 def settings_mapping_update(self):
235 232 _ = self.request.translate
236 233 c = self.load_default_context()
237 234 c.active = 'mapping'
238 235 rm_obsolete = self.request.POST.get('destroy', False)
239 236 invalidate_cache = self.request.POST.get('invalidate', False)
240 237 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 238
242 239 if invalidate_cache:
243 240 log.debug('invalidating all repositories cache')
244 241 for repo in Repository.get_all():
245 242 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246 243
247 244 filesystem_repos = ScmModel().repo_scan()
248 245 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 246 PermissionModel().trigger_permission_flush()
250 247
251 248 def _repr(rm_repo):
252 249 return ', '.join(map(safe_str, rm_repo)) or '-'
253 250
254 251 h.flash(_('Repositories successfully '
255 252 'rescanned added: %s ; removed: %s') %
256 253 (_repr(added), _repr(removed)),
257 254 category='success')
258 255 raise HTTPFound(h.route_path('admin_settings_mapping'))
259 256
260 257 @LoginRequired()
261 258 @HasPermissionAllDecorator('hg.admin')
262 259 def settings_global(self):
263 260 c = self.load_default_context()
264 261 c.active = 'global'
265 262 c.personal_repo_group_default_pattern = RepoGroupModel()\
266 263 .get_personal_group_name_pattern()
267 264
268 265 data = render('rhodecode:templates/admin/settings/settings.mako',
269 266 self._get_template_context(c), self.request)
270 267 html = formencode.htmlfill.render(
271 268 data,
272 269 defaults=self._form_defaults(),
273 270 encoding="UTF-8",
274 271 force_defaults=False
275 272 )
276 273 return Response(html)
277 274
278 275 @LoginRequired()
279 276 @HasPermissionAllDecorator('hg.admin')
280 277 @CSRFRequired()
281 278 def settings_global_update(self):
282 279 _ = self.request.translate
283 280 c = self.load_default_context()
284 281 c.active = 'global'
285 282 c.personal_repo_group_default_pattern = RepoGroupModel()\
286 283 .get_personal_group_name_pattern()
287 284 application_form = ApplicationSettingsForm(self.request.translate)()
288 285 try:
289 286 form_result = application_form.to_python(dict(self.request.POST))
290 287 except formencode.Invalid as errors:
291 288 h.flash(
292 289 _("Some form inputs contain invalid data."),
293 290 category='error')
294 291 data = render('rhodecode:templates/admin/settings/settings.mako',
295 292 self._get_template_context(c), self.request)
296 293 html = formencode.htmlfill.render(
297 294 data,
298 295 defaults=errors.value,
299 296 errors=errors.unpack_errors() or {},
300 297 prefix_error=False,
301 298 encoding="UTF-8",
302 299 force_defaults=False
303 300 )
304 301 return Response(html)
305 302
306 303 settings = [
307 304 ('title', 'rhodecode_title', 'unicode'),
308 305 ('realm', 'rhodecode_realm', 'unicode'),
309 306 ('pre_code', 'rhodecode_pre_code', 'unicode'),
310 307 ('post_code', 'rhodecode_post_code', 'unicode'),
311 308 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
312 309 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
313 310 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
314 311 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
315 312 ]
316 313
317 314 try:
318 315 for setting, form_key, type_ in settings:
319 316 sett = SettingsModel().create_or_update_setting(
320 317 setting, form_result[form_key], type_)
321 318 Session().add(sett)
322 319
323 320 Session().commit()
324 321 SettingsModel().invalidate_settings_cache()
325 322 h.flash(_('Updated application settings'), category='success')
326 323 except Exception:
327 324 log.exception("Exception while updating application settings")
328 325 h.flash(
329 326 _('Error occurred during updating application settings'),
330 327 category='error')
331 328
332 329 raise HTTPFound(h.route_path('admin_settings_global'))
333 330
334 331 @LoginRequired()
335 332 @HasPermissionAllDecorator('hg.admin')
336 333 def settings_visual(self):
337 334 c = self.load_default_context()
338 335 c.active = 'visual'
339 336
340 337 data = render('rhodecode:templates/admin/settings/settings.mako',
341 338 self._get_template_context(c), self.request)
342 339 html = formencode.htmlfill.render(
343 340 data,
344 341 defaults=self._form_defaults(),
345 342 encoding="UTF-8",
346 343 force_defaults=False
347 344 )
348 345 return Response(html)
349 346
350 347 @LoginRequired()
351 348 @HasPermissionAllDecorator('hg.admin')
352 349 @CSRFRequired()
353 350 def settings_visual_update(self):
354 351 _ = self.request.translate
355 352 c = self.load_default_context()
356 353 c.active = 'visual'
357 354 application_form = ApplicationVisualisationForm(self.request.translate)()
358 355 try:
359 356 form_result = application_form.to_python(dict(self.request.POST))
360 357 except formencode.Invalid as errors:
361 358 h.flash(
362 359 _("Some form inputs contain invalid data."),
363 360 category='error')
364 361 data = render('rhodecode:templates/admin/settings/settings.mako',
365 362 self._get_template_context(c), self.request)
366 363 html = formencode.htmlfill.render(
367 364 data,
368 365 defaults=errors.value,
369 366 errors=errors.unpack_errors() or {},
370 367 prefix_error=False,
371 368 encoding="UTF-8",
372 369 force_defaults=False
373 370 )
374 371 return Response(html)
375 372
376 373 try:
377 374 settings = [
378 375 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
379 376 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
380 377 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
381 378 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
382 379 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
383 380 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
384 381 ('show_version', 'rhodecode_show_version', 'bool'),
385 382 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
386 383 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
387 384 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
388 385 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
389 386 ('clone_uri_id_tmpl', 'rhodecode_clone_uri_id_tmpl', 'unicode'),
390 387 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
391 388 ('support_url', 'rhodecode_support_url', 'unicode'),
392 389 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
393 390 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
394 391 ]
395 392 for setting, form_key, type_ in settings:
396 393 sett = SettingsModel().create_or_update_setting(
397 394 setting, form_result[form_key], type_)
398 395 Session().add(sett)
399 396
400 397 Session().commit()
401 398 SettingsModel().invalidate_settings_cache()
402 399 h.flash(_('Updated visualisation settings'), category='success')
403 400 except Exception:
404 401 log.exception("Exception updating visualization settings")
405 402 h.flash(_('Error occurred during updating '
406 403 'visualisation settings'),
407 404 category='error')
408 405
409 406 raise HTTPFound(h.route_path('admin_settings_visual'))
410 407
411 408 @LoginRequired()
412 409 @HasPermissionAllDecorator('hg.admin')
413 410 def settings_issuetracker(self):
414 411 c = self.load_default_context()
415 412 c.active = 'issuetracker'
416 413 defaults = c.rc_config
417 414
418 415 entry_key = 'rhodecode_issuetracker_pat_'
419 416
420 417 c.issuetracker_entries = {}
421 418 for k, v in defaults.items():
422 419 if k.startswith(entry_key):
423 420 uid = k[len(entry_key):]
424 421 c.issuetracker_entries[uid] = None
425 422
426 423 for uid in c.issuetracker_entries:
427 424 c.issuetracker_entries[uid] = AttributeDict({
428 425 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
429 426 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
430 427 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
431 428 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
432 429 })
433 430
434 431 return self._get_template_context(c)
435 432
436 433 @LoginRequired()
437 434 @HasPermissionAllDecorator('hg.admin')
438 435 @CSRFRequired()
439 436 def settings_issuetracker_test(self):
440 437 error_container = []
441 438
442 439 urlified_commit = h.urlify_commit_message(
443 440 self.request.POST.get('test_text', ''),
444 441 'repo_group/test_repo1', error_container=error_container)
445 442 if error_container:
446 443 def converter(inp):
447 444 return h.html_escape(inp)
448 445
449 446 return 'ERRORS: ' + '\n'.join(map(converter, error_container))
450 447
451 448 return urlified_commit
452 449
453 450 @LoginRequired()
454 451 @HasPermissionAllDecorator('hg.admin')
455 452 @CSRFRequired()
456 453 def settings_issuetracker_update(self):
457 454 _ = self.request.translate
458 455 self.load_default_context()
459 456 settings_model = IssueTrackerSettingsModel()
460 457
461 458 try:
462 459 form = IssueTrackerPatternsForm(self.request.translate)()
463 460 data = form.to_python(self.request.POST)
464 461 except formencode.Invalid as errors:
465 462 log.exception('Failed to add new pattern')
466 463 error = errors
467 464 h.flash(_(f'Invalid issue tracker pattern: {error}'),
468 465 category='error')
469 466 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
470 467
471 468 if data:
472 469 for uid in data.get('delete_patterns', []):
473 470 settings_model.delete_entries(uid)
474 471
475 472 for pattern in data.get('patterns', []):
476 473 for setting, value, type_ in pattern:
477 474 sett = settings_model.create_or_update_setting(
478 475 setting, value, type_)
479 476 Session().add(sett)
480 477
481 478 Session().commit()
482 479
483 480 SettingsModel().invalidate_settings_cache()
484 481 h.flash(_('Updated issue tracker entries'), category='success')
485 482 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
486 483
487 484 @LoginRequired()
488 485 @HasPermissionAllDecorator('hg.admin')
489 486 @CSRFRequired()
490 487 def settings_issuetracker_delete(self):
491 488 _ = self.request.translate
492 489 self.load_default_context()
493 490 uid = self.request.POST.get('uid')
494 491 try:
495 492 IssueTrackerSettingsModel().delete_entries(uid)
496 493 except Exception:
497 494 log.exception('Failed to delete issue tracker setting %s', uid)
498 495 raise HTTPNotFound()
499 496
500 497 SettingsModel().invalidate_settings_cache()
501 498 h.flash(_('Removed issue tracker entry.'), category='success')
502 499
503 500 return {'deleted': uid}
504 501
505 502 @LoginRequired()
506 503 @HasPermissionAllDecorator('hg.admin')
507 504 def settings_email(self):
508 505 c = self.load_default_context()
509 506 c.active = 'email'
510 507 c.rhodecode_ini = rhodecode.CONFIG
511 508
512 509 data = render('rhodecode:templates/admin/settings/settings.mako',
513 510 self._get_template_context(c), self.request)
514 511 html = formencode.htmlfill.render(
515 512 data,
516 513 defaults=self._form_defaults(),
517 514 encoding="UTF-8",
518 515 force_defaults=False
519 516 )
520 517 return Response(html)
521 518
522 519 @LoginRequired()
523 520 @HasPermissionAllDecorator('hg.admin')
524 521 @CSRFRequired()
525 522 def settings_email_update(self):
526 523 _ = self.request.translate
527 524 c = self.load_default_context()
528 525 c.active = 'email'
529 526
530 527 test_email = self.request.POST.get('test_email')
531 528
532 529 if not test_email:
533 530 h.flash(_('Please enter email address'), category='error')
534 531 raise HTTPFound(h.route_path('admin_settings_email'))
535 532
536 533 email_kwargs = {
537 534 'date': datetime.datetime.now(),
538 535 'user': self._rhodecode_db_user
539 536 }
540 537
541 538 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
542 539 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
543 540
544 541 recipients = [test_email] if test_email else None
545 542
546 543 run_task(tasks.send_email, recipients, subject,
547 544 email_body_plaintext, email_body)
548 545
549 546 h.flash(_('Send email task created'), category='success')
550 547 raise HTTPFound(h.route_path('admin_settings_email'))
551 548
552 549 @LoginRequired()
553 550 @HasPermissionAllDecorator('hg.admin')
554 551 def settings_hooks(self):
555 552 c = self.load_default_context()
556 553 c.active = 'hooks'
557 554
558 555 model = SettingsModel()
559 556 c.hooks = model.get_builtin_hooks()
560 557 c.custom_hooks = model.get_custom_hooks()
561 558
562 559 data = render('rhodecode:templates/admin/settings/settings.mako',
563 560 self._get_template_context(c), self.request)
564 561 html = formencode.htmlfill.render(
565 562 data,
566 563 defaults=self._form_defaults(),
567 564 encoding="UTF-8",
568 565 force_defaults=False
569 566 )
570 567 return Response(html)
571 568
572 569 @LoginRequired()
573 570 @HasPermissionAllDecorator('hg.admin')
574 571 @CSRFRequired()
575 572 def settings_hooks_update(self):
576 573 _ = self.request.translate
577 574 c = self.load_default_context()
578 575 c.active = 'hooks'
579 576 if c.visual.allow_custom_hooks_settings:
580 577 ui_key = self.request.POST.get('new_hook_ui_key')
581 578 ui_value = self.request.POST.get('new_hook_ui_value')
582 579
583 580 hook_id = self.request.POST.get('hook_id')
584 581 new_hook = False
585 582
586 583 model = SettingsModel()
587 584 try:
588 585 if ui_value and ui_key:
589 586 model.create_or_update_hook(ui_key, ui_value)
590 587 h.flash(_('Added new hook'), category='success')
591 588 new_hook = True
592 589 elif hook_id:
593 590 RhodeCodeUi.delete(hook_id)
594 591 Session().commit()
595 592
596 593 # check for edits
597 594 update = False
598 595 _d = self.request.POST.dict_of_lists()
599 596 for k, v in zip(_d.get('hook_ui_key', []),
600 597 _d.get('hook_ui_value_new', [])):
601 598 model.create_or_update_hook(k, v)
602 599 update = True
603 600
604 601 if update and not new_hook:
605 602 h.flash(_('Updated hooks'), category='success')
606 603 Session().commit()
607 604 except Exception:
608 605 log.exception("Exception during hook creation")
609 606 h.flash(_('Error occurred during hook creation'),
610 607 category='error')
611 608
612 609 raise HTTPFound(h.route_path('admin_settings_hooks'))
613 610
614 611 @LoginRequired()
615 612 @HasPermissionAllDecorator('hg.admin')
616 613 def settings_search(self):
617 614 c = self.load_default_context()
618 615 c.active = 'search'
619 616
620 617 c.searcher = searcher_from_config(self.request.registry.settings)
621 618 c.statistics = c.searcher.statistics(self.request.translate)
622 619
623 620 return self._get_template_context(c)
624 621
625 622 @LoginRequired()
626 623 @HasPermissionAllDecorator('hg.admin')
627 624 def settings_labs(self):
628 625 c = self.load_default_context()
629 626 if not c.labs_active:
630 627 raise HTTPFound(h.route_path('admin_settings'))
631 628
632 629 c.active = 'labs'
633 630 c.lab_settings = _LAB_SETTINGS
634 631
635 632 data = render('rhodecode:templates/admin/settings/settings.mako',
636 633 self._get_template_context(c), self.request)
637 634 html = formencode.htmlfill.render(
638 635 data,
639 636 defaults=self._form_defaults(),
640 637 encoding="UTF-8",
641 638 force_defaults=False
642 639 )
643 640 return Response(html)
644 641
645 642 @LoginRequired()
646 643 @HasPermissionAllDecorator('hg.admin')
647 644 @CSRFRequired()
648 645 def settings_labs_update(self):
649 646 _ = self.request.translate
650 647 c = self.load_default_context()
651 648 c.active = 'labs'
652 649
653 650 application_form = LabsSettingsForm(self.request.translate)()
654 651 try:
655 652 form_result = application_form.to_python(dict(self.request.POST))
656 653 except formencode.Invalid as errors:
657 654 h.flash(
658 655 _("Some form inputs contain invalid data."),
659 656 category='error')
660 657 data = render('rhodecode:templates/admin/settings/settings.mako',
661 658 self._get_template_context(c), self.request)
662 659 html = formencode.htmlfill.render(
663 660 data,
664 661 defaults=errors.value,
665 662 errors=errors.unpack_errors() or {},
666 663 prefix_error=False,
667 664 encoding="UTF-8",
668 665 force_defaults=False
669 666 )
670 667 return Response(html)
671 668
672 669 try:
673 670 session = Session()
674 671 for setting in _LAB_SETTINGS:
675 672 setting_name = setting.key[len('rhodecode_'):]
676 673 sett = SettingsModel().create_or_update_setting(
677 674 setting_name, form_result[setting.key], setting.type)
678 675 session.add(sett)
679 676
680 677 except Exception:
681 678 log.exception('Exception while updating lab settings')
682 679 h.flash(_('Error occurred during updating labs settings'),
683 680 category='error')
684 681 else:
685 682 Session().commit()
686 683 SettingsModel().invalidate_settings_cache()
687 684 h.flash(_('Updated Labs settings'), category='success')
688 685 raise HTTPFound(h.route_path('admin_settings_labs'))
689 686
690 687 data = render('rhodecode:templates/admin/settings/settings.mako',
691 688 self._get_template_context(c), self.request)
692 689 html = formencode.htmlfill.render(
693 690 data,
694 691 defaults=self._form_defaults(),
695 692 encoding="UTF-8",
696 693 force_defaults=False
697 694 )
698 695 return Response(html)
699 696
700 697
701 698 # :param key: name of the setting including the 'rhodecode_' prefix
702 699 # :param type: the RhodeCodeSetting type to use.
703 700 # :param group: the i18ned group in which we should dispaly this setting
704 701 # :param label: the i18ned label we should display for this setting
705 702 # :param help: the i18ned help we should dispaly for this setting
706 703 LabSetting = collections.namedtuple(
707 704 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
708 705
709 706
710 707 # This list has to be kept in sync with the form
711 708 # rhodecode.model.forms.LabsSettingsForm.
712 709 _LAB_SETTINGS = [
713 710
714 711 ]
@@ -1,97 +1,97 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import codecs
20 20 import logging
21 21 import os
22 22 from pyramid.renderers import render
23 23
24 24 from rhodecode.events import trigger
25 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_base_path
25 from rhodecode.lib.utils import get_rhodecode_realm, get_rhodecode_repo_store_path
26 26 from rhodecode.lib.utils2 import str2bool
27 27 from rhodecode.model.db import RepoGroup
28 28
29 29 from . import config_keys
30 30 from .events import ModDavSvnConfigChange
31 31
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 def write_mod_dav_svn_config(settings):
37 37 use_ssl = str2bool(settings['force_https'])
38 38 file_path = settings[config_keys.config_file_path]
39 39 config = _render_mod_dav_svn_config(
40 40 use_ssl=use_ssl,
41 parent_path_root=get_rhodecode_base_path(),
41 parent_path_root=get_rhodecode_repo_store_path(),
42 42 list_parent_path=settings[config_keys.list_parent_path],
43 43 location_root=settings[config_keys.location_root],
44 44 repo_groups=RepoGroup.get_all_repo_groups(),
45 45 realm=get_rhodecode_realm(), template=settings[config_keys.template])
46 46 _write_mod_dav_svn_config(config, file_path)
47 47 return file_path
48 48
49 49
50 50 def generate_mod_dav_svn_config(registry):
51 51 """
52 52 Generate the configuration file for use with subversion's mod_dav_svn
53 53 module. The configuration has to contain a <Location> block for each
54 54 available repository group because the mod_dav_svn module does not support
55 55 repositories organized in sub folders.
56 56 """
57 57 settings = registry.settings
58 58 file_path = write_mod_dav_svn_config(settings)
59 59
60 60 # Trigger an event on mod dav svn configuration change.
61 61 trigger(ModDavSvnConfigChange(), registry)
62 62 return file_path
63 63
64 64
65 65 def _render_mod_dav_svn_config(
66 66 parent_path_root, list_parent_path, location_root, repo_groups, realm,
67 67 use_ssl, template):
68 68 """
69 69 Render mod_dav_svn configuration to string.
70 70 """
71 71 repo_group_paths = []
72 72 for repo_group in repo_groups:
73 73 group_path = repo_group.full_path_splitted
74 74 location = os.path.join(location_root, *group_path)
75 75 parent_path = os.path.join(parent_path_root, *group_path)
76 76 repo_group_paths.append((location, parent_path))
77 77
78 78 context = {
79 79 'location_root': location_root,
80 80 'parent_path_root': parent_path_root,
81 81 'repo_group_paths': repo_group_paths,
82 82 'svn_list_parent_path': list_parent_path,
83 83 'rhodecode_realm': realm,
84 84 'use_https': use_ssl,
85 85 }
86 86 template = template or \
87 87 'rhodecode:apps/svn_support/templates/mod-dav-svn.conf.mako'
88 88 # Render the configuration template to string.
89 89 return render(template, context)
90 90
91 91
92 92 def _write_mod_dav_svn_config(config, filepath):
93 93 """
94 94 Write mod_dav_svn config to file.
95 95 """
96 96 with codecs.open(filepath, 'w', encoding='utf-8') as f:
97 97 f.write(config)
@@ -1,198 +1,200 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import tempfile
21 21 import logging
22 22
23 23 from pyramid.settings import asbool
24 24
25 25 from rhodecode.config.settings_maker import SettingsMaker
26 26 from rhodecode.config import utils as config_utils
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 def sanitize_settings_and_apply_defaults(global_config, settings):
32 32 """
33 33 Applies settings defaults and does all type conversion.
34 34
35 35 We would move all settings parsing and preparation into this place, so that
36 36 we have only one place left which deals with this part. The remaining parts
37 37 of the application would start to rely fully on well-prepared settings.
38 38
39 39 This piece would later be split up per topic to avoid a big fat monster
40 40 function.
41 41 """
42 42 jn = os.path.join
43 43
44 44 global_settings_maker = SettingsMaker(global_config)
45 45 global_settings_maker.make_setting('debug', default=False, parser='bool')
46 46 debug_enabled = asbool(global_config.get('debug'))
47 47
48 48 settings_maker = SettingsMaker(settings)
49 49
50 50 settings_maker.make_setting(
51 51 'logging.autoconfigure',
52 52 default=False,
53 53 parser='bool')
54 54
55 55 logging_conf = jn(os.path.dirname(global_config.get('__file__')), 'logging.ini')
56 56 settings_maker.enable_logging(logging_conf, level='INFO' if debug_enabled else 'DEBUG')
57 57
58 58 # Default includes, possible to change as a user
59 59 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
60 60 log.debug(
61 61 "Using the following pyramid.includes: %s",
62 62 pyramid_includes)
63 63
64 64 settings_maker.make_setting('rhodecode.edition', 'Community Edition')
65 65 settings_maker.make_setting('rhodecode.edition_id', 'CE')
66 66
67 67 if 'mako.default_filters' not in settings:
68 68 # set custom default filters if we don't have it defined
69 69 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
70 70 settings['mako.default_filters'] = 'h_filter'
71 71
72 72 if 'mako.directories' not in settings:
73 73 mako_directories = settings.setdefault('mako.directories', [
74 74 # Base templates of the original application
75 75 'rhodecode:templates',
76 76 ])
77 77 log.debug(
78 78 "Using the following Mako template directories: %s",
79 79 mako_directories)
80 80
81 81 # NOTE(marcink): fix redis requirement for schema of connection since 3.X
82 82 if 'beaker.session.type' in settings and settings['beaker.session.type'] == 'ext:redis':
83 83 raw_url = settings['beaker.session.url']
84 84 if not raw_url.startswith(('redis://', 'rediss://', 'unix://')):
85 85 settings['beaker.session.url'] = 'redis://' + raw_url
86 86
87 87 settings_maker.make_setting('__file__', global_config.get('__file__'))
88 88
89 89 # TODO: johbo: Re-think this, usually the call to config.include
90 90 # should allow to pass in a prefix.
91 91 settings_maker.make_setting('rhodecode.api.url', '/_admin/api')
92 92
93 93 # Sanitize generic settings.
94 94 settings_maker.make_setting('default_encoding', 'UTF-8', parser='list')
95 95 settings_maker.make_setting('is_test', False, parser='bool')
96 96 settings_maker.make_setting('gzip_responses', False, parser='bool')
97 97
98 98 # statsd
99 99 settings_maker.make_setting('statsd.enabled', False, parser='bool')
100 100 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
101 101 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
102 102 settings_maker.make_setting('statsd.statsd_prefix', '')
103 103 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
104 104
105 105 settings_maker.make_setting('vcs.svn.compatible_version', '')
106 106 settings_maker.make_setting('vcs.hooks.protocol', 'http')
107 107 settings_maker.make_setting('vcs.hooks.host', '*')
108 108 settings_maker.make_setting('vcs.scm_app_implementation', 'http')
109 109 settings_maker.make_setting('vcs.server', '')
110 110 settings_maker.make_setting('vcs.server.protocol', 'http')
111 111 settings_maker.make_setting('vcs.server.enable', 'true', parser='bool')
112 112 settings_maker.make_setting('startup.import_repos', 'false', parser='bool')
113 113 settings_maker.make_setting('vcs.hooks.direct_calls', 'false', parser='bool')
114 114 settings_maker.make_setting('vcs.start_server', 'false', parser='bool')
115 115 settings_maker.make_setting('vcs.backends', 'hg, git, svn', parser='list')
116 116 settings_maker.make_setting('vcs.connection_timeout', 3600, parser='int')
117 117
118 118 settings_maker.make_setting('vcs.methods.cache', True, parser='bool')
119 119
120 # repo_store path
121 settings_maker.make_setting('repo_store.path', '/var/opt/rhodecode_repo_store')
120 122 # Support legacy values of vcs.scm_app_implementation. Legacy
121 123 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
122 124 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
123 125 scm_app_impl = settings['vcs.scm_app_implementation']
124 126 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
125 127 settings['vcs.scm_app_implementation'] = 'http'
126 128
127 129 settings_maker.make_setting('appenlight', False, parser='bool')
128 130
129 131 temp_store = tempfile.gettempdir()
130 132 tmp_cache_dir = jn(temp_store, 'rc_cache')
131 133
132 134 # save default, cache dir, and use it for all backends later.
133 135 default_cache_dir = settings_maker.make_setting(
134 136 'cache_dir',
135 137 default=tmp_cache_dir, default_when_empty=True,
136 138 parser='dir:ensured')
137 139
138 140 # exception store cache
139 141 settings_maker.make_setting(
140 142 'exception_tracker.store_path',
141 143 default=jn(default_cache_dir, 'exc_store'), default_when_empty=True,
142 144 parser='dir:ensured'
143 145 )
144 146
145 147 settings_maker.make_setting(
146 148 'celerybeat-schedule.path',
147 149 default=jn(default_cache_dir, 'celerybeat_schedule', 'celerybeat-schedule.db'), default_when_empty=True,
148 150 parser='file:ensured'
149 151 )
150 152
151 153 settings_maker.make_setting('exception_tracker.send_email', False, parser='bool')
152 154 settings_maker.make_setting('exception_tracker.email_prefix', '[RHODECODE ERROR]', default_when_empty=True)
153 155
154 156 # sessions, ensure file since no-value is memory
155 157 settings_maker.make_setting('beaker.session.type', 'file')
156 158 settings_maker.make_setting('beaker.session.data_dir', jn(default_cache_dir, 'session_data'))
157 159
158 160 # cache_general
159 161 settings_maker.make_setting('rc_cache.cache_general.backend', 'dogpile.cache.rc.file_namespace')
160 162 settings_maker.make_setting('rc_cache.cache_general.expiration_time', 60 * 60 * 12, parser='int')
161 163 settings_maker.make_setting('rc_cache.cache_general.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_general.db'))
162 164
163 165 # cache_perms
164 166 settings_maker.make_setting('rc_cache.cache_perms.backend', 'dogpile.cache.rc.file_namespace')
165 167 settings_maker.make_setting('rc_cache.cache_perms.expiration_time', 60 * 60, parser='int')
166 168 settings_maker.make_setting('rc_cache.cache_perms.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_perms_db'))
167 169
168 170 # cache_repo
169 171 settings_maker.make_setting('rc_cache.cache_repo.backend', 'dogpile.cache.rc.file_namespace')
170 172 settings_maker.make_setting('rc_cache.cache_repo.expiration_time', 60 * 60 * 24 * 30, parser='int')
171 173 settings_maker.make_setting('rc_cache.cache_repo.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_repo_db'))
172 174
173 175 # cache_license
174 176 settings_maker.make_setting('rc_cache.cache_license.backend', 'dogpile.cache.rc.file_namespace')
175 177 settings_maker.make_setting('rc_cache.cache_license.expiration_time', 60 * 5, parser='int')
176 178 settings_maker.make_setting('rc_cache.cache_license.arguments.filename', jn(default_cache_dir, 'rhodecode_cache_license_db'))
177 179
178 180 # cache_repo_longterm memory, 96H
179 181 settings_maker.make_setting('rc_cache.cache_repo_longterm.backend', 'dogpile.cache.rc.memory_lru')
180 182 settings_maker.make_setting('rc_cache.cache_repo_longterm.expiration_time', 345600, parser='int')
181 183 settings_maker.make_setting('rc_cache.cache_repo_longterm.max_size', 10000, parser='int')
182 184
183 185 # sql_cache_short
184 186 settings_maker.make_setting('rc_cache.sql_cache_short.backend', 'dogpile.cache.rc.memory_lru')
185 187 settings_maker.make_setting('rc_cache.sql_cache_short.expiration_time', 30, parser='int')
186 188 settings_maker.make_setting('rc_cache.sql_cache_short.max_size', 10000, parser='int')
187 189
188 190 # archive_cache
189 191 settings_maker.make_setting('archive_cache.store_dir', jn(default_cache_dir, 'archive_cache'), default_when_empty=True,)
190 192 settings_maker.make_setting('archive_cache.cache_size_gb', 10, parser='float')
191 193 settings_maker.make_setting('archive_cache.cache_shards', 10, parser='int')
192 194
193 195 settings_maker.env_expand()
194 196
195 197 # configure instance id
196 198 config_utils.set_instance_id(settings)
197 199
198 200 return settings
@@ -1,89 +1,88 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import logging
21 21 import rhodecode
22 22 import collections
23 23
24 24 from rhodecode.config import utils
25 25
26 26 from rhodecode.lib.utils import load_rcextensions
27 27 from rhodecode.lib.utils2 import str2bool
28 28 from rhodecode.lib.vcs import connect_vcs
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 def load_pyramid_environment(global_config, settings):
34 34 # Some parts of the code expect a merge of global and app settings.
35 35 settings_merged = global_config.copy()
36 36 settings_merged.update(settings)
37 37
38 38 # TODO(marcink): probably not required anymore
39 39 # configure channelstream,
40 40 settings_merged['channelstream_config'] = {
41 41 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
42 42 'server': settings_merged.get('channelstream.server'),
43 43 'secret': settings_merged.get('channelstream.secret')
44 44 }
45 45
46 46 # If this is a test run we prepare the test environment like
47 47 # creating a test database, test search index and test repositories.
48 48 # This has to be done before the database connection is initialized.
49 49 if settings['is_test']:
50 50 rhodecode.is_test = True
51 51 rhodecode.disable_error_handler = True
52 52 from rhodecode import authentication
53 53 authentication.plugin_default_auth_ttl = 0
54 54
55 55 utils.initialize_test_environment(settings_merged)
56 56
57 57 # Initialize the database connection.
58 58 utils.initialize_database(settings_merged)
59 59
60 60 load_rcextensions(root_path=settings_merged['here'])
61 61
62 62 # Limit backends to `vcs.backends` from configuration, and preserve the order
63 63 for alias in rhodecode.BACKENDS.keys():
64 64 if alias not in settings['vcs.backends']:
65 65 del rhodecode.BACKENDS[alias]
66 66
67 67 _sorted_backend = sorted(rhodecode.BACKENDS.items(),
68 68 key=lambda item: settings['vcs.backends'].index(item[0]))
69 69 rhodecode.BACKENDS = collections.OrderedDict(_sorted_backend)
70 70
71 71 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
72 72
73 73 # initialize vcs client and optionally run the server if enabled
74 74 vcs_server_uri = settings['vcs.server']
75 75 vcs_server_enabled = settings['vcs.server.enable']
76 76
77 77 utils.configure_vcs(settings)
78 78
79 79 # Store the settings to make them available to other modules.
80 80
81 81 rhodecode.PYRAMID_SETTINGS = settings_merged
82 82 rhodecode.CONFIG = settings_merged
83 83 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
84 rhodecode.CONFIG['default_base_path'] = utils.get_default_base_path()
85 84
86 85 if vcs_server_enabled:
87 86 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
88 87 else:
89 88 log.warning('vcs-server not enabled, vcs connection unavailable')
@@ -1,116 +1,104 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import platform
21 21
22 22
23 23 def configure_vcs(config):
24 24 """
25 25 Patch VCS config with some RhodeCode specific stuff
26 26 """
27 27 from rhodecode.lib.vcs import conf
28 28 import rhodecode.lib.vcs.conf.settings
29 29
30 30 conf.settings.BACKENDS = {
31 31 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
32 32 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
33 33 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
34 34 }
35 35
36 36 conf.settings.HOOKS_PROTOCOL = config['vcs.hooks.protocol']
37 37 conf.settings.HOOKS_HOST = config['vcs.hooks.host']
38 38 conf.settings.DEFAULT_ENCODINGS = config['default_encoding']
39 39 conf.settings.ALIASES[:] = config['vcs.backends']
40 40 conf.settings.SVN_COMPATIBLE_VERSION = config['vcs.svn.compatible_version']
41 41
42 42
43 43 def initialize_database(config):
44 44 from rhodecode.lib.utils2 import engine_from_config, get_encryption_key
45 45 from rhodecode.model import init_model
46 46 engine = engine_from_config(config, 'sqlalchemy.db1.')
47 47 init_model(engine, encryption_key=get_encryption_key(config))
48 48
49 49
50 50 def initialize_test_environment(settings, test_env=None):
51 51 if test_env is None:
52 52 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
53 53
54 54 from rhodecode.lib.utils import (
55 55 create_test_directory, create_test_database, create_test_repositories,
56 56 create_test_index)
57 57 from rhodecode.tests import TESTS_TMP_PATH
58 58 from rhodecode.lib.vcs.backends.hg import largefiles_store
59 59 from rhodecode.lib.vcs.backends.git import lfs_store
60 60
61 61 # test repos
62 62 if test_env:
63 63 create_test_directory(TESTS_TMP_PATH)
64 64 # large object stores
65 65 create_test_directory(largefiles_store(TESTS_TMP_PATH))
66 66 create_test_directory(lfs_store(TESTS_TMP_PATH))
67 67
68 68 create_test_database(TESTS_TMP_PATH, settings)
69 69 create_test_repositories(TESTS_TMP_PATH, settings)
70 70 create_test_index(TESTS_TMP_PATH, settings)
71 71
72 72
73 73 def get_vcs_server_protocol(config):
74 74 return config['vcs.server.protocol']
75 75
76 76
77 77 def set_instance_id(config):
78 78 """
79 79 Sets a dynamic generated config['instance_id'] if missing or '*'
80 80 E.g instance_id = *cluster-1 or instance_id = *
81 81 """
82 82
83 83 config['instance_id'] = config.get('instance_id') or ''
84 84 instance_id = config['instance_id']
85 85 if instance_id.startswith('*') or not instance_id:
86 86 prefix = instance_id.lstrip('*')
87 87 _platform_id = platform.uname()[1] or 'instance'
88 88 config['instance_id'] = '{prefix}uname:{platform}-pid:{pid}'.format(
89 89 prefix=prefix,
90 90 platform=_platform_id,
91 91 pid=os.getpid())
92 92
93 93
94 94 def get_default_user_id():
95 95 DEFAULT_USER = 'default'
96 96 from sqlalchemy import text
97 97 from rhodecode.model import meta
98 98
99 99 engine = meta.get_engine()
100 100 with meta.SA_Session(engine) as session:
101 101 result = session.execute(text("SELECT user_id from users where username = :uname"), {'uname': DEFAULT_USER})
102 102 user_id = result.first()[0]
103 103
104 104 return user_id
105
106
107 def get_default_base_path():
108 from sqlalchemy import text
109 from rhodecode.model import meta
110
111 engine = meta.get_engine()
112 with meta.SA_Session(engine) as session:
113 result = session.execute(text("SELECT ui_value from rhodecode_ui where ui_key = '/'"))
114 base_path = result.first()[0]
115
116 return base_path
@@ -1,609 +1,607 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 The base Controller API
21 21 Provides the BaseController class for subclassing. And usage in different
22 22 controllers
23 23 """
24 24
25 25 import logging
26 26 import socket
27 27 import base64
28 28
29 29 import markupsafe
30 30 import ipaddress
31 31
32 32 import paste.httpheaders
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35
36 36 import rhodecode
37 37 from rhodecode.authentication.base import VCS_TYPE
38 38 from rhodecode.lib import auth, utils2
39 39 from rhodecode.lib import helpers as h
40 40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 41 from rhodecode.lib.exceptions import UserCreationError
42 42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.lib.str_utils import ascii_bytes, safe_int, safe_str
45 45 from rhodecode.lib.type_utils import aslist, str2bool
46 46 from rhodecode.lib.hash_utils import sha1
47 47 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
48 48 from rhodecode.model.notification import NotificationModel
49 49 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 def _filter_proxy(ip):
55 55 """
56 56 Passed in IP addresses in HEADERS can be in a special format of multiple
57 57 ips. Those comma separated IPs are passed from various proxies in the
58 58 chain of request processing. The left-most being the original client.
59 59 We only care about the first IP which came from the org. client.
60 60
61 61 :param ip: ip string from headers
62 62 """
63 63 if ',' in ip:
64 64 _ips = ip.split(',')
65 65 _first_ip = _ips[0].strip()
66 66 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
67 67 return _first_ip
68 68 return ip
69 69
70 70
71 71 def _filter_port(ip):
72 72 """
73 73 Removes a port from ip, there are 4 main cases to handle here.
74 74 - ipv4 eg. 127.0.0.1
75 75 - ipv6 eg. ::1
76 76 - ipv4+port eg. 127.0.0.1:8080
77 77 - ipv6+port eg. [::1]:8080
78 78
79 79 :param ip:
80 80 """
81 81 def is_ipv6(ip_addr):
82 82 if hasattr(socket, 'inet_pton'):
83 83 try:
84 84 socket.inet_pton(socket.AF_INET6, ip_addr)
85 85 except socket.error:
86 86 return False
87 87 else:
88 88 # fallback to ipaddress
89 89 try:
90 90 ipaddress.IPv6Address(safe_str(ip_addr))
91 91 except Exception:
92 92 return False
93 93 return True
94 94
95 95 if ':' not in ip: # must be ipv4 pure ip
96 96 return ip
97 97
98 98 if '[' in ip and ']' in ip: # ipv6 with port
99 99 return ip.split(']')[0][1:].lower()
100 100
101 101 # must be ipv6 or ipv4 with port
102 102 if is_ipv6(ip):
103 103 return ip
104 104 else:
105 105 ip, _port = ip.split(':')[:2] # means ipv4+port
106 106 return ip
107 107
108 108
109 109 def get_ip_addr(environ):
110 110 proxy_key = 'HTTP_X_REAL_IP'
111 111 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
112 112 def_key = 'REMOTE_ADDR'
113 113
114 114 def ip_filters(ip_):
115 115 return _filter_port(_filter_proxy(ip_))
116 116
117 117 ip = environ.get(proxy_key)
118 118 if ip:
119 119 return ip_filters(ip)
120 120
121 121 ip = environ.get(proxy_key2)
122 122 if ip:
123 123 return ip_filters(ip)
124 124
125 125 ip = environ.get(def_key, '0.0.0.0')
126 126 return ip_filters(ip)
127 127
128 128
129 129 def get_server_ip_addr(environ, log_errors=True):
130 130 hostname = environ.get('SERVER_NAME')
131 131 try:
132 132 return socket.gethostbyname(hostname)
133 133 except Exception as e:
134 134 if log_errors:
135 135 # in some cases this lookup is not possible, and we don't want to
136 136 # make it an exception in logs
137 137 log.exception('Could not retrieve server ip address: %s', e)
138 138 return hostname
139 139
140 140
141 141 def get_server_port(environ):
142 142 return environ.get('SERVER_PORT')
143 143
144 144
145 145
146 146 def get_user_agent(environ):
147 147 return environ.get('HTTP_USER_AGENT')
148 148
149 149
150 150 def vcs_operation_context(
151 151 environ, repo_name, username, action, scm, check_locking=True,
152 152 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
153 153 """
154 154 Generate the context for a vcs operation, e.g. push or pull.
155 155
156 156 This context is passed over the layers so that hooks triggered by the
157 157 vcs operation know details like the user, the user's IP address etc.
158 158
159 159 :param check_locking: Allows to switch of the computation of the locking
160 160 data. This serves mainly the need of the simplevcs middleware to be
161 161 able to disable this for certain operations.
162 162
163 163 """
164 164 # Tri-state value: False: unlock, None: nothing, True: lock
165 165 make_lock = None
166 166 locked_by = [None, None, None]
167 167 is_anonymous = username == User.DEFAULT_USER
168 168 user = User.get_by_username(username)
169 169 if not is_anonymous and check_locking:
170 170 log.debug('Checking locking on repository "%s"', repo_name)
171 171 repo = Repository.get_by_repo_name(repo_name)
172 172 make_lock, __, locked_by = repo.get_locking_state(
173 173 action, user.user_id)
174 174 user_id = user.user_id
175 175 settings_model = VcsSettingsModel(repo=repo_name)
176 176 ui_settings = settings_model.get_ui_settings()
177 177
178 178 # NOTE(marcink): This should be also in sync with
179 179 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
180 180 store = [x for x in ui_settings if x.key == '/']
181 181 repo_store = ''
182 182 if store:
183 183 repo_store = store[0].value
184 184
185 185 scm_data = {
186 186 'ip': get_ip_addr(environ),
187 187 'username': username,
188 188 'user_id': user_id,
189 189 'action': action,
190 190 'repository': repo_name,
191 191 'scm': scm,
192 192 'config': rhodecode.CONFIG['__file__'],
193 193 'repo_store': repo_store,
194 194 'make_lock': make_lock,
195 195 'locked_by': locked_by,
196 196 'server_url': utils2.get_server_url(environ),
197 197 'user_agent': get_user_agent(environ),
198 198 'hooks': get_enabled_hook_classes(ui_settings),
199 199 'is_shadow_repo': is_shadow_repo,
200 200 'detect_force_push': detect_force_push,
201 201 'check_branch_perms': check_branch_perms,
202 202 }
203 203 return scm_data
204 204
205 205
206 206 class BasicAuth(AuthBasicAuthenticator):
207 207
208 208 def __init__(self, realm, authfunc, registry, auth_http_code=None,
209 209 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
210 210 super().__init__(realm=realm, authfunc=authfunc)
211 211 self.realm = realm
212 212 self.rc_realm = rc_realm
213 213 self.initial_call = initial_call_detection
214 214 self.authfunc = authfunc
215 215 self.registry = registry
216 216 self.acl_repo_name = acl_repo_name
217 217 self._rc_auth_http_code = auth_http_code
218 218
219 219 def _get_response_from_code(self, http_code, fallback):
220 220 try:
221 221 return get_exception(safe_int(http_code))
222 222 except Exception:
223 223 log.exception('Failed to fetch response class for code %s, using fallback: %s', http_code, fallback)
224 224 return fallback
225 225
226 226 def get_rc_realm(self):
227 227 return safe_str(self.rc_realm)
228 228
229 229 def build_authentication(self):
230 230 header = [('WWW-Authenticate', f'Basic realm="{self.realm}"')]
231 231
232 232 # NOTE: the initial_Call detection seems to be not working/not needed witg latest Mercurial
233 233 # investigate if we still need it.
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(self._rc_auth_http_code, fallback=HTTPUnauthorized)
239 239 log.debug('Using custom response class: %s', custom_response_klass)
240 240 return custom_response_klass(headers=header)
241 241 return HTTPUnauthorized(headers=header)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = paste.httpheaders.AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (auth_meth, auth_creds_b64) = authorization.split(' ', 1)
248 248 if 'basic' != auth_meth.lower():
249 249 return self.build_authentication()
250 250
251 251 credentials = safe_str(base64.b64decode(auth_creds_b64.strip()))
252 252 _parts = credentials.split(':', 1)
253 253 if len(_parts) == 2:
254 254 username, password = _parts
255 255 auth_data = self.authfunc(
256 256 username, password, environ, VCS_TYPE,
257 257 registry=self.registry, acl_repo_name=self.acl_repo_name)
258 258 if auth_data:
259 259 return {'username': username, 'auth_data': auth_data}
260 260 if username and password:
261 261 # we mark that we actually executed authentication once, at
262 262 # that point we can use the alternative auth code
263 263 self.initial_call = False
264 264
265 265 return self.build_authentication()
266 266
267 267 __call__ = authenticate
268 268
269 269
270 270 def calculate_version_hash(config):
271 271 return sha1(
272 272 config.get(b'beaker.session.secret', b'') + ascii_bytes(rhodecode.__version__)
273 273 )[:8]
274 274
275 275
276 276 def get_current_lang(request):
277 277 return getattr(request, '_LOCALE_', request.locale_name)
278 278
279 279
280 280 def attach_context_attributes(context, request, user_id=None, is_api=None):
281 281 """
282 282 Attach variables into template context called `c`.
283 283 """
284 284 config = request.registry.settings
285 285
286 286 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
287 287 context.rc_config = rc_config
288 288 context.rhodecode_version = rhodecode.__version__
289 289 context.rhodecode_edition = config.get('rhodecode.edition')
290 290 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
291 291 # unique secret + version does not leak the version but keep consistency
292 292 context.rhodecode_version_hash = calculate_version_hash(config)
293 293
294 294 # Default language set for the incoming request
295 295 context.language = get_current_lang(request)
296 296
297 297 # Visual options
298 298 context.visual = AttributeDict({})
299 299
300 300 # DB stored Visual Items
301 301 context.visual.show_public_icon = str2bool(
302 302 rc_config.get('rhodecode_show_public_icon'))
303 303 context.visual.show_private_icon = str2bool(
304 304 rc_config.get('rhodecode_show_private_icon'))
305 305 context.visual.stylify_metatags = str2bool(
306 306 rc_config.get('rhodecode_stylify_metatags'))
307 307 context.visual.dashboard_items = safe_int(
308 308 rc_config.get('rhodecode_dashboard_items', 100))
309 309 context.visual.admin_grid_items = safe_int(
310 310 rc_config.get('rhodecode_admin_grid_items', 100))
311 311 context.visual.show_revision_number = str2bool(
312 312 rc_config.get('rhodecode_show_revision_number', True))
313 313 context.visual.show_sha_length = safe_int(
314 314 rc_config.get('rhodecode_show_sha_length', 100))
315 315 context.visual.repository_fields = str2bool(
316 316 rc_config.get('rhodecode_repository_fields'))
317 317 context.visual.show_version = str2bool(
318 318 rc_config.get('rhodecode_show_version'))
319 319 context.visual.use_gravatar = str2bool(
320 320 rc_config.get('rhodecode_use_gravatar'))
321 321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
322 322 context.visual.default_renderer = rc_config.get(
323 323 'rhodecode_markup_renderer', 'rst')
324 324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
325 325 context.visual.rhodecode_support_url = \
326 326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
327 327
328 328 context.visual.affected_files_cut_off = 60
329 329
330 330 context.pre_code = rc_config.get('rhodecode_pre_code')
331 331 context.post_code = rc_config.get('rhodecode_post_code')
332 332 context.rhodecode_name = rc_config.get('rhodecode_title')
333 333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
334 334 # if we have specified default_encoding in the request, it has more
335 335 # priority
336 336 if request.GET.get('default_encoding'):
337 337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
338 338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
339 339 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
340 340 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
341 341
342 342 # INI stored
343 343 context.labs_active = str2bool(
344 344 config.get('labs_settings_active', 'false'))
345 345 context.ssh_enabled = str2bool(
346 346 config.get('ssh.generate_authorized_keyfile', 'false'))
347 347 context.ssh_key_generator_enabled = str2bool(
348 348 config.get('ssh.enable_ui_key_generator', 'true'))
349 349
350 context.visual.allow_repo_location_change = str2bool(
351 config.get('allow_repo_location_change', True))
352 350 context.visual.allow_custom_hooks_settings = str2bool(
353 351 config.get('allow_custom_hooks_settings', True))
354 352 context.debug_style = str2bool(config.get('debug_style', False))
355 353
356 354 context.rhodecode_instanceid = config.get('instance_id')
357 355
358 356 context.visual.cut_off_limit_diff = safe_int(
359 357 config.get('cut_off_limit_diff'), default=0)
360 358 context.visual.cut_off_limit_file = safe_int(
361 359 config.get('cut_off_limit_file'), default=0)
362 360
363 361 context.license = AttributeDict({})
364 362 context.license.hide_license_info = str2bool(
365 363 config.get('license.hide_license_info', False))
366 364
367 365 # AppEnlight
368 366 context.appenlight_enabled = config.get('appenlight', False)
369 367 context.appenlight_api_public_key = config.get(
370 368 'appenlight.api_public_key', '')
371 369 context.appenlight_server_url = config.get('appenlight.server_url', '')
372 370
373 371 diffmode = {
374 372 "unified": "unified",
375 373 "sideside": "sideside"
376 374 }.get(request.GET.get('diffmode'))
377 375
378 376 if is_api is not None:
379 377 is_api = hasattr(request, 'rpc_user')
380 378 session_attrs = {
381 379 # defaults
382 380 "clone_url_format": "http",
383 381 "diffmode": "sideside",
384 382 "license_fingerprint": request.session.get('license_fingerprint')
385 383 }
386 384
387 385 if not is_api:
388 386 # don't access pyramid session for API calls
389 387 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
390 388 request.session['rc_user_session_attr.diffmode'] = diffmode
391 389
392 390 # session settings per user
393 391
394 392 for k, v in list(request.session.items()):
395 393 pref = 'rc_user_session_attr.'
396 394 if k and k.startswith(pref):
397 395 k = k[len(pref):]
398 396 session_attrs[k] = v
399 397
400 398 context.user_session_attrs = session_attrs
401 399
402 400 # JS template context
403 401 context.template_context = {
404 402 'repo_name': None,
405 403 'repo_type': None,
406 404 'repo_landing_commit': None,
407 405 'rhodecode_user': {
408 406 'username': None,
409 407 'email': None,
410 408 'notification_status': False
411 409 },
412 410 'session_attrs': session_attrs,
413 411 'visual': {
414 412 'default_renderer': None
415 413 },
416 414 'commit_data': {
417 415 'commit_id': None
418 416 },
419 417 'pull_request_data': {'pull_request_id': None},
420 418 'timeago': {
421 419 'refresh_time': 120 * 1000,
422 420 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
423 421 },
424 422 'pyramid_dispatch': {
425 423
426 424 },
427 425 'extra': {'plugins': {}}
428 426 }
429 427 # END CONFIG VARS
430 428 if is_api:
431 429 csrf_token = None
432 430 else:
433 431 csrf_token = auth.get_csrf_token(session=request.session)
434 432
435 433 context.csrf_token = csrf_token
436 434 context.backends = list(rhodecode.BACKENDS.keys())
437 435
438 436 unread_count = 0
439 437 user_bookmark_list = []
440 438 if user_id:
441 439 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
442 440 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
443 441 context.unread_notifications = unread_count
444 442 context.bookmark_items = user_bookmark_list
445 443
446 444 # web case
447 445 if hasattr(request, 'user'):
448 446 context.auth_user = request.user
449 447 context.rhodecode_user = request.user
450 448
451 449 # api case
452 450 if hasattr(request, 'rpc_user'):
453 451 context.auth_user = request.rpc_user
454 452 context.rhodecode_user = request.rpc_user
455 453
456 454 # attach the whole call context to the request
457 455 request.set_call_context(context)
458 456
459 457
460 458 def get_auth_user(request):
461 459 environ = request.environ
462 460 session = request.session
463 461
464 462 ip_addr = get_ip_addr(environ)
465 463
466 464 # make sure that we update permissions each time we call controller
467 465 _auth_token = (
468 466 # ?auth_token=XXX
469 467 request.GET.get('auth_token', '')
470 468 # ?api_key=XXX !LEGACY
471 469 or request.GET.get('api_key', '')
472 470 # or headers....
473 471 or request.headers.get('X-Rc-Auth-Token', '')
474 472 )
475 473 if not _auth_token and request.matchdict:
476 474 url_auth_token = request.matchdict.get('_auth_token')
477 475 _auth_token = url_auth_token
478 476 if _auth_token:
479 477 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
480 478
481 479 if _auth_token:
482 480 # when using API_KEY we assume user exists, and
483 481 # doesn't need auth based on cookies.
484 482 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
485 483 authenticated = False
486 484 else:
487 485 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
488 486 try:
489 487 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
490 488 ip_addr=ip_addr)
491 489 except UserCreationError as e:
492 490 h.flash(e, 'error')
493 491 # container auth or other auth functions that create users
494 492 # on the fly can throw this exception signaling that there's
495 493 # issue with user creation, explanation should be provided
496 494 # in Exception itself. We then create a simple blank
497 495 # AuthUser
498 496 auth_user = AuthUser(ip_addr=ip_addr)
499 497
500 498 # in case someone changes a password for user it triggers session
501 499 # flush and forces a re-login
502 500 if password_changed(auth_user, session):
503 501 session.invalidate()
504 502 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
505 503 auth_user = AuthUser(ip_addr=ip_addr)
506 504
507 505 authenticated = cookie_store.get('is_authenticated')
508 506
509 507 if not auth_user.is_authenticated and auth_user.is_user_object:
510 508 # user is not authenticated and not empty
511 509 auth_user.set_authenticated(authenticated)
512 510
513 511 return auth_user, _auth_token
514 512
515 513
516 514 def h_filter(s):
517 515 """
518 516 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
519 517 we wrap this with additional functionality that converts None to empty
520 518 strings
521 519 """
522 520 if s is None:
523 521 return markupsafe.Markup()
524 522 return markupsafe.escape(s)
525 523
526 524
527 525 def add_events_routes(config):
528 526 """
529 527 Adds routing that can be used in events. Because some events are triggered
530 528 outside of pyramid context, we need to bootstrap request with some
531 529 routing registered
532 530 """
533 531
534 532 from rhodecode.apps._base import ADMIN_PREFIX
535 533
536 534 config.add_route(name='home', pattern='/')
537 535 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
538 536 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
539 537
540 538 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
541 539 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
542 540 config.add_route(name='repo_summary', pattern='/{repo_name}')
543 541 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
544 542 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
545 543
546 544 config.add_route(name='pullrequest_show',
547 545 pattern='/{repo_name}/pull-request/{pull_request_id}')
548 546 config.add_route(name='pull_requests_global',
549 547 pattern='/pull-request/{pull_request_id}')
550 548
551 549 config.add_route(name='repo_commit',
552 550 pattern='/{repo_name}/changeset/{commit_id}')
553 551 config.add_route(name='repo_files',
554 552 pattern='/{repo_name}/files/{commit_id}/{f_path}')
555 553
556 554 config.add_route(name='hovercard_user',
557 555 pattern='/_hovercard/user/{user_id}')
558 556
559 557 config.add_route(name='hovercard_user_group',
560 558 pattern='/_hovercard/user_group/{user_group_id}')
561 559
562 560 config.add_route(name='hovercard_pull_request',
563 561 pattern='/_hovercard/pull_request/{pull_request_id}')
564 562
565 563 config.add_route(name='hovercard_repo_commit',
566 564 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
567 565
568 566
569 567 def bootstrap_config(request, registry_name='RcTestRegistry'):
570 568 from rhodecode.config.config_maker import sanitize_settings_and_apply_defaults
571 569 import pyramid.testing
572 570 registry = pyramid.testing.Registry(registry_name)
573 571
574 572 global_config = {'__file__': ''}
575 573
576 574 config = pyramid.testing.setUp(registry=registry, request=request)
577 575 sanitize_settings_and_apply_defaults(global_config, config.registry.settings)
578 576
579 577 # allow pyramid lookup in testing
580 578 config.include('pyramid_mako')
581 579 config.include('rhodecode.lib.rc_beaker')
582 580 config.include('rhodecode.lib.rc_cache')
583 581 config.include('rhodecode.lib.rc_cache.archive_cache')
584 582 add_events_routes(config)
585 583
586 584 return config
587 585
588 586
589 587 def bootstrap_request(**kwargs):
590 588 """
591 589 Returns a thin version of Request Object that is used in non-web context like testing/celery
592 590 """
593 591
594 592 import pyramid.testing
595 593 from rhodecode.lib.request import ThinRequest as _ThinRequest
596 594
597 595 class ThinRequest(_ThinRequest):
598 596 application_url = kwargs.pop('application_url', 'http://example.com')
599 597 host = kwargs.pop('host', 'example.com:80')
600 598 domain = kwargs.pop('domain', 'example.com')
601 599
602 600 class ThinSession(pyramid.testing.DummySession):
603 601 def save(*arg, **kw):
604 602 pass
605 603
606 604 request = ThinRequest(**kwargs)
607 605 request.session = ThinSession()
608 606
609 607 return request
@@ -1,701 +1,694 b''
1 1
2 2
3 3 # Copyright (C) 2014-2023 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import io
29 29 import logging
30 30 import importlib
31 31 from functools import wraps
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 43 from rhodecode.lib import rc_cache
44 44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 45 from rhodecode.lib.base import (
46 46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 48 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
49 49 from rhodecode.lib.middleware import appenlight
50 50 from rhodecode.lib.middleware.utils import scm_app_http
51 51 from rhodecode.lib.str_utils import safe_bytes
52 52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
54 54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 55 from rhodecode.lib.vcs.backends import base
56 56
57 57 from rhodecode.model import meta
58 58 from rhodecode.model.db import User, Repository, PullRequest
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.pull_request import PullRequestModel
61 61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 def extract_svn_txn_id(acl_repo_name, data: bytes):
67 67 """
68 68 Helper method for extraction of svn txn_id from submitted XML data during
69 69 POST operations
70 70 """
71 71
72 72 try:
73 73 root = etree.fromstring(data)
74 74 pat = re.compile(r'/txn/(?P<txn_id>.*)')
75 75 for el in root:
76 76 if el.tag == '{DAV:}source':
77 77 for sub_el in el:
78 78 if sub_el.tag == '{DAV:}href':
79 79 match = pat.search(sub_el.text)
80 80 if match:
81 81 svn_tx_id = match.groupdict()['txn_id']
82 82 txn_id = rc_cache.utils.compute_key_from_params(
83 83 acl_repo_name, svn_tx_id)
84 84 return txn_id
85 85 except Exception:
86 86 log.exception('Failed to extract txn_id')
87 87
88 88
89 89 def initialize_generator(factory):
90 90 """
91 91 Initializes the returned generator by draining its first element.
92 92
93 93 This can be used to give a generator an initializer, which is the code
94 94 up to the first yield statement. This decorator enforces that the first
95 95 produced element has the value ``"__init__"`` to make its special
96 96 purpose very explicit in the using code.
97 97 """
98 98
99 99 @wraps(factory)
100 100 def wrapper(*args, **kwargs):
101 101 gen = factory(*args, **kwargs)
102 102 try:
103 103 init = next(gen)
104 104 except StopIteration:
105 105 raise ValueError('Generator must yield at least one element.')
106 106 if init != "__init__":
107 107 raise ValueError('First yielded element must be "__init__".')
108 108 return gen
109 109 return wrapper
110 110
111 111
112 112 class SimpleVCS(object):
113 113 """Common functionality for SCM HTTP handlers."""
114 114
115 115 SCM = 'unknown'
116 116
117 117 acl_repo_name = None
118 118 url_repo_name = None
119 119 vcs_repo_name = None
120 120 rc_extras = {}
121 121
122 122 # We have to handle requests to shadow repositories different than requests
123 123 # to normal repositories. Therefore we have to distinguish them. To do this
124 124 # we use this regex which will match only on URLs pointing to shadow
125 125 # repositories.
126 126 shadow_repo_re = re.compile(
127 127 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
128 128 '(?P<target>{slug_pat})/' # target repo
129 129 'pull-request/(?P<pr_id>\\d+)/' # pull request
130 130 'repository$' # shadow repo
131 131 .format(slug_pat=SLUG_RE.pattern))
132 132
133 133 def __init__(self, config, registry):
134 134 self.registry = registry
135 135 self.config = config
136 136 # re-populated by specialized middleware
137 137 self.repo_vcs_config = base.Config()
138 138
139 139 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
140 140 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
141 141
142 142 # authenticate this VCS request using authfunc
143 143 auth_ret_code_detection = \
144 144 str2bool(self.config.get('auth_ret_code_detection', False))
145 145 self.authenticate = BasicAuth(
146 146 '', authenticate, registry, config.get('auth_ret_code'),
147 147 auth_ret_code_detection, rc_realm=realm)
148 148 self.ip_addr = '0.0.0.0'
149 149
150 150 @LazyProperty
151 151 def global_vcs_config(self):
152 152 try:
153 153 return VcsSettingsModel().get_ui_settings_as_config_obj()
154 154 except Exception:
155 155 return base.Config()
156 156
157 157 @property
158 158 def base_path(self):
159 settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
160
161 if not settings_path:
162 settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
159 settings_path = self.config.get('repo_store.path')
163 160
164 161 if not settings_path:
165 # try, maybe we passed in explicitly as config option
166 settings_path = self.config.get('base_path')
167
168 if not settings_path:
169 raise ValueError('FATAL: base_path is empty')
162 raise ValueError('FATAL: repo_store.path is empty')
170 163 return settings_path
171 164
172 165 def set_repo_names(self, environ):
173 166 """
174 167 This will populate the attributes acl_repo_name, url_repo_name,
175 168 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 169 shadow) repositories all names are equal. In case of requests to a
177 170 shadow repository the acl-name points to the target repo of the pull
178 171 request and the vcs-name points to the shadow repo file system path.
179 172 The url-name is always the URL used by the vcs client program.
180 173
181 174 Example in case of a shadow repo:
182 175 acl_repo_name = RepoGroup/MyRepo
183 176 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 177 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 178 """
186 179 # First we set the repo name from URL for all attributes. This is the
187 180 # default if handling normal (non shadow) repo requests.
188 181 self.url_repo_name = self._get_repository_name(environ)
189 182 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 183 self.is_shadow_repo = False
191 184
192 185 # Check if this is a request to a shadow repository.
193 186 match = self.shadow_repo_re.match(self.url_repo_name)
194 187 if match:
195 188 match_dict = match.groupdict()
196 189
197 190 # Build acl repo name from regex match.
198 191 acl_repo_name = safe_str('{groups}{target}'.format(
199 192 groups=match_dict['groups'] or '',
200 193 target=match_dict['target']))
201 194
202 195 # Retrieve pull request instance by ID from regex match.
203 196 pull_request = PullRequest.get(match_dict['pr_id'])
204 197
205 198 # Only proceed if we got a pull request and if acl repo name from
206 199 # URL equals the target repo name of the pull request.
207 200 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
208 201
209 202 # Get file system path to shadow repository.
210 203 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 204 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
212 205
213 206 # Store names for later usage.
214 207 self.vcs_repo_name = vcs_repo_name
215 208 self.acl_repo_name = acl_repo_name
216 209 self.is_shadow_repo = True
217 210
218 211 log.debug('Setting all VCS repository names: %s', {
219 212 'acl_repo_name': self.acl_repo_name,
220 213 'url_repo_name': self.url_repo_name,
221 214 'vcs_repo_name': self.vcs_repo_name,
222 215 })
223 216
224 217 @property
225 218 def scm_app(self):
226 219 custom_implementation = self.config['vcs.scm_app_implementation']
227 220 if custom_implementation == 'http':
228 221 log.debug('Using HTTP implementation of scm app.')
229 222 scm_app_impl = scm_app_http
230 223 else:
231 224 log.debug('Using custom implementation of scm_app: "{}"'.format(
232 225 custom_implementation))
233 226 scm_app_impl = importlib.import_module(custom_implementation)
234 227 return scm_app_impl
235 228
236 229 def _get_by_id(self, repo_name):
237 230 """
238 231 Gets a special pattern _<ID> from clone url and tries to replace it
239 232 with a repository_name for support of _<ID> non changeable urls
240 233 """
241 234
242 235 data = repo_name.split('/')
243 236 if len(data) >= 2:
244 237 from rhodecode.model.repo import RepoModel
245 238 by_id_match = RepoModel().get_repo_by_id(repo_name)
246 239 if by_id_match:
247 240 data[1] = by_id_match.repo_name
248 241
249 242 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
250 243 # and we use this data
251 244 maybe_new_path = '/'.join(data)
252 245 return safe_bytes(maybe_new_path).decode('latin1')
253 246
254 247 def _invalidate_cache(self, repo_name):
255 248 """
256 249 Set's cache for this repository for invalidation on next access
257 250
258 251 :param repo_name: full repo name, also a cache key
259 252 """
260 253 ScmModel().mark_for_invalidation(repo_name)
261 254
262 255 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
263 256 db_repo = Repository.get_by_repo_name(repo_name)
264 257 if not db_repo:
265 258 log.debug('Repository `%s` not found inside the database.',
266 259 repo_name)
267 260 return False
268 261
269 262 if db_repo.repo_type != scm_type:
270 263 log.warning(
271 264 'Repository `%s` have incorrect scm_type, expected %s got %s',
272 265 repo_name, db_repo.repo_type, scm_type)
273 266 return False
274 267
275 268 config = db_repo._config
276 269 config.set('extensions', 'largefiles', '')
277 270 return is_valid_repo(
278 271 repo_name, base_path,
279 272 explicit_scm=scm_type, expect_scm=scm_type, config=config)
280 273
281 274 def valid_and_active_user(self, user):
282 275 """
283 276 Checks if that user is not empty, and if it's actually object it checks
284 277 if he's active.
285 278
286 279 :param user: user object or None
287 280 :return: boolean
288 281 """
289 282 if user is None:
290 283 return False
291 284
292 285 elif user.active:
293 286 return True
294 287
295 288 return False
296 289
297 290 @property
298 291 def is_shadow_repo_dir(self):
299 292 return os.path.isdir(self.vcs_repo_name)
300 293
301 294 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
302 295 plugin_id='', plugin_cache_active=False, cache_ttl=0):
303 296 """
304 297 Checks permissions using action (push/pull) user and repository
305 298 name. If plugin_cache and ttl is set it will use the plugin which
306 299 authenticated the user to store the cached permissions result for N
307 300 amount of seconds as in cache_ttl
308 301
309 302 :param action: push or pull action
310 303 :param user: user instance
311 304 :param repo_name: repository name
312 305 """
313 306
314 307 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
315 308 plugin_id, plugin_cache_active, cache_ttl)
316 309
317 310 user_id = user.user_id
318 311 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
319 312 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
320 313
321 314 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
322 315 expiration_time=cache_ttl,
323 316 condition=plugin_cache_active)
324 317 def compute_perm_vcs(
325 318 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
326 319
327 320 log.debug('auth: calculating permission access now...')
328 321 # check IP
329 322 inherit = user.inherit_default_permissions
330 323 ip_allowed = AuthUser.check_ip_allowed(
331 324 user_id, ip_addr, inherit_from_default=inherit)
332 325 if ip_allowed:
333 326 log.info('Access for IP:%s allowed', ip_addr)
334 327 else:
335 328 return False
336 329
337 330 if action == 'push':
338 331 perms = ('repository.write', 'repository.admin')
339 332 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
340 333 return False
341 334
342 335 else:
343 336 # any other action need at least read permission
344 337 perms = (
345 338 'repository.read', 'repository.write', 'repository.admin')
346 339 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
347 340 return False
348 341
349 342 return True
350 343
351 344 start = time.time()
352 345 log.debug('Running plugin `%s` permissions check', plugin_id)
353 346
354 347 # for environ based auth, password can be empty, but then the validation is
355 348 # on the server that fills in the env data needed for authentication
356 349 perm_result = compute_perm_vcs(
357 350 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
358 351
359 352 auth_time = time.time() - start
360 353 log.debug('Permissions for plugin `%s` completed in %.4fs, '
361 354 'expiration time of fetched cache %.1fs.',
362 355 plugin_id, auth_time, cache_ttl)
363 356
364 357 return perm_result
365 358
366 359 def _get_http_scheme(self, environ):
367 360 try:
368 361 return environ['wsgi.url_scheme']
369 362 except Exception:
370 363 log.exception('Failed to read http scheme')
371 364 return 'http'
372 365
373 366 def _check_ssl(self, environ, start_response):
374 367 """
375 368 Checks the SSL check flag and returns False if SSL is not present
376 369 and required True otherwise
377 370 """
378 371 org_proto = environ['wsgi._org_proto']
379 372 # check if we have SSL required ! if not it's a bad request !
380 373 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
381 374 if require_ssl and org_proto == 'http':
382 375 log.debug(
383 376 'Bad request: detected protocol is `%s` and '
384 377 'SSL/HTTPS is required.', org_proto)
385 378 return False
386 379 return True
387 380
388 381 def _get_default_cache_ttl(self):
389 382 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
390 383 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
391 384 plugin_settings = plugin.get_settings()
392 385 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
393 386 plugin_settings) or (False, 0)
394 387 return plugin_cache_active, cache_ttl
395 388
396 389 def __call__(self, environ, start_response):
397 390 try:
398 391 return self._handle_request(environ, start_response)
399 392 except Exception:
400 393 log.exception("Exception while handling request")
401 394 appenlight.track_exception(environ)
402 395 return HTTPInternalServerError()(environ, start_response)
403 396 finally:
404 397 meta.Session.remove()
405 398
406 399 def _handle_request(self, environ, start_response):
407 400 if not self._check_ssl(environ, start_response):
408 401 reason = ('SSL required, while RhodeCode was unable '
409 402 'to detect this as SSL request')
410 403 log.debug('User not allowed to proceed, %s', reason)
411 404 return HTTPNotAcceptable(reason)(environ, start_response)
412 405
413 406 if not self.url_repo_name:
414 407 log.warning('Repository name is empty: %s', self.url_repo_name)
415 408 # failed to get repo name, we fail now
416 409 return HTTPNotFound()(environ, start_response)
417 410 log.debug('Extracted repo name is %s', self.url_repo_name)
418 411
419 412 ip_addr = get_ip_addr(environ)
420 413 user_agent = get_user_agent(environ)
421 414 username = None
422 415
423 416 # skip passing error to error controller
424 417 environ['pylons.status_code_redirect'] = True
425 418
426 419 # ======================================================================
427 420 # GET ACTION PULL or PUSH
428 421 # ======================================================================
429 422 action = self._get_action(environ)
430 423
431 424 # ======================================================================
432 425 # Check if this is a request to a shadow repository of a pull request.
433 426 # In this case only pull action is allowed.
434 427 # ======================================================================
435 428 if self.is_shadow_repo and action != 'pull':
436 429 reason = 'Only pull action is allowed for shadow repositories.'
437 430 log.debug('User not allowed to proceed, %s', reason)
438 431 return HTTPNotAcceptable(reason)(environ, start_response)
439 432
440 433 # Check if the shadow repo actually exists, in case someone refers
441 434 # to it, and it has been deleted because of successful merge.
442 435 if self.is_shadow_repo and not self.is_shadow_repo_dir:
443 436 log.debug(
444 437 'Shadow repo detected, and shadow repo dir `%s` is missing',
445 438 self.is_shadow_repo_dir)
446 439 return HTTPNotFound()(environ, start_response)
447 440
448 441 # ======================================================================
449 442 # CHECK ANONYMOUS PERMISSION
450 443 # ======================================================================
451 444 detect_force_push = False
452 445 check_branch_perms = False
453 446 if action in ['pull', 'push']:
454 447 user_obj = anonymous_user = User.get_default_user()
455 448 auth_user = user_obj.AuthUser()
456 449 username = anonymous_user.username
457 450 if anonymous_user.active:
458 451 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
459 452 # ONLY check permissions if the user is activated
460 453 anonymous_perm = self._check_permission(
461 454 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
462 455 plugin_id='anonymous_access',
463 456 plugin_cache_active=plugin_cache_active,
464 457 cache_ttl=cache_ttl,
465 458 )
466 459 else:
467 460 anonymous_perm = False
468 461
469 462 if not anonymous_user.active or not anonymous_perm:
470 463 if not anonymous_user.active:
471 464 log.debug('Anonymous access is disabled, running '
472 465 'authentication')
473 466
474 467 if not anonymous_perm:
475 468 log.debug('Not enough credentials to access repo: `%s` '
476 469 'repository as anonymous user', self.acl_repo_name)
477 470
478 471
479 472 username = None
480 473 # ==============================================================
481 474 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 475 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 476 # ==============================================================
484 477
485 478 # try to auth based on environ, container auth methods
486 479 log.debug('Running PRE-AUTH for container|headers based authentication')
487 480
488 481 # headers auth, by just reading special headers and bypass the auth with user/passwd
489 482 pre_auth = authenticate(
490 483 '', '', environ, VCS_TYPE, registry=self.registry,
491 484 acl_repo_name=self.acl_repo_name)
492 485
493 486 if pre_auth and pre_auth.get('username'):
494 487 username = pre_auth['username']
495 488 log.debug('PRE-AUTH got `%s` as username', username)
496 489 if pre_auth:
497 490 log.debug('PRE-AUTH successful from %s',
498 491 pre_auth.get('auth_data', {}).get('_plugin'))
499 492
500 493 # If not authenticated by the container, running basic auth
501 494 # before inject the calling repo_name for special scope checks
502 495 self.authenticate.acl_repo_name = self.acl_repo_name
503 496
504 497 plugin_cache_active, cache_ttl = False, 0
505 498 plugin = None
506 499
507 500 # regular auth chain
508 501 if not username:
509 502 self.authenticate.realm = self.authenticate.get_rc_realm()
510 503
511 504 try:
512 505 auth_result = self.authenticate(environ)
513 506 except (UserCreationError, NotAllowedToCreateUserError) as e:
514 507 log.error(e)
515 508 reason = safe_str(e)
516 509 return HTTPNotAcceptable(reason)(environ, start_response)
517 510
518 511 if isinstance(auth_result, dict):
519 512 AUTH_TYPE.update(environ, 'basic')
520 513 REMOTE_USER.update(environ, auth_result['username'])
521 514 username = auth_result['username']
522 515 plugin = auth_result.get('auth_data', {}).get('_plugin')
523 516 log.info(
524 517 'MAIN-AUTH successful for user `%s` from %s plugin',
525 518 username, plugin)
526 519
527 520 plugin_cache_active, cache_ttl = auth_result.get(
528 521 'auth_data', {}).get('_ttl_cache') or (False, 0)
529 522 else:
530 523 return auth_result.wsgi_application(environ, start_response)
531 524
532 525 # ==============================================================
533 526 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
534 527 # ==============================================================
535 528 user = User.get_by_username(username)
536 529 if not self.valid_and_active_user(user):
537 530 return HTTPForbidden()(environ, start_response)
538 531 username = user.username
539 532 user_id = user.user_id
540 533
541 534 # check user attributes for password change flag
542 535 user_obj = user
543 536 auth_user = user_obj.AuthUser()
544 537 if user_obj and user_obj.username != User.DEFAULT_USER and \
545 538 user_obj.user_data.get('force_password_change'):
546 539 reason = 'password change required'
547 540 log.debug('User not allowed to authenticate, %s', reason)
548 541 return HTTPNotAcceptable(reason)(environ, start_response)
549 542
550 543 # check permissions for this repository
551 544 perm = self._check_permission(
552 545 action, user, auth_user, self.acl_repo_name, ip_addr,
553 546 plugin, plugin_cache_active, cache_ttl)
554 547 if not perm:
555 548 return HTTPForbidden()(environ, start_response)
556 549 environ['rc_auth_user_id'] = str(user_id)
557 550
558 551 if action == 'push':
559 552 perms = auth_user.get_branch_permissions(self.acl_repo_name)
560 553 if perms:
561 554 check_branch_perms = True
562 555 detect_force_push = True
563 556
564 557 # extras are injected into UI object and later available
565 558 # in hooks executed by RhodeCode
566 559 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
567 560
568 561 extras = vcs_operation_context(
569 562 environ, repo_name=self.acl_repo_name, username=username,
570 563 action=action, scm=self.SCM, check_locking=check_locking,
571 564 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
572 565 detect_force_push=detect_force_push
573 566 )
574 567
575 568 # ======================================================================
576 569 # REQUEST HANDLING
577 570 # ======================================================================
578 571 repo_path = os.path.join(
579 572 safe_str(self.base_path), safe_str(self.vcs_repo_name))
580 573 log.debug('Repository path is %s', repo_path)
581 574
582 575 fix_PATH()
583 576
584 577 log.info(
585 578 '%s action on %s repo "%s" by "%s" from %s %s',
586 579 action, self.SCM, safe_str(self.url_repo_name),
587 580 safe_str(username), ip_addr, user_agent)
588 581
589 582 return self._generate_vcs_response(
590 583 environ, start_response, repo_path, extras, action)
591 584
592 585 @initialize_generator
593 586 def _generate_vcs_response(
594 587 self, environ, start_response, repo_path, extras, action):
595 588 """
596 589 Returns a generator for the response content.
597 590
598 591 This method is implemented as a generator, so that it can trigger
599 592 the cache validation after all content sent back to the client. It
600 593 also handles the locking exceptions which will be triggered when
601 594 the first chunk is produced by the underlying WSGI application.
602 595 """
603 596
604 597 txn_id = ''
605 598 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
606 599 # case for SVN, we want to re-use the callback daemon port
607 600 # so we use the txn_id, for this we peek the body, and still save
608 601 # it as wsgi.input
609 602
610 603 stream = environ['wsgi.input']
611 604
612 605 if isinstance(stream, io.BytesIO):
613 606 data: bytes = stream.getvalue()
614 607 elif hasattr(stream, 'buf'): # most likely gunicorn.http.body.Body
615 608 data: bytes = stream.buf.getvalue()
616 609 else:
617 610 # fallback to the crudest way, copy the iterator
618 611 data = safe_bytes(stream.read())
619 612 environ['wsgi.input'] = io.BytesIO(data)
620 613
621 614 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
622 615
623 616 callback_daemon, extras = self._prepare_callback_daemon(
624 617 extras, environ, action, txn_id=txn_id)
625 618 log.debug('HOOKS extras is %s', extras)
626 619
627 620 http_scheme = self._get_http_scheme(environ)
628 621
629 622 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
630 623 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
631 624 with callback_daemon:
632 625 app.rc_extras = extras
633 626
634 627 try:
635 628 response = app(environ, start_response)
636 629 finally:
637 630 # This statement works together with the decorator
638 631 # "initialize_generator" above. The decorator ensures that
639 632 # we hit the first yield statement before the generator is
640 633 # returned back to the WSGI server. This is needed to
641 634 # ensure that the call to "app" above triggers the
642 635 # needed callback to "start_response" before the
643 636 # generator is actually used.
644 637 yield "__init__"
645 638
646 639 # iter content
647 640 for chunk in response:
648 641 yield chunk
649 642
650 643 try:
651 644 # invalidate cache on push
652 645 if action == 'push':
653 646 self._invalidate_cache(self.url_repo_name)
654 647 finally:
655 648 meta.Session.remove()
656 649
657 650 def _get_repository_name(self, environ):
658 651 """Get repository name out of the environmnent
659 652
660 653 :param environ: WSGI environment
661 654 """
662 655 raise NotImplementedError()
663 656
664 657 def _get_action(self, environ):
665 658 """Map request commands into a pull or push command.
666 659
667 660 :param environ: WSGI environment
668 661 """
669 662 raise NotImplementedError()
670 663
671 664 def _create_wsgi_app(self, repo_path, repo_name, config):
672 665 """Return the WSGI app that will finally handle the request."""
673 666 raise NotImplementedError()
674 667
675 668 def _create_config(self, extras, repo_name, scheme='http'):
676 669 """Create a safe config representation."""
677 670 raise NotImplementedError()
678 671
679 672 def _should_use_callback_daemon(self, extras, environ, action):
680 673 if extras.get('is_shadow_repo'):
681 674 # we don't want to execute hooks, and callback daemon for shadow repos
682 675 return False
683 676 return True
684 677
685 678 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
686 679 protocol = vcs_settings.HOOKS_PROTOCOL
687 680 if not self._should_use_callback_daemon(extras, environ, action):
688 681 # disable callback daemon for actions that don't require it
689 682 protocol = 'local'
690 683
691 684 return prepare_callback_daemon(
692 685 extras, protocol=protocol,
693 686 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
694 687
695 688
696 689 def _should_check_locking(query_string):
697 690 # this is kind of hacky, but due to how mercurial handles client-server
698 691 # server see all operation on commit; bookmarks, phases and
699 692 # obsolescence marker in different transaction, we don't want to check
700 693 # locking on those
701 694 return query_string not in ['cmd=listkeys']
@@ -1,845 +1,844 b''
1 1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import os
21 21 import sys
22 22 import time
23 23 import platform
24 24 import collections
25 25 import psutil
26 26 from functools import wraps
27 27
28 28 import pkg_resources
29 29 import logging
30 30 import resource
31 31
32 32 import configparser
33 33
34 34 from rc_license.models import LicenseModel
35 35 from rhodecode.lib.str_utils import safe_str
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 _NA = 'NOT AVAILABLE'
41 41 _NA_FLOAT = 0.0
42 42
43 43 STATE_OK = 'ok'
44 44 STATE_ERR = 'error'
45 45 STATE_WARN = 'warning'
46 46
47 47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
48 48
49 49
50 50 registered_helpers = {}
51 51
52 52
53 53 def register_sysinfo(func):
54 54 """
55 55 @register_helper
56 56 def db_check():
57 57 pass
58 58
59 59 db_check == registered_helpers['db_check']
60 60 """
61 61 global registered_helpers
62 62 registered_helpers[func.__name__] = func
63 63
64 64 @wraps(func)
65 65 def _wrapper(*args, **kwargs):
66 66 return func(*args, **kwargs)
67 67 return _wrapper
68 68
69 69
70 70 # HELPERS
71 71 def percentage(part: (int, float), whole: (int, float)):
72 72 whole = float(whole)
73 73 if whole > 0:
74 74 return round(100 * float(part) / whole, 1)
75 75 return 0.0
76 76
77 77
78 78 def get_storage_size(storage_path):
79 79 sizes = []
80 80 for file_ in os.listdir(storage_path):
81 81 storage_file = os.path.join(storage_path, file_)
82 82 if os.path.isfile(storage_file):
83 83 try:
84 84 sizes.append(os.path.getsize(storage_file))
85 85 except OSError:
86 86 log.exception('Failed to get size of storage file %s', storage_file)
87 87 pass
88 88
89 89 return sum(sizes)
90 90
91 91
92 92 def get_resource(resource_type):
93 93 try:
94 94 return resource.getrlimit(resource_type)
95 95 except Exception:
96 96 return 'NOT_SUPPORTED'
97 97
98 98
99 99 def get_cert_path(ini_path):
100 100 default = '/etc/ssl/certs/ca-certificates.crt'
101 101 control_ca_bundle = os.path.join(
102 102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
103 103 '.rccontrol-profile/etc/ca-bundle.crt')
104 104 if os.path.isfile(control_ca_bundle):
105 105 default = control_ca_bundle
106 106
107 107 return default
108 108
109 109
110 110 class SysInfoRes(object):
111 111 def __init__(self, value, state=None, human_value=None):
112 112 self.value = value
113 113 self.state = state or STATE_OK_DEFAULT
114 114 self.human_value = human_value or value
115 115
116 116 def __json__(self):
117 117 return {
118 118 'value': self.value,
119 119 'state': self.state,
120 120 'human_value': self.human_value,
121 121 }
122 122
123 123 def get_value(self):
124 124 return self.__json__()
125 125
126 126 def __str__(self):
127 127 return f'<SysInfoRes({self.__json__()})>'
128 128
129 129
130 130 class SysInfo(object):
131 131
132 132 def __init__(self, func_name, **kwargs):
133 133 self.function_name = func_name
134 134 self.value = _NA
135 135 self.state = None
136 136 self.kwargs = kwargs or {}
137 137
138 138 def __call__(self):
139 139 computed = self.compute(**self.kwargs)
140 140 if not isinstance(computed, SysInfoRes):
141 141 raise ValueError(
142 142 'computed value for {} is not instance of '
143 143 '{}, got {} instead'.format(
144 144 self.function_name, SysInfoRes, type(computed)))
145 145 return computed.__json__()
146 146
147 147 def __str__(self):
148 148 return f'<SysInfo({self.function_name})>'
149 149
150 150 def compute(self, **kwargs):
151 151 return self.function_name(**kwargs)
152 152
153 153
154 154 # SysInfo functions
155 155 @register_sysinfo
156 156 def python_info():
157 157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
158 158 executable=sys.executable)
159 159 return SysInfoRes(value=value)
160 160
161 161
162 162 @register_sysinfo
163 163 def py_modules():
164 164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
165 165 for p in pkg_resources.working_set])
166 166
167 167 value = sorted(mods.items(), key=lambda k: k[0].lower())
168 168 return SysInfoRes(value=value)
169 169
170 170
171 171 @register_sysinfo
172 172 def platform_type():
173 173 from rhodecode.lib.utils import generate_platform_uuid
174 174
175 175 value = dict(
176 176 name=safe_str(platform.platform()),
177 177 uuid=generate_platform_uuid()
178 178 )
179 179 return SysInfoRes(value=value)
180 180
181 181
182 182 @register_sysinfo
183 183 def locale_info():
184 184 import locale
185 185
186 186 def safe_get_locale(locale_name):
187 187 try:
188 188 locale.getlocale(locale_name)
189 189 except TypeError:
190 190 return f'FAILED_LOCALE_GET:{locale_name}'
191 191
192 192 value = dict(
193 193 locale_default=locale.getlocale(),
194 194 locale_lc_all=safe_get_locale(locale.LC_ALL),
195 195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
196 196 lang_env=os.environ.get('LANG'),
197 197 lc_all_env=os.environ.get('LC_ALL'),
198 198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
199 199 )
200 200 human_value = \
201 201 f"LANG: {value['lang_env']}, \
202 202 locale LC_ALL: {value['locale_lc_all']}, \
203 203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
204 204 Default locales: {value['locale_default']}"
205 205
206 206 return SysInfoRes(value=value, human_value=human_value)
207 207
208 208
209 209 @register_sysinfo
210 210 def ulimit_info():
211 211 data = collections.OrderedDict([
212 212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
213 213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
214 214 ('stack size', get_resource(resource.RLIMIT_STACK)),
215 215 ('core file size', get_resource(resource.RLIMIT_CORE)),
216 216 ('address space size', get_resource(resource.RLIMIT_AS)),
217 217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
218 218 ('heap size', get_resource(resource.RLIMIT_DATA)),
219 219 ('rss size', get_resource(resource.RLIMIT_RSS)),
220 220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
221 221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
222 222 ])
223 223
224 224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
225 225
226 226 value = {
227 227 'limits': data,
228 228 'text': text,
229 229 }
230 230 return SysInfoRes(value=value)
231 231
232 232
233 233 @register_sysinfo
234 234 def uptime():
235 235 from rhodecode.lib.helpers import age, time_to_datetime
236 236 from rhodecode.translation import TranslationString
237 237
238 238 value = dict(boot_time=0, uptime=0, text='')
239 239 state = STATE_OK_DEFAULT
240 240
241 241 boot_time = psutil.boot_time()
242 242 value['boot_time'] = boot_time
243 243 value['uptime'] = time.time() - boot_time
244 244
245 245 date_or_age = age(time_to_datetime(boot_time))
246 246 if isinstance(date_or_age, TranslationString):
247 247 date_or_age = date_or_age.interpolate()
248 248
249 249 human_value = value.copy()
250 250 human_value['boot_time'] = time_to_datetime(boot_time)
251 251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
252 252
253 253 human_value['text'] = f'Server started {date_or_age}'
254 254 return SysInfoRes(value=value, human_value=human_value)
255 255
256 256
257 257 @register_sysinfo
258 258 def memory():
259 259 from rhodecode.lib.helpers import format_byte_size_binary
260 260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
261 261 percent_used=0, free=0, inactive=0, active=0, shared=0,
262 262 total=0, buffers=0, text='')
263 263
264 264 state = STATE_OK_DEFAULT
265 265
266 266 value.update(dict(psutil.virtual_memory()._asdict()))
267 267 value['used_real'] = value['total'] - value['available']
268 268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
269 269
270 270 human_value = value.copy()
271 271 human_value['text'] = '{}/{}, {}% used'.format(
272 272 format_byte_size_binary(value['used_real']),
273 273 format_byte_size_binary(value['total']),
274 274 value['percent_used'])
275 275
276 276 keys = list(value.keys())[::]
277 277 keys.pop(keys.index('percent'))
278 278 keys.pop(keys.index('percent_used'))
279 279 keys.pop(keys.index('text'))
280 280 for k in keys:
281 281 human_value[k] = format_byte_size_binary(value[k])
282 282
283 283 if state['type'] == STATE_OK and value['percent_used'] > 90:
284 284 msg = 'Critical: your available RAM memory is very low.'
285 285 state = {'message': msg, 'type': STATE_ERR}
286 286
287 287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
288 288 msg = 'Warning: your available RAM memory is running low.'
289 289 state = {'message': msg, 'type': STATE_WARN}
290 290
291 291 return SysInfoRes(value=value, state=state, human_value=human_value)
292 292
293 293
294 294 @register_sysinfo
295 295 def machine_load():
296 296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
297 297 state = STATE_OK_DEFAULT
298 298
299 299 # load averages
300 300 if hasattr(psutil.os, 'getloadavg'):
301 301 value.update(dict(
302 302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
303 303 ))
304 304
305 305 human_value = value.copy()
306 306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 307 value['1_min'], value['5_min'], value['15_min'])
308 308
309 309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
310 310 msg = 'Warning: your machine load is very high.'
311 311 state = {'message': msg, 'type': STATE_WARN}
312 312
313 313 return SysInfoRes(value=value, state=state, human_value=human_value)
314 314
315 315
316 316 @register_sysinfo
317 317 def cpu():
318 318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 319 state = STATE_OK_DEFAULT
320 320
321 321 value['cpu'] = psutil.cpu_percent(0.5)
322 322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
323 323 value['cpu_count'] = psutil.cpu_count()
324 324
325 325 human_value = value.copy()
326 326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
327 327
328 328 return SysInfoRes(value=value, state=state, human_value=human_value)
329 329
330 330
331 331 @register_sysinfo
332 332 def storage():
333 333 from rhodecode.lib.helpers import format_byte_size_binary
334 from rhodecode.model.settings import VcsSettingsModel
335 path = VcsSettingsModel().get_repos_location()
334 from rhodecode.lib.utils import get_rhodecode_repo_store_path
335 path = get_rhodecode_repo_store_path()
336 336
337 337 value = dict(percent=0, used=0, total=0, path=path, text='')
338 338 state = STATE_OK_DEFAULT
339 339
340 340 try:
341 341 value.update(dict(psutil.disk_usage(path)._asdict()))
342 342 except Exception as e:
343 343 log.exception('Failed to fetch disk info')
344 344 state = {'message': str(e), 'type': STATE_ERR}
345 345
346 346 human_value = value.copy()
347 347 human_value['used'] = format_byte_size_binary(value['used'])
348 348 human_value['total'] = format_byte_size_binary(value['total'])
349 349 human_value['text'] = "{}/{}, {}% used".format(
350 350 format_byte_size_binary(value['used']),
351 351 format_byte_size_binary(value['total']),
352 352 value['percent'])
353 353
354 354 if state['type'] == STATE_OK and value['percent'] > 90:
355 355 msg = 'Critical: your disk space is very low.'
356 356 state = {'message': msg, 'type': STATE_ERR}
357 357
358 358 elif state['type'] == STATE_OK and value['percent'] > 70:
359 359 msg = 'Warning: your disk space is running low.'
360 360 state = {'message': msg, 'type': STATE_WARN}
361 361
362 362 return SysInfoRes(value=value, state=state, human_value=human_value)
363 363
364 364
365 365 @register_sysinfo
366 366 def storage_inodes():
367 from rhodecode.model.settings import VcsSettingsModel
368 path = VcsSettingsModel().get_repos_location()
367 from rhodecode.lib.utils import get_rhodecode_repo_store_path
368 path = get_rhodecode_repo_store_path()
369 369
370 370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
371 371 state = STATE_OK_DEFAULT
372 372
373 373 try:
374 374 i_stat = os.statvfs(path)
375 375 value['free'] = i_stat.f_ffree
376 376 value['used'] = i_stat.f_files-i_stat.f_favail
377 377 value['total'] = i_stat.f_files
378 378 value['percent'] = percentage(value['used'], value['total'])
379 379 except Exception as e:
380 380 log.exception('Failed to fetch disk inodes info')
381 381 state = {'message': str(e), 'type': STATE_ERR}
382 382
383 383 human_value = value.copy()
384 384 human_value['text'] = "{}/{}, {}% used".format(
385 385 value['used'], value['total'], value['percent'])
386 386
387 387 if state['type'] == STATE_OK and value['percent'] > 90:
388 388 msg = 'Critical: your disk free inodes are very low.'
389 389 state = {'message': msg, 'type': STATE_ERR}
390 390
391 391 elif state['type'] == STATE_OK and value['percent'] > 70:
392 392 msg = 'Warning: your disk free inodes are running low.'
393 393 state = {'message': msg, 'type': STATE_WARN}
394 394
395 395 return SysInfoRes(value=value, state=state, human_value=human_value)
396 396
397 397
398 398 @register_sysinfo
399 399 def storage_archives():
400 400 import rhodecode
401 401 from rhodecode.lib.utils import safe_str
402 402 from rhodecode.lib.helpers import format_byte_size_binary
403 403
404 404 msg = 'Archive cache storage is controlled by ' \
405 405 'archive_cache.store_dir=/path/to/cache option in the .ini file'
406 406 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
407 407
408 408 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
409 409 state = STATE_OK_DEFAULT
410 410 try:
411 411 items_count = 0
412 412 used = 0
413 413 for root, dirs, files in os.walk(path):
414 414 if root == path:
415 415 items_count = len(dirs)
416 416
417 417 for f in files:
418 418 try:
419 419 used += os.path.getsize(os.path.join(root, f))
420 420 except OSError:
421 421 pass
422 422 value.update({
423 423 'percent': 100,
424 424 'used': used,
425 425 'total': used,
426 426 'items': items_count
427 427 })
428 428
429 429 except Exception as e:
430 430 log.exception('failed to fetch archive cache storage')
431 431 state = {'message': str(e), 'type': STATE_ERR}
432 432
433 433 human_value = value.copy()
434 434 human_value['used'] = format_byte_size_binary(value['used'])
435 435 human_value['total'] = format_byte_size_binary(value['total'])
436 436 human_value['text'] = "{} ({} items)".format(
437 437 human_value['used'], value['items'])
438 438
439 439 return SysInfoRes(value=value, state=state, human_value=human_value)
440 440
441 441
442 442 @register_sysinfo
443 443 def storage_gist():
444 444 from rhodecode.model.gist import GIST_STORE_LOC
445 from rhodecode.model.settings import VcsSettingsModel
446 from rhodecode.lib.utils import safe_str
445 from rhodecode.lib.utils import safe_str, get_rhodecode_repo_store_path
447 446 from rhodecode.lib.helpers import format_byte_size_binary
448 447 path = safe_str(os.path.join(
449 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
448 get_rhodecode_repo_store_path(), GIST_STORE_LOC))
450 449
451 450 # gist storage
452 451 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
453 452 state = STATE_OK_DEFAULT
454 453
455 454 try:
456 455 items_count = 0
457 456 used = 0
458 457 for root, dirs, files in os.walk(path):
459 458 if root == path:
460 459 items_count = len(dirs)
461 460
462 461 for f in files:
463 462 try:
464 463 used += os.path.getsize(os.path.join(root, f))
465 464 except OSError:
466 465 pass
467 466 value.update({
468 467 'percent': 100,
469 468 'used': used,
470 469 'total': used,
471 470 'items': items_count
472 471 })
473 472 except Exception as e:
474 473 log.exception('failed to fetch gist storage items')
475 474 state = {'message': str(e), 'type': STATE_ERR}
476 475
477 476 human_value = value.copy()
478 477 human_value['used'] = format_byte_size_binary(value['used'])
479 478 human_value['total'] = format_byte_size_binary(value['total'])
480 479 human_value['text'] = "{} ({} items)".format(
481 480 human_value['used'], value['items'])
482 481
483 482 return SysInfoRes(value=value, state=state, human_value=human_value)
484 483
485 484
486 485 @register_sysinfo
487 486 def storage_temp():
488 487 import tempfile
489 488 from rhodecode.lib.helpers import format_byte_size_binary
490 489
491 490 path = tempfile.gettempdir()
492 491 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
493 492 state = STATE_OK_DEFAULT
494 493
495 494 if not psutil:
496 495 return SysInfoRes(value=value, state=state)
497 496
498 497 try:
499 498 value.update(dict(psutil.disk_usage(path)._asdict()))
500 499 except Exception as e:
501 500 log.exception('Failed to fetch temp dir info')
502 501 state = {'message': str(e), 'type': STATE_ERR}
503 502
504 503 human_value = value.copy()
505 504 human_value['used'] = format_byte_size_binary(value['used'])
506 505 human_value['total'] = format_byte_size_binary(value['total'])
507 506 human_value['text'] = "{}/{}, {}% used".format(
508 507 format_byte_size_binary(value['used']),
509 508 format_byte_size_binary(value['total']),
510 509 value['percent'])
511 510
512 511 return SysInfoRes(value=value, state=state, human_value=human_value)
513 512
514 513
515 514 @register_sysinfo
516 515 def search_info():
517 516 import rhodecode
518 517 from rhodecode.lib.index import searcher_from_config
519 518
520 519 backend = rhodecode.CONFIG.get('search.module', '')
521 520 location = rhodecode.CONFIG.get('search.location', '')
522 521
523 522 try:
524 523 searcher = searcher_from_config(rhodecode.CONFIG)
525 524 searcher = searcher.__class__.__name__
526 525 except Exception:
527 526 searcher = None
528 527
529 528 value = dict(
530 529 backend=backend, searcher=searcher, location=location, text='')
531 530 state = STATE_OK_DEFAULT
532 531
533 532 human_value = value.copy()
534 533 human_value['text'] = "backend:`{}`".format(human_value['backend'])
535 534
536 535 return SysInfoRes(value=value, state=state, human_value=human_value)
537 536
538 537
539 538 @register_sysinfo
540 539 def git_info():
541 540 from rhodecode.lib.vcs.backends import git
542 541 state = STATE_OK_DEFAULT
543 542 value = human_value = ''
544 543 try:
545 544 value = git.discover_git_version(raise_on_exc=True)
546 545 human_value = f'version reported from VCSServer: {value}'
547 546 except Exception as e:
548 547 state = {'message': str(e), 'type': STATE_ERR}
549 548
550 549 return SysInfoRes(value=value, state=state, human_value=human_value)
551 550
552 551
553 552 @register_sysinfo
554 553 def hg_info():
555 554 from rhodecode.lib.vcs.backends import hg
556 555 state = STATE_OK_DEFAULT
557 556 value = human_value = ''
558 557 try:
559 558 value = hg.discover_hg_version(raise_on_exc=True)
560 559 human_value = f'version reported from VCSServer: {value}'
561 560 except Exception as e:
562 561 state = {'message': str(e), 'type': STATE_ERR}
563 562 return SysInfoRes(value=value, state=state, human_value=human_value)
564 563
565 564
566 565 @register_sysinfo
567 566 def svn_info():
568 567 from rhodecode.lib.vcs.backends import svn
569 568 state = STATE_OK_DEFAULT
570 569 value = human_value = ''
571 570 try:
572 571 value = svn.discover_svn_version(raise_on_exc=True)
573 572 human_value = f'version reported from VCSServer: {value}'
574 573 except Exception as e:
575 574 state = {'message': str(e), 'type': STATE_ERR}
576 575 return SysInfoRes(value=value, state=state, human_value=human_value)
577 576
578 577
579 578 @register_sysinfo
580 579 def vcs_backends():
581 580 import rhodecode
582 581 value = rhodecode.CONFIG.get('vcs.backends')
583 582 human_value = 'Enabled backends in order: {}'.format(','.join(value))
584 583 return SysInfoRes(value=value, human_value=human_value)
585 584
586 585
587 586 @register_sysinfo
588 587 def vcs_server():
589 588 import rhodecode
590 589 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
591 590
592 591 server_url = rhodecode.CONFIG.get('vcs.server')
593 592 enabled = rhodecode.CONFIG.get('vcs.server.enable')
594 593 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
595 594 state = STATE_OK_DEFAULT
596 595 version = None
597 596 workers = 0
598 597
599 598 try:
600 599 data = get_vcsserver_service_data()
601 600 if data and 'version' in data:
602 601 version = data['version']
603 602
604 603 if data and 'config' in data:
605 604 conf = data['config']
606 605 workers = conf.get('workers', 'NOT AVAILABLE')
607 606
608 607 connection = 'connected'
609 608 except Exception as e:
610 609 connection = 'failed'
611 610 state = {'message': str(e), 'type': STATE_ERR}
612 611
613 612 value = dict(
614 613 url=server_url,
615 614 enabled=enabled,
616 615 protocol=protocol,
617 616 connection=connection,
618 617 version=version,
619 618 text='',
620 619 )
621 620
622 621 human_value = value.copy()
623 622 human_value['text'] = \
624 623 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
625 624 url=server_url, ver=version, workers=workers, mode=protocol,
626 625 conn=connection)
627 626
628 627 return SysInfoRes(value=value, state=state, human_value=human_value)
629 628
630 629
631 630 @register_sysinfo
632 631 def vcs_server_config():
633 632 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
634 633 state = STATE_OK_DEFAULT
635 634
636 635 value = {}
637 636 try:
638 637 data = get_vcsserver_service_data()
639 638 value = data['app_config']
640 639 except Exception as e:
641 640 state = {'message': str(e), 'type': STATE_ERR}
642 641
643 642 human_value = value.copy()
644 643 human_value['text'] = 'VCS Server config'
645 644
646 645 return SysInfoRes(value=value, state=state, human_value=human_value)
647 646
648 647
649 648 @register_sysinfo
650 649 def rhodecode_app_info():
651 650 import rhodecode
652 651 edition = rhodecode.CONFIG.get('rhodecode.edition')
653 652
654 653 value = dict(
655 654 rhodecode_version=rhodecode.__version__,
656 655 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
657 656 text=''
658 657 )
659 658 human_value = value.copy()
660 659 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
661 660 edition=edition, ver=value['rhodecode_version']
662 661 )
663 662 return SysInfoRes(value=value, human_value=human_value)
664 663
665 664
666 665 @register_sysinfo
667 666 def rhodecode_config():
668 667 import rhodecode
669 668 path = rhodecode.CONFIG.get('__file__')
670 669 rhodecode_ini_safe = rhodecode.CONFIG.copy()
671 670 cert_path = get_cert_path(path)
672 671
673 672 try:
674 673 config = configparser.ConfigParser()
675 674 config.read(path)
676 675 parsed_ini = config
677 676 if parsed_ini.has_section('server:main'):
678 677 parsed_ini = dict(parsed_ini.items('server:main'))
679 678 except Exception:
680 679 log.exception('Failed to read .ini file for display')
681 680 parsed_ini = {}
682 681
683 682 rhodecode_ini_safe['server:main'] = parsed_ini
684 683
685 684 blacklist = [
686 685 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
687 686 'routes.map',
688 687 'sqlalchemy.db1.url',
689 688 'channelstream.secret',
690 689 'beaker.session.secret',
691 690 'rhodecode.encrypted_values.secret',
692 691 'rhodecode_auth_github_consumer_key',
693 692 'rhodecode_auth_github_consumer_secret',
694 693 'rhodecode_auth_google_consumer_key',
695 694 'rhodecode_auth_google_consumer_secret',
696 695 'rhodecode_auth_bitbucket_consumer_secret',
697 696 'rhodecode_auth_bitbucket_consumer_key',
698 697 'rhodecode_auth_twitter_consumer_secret',
699 698 'rhodecode_auth_twitter_consumer_key',
700 699
701 700 'rhodecode_auth_twitter_secret',
702 701 'rhodecode_auth_github_secret',
703 702 'rhodecode_auth_google_secret',
704 703 'rhodecode_auth_bitbucket_secret',
705 704
706 705 'appenlight.api_key',
707 706 ('app_conf', 'sqlalchemy.db1.url')
708 707 ]
709 708 for k in blacklist:
710 709 if isinstance(k, tuple):
711 710 section, key = k
712 711 if section in rhodecode_ini_safe:
713 712 rhodecode_ini_safe[section] = '**OBFUSCATED**'
714 713 else:
715 714 rhodecode_ini_safe.pop(k, None)
716 715
717 716 # TODO: maybe put some CONFIG checks here ?
718 717 return SysInfoRes(value={'config': rhodecode_ini_safe,
719 718 'path': path, 'cert_path': cert_path})
720 719
721 720
722 721 @register_sysinfo
723 722 def database_info():
724 723 import rhodecode
725 724 from sqlalchemy.engine import url as engine_url
726 725 from rhodecode.model import meta
727 726 from rhodecode.model.meta import Session
728 727 from rhodecode.model.db import DbMigrateVersion
729 728
730 729 state = STATE_OK_DEFAULT
731 730
732 731 db_migrate = DbMigrateVersion.query().filter(
733 732 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
734 733
735 734 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
736 735
737 736 try:
738 737 engine = meta.get_engine()
739 738 db_server_info = engine.dialect._get_server_version_info(
740 739 Session.connection(bind=engine))
741 740 db_version = '.'.join(map(str, db_server_info))
742 741 except Exception:
743 742 log.exception('failed to fetch db version')
744 743 db_version = 'UNKNOWN'
745 744
746 745 db_info = dict(
747 746 migrate_version=db_migrate.version,
748 747 type=db_url_obj.get_backend_name(),
749 748 version=db_version,
750 749 url=repr(db_url_obj)
751 750 )
752 751 current_version = db_migrate.version
753 752 expected_version = rhodecode.__dbversion__
754 753 if state['type'] == STATE_OK and current_version != expected_version:
755 754 msg = 'Critical: database schema mismatch, ' \
756 755 'expected version {}, got {}. ' \
757 756 'Please run migrations on your database.'.format(
758 757 expected_version, current_version)
759 758 state = {'message': msg, 'type': STATE_ERR}
760 759
761 760 human_value = db_info.copy()
762 761 human_value['url'] = "{} @ migration version: {}".format(
763 762 db_info['url'], db_info['migrate_version'])
764 763 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
765 764 return SysInfoRes(value=db_info, state=state, human_value=human_value)
766 765
767 766
768 767 @register_sysinfo
769 768 def server_info(environ):
770 769 import rhodecode
771 770 from rhodecode.lib.base import get_server_ip_addr, get_server_port
772 771
773 772 value = {
774 773 'server_ip': '{}:{}'.format(
775 774 get_server_ip_addr(environ, log_errors=False),
776 775 get_server_port(environ)
777 776 ),
778 777 'server_id': rhodecode.CONFIG.get('instance_id'),
779 778 }
780 779 return SysInfoRes(value=value)
781 780
782 781
783 782 @register_sysinfo
784 783 def usage_info():
785 784 from rhodecode.model.db import User, Repository, true
786 785 value = {
787 786 'users': User.query().count(),
788 787 'users_active': User.query().filter(User.active == true()).count(),
789 788 'repositories': Repository.query().count(),
790 789 'repository_types': {
791 790 'hg': Repository.query().filter(
792 791 Repository.repo_type == 'hg').count(),
793 792 'git': Repository.query().filter(
794 793 Repository.repo_type == 'git').count(),
795 794 'svn': Repository.query().filter(
796 795 Repository.repo_type == 'svn').count(),
797 796 },
798 797 }
799 798 return SysInfoRes(value=value)
800 799
801 800
802 801 def get_system_info(environ):
803 802 environ = environ or {}
804 803 return {
805 804 'rhodecode_app': SysInfo(rhodecode_app_info)(),
806 805 'rhodecode_config': SysInfo(rhodecode_config)(),
807 806 'rhodecode_usage': SysInfo(usage_info)(),
808 807 'python': SysInfo(python_info)(),
809 808 'py_modules': SysInfo(py_modules)(),
810 809
811 810 'platform': SysInfo(platform_type)(),
812 811 'locale': SysInfo(locale_info)(),
813 812 'server': SysInfo(server_info, environ=environ)(),
814 813 'database': SysInfo(database_info)(),
815 814 'ulimit': SysInfo(ulimit_info)(),
816 815 'storage': SysInfo(storage)(),
817 816 'storage_inodes': SysInfo(storage_inodes)(),
818 817 'storage_archive': SysInfo(storage_archives)(),
819 818 'storage_gist': SysInfo(storage_gist)(),
820 819 'storage_temp': SysInfo(storage_temp)(),
821 820
822 821 'search': SysInfo(search_info)(),
823 822
824 823 'uptime': SysInfo(uptime)(),
825 824 'load': SysInfo(machine_load)(),
826 825 'cpu': SysInfo(cpu)(),
827 826 'memory': SysInfo(memory)(),
828 827
829 828 'vcs_backends': SysInfo(vcs_backends)(),
830 829 'vcs_server': SysInfo(vcs_server)(),
831 830
832 831 'vcs_server_config': SysInfo(vcs_server_config)(),
833 832
834 833 'git': SysInfo(git_info)(),
835 834 'hg': SysInfo(hg_info)(),
836 835 'svn': SysInfo(svn_info)(),
837 836 }
838 837
839 838
840 839 def load_system_info(key):
841 840 """
842 841 get_sys_info('vcs_server')
843 842 get_sys_info('database')
844 843 """
845 844 return SysInfo(registered_helpers[key])()
@@ -1,824 +1,824 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Utilities library for RhodeCode
21 21 """
22 22
23 23 import datetime
24 24
25 25 import decorator
26 26 import logging
27 27 import os
28 28 import re
29 29 import sys
30 30 import shutil
31 31 import socket
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35
36 36 from functools import wraps
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42 42
43 43 from mako import exceptions
44 44
45 45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
46 46 from rhodecode.lib.type_utils import AttributeDict
47 47 from rhodecode.lib.str_utils import safe_bytes, safe_str
48 48 from rhodecode.lib.vcs.backends.base import Config
49 49 from rhodecode.lib.vcs.exceptions import VCSError
50 50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 51 from rhodecode.lib.ext_json import sjson as json
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def adopt_for_celery(func):
80 80 """
81 81 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
82 82 for further usage as a celery tasks.
83 83 """
84 84 @wraps(func)
85 85 def wrapper(extras):
86 86 extras = AttributeDict(extras)
87 87 # HooksResponse implements to_json method which must be used there.
88 88 return func(extras).to_json()
89 89 return wrapper
90 90
91 91
92 92 def repo_name_slug(value):
93 93 """
94 94 Return slug of name of repository
95 95 This function is called on each creation/modification
96 96 of repository to prevent bad names in repo
97 97 """
98 98
99 99 replacement_char = '-'
100 100
101 101 slug = strip_tags(value)
102 102 slug = convert_accented_entities(slug)
103 103 slug = convert_misc_entities(slug)
104 104
105 105 slug = SLUG_BAD_CHAR_RE.sub('', slug)
106 106 slug = re.sub(r'[\s]+', '-', slug)
107 107 slug = collapse(slug, replacement_char)
108 108
109 109 return slug
110 110
111 111
112 112 #==============================================================================
113 113 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
114 114 #==============================================================================
115 115 def get_repo_slug(request):
116 116 _repo = ''
117 117
118 118 if hasattr(request, 'db_repo_name'):
119 119 # if our requests has set db reference use it for name, this
120 120 # translates the example.com/_<id> into proper repo names
121 121 _repo = request.db_repo_name
122 122 elif getattr(request, 'matchdict', None):
123 123 # pyramid
124 124 _repo = request.matchdict.get('repo_name')
125 125
126 126 if _repo:
127 127 _repo = _repo.rstrip('/')
128 128 return _repo
129 129
130 130
131 131 def get_repo_group_slug(request):
132 132 _group = ''
133 133 if hasattr(request, 'db_repo_group'):
134 134 # if our requests has set db reference use it for name, this
135 135 # translates the example.com/_<id> into proper repo group names
136 136 _group = request.db_repo_group.group_name
137 137 elif getattr(request, 'matchdict', None):
138 138 # pyramid
139 139 _group = request.matchdict.get('repo_group_name')
140 140
141 141 if _group:
142 142 _group = _group.rstrip('/')
143 143 return _group
144 144
145 145
146 146 def get_user_group_slug(request):
147 147 _user_group = ''
148 148
149 149 if hasattr(request, 'db_user_group'):
150 150 _user_group = request.db_user_group.users_group_name
151 151 elif getattr(request, 'matchdict', None):
152 152 # pyramid
153 153 _user_group = request.matchdict.get('user_group_id')
154 154 _user_group_name = request.matchdict.get('user_group_name')
155 155 try:
156 156 if _user_group:
157 157 _user_group = UserGroup.get(_user_group)
158 158 elif _user_group_name:
159 159 _user_group = UserGroup.get_by_group_name(_user_group_name)
160 160
161 161 if _user_group:
162 162 _user_group = _user_group.users_group_name
163 163 except Exception:
164 164 log.exception('Failed to get user group by id and name')
165 165 # catch all failures here
166 166 return None
167 167
168 168 return _user_group
169 169
170 170
171 171 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
172 172 """
173 173 Scans given path for repos and return (name,(type,path)) tuple
174 174
175 175 :param path: path to scan for repositories
176 176 :param recursive: recursive search and return names with subdirs in front
177 177 """
178 178
179 179 # remove ending slash for better results
180 180 path = path.rstrip(os.sep)
181 181 log.debug('now scanning in %s location recursive:%s...', path, recursive)
182 182
183 183 def _get_repos(p):
184 184 dirpaths = get_dirpaths(p)
185 185 if not _is_dir_writable(p):
186 186 log.warning('repo path without write access: %s', p)
187 187
188 188 for dirpath in dirpaths:
189 189 if os.path.isfile(os.path.join(p, dirpath)):
190 190 continue
191 191 cur_path = os.path.join(p, dirpath)
192 192
193 193 # skip removed repos
194 194 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 195 continue
196 196
197 197 #skip .<somethin> dirs
198 198 if dirpath.startswith('.'):
199 199 continue
200 200
201 201 try:
202 202 scm_info = get_scm(cur_path)
203 203 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 204 except VCSError:
205 205 if not recursive:
206 206 continue
207 207 #check if this dir containts other repos for recursive scan
208 208 rec_path = os.path.join(p, dirpath)
209 209 if os.path.isdir(rec_path):
210 210 yield from _get_repos(rec_path)
211 211
212 212 return _get_repos(path)
213 213
214 214
215 215 def get_dirpaths(p: str) -> list:
216 216 try:
217 217 # OS-independable way of checking if we have at least read-only
218 218 # access or not.
219 219 dirpaths = os.listdir(p)
220 220 except OSError:
221 221 log.warning('ignoring repo path without read access: %s', p)
222 222 return []
223 223
224 224 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
225 225 # decode paths and suddenly returns unicode objects itself. The items it
226 226 # cannot decode are returned as strings and cause issues.
227 227 #
228 228 # Those paths are ignored here until a solid solution for path handling has
229 229 # been built.
230 230 expected_type = type(p)
231 231
232 232 def _has_correct_type(item):
233 233 if type(item) is not expected_type:
234 234 log.error(
235 235 "Ignoring path %s since it cannot be decoded into str.",
236 236 # Using "repr" to make sure that we see the byte value in case
237 237 # of support.
238 238 repr(item))
239 239 return False
240 240 return True
241 241
242 242 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
243 243
244 244 return dirpaths
245 245
246 246
247 247 def _is_dir_writable(path):
248 248 """
249 249 Probe if `path` is writable.
250 250
251 251 Due to trouble on Cygwin / Windows, this is actually probing if it is
252 252 possible to create a file inside of `path`, stat does not produce reliable
253 253 results in this case.
254 254 """
255 255 try:
256 256 with tempfile.TemporaryFile(dir=path):
257 257 pass
258 258 except OSError:
259 259 return False
260 260 return True
261 261
262 262
263 263 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
264 264 """
265 265 Returns True if given path is a valid repository False otherwise.
266 266 If expect_scm param is given also, compare if given scm is the same
267 267 as expected from scm parameter. If explicit_scm is given don't try to
268 268 detect the scm, just use the given one to check if repo is valid
269 269
270 270 :param repo_name:
271 271 :param base_path:
272 272 :param expect_scm:
273 273 :param explicit_scm:
274 274 :param config:
275 275
276 276 :return True: if given path is a valid repository
277 277 """
278 278 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
279 279 log.debug('Checking if `%s` is a valid path for repository. '
280 280 'Explicit type: %s', repo_name, explicit_scm)
281 281
282 282 try:
283 283 if explicit_scm:
284 284 detected_scms = [get_scm_backend(explicit_scm)(
285 285 full_path, config=config).alias]
286 286 else:
287 287 detected_scms = get_scm(full_path)
288 288
289 289 if expect_scm:
290 290 return detected_scms[0] == expect_scm
291 291 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
292 292 return True
293 293 except VCSError:
294 294 log.debug('path: %s is not a valid repo !', full_path)
295 295 return False
296 296
297 297
298 298 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
299 299 """
300 300 Returns True if a given path is a repository group, False otherwise
301 301
302 302 :param repo_group_name:
303 303 :param base_path:
304 304 """
305 305 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
306 306 log.debug('Checking if `%s` is a valid path for repository group',
307 307 repo_group_name)
308 308
309 309 # check if it's not a repo
310 310 if is_valid_repo(repo_group_name, base_path):
311 311 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
312 312 return False
313 313
314 314 try:
315 315 # we need to check bare git repos at higher level
316 316 # since we might match branches/hooks/info/objects or possible
317 317 # other things inside bare git repo
318 318 maybe_repo = os.path.dirname(full_path)
319 319 if maybe_repo == base_path:
320 320 # skip root level repo check; we know root location CANNOT BE a repo group
321 321 return False
322 322
323 323 scm_ = get_scm(maybe_repo)
324 324 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
325 325 return False
326 326 except VCSError:
327 327 pass
328 328
329 329 # check if it's a valid path
330 330 if skip_path_check or os.path.isdir(full_path):
331 331 log.debug('path: %s is a valid repo group !', full_path)
332 332 return True
333 333
334 334 log.debug('path: %s is not a valid repo group !', full_path)
335 335 return False
336 336
337 337
338 338 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
339 339 while True:
340 340 ok = input(prompt)
341 341 if ok.lower() in ('y', 'ye', 'yes'):
342 342 return True
343 343 if ok.lower() in ('n', 'no', 'nop', 'nope'):
344 344 return False
345 345 retries = retries - 1
346 346 if retries < 0:
347 347 raise OSError
348 348 print(complaint)
349 349
350 350 # propagated from mercurial documentation
351 351 ui_sections = [
352 352 'alias', 'auth',
353 353 'decode/encode', 'defaults',
354 354 'diff', 'email',
355 355 'extensions', 'format',
356 356 'merge-patterns', 'merge-tools',
357 357 'hooks', 'http_proxy',
358 358 'smtp', 'patch',
359 359 'paths', 'profiling',
360 360 'server', 'trusted',
361 361 'ui', 'web', ]
362 362
363 363
364 364 def config_data_from_db(clear_session=True, repo=None):
365 365 """
366 366 Read the configuration data from the database and return configuration
367 367 tuples.
368 368 """
369 369 from rhodecode.model.settings import VcsSettingsModel
370 370
371 371 config = []
372 372
373 373 sa = meta.Session()
374 374 settings_model = VcsSettingsModel(repo=repo, sa=sa)
375 375
376 376 ui_settings = settings_model.get_ui_settings()
377 377
378 378 ui_data = []
379 379 for setting in ui_settings:
380 380 if setting.active:
381 381 ui_data.append((setting.section, setting.key, setting.value))
382 382 config.append((
383 383 safe_str(setting.section), safe_str(setting.key),
384 384 safe_str(setting.value)))
385 385 if setting.key == 'push_ssl':
386 386 # force set push_ssl requirement to False, rhodecode
387 387 # handles that
388 388 config.append((
389 389 safe_str(setting.section), safe_str(setting.key), False))
390 390 log.debug(
391 391 'settings ui from db@repo[%s]: %s',
392 392 repo,
393 393 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
394 394 if clear_session:
395 395 meta.Session.remove()
396 396
397 397 # TODO: mikhail: probably it makes no sense to re-read hooks information.
398 398 # It's already there and activated/deactivated
399 399 skip_entries = []
400 400 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
401 401 if 'pull' not in enabled_hook_classes:
402 402 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
403 403 if 'push' not in enabled_hook_classes:
404 404 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
405 405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
406 406 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
407 407
408 408 config = [entry for entry in config if entry[:2] not in skip_entries]
409 409
410 410 return config
411 411
412 412
413 413 def make_db_config(clear_session=True, repo=None):
414 414 """
415 415 Create a :class:`Config` instance based on the values in the database.
416 416 """
417 417 config = Config()
418 418 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
419 419 for section, option, value in config_data:
420 420 config.set(section, option, value)
421 421 return config
422 422
423 423
424 424 def get_enabled_hook_classes(ui_settings):
425 425 """
426 426 Return the enabled hook classes.
427 427
428 428 :param ui_settings: List of ui_settings as returned
429 429 by :meth:`VcsSettingsModel.get_ui_settings`
430 430
431 431 :return: a list with the enabled hook classes. The order is not guaranteed.
432 432 :rtype: list
433 433 """
434 434 enabled_hooks = []
435 435 active_hook_keys = [
436 436 key for section, key, value, active in ui_settings
437 437 if section == 'hooks' and active]
438 438
439 439 hook_names = {
440 440 RhodeCodeUi.HOOK_PUSH: 'push',
441 441 RhodeCodeUi.HOOK_PULL: 'pull',
442 442 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
443 443 }
444 444
445 445 for key in active_hook_keys:
446 446 hook = hook_names.get(key)
447 447 if hook:
448 448 enabled_hooks.append(hook)
449 449
450 450 return enabled_hooks
451 451
452 452
453 453 def set_rhodecode_config(config):
454 454 """
455 455 Updates pyramid config with new settings from database
456 456
457 457 :param config:
458 458 """
459 459 from rhodecode.model.settings import SettingsModel
460 460 app_settings = SettingsModel().get_all_settings()
461 461
462 462 for k, v in list(app_settings.items()):
463 463 config[k] = v
464 464
465 465
466 466 def get_rhodecode_realm():
467 467 """
468 468 Return the rhodecode realm from database.
469 469 """
470 470 from rhodecode.model.settings import SettingsModel
471 471 realm = SettingsModel().get_setting_by_name('realm')
472 472 return safe_str(realm.app_settings_value)
473 473
474 474
475 def get_rhodecode_base_path():
475 def get_rhodecode_repo_store_path():
476 476 """
477 477 Returns the base path. The base path is the filesystem path which points
478 478 to the repository store.
479 479 """
480 480
481 481 import rhodecode
482 return rhodecode.CONFIG['default_base_path']
482 return rhodecode.CONFIG['repo_store.path']
483 483
484 484
485 485 def map_groups(path):
486 486 """
487 487 Given a full path to a repository, create all nested groups that this
488 488 repo is inside. This function creates parent-child relationships between
489 489 groups and creates default perms for all new groups.
490 490
491 491 :param paths: full path to repository
492 492 """
493 493 from rhodecode.model.repo_group import RepoGroupModel
494 494 sa = meta.Session()
495 495 groups = path.split(Repository.NAME_SEP)
496 496 parent = None
497 497 group = None
498 498
499 499 # last element is repo in nested groups structure
500 500 groups = groups[:-1]
501 501 rgm = RepoGroupModel(sa)
502 502 owner = User.get_first_super_admin()
503 503 for lvl, group_name in enumerate(groups):
504 504 group_name = '/'.join(groups[:lvl] + [group_name])
505 505 group = RepoGroup.get_by_group_name(group_name)
506 506 desc = '%s group' % group_name
507 507
508 508 # skip folders that are now removed repos
509 509 if REMOVED_REPO_PAT.match(group_name):
510 510 break
511 511
512 512 if group is None:
513 513 log.debug('creating group level: %s group_name: %s',
514 514 lvl, group_name)
515 515 group = RepoGroup(group_name, parent)
516 516 group.group_description = desc
517 517 group.user = owner
518 518 sa.add(group)
519 519 perm_obj = rgm._create_default_perms(group)
520 520 sa.add(perm_obj)
521 521 sa.flush()
522 522
523 523 parent = group
524 524 return group
525 525
526 526
527 527 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
528 528 """
529 529 maps all repos given in initial_repo_list, non existing repositories
530 530 are created, if remove_obsolete is True it also checks for db entries
531 531 that are not in initial_repo_list and removes them.
532 532
533 533 :param initial_repo_list: list of repositories found by scanning methods
534 534 :param remove_obsolete: check for obsolete entries in database
535 535 """
536 536 from rhodecode.model.repo import RepoModel
537 537 from rhodecode.model.repo_group import RepoGroupModel
538 538 from rhodecode.model.settings import SettingsModel
539 539
540 540 sa = meta.Session()
541 541 repo_model = RepoModel()
542 542 user = User.get_first_super_admin()
543 543 added = []
544 544
545 545 # creation defaults
546 546 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
547 547 enable_statistics = defs.get('repo_enable_statistics')
548 548 enable_locking = defs.get('repo_enable_locking')
549 549 enable_downloads = defs.get('repo_enable_downloads')
550 550 private = defs.get('repo_private')
551 551
552 552 for name, repo in list(initial_repo_list.items()):
553 553 group = map_groups(name)
554 554 str_name = safe_str(name)
555 555 db_repo = repo_model.get_by_repo_name(str_name)
556 556
557 557 # found repo that is on filesystem not in RhodeCode database
558 558 if not db_repo:
559 559 log.info('repository `%s` not found in the database, creating now', name)
560 560 added.append(name)
561 561 desc = (repo.description
562 562 if repo.description != 'unknown'
563 563 else '%s repository' % name)
564 564
565 565 db_repo = repo_model._create_repo(
566 566 repo_name=name,
567 567 repo_type=repo.alias,
568 568 description=desc,
569 569 repo_group=getattr(group, 'group_id', None),
570 570 owner=user,
571 571 enable_locking=enable_locking,
572 572 enable_downloads=enable_downloads,
573 573 enable_statistics=enable_statistics,
574 574 private=private,
575 575 state=Repository.STATE_CREATED
576 576 )
577 577 sa.commit()
578 578 # we added that repo just now, and make sure we updated server info
579 579 if db_repo.repo_type == 'git':
580 580 git_repo = db_repo.scm_instance()
581 581 # update repository server-info
582 582 log.debug('Running update server info')
583 583 git_repo._update_server_info(force=True)
584 584
585 585 db_repo.update_commit_cache()
586 586
587 587 config = db_repo._config
588 588 config.set('extensions', 'largefiles', '')
589 589 repo = db_repo.scm_instance(config=config)
590 590 repo.install_hooks(force=force_hooks_rebuild)
591 591
592 592 removed = []
593 593 if remove_obsolete:
594 594 # remove from database those repositories that are not in the filesystem
595 595 for repo in sa.query(Repository).all():
596 596 if repo.repo_name not in list(initial_repo_list.keys()):
597 597 log.debug("Removing non-existing repository found in db `%s`",
598 598 repo.repo_name)
599 599 try:
600 600 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
601 601 sa.commit()
602 602 removed.append(repo.repo_name)
603 603 except Exception:
604 604 # don't hold further removals on error
605 605 log.error(traceback.format_exc())
606 606 sa.rollback()
607 607
608 608 def splitter(full_repo_name):
609 609 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
610 610 gr_name = None
611 611 if len(_parts) == 2:
612 612 gr_name = _parts[0]
613 613 return gr_name
614 614
615 615 initial_repo_group_list = [splitter(x) for x in
616 616 list(initial_repo_list.keys()) if splitter(x)]
617 617
618 618 # remove from database those repository groups that are not in the
619 619 # filesystem due to parent child relationships we need to delete them
620 620 # in a specific order of most nested first
621 621 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
622 622 def nested_sort(gr):
623 623 return len(gr.split('/'))
624 624 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
625 625 if group_name not in initial_repo_group_list:
626 626 repo_group = RepoGroup.get_by_group_name(group_name)
627 627 if (repo_group.children.all() or
628 628 not RepoGroupModel().check_exist_filesystem(
629 629 group_name=group_name, exc_on_failure=False)):
630 630 continue
631 631
632 632 log.info(
633 633 'Removing non-existing repository group found in db `%s`',
634 634 group_name)
635 635 try:
636 636 RepoGroupModel(sa).delete(group_name, fs_remove=False)
637 637 sa.commit()
638 638 removed.append(group_name)
639 639 except Exception:
640 640 # don't hold further removals on error
641 641 log.exception(
642 642 'Unable to remove repository group `%s`',
643 643 group_name)
644 644 sa.rollback()
645 645 raise
646 646
647 647 return added, removed
648 648
649 649
650 650 def load_rcextensions(root_path):
651 651 import rhodecode
652 652 from rhodecode.config import conf
653 653
654 654 path = os.path.join(root_path)
655 655 sys.path.append(path)
656 656
657 657 try:
658 658 rcextensions = __import__('rcextensions')
659 659 except ImportError:
660 660 if os.path.isdir(os.path.join(path, 'rcextensions')):
661 661 log.warning('Unable to load rcextensions from %s', path)
662 662 rcextensions = None
663 663
664 664 if rcextensions:
665 665 log.info('Loaded rcextensions from %s...', rcextensions)
666 666 rhodecode.EXTENSIONS = rcextensions
667 667
668 668 # Additional mappings that are not present in the pygments lexers
669 669 conf.LANGUAGES_EXTENSIONS_MAP.update(
670 670 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
671 671
672 672
673 673 def get_custom_lexer(extension):
674 674 """
675 675 returns a custom lexer if it is defined in rcextensions module, or None
676 676 if there's no custom lexer defined
677 677 """
678 678 import rhodecode
679 679 from pygments import lexers
680 680
681 681 # custom override made by RhodeCode
682 682 if extension in ['mako']:
683 683 return lexers.get_lexer_by_name('html+mako')
684 684
685 685 # check if we didn't define this extension as other lexer
686 686 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
687 687 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
688 688 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
689 689 return lexers.get_lexer_by_name(_lexer_name)
690 690
691 691
692 692 #==============================================================================
693 693 # TEST FUNCTIONS AND CREATORS
694 694 #==============================================================================
695 695 def create_test_index(repo_location, config):
696 696 """
697 697 Makes default test index.
698 698 """
699 699 try:
700 700 import rc_testdata
701 701 except ImportError:
702 702 raise ImportError('Failed to import rc_testdata, '
703 703 'please make sure this package is installed from requirements_test.txt')
704 704 rc_testdata.extract_search_index(
705 705 'vcs_search_index', os.path.dirname(config['search.location']))
706 706
707 707
708 708 def create_test_directory(test_path):
709 709 """
710 710 Create test directory if it doesn't exist.
711 711 """
712 712 if not os.path.isdir(test_path):
713 713 log.debug('Creating testdir %s', test_path)
714 714 os.makedirs(test_path)
715 715
716 716
717 717 def create_test_database(test_path, config):
718 718 """
719 719 Makes a fresh database.
720 720 """
721 721 from rhodecode.lib.db_manage import DbManage
722 722 from rhodecode.lib.utils2 import get_encryption_key
723 723
724 724 # PART ONE create db
725 725 dbconf = config['sqlalchemy.db1.url']
726 726 enc_key = get_encryption_key(config)
727 727
728 728 log.debug('making test db %s', dbconf)
729 729
730 730 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
731 731 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
732 732 dbmanage.create_tables(override=True)
733 733 dbmanage.set_db_version()
734 734 # for tests dynamically set new root paths based on generated content
735 735 dbmanage.create_settings(dbmanage.config_prompt(test_path))
736 736 dbmanage.create_default_user()
737 737 dbmanage.create_test_admin_and_users()
738 738 dbmanage.create_permissions()
739 739 dbmanage.populate_default_permissions()
740 740 Session().commit()
741 741
742 742
743 743 def create_test_repositories(test_path, config):
744 744 """
745 745 Creates test repositories in the temporary directory. Repositories are
746 746 extracted from archives within the rc_testdata package.
747 747 """
748 748 import rc_testdata
749 749 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
750 750
751 751 log.debug('making test vcs repositories')
752 752
753 753 idx_path = config['search.location']
754 754 data_path = config['cache_dir']
755 755
756 756 # clean index and data
757 757 if idx_path and os.path.exists(idx_path):
758 758 log.debug('remove %s', idx_path)
759 759 shutil.rmtree(idx_path)
760 760
761 761 if data_path and os.path.exists(data_path):
762 762 log.debug('remove %s', data_path)
763 763 shutil.rmtree(data_path)
764 764
765 765 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
766 766 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
767 767
768 768 # Note: Subversion is in the process of being integrated with the system,
769 769 # until we have a properly packed version of the test svn repository, this
770 770 # tries to copy over the repo from a package "rc_testdata"
771 771 svn_repo_path = rc_testdata.get_svn_repo_archive()
772 772 with tarfile.open(svn_repo_path) as tar:
773 773 tar.extractall(jn(test_path, SVN_REPO))
774 774
775 775
776 776 def password_changed(auth_user, session):
777 777 # Never report password change in case of default user or anonymous user.
778 778 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
779 779 return False
780 780
781 781 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
782 782 rhodecode_user = session.get('rhodecode_user', {})
783 783 session_password_hash = rhodecode_user.get('password', '')
784 784 return password_hash != session_password_hash
785 785
786 786
787 787 def read_opensource_licenses():
788 788 global _license_cache
789 789
790 790 if not _license_cache:
791 791 licenses = pkg_resources.resource_string(
792 792 'rhodecode', 'config/licenses.json')
793 793 _license_cache = json.loads(licenses)
794 794
795 795 return _license_cache
796 796
797 797
798 798 def generate_platform_uuid():
799 799 """
800 800 Generates platform UUID based on it's name
801 801 """
802 802 import platform
803 803
804 804 try:
805 805 uuid_list = [platform.platform()]
806 806 return sha256_safe(':'.join(uuid_list))
807 807 except Exception as e:
808 808 log.error('Failed to generate host uuid: %s', e)
809 809 return 'UNDEFINED'
810 810
811 811
812 812 def send_test_email(recipients, email_body='TEST EMAIL'):
813 813 """
814 814 Simple code for generating test emails.
815 815 Usage::
816 816
817 817 from rhodecode.lib import utils
818 818 utils.send_test_email()
819 819 """
820 820 from rhodecode.lib.celerylib import tasks, run_task
821 821
822 822 email_body = email_body_plaintext = email_body
823 823 subject = f'SUBJECT FROM: {socket.gethostname()}'
824 824 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,140 +1,149 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 import logging
21 21
22 22 import rhodecode
23 23 from rhodecode.model import meta, db
24 from rhodecode.lib.utils import get_rhodecode_repo_store_path
24 25 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
25 26
26 27 log = logging.getLogger(__name__)
27 28
28 29
29 30 def init_model(engine, encryption_key: bytes = b''):
30 31 """
31 32 Initializes db session, bind the engine with the metadata,
32 33 Call this before using any of the tables or classes in the model,
33 34 preferably once in application start
34 35
35 36 :param engine: engine to bind to
36 37 :param encryption_key: key used for encryption
37 38 """
38 39
39 40 engine_str = obfuscate_url_pw(str(engine.url))
40 41 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
41 42
42 43 meta.bind_engine_to_session(engine)
43 44 init_model_encryption(db, enc_key=encryption_key)
44 45
45 46
46 47 def init_model_encryption(*db_models, enc_key: bytes = b'', config=None):
47 48 if not enc_key:
48 49 from pyramid.threadlocal import get_current_registry
49 50 config = config or get_current_registry().settings
50 51 enc_key = get_encryption_key(config)
51 52
52 53 for db_model in db_models:
53 54 log.debug('setting encryption key for model %s', db_model)
54 55 db_model.ENCRYPTION_KEY = enc_key
55 56
56 57
57 58 class BaseModel(object):
58 59 """
59 60 Base Model for all RhodeCode models, it adds sql alchemy session
60 61 into instance of model
61 62
62 63 :param sa: If passed it reuses this session instead of creating a new one
63 64 """
64 65
65 66 cls = None # override in child class
66 67
67 68 def __init__(self, sa=None):
68 69 if sa is not None:
69 70 self.sa = sa
70 71 else:
71 72 self.sa = meta.Session()
72 73
73 74 def _get_instance(self, cls, instance, callback=None):
74 75 """
75 76 Gets instance of given cls using some simple lookup mechanism.
76 77
77 78 :param cls: classes to fetch
78 79 :param instance: int or Instance
79 80 :param callback: callback to call if all lookups failed
80 81 """
81 82
82 83 if isinstance(instance, cls):
83 84 return instance
84 85 elif isinstance(instance, int):
85 86 if isinstance(cls, tuple):
86 87 # if we pass multi instances we pick first to .get()
87 88 cls = cls[0]
88 89 return cls.get(instance)
89 90 else:
90 91 if instance:
91 92 if callback is None:
92 93 raise Exception(
93 94 'given object must be int or Instance of %s '
94 95 'got %s, no callback provided' % (cls, type(instance))
95 96 )
96 97 else:
97 98 return callback(instance)
98 99
99 100 def _get_user(self, user):
100 101 """
101 102 Helper method to get user by ID, or username fallback
102 103
103 104 :param user: UserID, username, or User instance
104 105 """
105 106 return self._get_instance(
106 107 db.User, user, callback=db.User.get_by_username)
107 108
108 109 def _get_user_group(self, user_group):
109 110 """
110 111 Helper method to get user by ID, or username fallback
111 112
112 113 :param user_group: UserGroupID, user_group_name, or UserGroup instance
113 114 """
114 115 return self._get_instance(
115 116 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
116 117
117 118 def _get_repo(self, repository):
118 119 """
119 120 Helper method to get repository by ID, or repository name
120 121
121 122 :param repository: RepoID, repository name or Repository Instance
122 123 """
123 124 return self._get_instance(
124 125 db.Repository, repository, callback=db.Repository.get_by_repo_name)
125 126
126 127 def _get_perm(self, permission):
127 128 """
128 129 Helper method to get permission by ID, or permission name
129 130
130 131 :param permission: PermissionID, permission_name or Permission instance
131 132 """
132 133 return self._get_instance(
133 134 db.Permission, permission, callback=db.Permission.get_by_key)
134 135
135 136 @classmethod
136 137 def get_all(cls):
137 138 """
138 139 Returns all instances of what is defined in `cls` class variable
139 140 """
140 141 return cls.cls.getAll()
142
143 @property
144 def repos_path(self):
145 """
146 Gets the repositories root path from *ini file
147 """
148
149 return get_rhodecode_repo_store_path()
@@ -1,5887 +1,5884 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Database Models for RhodeCode Enterprise
21 21 """
22 22
23 23 import re
24 24 import os
25 25 import time
26 26 import string
27 27 import logging
28 28 import datetime
29 29 import uuid
30 30 import warnings
31 31 import ipaddress
32 32 import functools
33 33 import traceback
34 34 import collections
35 35
36 36 from sqlalchemy import (
37 37 or_, and_, not_, func, cast, TypeDecorator, event, select,
38 38 true, false, null,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType, BigInteger)
42 42 from sqlalchemy.sql.expression import case
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51 from pyramid.threadlocal import get_current_request
52 52 from webhelpers2.text import remove_formatting
53 53
54 54 from rhodecode.lib.str_utils import safe_bytes
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance, VCSError
57 57 from rhodecode.lib.vcs.backends.base import (
58 58 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
59 59 from rhodecode.lib.utils2 import (
60 60 str2bool, safe_str, get_commit_safe, sha1_safe,
61 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
63 63 from rhodecode.lib.jsonalchemy import (
64 64 MutationObj, MutationList, JsonType, JsonRaw)
65 65 from rhodecode.lib.hash_utils import sha1
66 66 from rhodecode.lib import ext_json
67 67 from rhodecode.lib import enc_utils
68 68 from rhodecode.lib.ext_json import json, str_json
69 69 from rhodecode.lib.caching_query import FromCache
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY: bytes = b''
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 extra_sort_num = '1' # default
107 107
108 108 # NOTE(dan): inactive duplicates goes last
109 109 if getattr(obj, 'duplicate_perm', None):
110 110 extra_sort_num = '9'
111 111 return prefix + extra_sort_num + obj.username
112 112
113 113
114 114 def display_user_group_sort(obj):
115 115 """
116 116 Sort function used to sort permissions in .permissions() function of
117 117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 118 of all other resources
119 119 """
120 120
121 121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 122 return prefix + obj.users_group_name
123 123
124 124
125 125 def _hash_key(k):
126 126 return sha1_safe(k)
127 127
128 128
129 129 def in_filter_generator(qry, items, limit=500):
130 130 """
131 131 Splits IN() into multiple with OR
132 132 e.g.::
133 133 cnt = Repository.query().filter(
134 134 or_(
135 135 *in_filter_generator(Repository.repo_id, range(100000))
136 136 )).count()
137 137 """
138 138 if not items:
139 139 # empty list will cause empty query which might cause security issues
140 140 # this can lead to hidden unpleasant results
141 141 items = [-1]
142 142
143 143 parts = []
144 144 for chunk in range(0, len(items), limit):
145 145 parts.append(
146 146 qry.in_(items[chunk: chunk + limit])
147 147 )
148 148
149 149 return parts
150 150
151 151
152 152 base_table_args = {
153 153 'extend_existing': True,
154 154 'mysql_engine': 'InnoDB',
155 155 'mysql_charset': 'utf8',
156 156 'sqlite_autoincrement': True
157 157 }
158 158
159 159
160 160 class EncryptedTextValue(TypeDecorator):
161 161 """
162 162 Special column for encrypted long text data, use like::
163 163
164 164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165 165
166 166 This column is intelligent so if value is in unencrypted form it return
167 167 unencrypted form, but on save it always encrypts
168 168 """
169 169 cache_ok = True
170 170 impl = Text
171 171
172 172 def process_bind_param(self, value, dialect):
173 173 """
174 174 Setter for storing value
175 175 """
176 176 import rhodecode
177 177 if not value:
178 178 return value
179 179
180 180 # protect against double encrypting if values is already encrypted
181 181 if value.startswith('enc$aes$') \
182 182 or value.startswith('enc$aes_hmac$') \
183 183 or value.startswith('enc2$'):
184 184 raise ValueError('value needs to be in unencrypted format, '
185 185 'ie. not starting with enc$ or enc2$')
186 186
187 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 188 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
189 189 return safe_str(bytes_val)
190 190
191 191 def process_result_value(self, value, dialect):
192 192 """
193 193 Getter for retrieving value
194 194 """
195 195
196 196 import rhodecode
197 197 if not value:
198 198 return value
199 199
200 200 enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True)
201 201
202 202 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
203 203
204 204 return safe_str(bytes_val)
205 205
206 206
207 207 class BaseModel(object):
208 208 """
209 209 Base Model for all classes
210 210 """
211 211
212 212 @classmethod
213 213 def _get_keys(cls):
214 214 """return column names for this model """
215 215 return class_mapper(cls).c.keys()
216 216
217 217 def get_dict(self):
218 218 """
219 219 return dict with keys and values corresponding
220 220 to this model data """
221 221
222 222 d = {}
223 223 for k in self._get_keys():
224 224 d[k] = getattr(self, k)
225 225
226 226 # also use __json__() if present to get additional fields
227 227 _json_attr = getattr(self, '__json__', None)
228 228 if _json_attr:
229 229 # update with attributes from __json__
230 230 if callable(_json_attr):
231 231 _json_attr = _json_attr()
232 232 for k, val in _json_attr.items():
233 233 d[k] = val
234 234 return d
235 235
236 236 def get_appstruct(self):
237 237 """return list with keys and values tuples corresponding
238 238 to this model data """
239 239
240 240 lst = []
241 241 for k in self._get_keys():
242 242 lst.append((k, getattr(self, k),))
243 243 return lst
244 244
245 245 def populate_obj(self, populate_dict):
246 246 """populate model with data from given populate_dict"""
247 247
248 248 for k in self._get_keys():
249 249 if k in populate_dict:
250 250 setattr(self, k, populate_dict[k])
251 251
252 252 @classmethod
253 253 def query(cls):
254 254 return Session().query(cls)
255 255
256 256 @classmethod
257 257 def select(cls, custom_cls=None):
258 258 """
259 259 stmt = cls.select().where(cls.user_id==1)
260 260 # optionally
261 261 stmt = cls.select(User.user_id).where(cls.user_id==1)
262 262 result = cls.execute(stmt) | cls.scalars(stmt)
263 263 """
264 264
265 265 if custom_cls:
266 266 stmt = select(custom_cls)
267 267 else:
268 268 stmt = select(cls)
269 269 return stmt
270 270
271 271 @classmethod
272 272 def execute(cls, stmt):
273 273 return Session().execute(stmt)
274 274
275 275 @classmethod
276 276 def scalars(cls, stmt):
277 277 return Session().scalars(stmt)
278 278
279 279 @classmethod
280 280 def get(cls, id_):
281 281 if id_:
282 282 return cls.query().get(id_)
283 283
284 284 @classmethod
285 285 def get_or_404(cls, id_):
286 286 from pyramid.httpexceptions import HTTPNotFound
287 287
288 288 try:
289 289 id_ = int(id_)
290 290 except (TypeError, ValueError):
291 291 raise HTTPNotFound()
292 292
293 293 res = cls.query().get(id_)
294 294 if not res:
295 295 raise HTTPNotFound()
296 296 return res
297 297
298 298 @classmethod
299 299 def getAll(cls):
300 300 # deprecated and left for backward compatibility
301 301 return cls.get_all()
302 302
303 303 @classmethod
304 304 def get_all(cls):
305 305 return cls.query().all()
306 306
307 307 @classmethod
308 308 def delete(cls, id_):
309 309 obj = cls.query().get(id_)
310 310 Session().delete(obj)
311 311
312 312 @classmethod
313 313 def identity_cache(cls, session, attr_name, value):
314 314 exist_in_session = []
315 315 for (item_cls, pkey), instance in session.identity_map.items():
316 316 if cls == item_cls and getattr(instance, attr_name) == value:
317 317 exist_in_session.append(instance)
318 318 if exist_in_session:
319 319 if len(exist_in_session) == 1:
320 320 return exist_in_session[0]
321 321 log.exception(
322 322 'multiple objects with attr %s and '
323 323 'value %s found with same name: %r',
324 324 attr_name, value, exist_in_session)
325 325
326 326 @property
327 327 def cls_name(self):
328 328 return self.__class__.__name__
329 329
330 330 def __repr__(self):
331 331 return f'<DB:{self.cls_name}>'
332 332
333 333
334 334 class RhodeCodeSetting(Base, BaseModel):
335 335 __tablename__ = 'rhodecode_settings'
336 336 __table_args__ = (
337 337 UniqueConstraint('app_settings_name'),
338 338 base_table_args
339 339 )
340 340
341 341 SETTINGS_TYPES = {
342 342 'str': safe_str,
343 343 'int': safe_int,
344 344 'unicode': safe_str,
345 345 'bool': str2bool,
346 346 'list': functools.partial(aslist, sep=',')
347 347 }
348 348 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
349 349 GLOBAL_CONF_KEY = 'app_settings'
350 350
351 351 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
352 352 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
353 353 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
354 354 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
355 355
356 356 def __init__(self, key='', val='', type='unicode'):
357 357 self.app_settings_name = key
358 358 self.app_settings_type = type
359 359 self.app_settings_value = val
360 360
361 361 @validates('_app_settings_value')
362 362 def validate_settings_value(self, key, val):
363 363 assert type(val) == str
364 364 return val
365 365
366 366 @hybrid_property
367 367 def app_settings_value(self):
368 368 v = self._app_settings_value
369 369 _type = self.app_settings_type
370 370 if _type:
371 371 _type = self.app_settings_type.split('.')[0]
372 372 # decode the encrypted value
373 373 if 'encrypted' in self.app_settings_type:
374 374 cipher = EncryptedTextValue()
375 375 v = safe_str(cipher.process_result_value(v, None))
376 376
377 377 converter = self.SETTINGS_TYPES.get(_type) or \
378 378 self.SETTINGS_TYPES['unicode']
379 379 return converter(v)
380 380
381 381 @app_settings_value.setter
382 382 def app_settings_value(self, val):
383 383 """
384 384 Setter that will always make sure we use unicode in app_settings_value
385 385
386 386 :param val:
387 387 """
388 388 val = safe_str(val)
389 389 # encode the encrypted value
390 390 if 'encrypted' in self.app_settings_type:
391 391 cipher = EncryptedTextValue()
392 392 val = safe_str(cipher.process_bind_param(val, None))
393 393 self._app_settings_value = val
394 394
395 395 @hybrid_property
396 396 def app_settings_type(self):
397 397 return self._app_settings_type
398 398
399 399 @app_settings_type.setter
400 400 def app_settings_type(self, val):
401 401 if val.split('.')[0] not in self.SETTINGS_TYPES:
402 402 raise Exception('type must be one of %s got %s'
403 403 % (self.SETTINGS_TYPES.keys(), val))
404 404 self._app_settings_type = val
405 405
406 406 @classmethod
407 407 def get_by_prefix(cls, prefix):
408 408 return RhodeCodeSetting.query()\
409 409 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
410 410 .all()
411 411
412 412 def __repr__(self):
413 413 return "<%s('%s:%s[%s]')>" % (
414 414 self.cls_name,
415 415 self.app_settings_name, self.app_settings_value,
416 416 self.app_settings_type
417 417 )
418 418
419 419
420 420 class RhodeCodeUi(Base, BaseModel):
421 421 __tablename__ = 'rhodecode_ui'
422 422 __table_args__ = (
423 423 UniqueConstraint('ui_key'),
424 424 base_table_args
425 425 )
426 426 # Sync those values with vcsserver.config.hooks
427 427
428 428 HOOK_REPO_SIZE = 'changegroup.repo_size'
429 429 # HG
430 430 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
431 431 HOOK_PULL = 'outgoing.pull_logger'
432 432 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
433 433 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
434 434 HOOK_PUSH = 'changegroup.push_logger'
435 435 HOOK_PUSH_KEY = 'pushkey.key_push'
436 436
437 437 HOOKS_BUILTIN = [
438 438 HOOK_PRE_PULL,
439 439 HOOK_PULL,
440 440 HOOK_PRE_PUSH,
441 441 HOOK_PRETX_PUSH,
442 442 HOOK_PUSH,
443 443 HOOK_PUSH_KEY,
444 444 ]
445 445
446 446 # TODO: johbo: Unify way how hooks are configured for git and hg,
447 447 # git part is currently hardcoded.
448 448
449 449 # SVN PATTERNS
450 450 SVN_BRANCH_ID = 'vcs_svn_branch'
451 451 SVN_TAG_ID = 'vcs_svn_tag'
452 452
453 453 ui_id = Column(
454 454 "ui_id", Integer(), nullable=False, unique=True, default=None,
455 455 primary_key=True)
456 456 ui_section = Column(
457 457 "ui_section", String(255), nullable=True, unique=None, default=None)
458 458 ui_key = Column(
459 459 "ui_key", String(255), nullable=True, unique=None, default=None)
460 460 ui_value = Column(
461 461 "ui_value", String(255), nullable=True, unique=None, default=None)
462 462 ui_active = Column(
463 463 "ui_active", Boolean(), nullable=True, unique=None, default=True)
464 464
465 465 def __repr__(self):
466 466 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
467 467 self.ui_key, self.ui_value)
468 468
469 469
470 470 class RepoRhodeCodeSetting(Base, BaseModel):
471 471 __tablename__ = 'repo_rhodecode_settings'
472 472 __table_args__ = (
473 473 UniqueConstraint(
474 474 'app_settings_name', 'repository_id',
475 475 name='uq_repo_rhodecode_setting_name_repo_id'),
476 476 base_table_args
477 477 )
478 478
479 479 repository_id = Column(
480 480 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
481 481 nullable=False)
482 482 app_settings_id = Column(
483 483 "app_settings_id", Integer(), nullable=False, unique=True,
484 484 default=None, primary_key=True)
485 485 app_settings_name = Column(
486 486 "app_settings_name", String(255), nullable=True, unique=None,
487 487 default=None)
488 488 _app_settings_value = Column(
489 489 "app_settings_value", String(4096), nullable=True, unique=None,
490 490 default=None)
491 491 _app_settings_type = Column(
492 492 "app_settings_type", String(255), nullable=True, unique=None,
493 493 default=None)
494 494
495 495 repository = relationship('Repository', viewonly=True)
496 496
497 497 def __init__(self, repository_id, key='', val='', type='unicode'):
498 498 self.repository_id = repository_id
499 499 self.app_settings_name = key
500 500 self.app_settings_type = type
501 501 self.app_settings_value = val
502 502
503 503 @validates('_app_settings_value')
504 504 def validate_settings_value(self, key, val):
505 505 assert type(val) == str
506 506 return val
507 507
508 508 @hybrid_property
509 509 def app_settings_value(self):
510 510 v = self._app_settings_value
511 511 type_ = self.app_settings_type
512 512 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
513 513 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
514 514 return converter(v)
515 515
516 516 @app_settings_value.setter
517 517 def app_settings_value(self, val):
518 518 """
519 519 Setter that will always make sure we use unicode in app_settings_value
520 520
521 521 :param val:
522 522 """
523 523 self._app_settings_value = safe_str(val)
524 524
525 525 @hybrid_property
526 526 def app_settings_type(self):
527 527 return self._app_settings_type
528 528
529 529 @app_settings_type.setter
530 530 def app_settings_type(self, val):
531 531 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
532 532 if val not in SETTINGS_TYPES:
533 533 raise Exception('type must be one of %s got %s'
534 534 % (SETTINGS_TYPES.keys(), val))
535 535 self._app_settings_type = val
536 536
537 537 def __repr__(self):
538 538 return "<%s('%s:%s:%s[%s]')>" % (
539 539 self.cls_name, self.repository.repo_name,
540 540 self.app_settings_name, self.app_settings_value,
541 541 self.app_settings_type
542 542 )
543 543
544 544
545 545 class RepoRhodeCodeUi(Base, BaseModel):
546 546 __tablename__ = 'repo_rhodecode_ui'
547 547 __table_args__ = (
548 548 UniqueConstraint(
549 549 'repository_id', 'ui_section', 'ui_key',
550 550 name='uq_repo_rhodecode_ui_repository_id_section_key'),
551 551 base_table_args
552 552 )
553 553
554 554 repository_id = Column(
555 555 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
556 556 nullable=False)
557 557 ui_id = Column(
558 558 "ui_id", Integer(), nullable=False, unique=True, default=None,
559 559 primary_key=True)
560 560 ui_section = Column(
561 561 "ui_section", String(255), nullable=True, unique=None, default=None)
562 562 ui_key = Column(
563 563 "ui_key", String(255), nullable=True, unique=None, default=None)
564 564 ui_value = Column(
565 565 "ui_value", String(255), nullable=True, unique=None, default=None)
566 566 ui_active = Column(
567 567 "ui_active", Boolean(), nullable=True, unique=None, default=True)
568 568
569 569 repository = relationship('Repository', viewonly=True)
570 570
571 571 def __repr__(self):
572 572 return '<%s[%s:%s]%s=>%s]>' % (
573 573 self.cls_name, self.repository.repo_name,
574 574 self.ui_section, self.ui_key, self.ui_value)
575 575
576 576
577 577 class User(Base, BaseModel):
578 578 __tablename__ = 'users'
579 579 __table_args__ = (
580 580 UniqueConstraint('username'), UniqueConstraint('email'),
581 581 Index('u_username_idx', 'username'),
582 582 Index('u_email_idx', 'email'),
583 583 base_table_args
584 584 )
585 585
586 586 DEFAULT_USER = 'default'
587 587 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
588 588 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
589 589
590 590 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
591 591 username = Column("username", String(255), nullable=True, unique=None, default=None)
592 592 password = Column("password", String(255), nullable=True, unique=None, default=None)
593 593 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
594 594 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
595 595 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
596 596 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
597 597 _email = Column("email", String(255), nullable=True, unique=None, default=None)
598 598 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
599 599 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
600 600 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
601 601
602 602 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
603 603 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
604 604 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
605 605 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
606 606 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
607 607 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
608 608
609 609 user_log = relationship('UserLog', back_populates='user')
610 610 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
611 611
612 612 repositories = relationship('Repository', back_populates='user')
613 613 repository_groups = relationship('RepoGroup', back_populates='user')
614 614 user_groups = relationship('UserGroup', back_populates='user')
615 615
616 616 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
617 617 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
618 618
619 619 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
620 620 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
621 621 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
622 622
623 623 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
624 624
625 625 notifications = relationship('UserNotification', cascade='all', back_populates='user')
626 626 # notifications assigned to this user
627 627 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
628 628 # comments created by this user
629 629 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
630 630 # user profile extra info
631 631 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
632 632 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
633 633 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
634 634 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
635 635
636 636 # gists
637 637 user_gists = relationship('Gist', cascade='all', back_populates='owner')
638 638 # user pull requests
639 639 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
640 640
641 641 # external identities
642 642 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
643 643 # review rules
644 644 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
645 645
646 646 # artifacts owned
647 647 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
648 648
649 649 # no cascade, set NULL
650 650 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
651 651
652 652 def __repr__(self):
653 653 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
654 654
655 655 @hybrid_property
656 656 def email(self):
657 657 return self._email
658 658
659 659 @email.setter
660 660 def email(self, val):
661 661 self._email = val.lower() if val else None
662 662
663 663 @hybrid_property
664 664 def first_name(self):
665 665 from rhodecode.lib import helpers as h
666 666 if self.name:
667 667 return h.escape(self.name)
668 668 return self.name
669 669
670 670 @hybrid_property
671 671 def last_name(self):
672 672 from rhodecode.lib import helpers as h
673 673 if self.lastname:
674 674 return h.escape(self.lastname)
675 675 return self.lastname
676 676
677 677 @hybrid_property
678 678 def api_key(self):
679 679 """
680 680 Fetch if exist an auth-token with role ALL connected to this user
681 681 """
682 682 user_auth_token = UserApiKeys.query()\
683 683 .filter(UserApiKeys.user_id == self.user_id)\
684 684 .filter(or_(UserApiKeys.expires == -1,
685 685 UserApiKeys.expires >= time.time()))\
686 686 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
687 687 if user_auth_token:
688 688 user_auth_token = user_auth_token.api_key
689 689
690 690 return user_auth_token
691 691
692 692 @api_key.setter
693 693 def api_key(self, val):
694 694 # don't allow to set API key this is deprecated for now
695 695 self._api_key = None
696 696
697 697 @property
698 698 def reviewer_pull_requests(self):
699 699 return PullRequestReviewers.query() \
700 700 .options(joinedload(PullRequestReviewers.pull_request)) \
701 701 .filter(PullRequestReviewers.user_id == self.user_id) \
702 702 .all()
703 703
704 704 @property
705 705 def firstname(self):
706 706 # alias for future
707 707 return self.name
708 708
709 709 @property
710 710 def emails(self):
711 711 other = UserEmailMap.query()\
712 712 .filter(UserEmailMap.user == self) \
713 713 .order_by(UserEmailMap.email_id.asc()) \
714 714 .all()
715 715 return [self.email] + [x.email for x in other]
716 716
717 717 def emails_cached(self):
718 718 emails = []
719 719 if self.user_id != self.get_default_user_id():
720 720 emails = UserEmailMap.query()\
721 721 .filter(UserEmailMap.user == self) \
722 722 .order_by(UserEmailMap.email_id.asc())
723 723
724 724 emails = emails.options(
725 725 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
726 726 )
727 727
728 728 return [self.email] + [x.email for x in emails]
729 729
730 730 @property
731 731 def auth_tokens(self):
732 732 auth_tokens = self.get_auth_tokens()
733 733 return [x.api_key for x in auth_tokens]
734 734
735 735 def get_auth_tokens(self):
736 736 return UserApiKeys.query()\
737 737 .filter(UserApiKeys.user == self)\
738 738 .order_by(UserApiKeys.user_api_key_id.asc())\
739 739 .all()
740 740
741 741 @LazyProperty
742 742 def feed_token(self):
743 743 return self.get_feed_token()
744 744
745 745 def get_feed_token(self, cache=True):
746 746 feed_tokens = UserApiKeys.query()\
747 747 .filter(UserApiKeys.user == self)\
748 748 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
749 749 if cache:
750 750 feed_tokens = feed_tokens.options(
751 751 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
752 752
753 753 feed_tokens = feed_tokens.all()
754 754 if feed_tokens:
755 755 return feed_tokens[0].api_key
756 756 return 'NO_FEED_TOKEN_AVAILABLE'
757 757
758 758 @LazyProperty
759 759 def artifact_token(self):
760 760 return self.get_artifact_token()
761 761
762 762 def get_artifact_token(self, cache=True):
763 763 artifacts_tokens = UserApiKeys.query()\
764 764 .filter(UserApiKeys.user == self) \
765 765 .filter(or_(UserApiKeys.expires == -1,
766 766 UserApiKeys.expires >= time.time())) \
767 767 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
768 768
769 769 if cache:
770 770 artifacts_tokens = artifacts_tokens.options(
771 771 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
772 772
773 773 artifacts_tokens = artifacts_tokens.all()
774 774 if artifacts_tokens:
775 775 return artifacts_tokens[0].api_key
776 776 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
777 777
778 778 def get_or_create_artifact_token(self):
779 779 artifacts_tokens = UserApiKeys.query()\
780 780 .filter(UserApiKeys.user == self) \
781 781 .filter(or_(UserApiKeys.expires == -1,
782 782 UserApiKeys.expires >= time.time())) \
783 783 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
784 784
785 785 artifacts_tokens = artifacts_tokens.all()
786 786 if artifacts_tokens:
787 787 return artifacts_tokens[0].api_key
788 788 else:
789 789 from rhodecode.model.auth_token import AuthTokenModel
790 790 artifact_token = AuthTokenModel().create(
791 791 self, 'auto-generated-artifact-token',
792 792 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
793 793 Session.commit()
794 794 return artifact_token.api_key
795 795
796 796 @classmethod
797 797 def get(cls, user_id, cache=False):
798 798 if not user_id:
799 799 return
800 800
801 801 user = cls.query()
802 802 if cache:
803 803 user = user.options(
804 804 FromCache("sql_cache_short", f"get_users_{user_id}"))
805 805 return user.get(user_id)
806 806
807 807 @classmethod
808 808 def extra_valid_auth_tokens(cls, user, role=None):
809 809 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
810 810 .filter(or_(UserApiKeys.expires == -1,
811 811 UserApiKeys.expires >= time.time()))
812 812 if role:
813 813 tokens = tokens.filter(or_(UserApiKeys.role == role,
814 814 UserApiKeys.role == UserApiKeys.ROLE_ALL))
815 815 return tokens.all()
816 816
817 817 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
818 818 from rhodecode.lib import auth
819 819
820 820 log.debug('Trying to authenticate user: %s via auth-token, '
821 821 'and roles: %s', self, roles)
822 822
823 823 if not auth_token:
824 824 return False
825 825
826 826 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
827 827 tokens_q = UserApiKeys.query()\
828 828 .filter(UserApiKeys.user_id == self.user_id)\
829 829 .filter(or_(UserApiKeys.expires == -1,
830 830 UserApiKeys.expires >= time.time()))
831 831
832 832 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
833 833
834 834 crypto_backend = auth.crypto_backend()
835 835 enc_token_map = {}
836 836 plain_token_map = {}
837 837 for token in tokens_q:
838 838 if token.api_key.startswith(crypto_backend.ENC_PREF):
839 839 enc_token_map[token.api_key] = token
840 840 else:
841 841 plain_token_map[token.api_key] = token
842 842 log.debug(
843 843 'Found %s plain and %s encrypted tokens to check for authentication for this user',
844 844 len(plain_token_map), len(enc_token_map))
845 845
846 846 # plain token match comes first
847 847 match = plain_token_map.get(auth_token)
848 848
849 849 # check encrypted tokens now
850 850 if not match:
851 851 for token_hash, token in enc_token_map.items():
852 852 # NOTE(marcink): this is expensive to calculate, but most secure
853 853 if crypto_backend.hash_check(auth_token, token_hash):
854 854 match = token
855 855 break
856 856
857 857 if match:
858 858 log.debug('Found matching token %s', match)
859 859 if match.repo_id:
860 860 log.debug('Found scope, checking for scope match of token %s', match)
861 861 if match.repo_id == scope_repo_id:
862 862 return True
863 863 else:
864 864 log.debug(
865 865 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
866 866 'and calling scope is:%s, skipping further checks',
867 867 match.repo, scope_repo_id)
868 868 return False
869 869 else:
870 870 return True
871 871
872 872 return False
873 873
874 874 @property
875 875 def ip_addresses(self):
876 876 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
877 877 return [x.ip_addr for x in ret]
878 878
879 879 @property
880 880 def username_and_name(self):
881 881 return f'{self.username} ({self.first_name} {self.last_name})'
882 882
883 883 @property
884 884 def username_or_name_or_email(self):
885 885 full_name = self.full_name if self.full_name != ' ' else None
886 886 return self.username or full_name or self.email
887 887
888 888 @property
889 889 def full_name(self):
890 890 return f'{self.first_name} {self.last_name}'
891 891
892 892 @property
893 893 def full_name_or_username(self):
894 894 return (f'{self.first_name} {self.last_name}'
895 895 if (self.first_name and self.last_name) else self.username)
896 896
897 897 @property
898 898 def full_contact(self):
899 899 return f'{self.first_name} {self.last_name} <{self.email}>'
900 900
901 901 @property
902 902 def short_contact(self):
903 903 return f'{self.first_name} {self.last_name}'
904 904
905 905 @property
906 906 def is_admin(self):
907 907 return self.admin
908 908
909 909 @property
910 910 def language(self):
911 911 return self.user_data.get('language')
912 912
913 913 def AuthUser(self, **kwargs):
914 914 """
915 915 Returns instance of AuthUser for this user
916 916 """
917 917 from rhodecode.lib.auth import AuthUser
918 918 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
919 919
920 920 @hybrid_property
921 921 def user_data(self):
922 922 if not self._user_data:
923 923 return {}
924 924
925 925 try:
926 926 return json.loads(self._user_data) or {}
927 927 except TypeError:
928 928 return {}
929 929
930 930 @user_data.setter
931 931 def user_data(self, val):
932 932 if not isinstance(val, dict):
933 933 raise Exception('user_data must be dict, got %s' % type(val))
934 934 try:
935 935 self._user_data = safe_bytes(json.dumps(val))
936 936 except Exception:
937 937 log.error(traceback.format_exc())
938 938
939 939 @classmethod
940 940 def get_by_username(cls, username, case_insensitive=False,
941 941 cache=False):
942 942
943 943 if case_insensitive:
944 944 q = cls.select().where(
945 945 func.lower(cls.username) == func.lower(username))
946 946 else:
947 947 q = cls.select().where(cls.username == username)
948 948
949 949 if cache:
950 950 hash_key = _hash_key(username)
951 951 q = q.options(
952 952 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
953 953
954 954 return cls.execute(q).scalar_one_or_none()
955 955
956 956 @classmethod
957 957 def get_by_auth_token(cls, auth_token, cache=False):
958 958
959 959 q = cls.select(User)\
960 960 .join(UserApiKeys)\
961 961 .where(UserApiKeys.api_key == auth_token)\
962 962 .where(or_(UserApiKeys.expires == -1,
963 963 UserApiKeys.expires >= time.time()))
964 964
965 965 if cache:
966 966 q = q.options(
967 967 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
968 968
969 969 matched_user = cls.execute(q).scalar_one_or_none()
970 970
971 971 return matched_user
972 972
973 973 @classmethod
974 974 def get_by_email(cls, email, case_insensitive=False, cache=False):
975 975
976 976 if case_insensitive:
977 977 q = cls.select().where(func.lower(cls.email) == func.lower(email))
978 978 else:
979 979 q = cls.select().where(cls.email == email)
980 980
981 981 if cache:
982 982 email_key = _hash_key(email)
983 983 q = q.options(
984 984 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
985 985
986 986 ret = cls.execute(q).scalar_one_or_none()
987 987
988 988 if ret is None:
989 989 q = cls.select(UserEmailMap)
990 990 # try fetching in alternate email map
991 991 if case_insensitive:
992 992 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
993 993 else:
994 994 q = q.where(UserEmailMap.email == email)
995 995 q = q.options(joinedload(UserEmailMap.user))
996 996 if cache:
997 997 q = q.options(
998 998 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
999 999
1000 1000 result = cls.execute(q).scalar_one_or_none()
1001 1001 ret = getattr(result, 'user', None)
1002 1002
1003 1003 return ret
1004 1004
1005 1005 @classmethod
1006 1006 def get_from_cs_author(cls, author):
1007 1007 """
1008 1008 Tries to get User objects out of commit author string
1009 1009
1010 1010 :param author:
1011 1011 """
1012 1012 from rhodecode.lib.helpers import email, author_name
1013 1013 # Valid email in the attribute passed, see if they're in the system
1014 1014 _email = email(author)
1015 1015 if _email:
1016 1016 user = cls.get_by_email(_email, case_insensitive=True)
1017 1017 if user:
1018 1018 return user
1019 1019 # Maybe we can match by username?
1020 1020 _author = author_name(author)
1021 1021 user = cls.get_by_username(_author, case_insensitive=True)
1022 1022 if user:
1023 1023 return user
1024 1024
1025 1025 def update_userdata(self, **kwargs):
1026 1026 usr = self
1027 1027 old = usr.user_data
1028 1028 old.update(**kwargs)
1029 1029 usr.user_data = old
1030 1030 Session().add(usr)
1031 1031 log.debug('updated userdata with %s', kwargs)
1032 1032
1033 1033 def update_lastlogin(self):
1034 1034 """Update user lastlogin"""
1035 1035 self.last_login = datetime.datetime.now()
1036 1036 Session().add(self)
1037 1037 log.debug('updated user %s lastlogin', self.username)
1038 1038
1039 1039 def update_password(self, new_password):
1040 1040 from rhodecode.lib.auth import get_crypt_password
1041 1041
1042 1042 self.password = get_crypt_password(new_password)
1043 1043 Session().add(self)
1044 1044
1045 1045 @classmethod
1046 1046 def get_first_super_admin(cls):
1047 1047 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1048 1048 user = cls.scalars(stmt).first()
1049 1049
1050 1050 if user is None:
1051 1051 raise Exception('FATAL: Missing administrative account!')
1052 1052 return user
1053 1053
1054 1054 @classmethod
1055 1055 def get_all_super_admins(cls, only_active=False):
1056 1056 """
1057 1057 Returns all admin accounts sorted by username
1058 1058 """
1059 1059 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1060 1060 if only_active:
1061 1061 qry = qry.filter(User.active == true())
1062 1062 return qry.all()
1063 1063
1064 1064 @classmethod
1065 1065 def get_all_user_ids(cls, only_active=True):
1066 1066 """
1067 1067 Returns all users IDs
1068 1068 """
1069 1069 qry = Session().query(User.user_id)
1070 1070
1071 1071 if only_active:
1072 1072 qry = qry.filter(User.active == true())
1073 1073 return [x.user_id for x in qry]
1074 1074
1075 1075 @classmethod
1076 1076 def get_default_user(cls, cache=False, refresh=False):
1077 1077 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1078 1078 if user is None:
1079 1079 raise Exception('FATAL: Missing default account!')
1080 1080 if refresh:
1081 1081 # The default user might be based on outdated state which
1082 1082 # has been loaded from the cache.
1083 1083 # A call to refresh() ensures that the
1084 1084 # latest state from the database is used.
1085 1085 Session().refresh(user)
1086 1086
1087 1087 return user
1088 1088
1089 1089 @classmethod
1090 1090 def get_default_user_id(cls):
1091 1091 import rhodecode
1092 1092 return rhodecode.CONFIG['default_user_id']
1093 1093
1094 1094 def _get_default_perms(self, user, suffix=''):
1095 1095 from rhodecode.model.permission import PermissionModel
1096 1096 return PermissionModel().get_default_perms(user.user_perms, suffix)
1097 1097
1098 1098 def get_default_perms(self, suffix=''):
1099 1099 return self._get_default_perms(self, suffix)
1100 1100
1101 1101 def get_api_data(self, include_secrets=False, details='full'):
1102 1102 """
1103 1103 Common function for generating user related data for API
1104 1104
1105 1105 :param include_secrets: By default secrets in the API data will be replaced
1106 1106 by a placeholder value to prevent exposing this data by accident. In case
1107 1107 this data shall be exposed, set this flag to ``True``.
1108 1108
1109 1109 :param details: details can be 'basic|full' basic gives only a subset of
1110 1110 the available user information that includes user_id, name and emails.
1111 1111 """
1112 1112 user = self
1113 1113 user_data = self.user_data
1114 1114 data = {
1115 1115 'user_id': user.user_id,
1116 1116 'username': user.username,
1117 1117 'firstname': user.name,
1118 1118 'lastname': user.lastname,
1119 1119 'description': user.description,
1120 1120 'email': user.email,
1121 1121 'emails': user.emails,
1122 1122 }
1123 1123 if details == 'basic':
1124 1124 return data
1125 1125
1126 1126 auth_token_length = 40
1127 1127 auth_token_replacement = '*' * auth_token_length
1128 1128
1129 1129 extras = {
1130 1130 'auth_tokens': [auth_token_replacement],
1131 1131 'active': user.active,
1132 1132 'admin': user.admin,
1133 1133 'extern_type': user.extern_type,
1134 1134 'extern_name': user.extern_name,
1135 1135 'last_login': user.last_login,
1136 1136 'last_activity': user.last_activity,
1137 1137 'ip_addresses': user.ip_addresses,
1138 1138 'language': user_data.get('language')
1139 1139 }
1140 1140 data.update(extras)
1141 1141
1142 1142 if include_secrets:
1143 1143 data['auth_tokens'] = user.auth_tokens
1144 1144 return data
1145 1145
1146 1146 def __json__(self):
1147 1147 data = {
1148 1148 'full_name': self.full_name,
1149 1149 'full_name_or_username': self.full_name_or_username,
1150 1150 'short_contact': self.short_contact,
1151 1151 'full_contact': self.full_contact,
1152 1152 }
1153 1153 data.update(self.get_api_data())
1154 1154 return data
1155 1155
1156 1156
1157 1157 class UserApiKeys(Base, BaseModel):
1158 1158 __tablename__ = 'user_api_keys'
1159 1159 __table_args__ = (
1160 1160 Index('uak_api_key_idx', 'api_key'),
1161 1161 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1162 1162 base_table_args
1163 1163 )
1164 1164
1165 1165 # ApiKey role
1166 1166 ROLE_ALL = 'token_role_all'
1167 1167 ROLE_VCS = 'token_role_vcs'
1168 1168 ROLE_API = 'token_role_api'
1169 1169 ROLE_HTTP = 'token_role_http'
1170 1170 ROLE_FEED = 'token_role_feed'
1171 1171 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1172 1172 # The last one is ignored in the list as we only
1173 1173 # use it for one action, and cannot be created by users
1174 1174 ROLE_PASSWORD_RESET = 'token_password_reset'
1175 1175
1176 1176 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1177 1177
1178 1178 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1179 1179 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1180 1180 api_key = Column("api_key", String(255), nullable=False, unique=True)
1181 1181 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1182 1182 expires = Column('expires', Float(53), nullable=False)
1183 1183 role = Column('role', String(255), nullable=True)
1184 1184 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1185 1185
1186 1186 # scope columns
1187 1187 repo_id = Column(
1188 1188 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1189 1189 nullable=True, unique=None, default=None)
1190 1190 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1191 1191
1192 1192 repo_group_id = Column(
1193 1193 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1194 1194 nullable=True, unique=None, default=None)
1195 1195 repo_group = relationship('RepoGroup', lazy='joined')
1196 1196
1197 1197 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1198 1198
1199 1199 def __repr__(self):
1200 1200 return f"<{self.cls_name}('{self.role}')>"
1201 1201
1202 1202 def __json__(self):
1203 1203 data = {
1204 1204 'auth_token': self.api_key,
1205 1205 'role': self.role,
1206 1206 'scope': self.scope_humanized,
1207 1207 'expired': self.expired
1208 1208 }
1209 1209 return data
1210 1210
1211 1211 def get_api_data(self, include_secrets=False):
1212 1212 data = self.__json__()
1213 1213 if include_secrets:
1214 1214 return data
1215 1215 else:
1216 1216 data['auth_token'] = self.token_obfuscated
1217 1217 return data
1218 1218
1219 1219 @hybrid_property
1220 1220 def description_safe(self):
1221 1221 from rhodecode.lib import helpers as h
1222 1222 return h.escape(self.description)
1223 1223
1224 1224 @property
1225 1225 def expired(self):
1226 1226 if self.expires == -1:
1227 1227 return False
1228 1228 return time.time() > self.expires
1229 1229
1230 1230 @classmethod
1231 1231 def _get_role_name(cls, role):
1232 1232 return {
1233 1233 cls.ROLE_ALL: _('all'),
1234 1234 cls.ROLE_HTTP: _('http/web interface'),
1235 1235 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1236 1236 cls.ROLE_API: _('api calls'),
1237 1237 cls.ROLE_FEED: _('feed access'),
1238 1238 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1239 1239 }.get(role, role)
1240 1240
1241 1241 @classmethod
1242 1242 def _get_role_description(cls, role):
1243 1243 return {
1244 1244 cls.ROLE_ALL: _('Token for all actions.'),
1245 1245 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1246 1246 'login using `api_access_controllers_whitelist` functionality.'),
1247 1247 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1248 1248 'Requires auth_token authentication plugin to be active. <br/>'
1249 1249 'Such Token should be used then instead of a password to '
1250 1250 'interact with a repository, and additionally can be '
1251 1251 'limited to single repository using repo scope.'),
1252 1252 cls.ROLE_API: _('Token limited to api calls.'),
1253 1253 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1254 1254 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1255 1255 }.get(role, role)
1256 1256
1257 1257 @property
1258 1258 def role_humanized(self):
1259 1259 return self._get_role_name(self.role)
1260 1260
1261 1261 def _get_scope(self):
1262 1262 if self.repo:
1263 1263 return 'Repository: {}'.format(self.repo.repo_name)
1264 1264 if self.repo_group:
1265 1265 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1266 1266 return 'Global'
1267 1267
1268 1268 @property
1269 1269 def scope_humanized(self):
1270 1270 return self._get_scope()
1271 1271
1272 1272 @property
1273 1273 def token_obfuscated(self):
1274 1274 if self.api_key:
1275 1275 return self.api_key[:4] + "****"
1276 1276
1277 1277
1278 1278 class UserEmailMap(Base, BaseModel):
1279 1279 __tablename__ = 'user_email_map'
1280 1280 __table_args__ = (
1281 1281 Index('uem_email_idx', 'email'),
1282 1282 Index('uem_user_id_idx', 'user_id'),
1283 1283 UniqueConstraint('email'),
1284 1284 base_table_args
1285 1285 )
1286 1286
1287 1287 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1288 1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1289 1289 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1290 1290 user = relationship('User', lazy='joined', back_populates='user_emails')
1291 1291
1292 1292 @validates('_email')
1293 1293 def validate_email(self, key, email):
1294 1294 # check if this email is not main one
1295 1295 main_email = Session().query(User).filter(User.email == email).scalar()
1296 1296 if main_email is not None:
1297 1297 raise AttributeError('email %s is present is user table' % email)
1298 1298 return email
1299 1299
1300 1300 @hybrid_property
1301 1301 def email(self):
1302 1302 return self._email
1303 1303
1304 1304 @email.setter
1305 1305 def email(self, val):
1306 1306 self._email = val.lower() if val else None
1307 1307
1308 1308
1309 1309 class UserIpMap(Base, BaseModel):
1310 1310 __tablename__ = 'user_ip_map'
1311 1311 __table_args__ = (
1312 1312 UniqueConstraint('user_id', 'ip_addr'),
1313 1313 base_table_args
1314 1314 )
1315 1315
1316 1316 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1317 1317 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1318 1318 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1319 1319 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1320 1320 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1321 1321 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1322 1322
1323 1323 @hybrid_property
1324 1324 def description_safe(self):
1325 1325 from rhodecode.lib import helpers as h
1326 1326 return h.escape(self.description)
1327 1327
1328 1328 @classmethod
1329 1329 def _get_ip_range(cls, ip_addr):
1330 1330 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1331 1331 return [str(net.network_address), str(net.broadcast_address)]
1332 1332
1333 1333 def __json__(self):
1334 1334 return {
1335 1335 'ip_addr': self.ip_addr,
1336 1336 'ip_range': self._get_ip_range(self.ip_addr),
1337 1337 }
1338 1338
1339 1339 def __repr__(self):
1340 1340 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1341 1341
1342 1342
1343 1343 class UserSshKeys(Base, BaseModel):
1344 1344 __tablename__ = 'user_ssh_keys'
1345 1345 __table_args__ = (
1346 1346 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1347 1347
1348 1348 UniqueConstraint('ssh_key_fingerprint'),
1349 1349
1350 1350 base_table_args
1351 1351 )
1352 1352
1353 1353 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1354 1354 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1355 1355 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1356 1356
1357 1357 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1358 1358
1359 1359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1360 1360 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1361 1361 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1362 1362
1363 1363 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1364 1364
1365 1365 def __json__(self):
1366 1366 data = {
1367 1367 'ssh_fingerprint': self.ssh_key_fingerprint,
1368 1368 'description': self.description,
1369 1369 'created_on': self.created_on
1370 1370 }
1371 1371 return data
1372 1372
1373 1373 def get_api_data(self):
1374 1374 data = self.__json__()
1375 1375 return data
1376 1376
1377 1377
1378 1378 class UserLog(Base, BaseModel):
1379 1379 __tablename__ = 'user_logs'
1380 1380 __table_args__ = (
1381 1381 base_table_args,
1382 1382 )
1383 1383
1384 1384 VERSION_1 = 'v1'
1385 1385 VERSION_2 = 'v2'
1386 1386 VERSIONS = [VERSION_1, VERSION_2]
1387 1387
1388 1388 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1389 1389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1390 1390 username = Column("username", String(255), nullable=True, unique=None, default=None)
1391 1391 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1392 1392 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1393 1393 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1394 1394 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1395 1395 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1396 1396
1397 1397 version = Column("version", String(255), nullable=True, default=VERSION_1)
1398 1398 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1399 1399 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1400 1400 user = relationship('User', cascade='', back_populates='user_log')
1401 1401 repository = relationship('Repository', cascade='', back_populates='logs')
1402 1402
1403 1403 def __repr__(self):
1404 1404 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1405 1405
1406 1406 def __json__(self):
1407 1407 return {
1408 1408 'user_id': self.user_id,
1409 1409 'username': self.username,
1410 1410 'repository_id': self.repository_id,
1411 1411 'repository_name': self.repository_name,
1412 1412 'user_ip': self.user_ip,
1413 1413 'action_date': self.action_date,
1414 1414 'action': self.action,
1415 1415 }
1416 1416
1417 1417 @hybrid_property
1418 1418 def entry_id(self):
1419 1419 return self.user_log_id
1420 1420
1421 1421 @property
1422 1422 def action_as_day(self):
1423 1423 return datetime.date(*self.action_date.timetuple()[:3])
1424 1424
1425 1425
1426 1426 class UserGroup(Base, BaseModel):
1427 1427 __tablename__ = 'users_groups'
1428 1428 __table_args__ = (
1429 1429 base_table_args,
1430 1430 )
1431 1431
1432 1432 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1433 1433 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1434 1434 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1435 1435 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1436 1436 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1437 1437 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1438 1438 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1439 1439 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1440 1440
1441 1441 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1442 1442 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1443 1443 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1444 1444 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1445 1445 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1446 1446
1447 1447 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1448 1448
1449 1449 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1450 1450 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1451 1451
1452 1452 @classmethod
1453 1453 def _load_group_data(cls, column):
1454 1454 if not column:
1455 1455 return {}
1456 1456
1457 1457 try:
1458 1458 return json.loads(column) or {}
1459 1459 except TypeError:
1460 1460 return {}
1461 1461
1462 1462 @hybrid_property
1463 1463 def description_safe(self):
1464 1464 from rhodecode.lib import helpers as h
1465 1465 return h.escape(self.user_group_description)
1466 1466
1467 1467 @hybrid_property
1468 1468 def group_data(self):
1469 1469 return self._load_group_data(self._group_data)
1470 1470
1471 1471 @group_data.expression
1472 1472 def group_data(self, **kwargs):
1473 1473 return self._group_data
1474 1474
1475 1475 @group_data.setter
1476 1476 def group_data(self, val):
1477 1477 try:
1478 1478 self._group_data = json.dumps(val)
1479 1479 except Exception:
1480 1480 log.error(traceback.format_exc())
1481 1481
1482 1482 @classmethod
1483 1483 def _load_sync(cls, group_data):
1484 1484 if group_data:
1485 1485 return group_data.get('extern_type')
1486 1486
1487 1487 @property
1488 1488 def sync(self):
1489 1489 return self._load_sync(self.group_data)
1490 1490
1491 1491 def __repr__(self):
1492 1492 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1493 1493
1494 1494 @classmethod
1495 1495 def get_by_group_name(cls, group_name, cache=False,
1496 1496 case_insensitive=False):
1497 1497 if case_insensitive:
1498 1498 q = cls.query().filter(func.lower(cls.users_group_name) ==
1499 1499 func.lower(group_name))
1500 1500
1501 1501 else:
1502 1502 q = cls.query().filter(cls.users_group_name == group_name)
1503 1503 if cache:
1504 1504 name_key = _hash_key(group_name)
1505 1505 q = q.options(
1506 1506 FromCache("sql_cache_short", f"get_group_{name_key}"))
1507 1507 return q.scalar()
1508 1508
1509 1509 @classmethod
1510 1510 def get(cls, user_group_id, cache=False):
1511 1511 if not user_group_id:
1512 1512 return
1513 1513
1514 1514 user_group = cls.query()
1515 1515 if cache:
1516 1516 user_group = user_group.options(
1517 1517 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1518 1518 return user_group.get(user_group_id)
1519 1519
1520 1520 def permissions(self, with_admins=True, with_owner=True,
1521 1521 expand_from_user_groups=False):
1522 1522 """
1523 1523 Permissions for user groups
1524 1524 """
1525 1525 _admin_perm = 'usergroup.admin'
1526 1526
1527 1527 owner_row = []
1528 1528 if with_owner:
1529 1529 usr = AttributeDict(self.user.get_dict())
1530 1530 usr.owner_row = True
1531 1531 usr.permission = _admin_perm
1532 1532 owner_row.append(usr)
1533 1533
1534 1534 super_admin_ids = []
1535 1535 super_admin_rows = []
1536 1536 if with_admins:
1537 1537 for usr in User.get_all_super_admins():
1538 1538 super_admin_ids.append(usr.user_id)
1539 1539 # if this admin is also owner, don't double the record
1540 1540 if usr.user_id == owner_row[0].user_id:
1541 1541 owner_row[0].admin_row = True
1542 1542 else:
1543 1543 usr = AttributeDict(usr.get_dict())
1544 1544 usr.admin_row = True
1545 1545 usr.permission = _admin_perm
1546 1546 super_admin_rows.append(usr)
1547 1547
1548 1548 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1549 1549 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1550 1550 joinedload(UserUserGroupToPerm.user),
1551 1551 joinedload(UserUserGroupToPerm.permission),)
1552 1552
1553 1553 # get owners and admins and permissions. We do a trick of re-writing
1554 1554 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1555 1555 # has a global reference and changing one object propagates to all
1556 1556 # others. This means if admin is also an owner admin_row that change
1557 1557 # would propagate to both objects
1558 1558 perm_rows = []
1559 1559 for _usr in q.all():
1560 1560 usr = AttributeDict(_usr.user.get_dict())
1561 1561 # if this user is also owner/admin, mark as duplicate record
1562 1562 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1563 1563 usr.duplicate_perm = True
1564 1564 usr.permission = _usr.permission.permission_name
1565 1565 perm_rows.append(usr)
1566 1566
1567 1567 # filter the perm rows by 'default' first and then sort them by
1568 1568 # admin,write,read,none permissions sorted again alphabetically in
1569 1569 # each group
1570 1570 perm_rows = sorted(perm_rows, key=display_user_sort)
1571 1571
1572 1572 user_groups_rows = []
1573 1573 if expand_from_user_groups:
1574 1574 for ug in self.permission_user_groups(with_members=True):
1575 1575 for user_data in ug.members:
1576 1576 user_groups_rows.append(user_data)
1577 1577
1578 1578 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1579 1579
1580 1580 def permission_user_groups(self, with_members=False):
1581 1581 q = UserGroupUserGroupToPerm.query()\
1582 1582 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1583 1583 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1584 1584 joinedload(UserGroupUserGroupToPerm.target_user_group),
1585 1585 joinedload(UserGroupUserGroupToPerm.permission),)
1586 1586
1587 1587 perm_rows = []
1588 1588 for _user_group in q.all():
1589 1589 entry = AttributeDict(_user_group.user_group.get_dict())
1590 1590 entry.permission = _user_group.permission.permission_name
1591 1591 if with_members:
1592 1592 entry.members = [x.user.get_dict()
1593 1593 for x in _user_group.user_group.members]
1594 1594 perm_rows.append(entry)
1595 1595
1596 1596 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1597 1597 return perm_rows
1598 1598
1599 1599 def _get_default_perms(self, user_group, suffix=''):
1600 1600 from rhodecode.model.permission import PermissionModel
1601 1601 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1602 1602
1603 1603 def get_default_perms(self, suffix=''):
1604 1604 return self._get_default_perms(self, suffix)
1605 1605
1606 1606 def get_api_data(self, with_group_members=True, include_secrets=False):
1607 1607 """
1608 1608 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1609 1609 basically forwarded.
1610 1610
1611 1611 """
1612 1612 user_group = self
1613 1613 data = {
1614 1614 'users_group_id': user_group.users_group_id,
1615 1615 'group_name': user_group.users_group_name,
1616 1616 'group_description': user_group.user_group_description,
1617 1617 'active': user_group.users_group_active,
1618 1618 'owner': user_group.user.username,
1619 1619 'sync': user_group.sync,
1620 1620 'owner_email': user_group.user.email,
1621 1621 }
1622 1622
1623 1623 if with_group_members:
1624 1624 users = []
1625 1625 for user in user_group.members:
1626 1626 user = user.user
1627 1627 users.append(user.get_api_data(include_secrets=include_secrets))
1628 1628 data['users'] = users
1629 1629
1630 1630 return data
1631 1631
1632 1632
1633 1633 class UserGroupMember(Base, BaseModel):
1634 1634 __tablename__ = 'users_groups_members'
1635 1635 __table_args__ = (
1636 1636 base_table_args,
1637 1637 )
1638 1638
1639 1639 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1640 1640 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1641 1641 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1642 1642
1643 1643 user = relationship('User', lazy='joined', back_populates='group_member')
1644 1644 users_group = relationship('UserGroup', back_populates='members')
1645 1645
1646 1646 def __init__(self, gr_id='', u_id=''):
1647 1647 self.users_group_id = gr_id
1648 1648 self.user_id = u_id
1649 1649
1650 1650
1651 1651 class RepositoryField(Base, BaseModel):
1652 1652 __tablename__ = 'repositories_fields'
1653 1653 __table_args__ = (
1654 1654 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1655 1655 base_table_args,
1656 1656 )
1657 1657
1658 1658 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1659 1659
1660 1660 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1661 1661 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1662 1662 field_key = Column("field_key", String(250))
1663 1663 field_label = Column("field_label", String(1024), nullable=False)
1664 1664 field_value = Column("field_value", String(10000), nullable=False)
1665 1665 field_desc = Column("field_desc", String(1024), nullable=False)
1666 1666 field_type = Column("field_type", String(255), nullable=False, unique=None)
1667 1667 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1668 1668
1669 1669 repository = relationship('Repository', back_populates='extra_fields')
1670 1670
1671 1671 @property
1672 1672 def field_key_prefixed(self):
1673 1673 return 'ex_%s' % self.field_key
1674 1674
1675 1675 @classmethod
1676 1676 def un_prefix_key(cls, key):
1677 1677 if key.startswith(cls.PREFIX):
1678 1678 return key[len(cls.PREFIX):]
1679 1679 return key
1680 1680
1681 1681 @classmethod
1682 1682 def get_by_key_name(cls, key, repo):
1683 1683 row = cls.query()\
1684 1684 .filter(cls.repository == repo)\
1685 1685 .filter(cls.field_key == key).scalar()
1686 1686 return row
1687 1687
1688 1688
1689 1689 class Repository(Base, BaseModel):
1690 1690 __tablename__ = 'repositories'
1691 1691 __table_args__ = (
1692 1692 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1693 1693 base_table_args,
1694 1694 )
1695 1695 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1696 1696 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1697 1697 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1698 1698
1699 1699 STATE_CREATED = 'repo_state_created'
1700 1700 STATE_PENDING = 'repo_state_pending'
1701 1701 STATE_ERROR = 'repo_state_error'
1702 1702
1703 1703 LOCK_AUTOMATIC = 'lock_auto'
1704 1704 LOCK_API = 'lock_api'
1705 1705 LOCK_WEB = 'lock_web'
1706 1706 LOCK_PULL = 'lock_pull'
1707 1707
1708 1708 NAME_SEP = URL_SEP
1709 1709
1710 1710 repo_id = Column(
1711 1711 "repo_id", Integer(), nullable=False, unique=True, default=None,
1712 1712 primary_key=True)
1713 1713 _repo_name = Column(
1714 1714 "repo_name", Text(), nullable=False, default=None)
1715 1715 repo_name_hash = Column(
1716 1716 "repo_name_hash", String(255), nullable=False, unique=True)
1717 1717 repo_state = Column("repo_state", String(255), nullable=True)
1718 1718
1719 1719 clone_uri = Column(
1720 1720 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1721 1721 default=None)
1722 1722 push_uri = Column(
1723 1723 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1724 1724 default=None)
1725 1725 repo_type = Column(
1726 1726 "repo_type", String(255), nullable=False, unique=False, default=None)
1727 1727 user_id = Column(
1728 1728 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1729 1729 unique=False, default=None)
1730 1730 private = Column(
1731 1731 "private", Boolean(), nullable=True, unique=None, default=None)
1732 1732 archived = Column(
1733 1733 "archived", Boolean(), nullable=True, unique=None, default=None)
1734 1734 enable_statistics = Column(
1735 1735 "statistics", Boolean(), nullable=True, unique=None, default=True)
1736 1736 enable_downloads = Column(
1737 1737 "downloads", Boolean(), nullable=True, unique=None, default=True)
1738 1738 description = Column(
1739 1739 "description", String(10000), nullable=True, unique=None, default=None)
1740 1740 created_on = Column(
1741 1741 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1742 1742 default=datetime.datetime.now)
1743 1743 updated_on = Column(
1744 1744 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1745 1745 default=datetime.datetime.now)
1746 1746 _landing_revision = Column(
1747 1747 "landing_revision", String(255), nullable=False, unique=False,
1748 1748 default=None)
1749 1749 enable_locking = Column(
1750 1750 "enable_locking", Boolean(), nullable=False, unique=None,
1751 1751 default=False)
1752 1752 _locked = Column(
1753 1753 "locked", String(255), nullable=True, unique=False, default=None)
1754 1754 _changeset_cache = Column(
1755 1755 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1756 1756
1757 1757 fork_id = Column(
1758 1758 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1759 1759 nullable=True, unique=False, default=None)
1760 1760 group_id = Column(
1761 1761 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1762 1762 unique=False, default=None)
1763 1763
1764 1764 user = relationship('User', lazy='joined', back_populates='repositories')
1765 1765 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1766 1766 group = relationship('RepoGroup', lazy='joined')
1767 1767 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1768 1768 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1769 1769 stats = relationship('Statistics', cascade='all', uselist=False)
1770 1770
1771 1771 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1772 1772 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1773 1773
1774 1774 logs = relationship('UserLog', back_populates='repository')
1775 1775
1776 1776 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1777 1777
1778 1778 pull_requests_source = relationship(
1779 1779 'PullRequest',
1780 1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 1781 cascade="all, delete-orphan",
1782 1782 overlaps="source_repo"
1783 1783 )
1784 1784 pull_requests_target = relationship(
1785 1785 'PullRequest',
1786 1786 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1787 1787 cascade="all, delete-orphan",
1788 1788 overlaps="target_repo"
1789 1789 )
1790 1790
1791 1791 ui = relationship('RepoRhodeCodeUi', cascade="all")
1792 1792 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1793 1793 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1794 1794
1795 1795 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1796 1796
1797 1797 # no cascade, set NULL
1798 1798 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1799 1799
1800 1800 review_rules = relationship('RepoReviewRule')
1801 1801 user_branch_perms = relationship('UserToRepoBranchPermission')
1802 1802 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1803 1803
1804 1804 def __repr__(self):
1805 1805 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1806 1806
1807 1807 @hybrid_property
1808 1808 def description_safe(self):
1809 1809 from rhodecode.lib import helpers as h
1810 1810 return h.escape(self.description)
1811 1811
1812 1812 @hybrid_property
1813 1813 def landing_rev(self):
1814 1814 # always should return [rev_type, rev], e.g ['branch', 'master']
1815 1815 if self._landing_revision:
1816 1816 _rev_info = self._landing_revision.split(':')
1817 1817 if len(_rev_info) < 2:
1818 1818 _rev_info.insert(0, 'rev')
1819 1819 return [_rev_info[0], _rev_info[1]]
1820 1820 return [None, None]
1821 1821
1822 1822 @property
1823 1823 def landing_ref_type(self):
1824 1824 return self.landing_rev[0]
1825 1825
1826 1826 @property
1827 1827 def landing_ref_name(self):
1828 1828 return self.landing_rev[1]
1829 1829
1830 1830 @landing_rev.setter
1831 1831 def landing_rev(self, val):
1832 1832 if ':' not in val:
1833 1833 raise ValueError('value must be delimited with `:` and consist '
1834 1834 'of <rev_type>:<rev>, got %s instead' % val)
1835 1835 self._landing_revision = val
1836 1836
1837 1837 @hybrid_property
1838 1838 def locked(self):
1839 1839 if self._locked:
1840 1840 user_id, timelocked, reason = self._locked.split(':')
1841 1841 lock_values = int(user_id), timelocked, reason
1842 1842 else:
1843 1843 lock_values = [None, None, None]
1844 1844 return lock_values
1845 1845
1846 1846 @locked.setter
1847 1847 def locked(self, val):
1848 1848 if val and isinstance(val, (list, tuple)):
1849 1849 self._locked = ':'.join(map(str, val))
1850 1850 else:
1851 1851 self._locked = None
1852 1852
1853 1853 @classmethod
1854 1854 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1855 1855 from rhodecode.lib.vcs.backends.base import EmptyCommit
1856 1856 dummy = EmptyCommit().__json__()
1857 1857 if not changeset_cache_raw:
1858 1858 dummy['source_repo_id'] = repo_id
1859 1859 return json.loads(json.dumps(dummy))
1860 1860
1861 1861 try:
1862 1862 return json.loads(changeset_cache_raw)
1863 1863 except TypeError:
1864 1864 return dummy
1865 1865 except Exception:
1866 1866 log.error(traceback.format_exc())
1867 1867 return dummy
1868 1868
1869 1869 @hybrid_property
1870 1870 def changeset_cache(self):
1871 1871 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1872 1872
1873 1873 @changeset_cache.setter
1874 1874 def changeset_cache(self, val):
1875 1875 try:
1876 1876 self._changeset_cache = json.dumps(val)
1877 1877 except Exception:
1878 1878 log.error(traceback.format_exc())
1879 1879
1880 1880 @hybrid_property
1881 1881 def repo_name(self):
1882 1882 return self._repo_name
1883 1883
1884 1884 @repo_name.setter
1885 1885 def repo_name(self, value):
1886 1886 self._repo_name = value
1887 1887 self.repo_name_hash = sha1(safe_bytes(value))
1888 1888
1889 1889 @classmethod
1890 1890 def normalize_repo_name(cls, repo_name):
1891 1891 """
1892 1892 Normalizes os specific repo_name to the format internally stored inside
1893 1893 database using URL_SEP
1894 1894
1895 1895 :param cls:
1896 1896 :param repo_name:
1897 1897 """
1898 1898 return cls.NAME_SEP.join(repo_name.split(os.sep))
1899 1899
1900 1900 @classmethod
1901 1901 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1902 1902 session = Session()
1903 1903 q = session.query(cls).filter(cls.repo_name == repo_name)
1904 1904
1905 1905 if cache:
1906 1906 if identity_cache:
1907 1907 val = cls.identity_cache(session, 'repo_name', repo_name)
1908 1908 if val:
1909 1909 return val
1910 1910 else:
1911 1911 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1912 1912 q = q.options(
1913 1913 FromCache("sql_cache_short", cache_key))
1914 1914
1915 1915 return q.scalar()
1916 1916
1917 1917 @classmethod
1918 1918 def get_by_id_or_repo_name(cls, repoid):
1919 1919 if isinstance(repoid, int):
1920 1920 try:
1921 1921 repo = cls.get(repoid)
1922 1922 except ValueError:
1923 1923 repo = None
1924 1924 else:
1925 1925 repo = cls.get_by_repo_name(repoid)
1926 1926 return repo
1927 1927
1928 1928 @classmethod
1929 1929 def get_by_full_path(cls, repo_full_path):
1930 1930 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1931 1931 repo_name = cls.normalize_repo_name(repo_name)
1932 1932 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1933 1933
1934 1934 @classmethod
1935 1935 def get_repo_forks(cls, repo_id):
1936 1936 return cls.query().filter(Repository.fork_id == repo_id)
1937 1937
1938 1938 @classmethod
1939 1939 def base_path(cls):
1940 1940 """
1941 1941 Returns base path when all repos are stored
1942 1942
1943 1943 :param cls:
1944 1944 """
1945 from rhodecode.lib.utils import get_rhodecode_base_path
1946 return get_rhodecode_base_path()
1945 from rhodecode.lib.utils import get_rhodecode_repo_store_path
1946 return get_rhodecode_repo_store_path()
1947 1947
1948 1948 @classmethod
1949 1949 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1950 1950 case_insensitive=True, archived=False):
1951 1951 q = Repository.query()
1952 1952
1953 1953 if not archived:
1954 1954 q = q.filter(Repository.archived.isnot(true()))
1955 1955
1956 1956 if not isinstance(user_id, Optional):
1957 1957 q = q.filter(Repository.user_id == user_id)
1958 1958
1959 1959 if not isinstance(group_id, Optional):
1960 1960 q = q.filter(Repository.group_id == group_id)
1961 1961
1962 1962 if case_insensitive:
1963 1963 q = q.order_by(func.lower(Repository.repo_name))
1964 1964 else:
1965 1965 q = q.order_by(Repository.repo_name)
1966 1966
1967 1967 return q.all()
1968 1968
1969 1969 @property
1970 1970 def repo_uid(self):
1971 1971 return '_{}'.format(self.repo_id)
1972 1972
1973 1973 @property
1974 1974 def forks(self):
1975 1975 """
1976 1976 Return forks of this repo
1977 1977 """
1978 1978 return Repository.get_repo_forks(self.repo_id)
1979 1979
1980 1980 @property
1981 1981 def parent(self):
1982 1982 """
1983 1983 Returns fork parent
1984 1984 """
1985 1985 return self.fork
1986 1986
1987 1987 @property
1988 1988 def just_name(self):
1989 1989 return self.repo_name.split(self.NAME_SEP)[-1]
1990 1990
1991 1991 @property
1992 1992 def groups_with_parents(self):
1993 1993 groups = []
1994 1994 if self.group is None:
1995 1995 return groups
1996 1996
1997 1997 cur_gr = self.group
1998 1998 groups.insert(0, cur_gr)
1999 1999 while 1:
2000 2000 gr = getattr(cur_gr, 'parent_group', None)
2001 2001 cur_gr = cur_gr.parent_group
2002 2002 if gr is None:
2003 2003 break
2004 2004 groups.insert(0, gr)
2005 2005
2006 2006 return groups
2007 2007
2008 2008 @property
2009 2009 def groups_and_repo(self):
2010 2010 return self.groups_with_parents, self
2011 2011
2012 @LazyProperty
2012 @property
2013 2013 def repo_path(self):
2014 2014 """
2015 2015 Returns base full path for that repository means where it actually
2016 2016 exists on a filesystem
2017 2017 """
2018 q = Session().query(RhodeCodeUi).filter(
2019 RhodeCodeUi.ui_key == self.NAME_SEP)
2020 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2021 return q.one().ui_value
2018 return self.base_path()
2022 2019
2023 2020 @property
2024 2021 def repo_full_path(self):
2025 2022 p = [self.repo_path]
2026 2023 # we need to split the name by / since this is how we store the
2027 2024 # names in the database, but that eventually needs to be converted
2028 2025 # into a valid system path
2029 2026 p += self.repo_name.split(self.NAME_SEP)
2030 2027 return os.path.join(*map(safe_str, p))
2031 2028
2032 2029 @property
2033 2030 def cache_keys(self):
2034 2031 """
2035 2032 Returns associated cache keys for that repo
2036 2033 """
2037 2034 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2038 2035 return CacheKey.query()\
2039 2036 .filter(CacheKey.cache_key == repo_namespace_key)\
2040 2037 .order_by(CacheKey.cache_key)\
2041 2038 .all()
2042 2039
2043 2040 @property
2044 2041 def cached_diffs_relative_dir(self):
2045 2042 """
2046 2043 Return a relative to the repository store path of cached diffs
2047 2044 used for safe display for users, who shouldn't know the absolute store
2048 2045 path
2049 2046 """
2050 2047 return os.path.join(
2051 2048 os.path.dirname(self.repo_name),
2052 2049 self.cached_diffs_dir.split(os.path.sep)[-1])
2053 2050
2054 2051 @property
2055 2052 def cached_diffs_dir(self):
2056 2053 path = self.repo_full_path
2057 2054 return os.path.join(
2058 2055 os.path.dirname(path),
2059 2056 f'.__shadow_diff_cache_repo_{self.repo_id}')
2060 2057
2061 2058 def cached_diffs(self):
2062 2059 diff_cache_dir = self.cached_diffs_dir
2063 2060 if os.path.isdir(diff_cache_dir):
2064 2061 return os.listdir(diff_cache_dir)
2065 2062 return []
2066 2063
2067 2064 def shadow_repos(self):
2068 2065 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2069 2066 return [
2070 2067 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2071 2068 if x.startswith(shadow_repos_pattern)
2072 2069 ]
2073 2070
2074 2071 def get_new_name(self, repo_name):
2075 2072 """
2076 2073 returns new full repository name based on assigned group and new new
2077 2074
2078 2075 :param repo_name:
2079 2076 """
2080 2077 path_prefix = self.group.full_path_splitted if self.group else []
2081 2078 return self.NAME_SEP.join(path_prefix + [repo_name])
2082 2079
2083 2080 @property
2084 2081 def _config(self):
2085 2082 """
2086 2083 Returns db based config object.
2087 2084 """
2088 2085 from rhodecode.lib.utils import make_db_config
2089 2086 return make_db_config(clear_session=False, repo=self)
2090 2087
2091 2088 def permissions(self, with_admins=True, with_owner=True,
2092 2089 expand_from_user_groups=False):
2093 2090 """
2094 2091 Permissions for repositories
2095 2092 """
2096 2093 _admin_perm = 'repository.admin'
2097 2094
2098 2095 owner_row = []
2099 2096 if with_owner:
2100 2097 usr = AttributeDict(self.user.get_dict())
2101 2098 usr.owner_row = True
2102 2099 usr.permission = _admin_perm
2103 2100 usr.permission_id = None
2104 2101 owner_row.append(usr)
2105 2102
2106 2103 super_admin_ids = []
2107 2104 super_admin_rows = []
2108 2105 if with_admins:
2109 2106 for usr in User.get_all_super_admins():
2110 2107 super_admin_ids.append(usr.user_id)
2111 2108 # if this admin is also owner, don't double the record
2112 2109 if usr.user_id == owner_row[0].user_id:
2113 2110 owner_row[0].admin_row = True
2114 2111 else:
2115 2112 usr = AttributeDict(usr.get_dict())
2116 2113 usr.admin_row = True
2117 2114 usr.permission = _admin_perm
2118 2115 usr.permission_id = None
2119 2116 super_admin_rows.append(usr)
2120 2117
2121 2118 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2122 2119 q = q.options(joinedload(UserRepoToPerm.repository),
2123 2120 joinedload(UserRepoToPerm.user),
2124 2121 joinedload(UserRepoToPerm.permission),)
2125 2122
2126 2123 # get owners and admins and permissions. We do a trick of re-writing
2127 2124 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2128 2125 # has a global reference and changing one object propagates to all
2129 2126 # others. This means if admin is also an owner admin_row that change
2130 2127 # would propagate to both objects
2131 2128 perm_rows = []
2132 2129 for _usr in q.all():
2133 2130 usr = AttributeDict(_usr.user.get_dict())
2134 2131 # if this user is also owner/admin, mark as duplicate record
2135 2132 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2136 2133 usr.duplicate_perm = True
2137 2134 # also check if this permission is maybe used by branch_permissions
2138 2135 if _usr.branch_perm_entry:
2139 2136 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2140 2137
2141 2138 usr.permission = _usr.permission.permission_name
2142 2139 usr.permission_id = _usr.repo_to_perm_id
2143 2140 perm_rows.append(usr)
2144 2141
2145 2142 # filter the perm rows by 'default' first and then sort them by
2146 2143 # admin,write,read,none permissions sorted again alphabetically in
2147 2144 # each group
2148 2145 perm_rows = sorted(perm_rows, key=display_user_sort)
2149 2146
2150 2147 user_groups_rows = []
2151 2148 if expand_from_user_groups:
2152 2149 for ug in self.permission_user_groups(with_members=True):
2153 2150 for user_data in ug.members:
2154 2151 user_groups_rows.append(user_data)
2155 2152
2156 2153 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2157 2154
2158 2155 def permission_user_groups(self, with_members=True):
2159 2156 q = UserGroupRepoToPerm.query()\
2160 2157 .filter(UserGroupRepoToPerm.repository == self)
2161 2158 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2162 2159 joinedload(UserGroupRepoToPerm.users_group),
2163 2160 joinedload(UserGroupRepoToPerm.permission),)
2164 2161
2165 2162 perm_rows = []
2166 2163 for _user_group in q.all():
2167 2164 entry = AttributeDict(_user_group.users_group.get_dict())
2168 2165 entry.permission = _user_group.permission.permission_name
2169 2166 if with_members:
2170 2167 entry.members = [x.user.get_dict()
2171 2168 for x in _user_group.users_group.members]
2172 2169 perm_rows.append(entry)
2173 2170
2174 2171 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2175 2172 return perm_rows
2176 2173
2177 2174 def get_api_data(self, include_secrets=False):
2178 2175 """
2179 2176 Common function for generating repo api data
2180 2177
2181 2178 :param include_secrets: See :meth:`User.get_api_data`.
2182 2179
2183 2180 """
2184 2181 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2185 2182 # move this methods on models level.
2186 2183 from rhodecode.model.settings import SettingsModel
2187 2184 from rhodecode.model.repo import RepoModel
2188 2185
2189 2186 repo = self
2190 2187 _user_id, _time, _reason = self.locked
2191 2188
2192 2189 data = {
2193 2190 'repo_id': repo.repo_id,
2194 2191 'repo_name': repo.repo_name,
2195 2192 'repo_type': repo.repo_type,
2196 2193 'clone_uri': repo.clone_uri or '',
2197 2194 'push_uri': repo.push_uri or '',
2198 2195 'url': RepoModel().get_url(self),
2199 2196 'private': repo.private,
2200 2197 'created_on': repo.created_on,
2201 2198 'description': repo.description_safe,
2202 2199 'landing_rev': repo.landing_rev,
2203 2200 'owner': repo.user.username,
2204 2201 'fork_of': repo.fork.repo_name if repo.fork else None,
2205 2202 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2206 2203 'enable_statistics': repo.enable_statistics,
2207 2204 'enable_locking': repo.enable_locking,
2208 2205 'enable_downloads': repo.enable_downloads,
2209 2206 'last_changeset': repo.changeset_cache,
2210 2207 'locked_by': User.get(_user_id).get_api_data(
2211 2208 include_secrets=include_secrets) if _user_id else None,
2212 2209 'locked_date': time_to_datetime(_time) if _time else None,
2213 2210 'lock_reason': _reason if _reason else None,
2214 2211 }
2215 2212
2216 2213 # TODO: mikhail: should be per-repo settings here
2217 2214 rc_config = SettingsModel().get_all_settings()
2218 2215 repository_fields = str2bool(
2219 2216 rc_config.get('rhodecode_repository_fields'))
2220 2217 if repository_fields:
2221 2218 for f in self.extra_fields:
2222 2219 data[f.field_key_prefixed] = f.field_value
2223 2220
2224 2221 return data
2225 2222
2226 2223 @classmethod
2227 2224 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2228 2225 if not lock_time:
2229 2226 lock_time = time.time()
2230 2227 if not lock_reason:
2231 2228 lock_reason = cls.LOCK_AUTOMATIC
2232 2229 repo.locked = [user_id, lock_time, lock_reason]
2233 2230 Session().add(repo)
2234 2231 Session().commit()
2235 2232
2236 2233 @classmethod
2237 2234 def unlock(cls, repo):
2238 2235 repo.locked = None
2239 2236 Session().add(repo)
2240 2237 Session().commit()
2241 2238
2242 2239 @classmethod
2243 2240 def getlock(cls, repo):
2244 2241 return repo.locked
2245 2242
2246 2243 def get_locking_state(self, action, user_id, only_when_enabled=True):
2247 2244 """
2248 2245 Checks locking on this repository, if locking is enabled and lock is
2249 2246 present returns a tuple of make_lock, locked, locked_by.
2250 2247 make_lock can have 3 states None (do nothing) True, make lock
2251 2248 False release lock, This value is later propagated to hooks, which
2252 2249 do the locking. Think about this as signals passed to hooks what to do.
2253 2250
2254 2251 """
2255 2252 # TODO: johbo: This is part of the business logic and should be moved
2256 2253 # into the RepositoryModel.
2257 2254
2258 2255 if action not in ('push', 'pull'):
2259 2256 raise ValueError("Invalid action value: %s" % repr(action))
2260 2257
2261 2258 # defines if locked error should be thrown to user
2262 2259 currently_locked = False
2263 2260 # defines if new lock should be made, tri-state
2264 2261 make_lock = None
2265 2262 repo = self
2266 2263 user = User.get(user_id)
2267 2264
2268 2265 lock_info = repo.locked
2269 2266
2270 2267 if repo and (repo.enable_locking or not only_when_enabled):
2271 2268 if action == 'push':
2272 2269 # check if it's already locked !, if it is compare users
2273 2270 locked_by_user_id = lock_info[0]
2274 2271 if user.user_id == locked_by_user_id:
2275 2272 log.debug(
2276 2273 'Got `push` action from user %s, now unlocking', user)
2277 2274 # unlock if we have push from user who locked
2278 2275 make_lock = False
2279 2276 else:
2280 2277 # we're not the same user who locked, ban with
2281 2278 # code defined in settings (default is 423 HTTP Locked) !
2282 2279 log.debug('Repo %s is currently locked by %s', repo, user)
2283 2280 currently_locked = True
2284 2281 elif action == 'pull':
2285 2282 # [0] user [1] date
2286 2283 if lock_info[0] and lock_info[1]:
2287 2284 log.debug('Repo %s is currently locked by %s', repo, user)
2288 2285 currently_locked = True
2289 2286 else:
2290 2287 log.debug('Setting lock on repo %s by %s', repo, user)
2291 2288 make_lock = True
2292 2289
2293 2290 else:
2294 2291 log.debug('Repository %s do not have locking enabled', repo)
2295 2292
2296 2293 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2297 2294 make_lock, currently_locked, lock_info)
2298 2295
2299 2296 from rhodecode.lib.auth import HasRepoPermissionAny
2300 2297 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2301 2298 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2302 2299 # if we don't have at least write permission we cannot make a lock
2303 2300 log.debug('lock state reset back to FALSE due to lack '
2304 2301 'of at least read permission')
2305 2302 make_lock = False
2306 2303
2307 2304 return make_lock, currently_locked, lock_info
2308 2305
2309 2306 @property
2310 2307 def last_commit_cache_update_diff(self):
2311 2308 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2312 2309
2313 2310 @classmethod
2314 2311 def _load_commit_change(cls, last_commit_cache):
2315 2312 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2316 2313 empty_date = datetime.datetime.fromtimestamp(0)
2317 2314 date_latest = last_commit_cache.get('date', empty_date)
2318 2315 try:
2319 2316 return parse_datetime(date_latest)
2320 2317 except Exception:
2321 2318 return empty_date
2322 2319
2323 2320 @property
2324 2321 def last_commit_change(self):
2325 2322 return self._load_commit_change(self.changeset_cache)
2326 2323
2327 2324 @property
2328 2325 def last_db_change(self):
2329 2326 return self.updated_on
2330 2327
2331 2328 @property
2332 2329 def clone_uri_hidden(self):
2333 2330 clone_uri = self.clone_uri
2334 2331 if clone_uri:
2335 2332 import urlobject
2336 2333 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2337 2334 if url_obj.password:
2338 2335 clone_uri = url_obj.with_password('*****')
2339 2336 return clone_uri
2340 2337
2341 2338 @property
2342 2339 def push_uri_hidden(self):
2343 2340 push_uri = self.push_uri
2344 2341 if push_uri:
2345 2342 import urlobject
2346 2343 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2347 2344 if url_obj.password:
2348 2345 push_uri = url_obj.with_password('*****')
2349 2346 return push_uri
2350 2347
2351 2348 def clone_url(self, **override):
2352 2349 from rhodecode.model.settings import SettingsModel
2353 2350
2354 2351 uri_tmpl = None
2355 2352 if 'with_id' in override:
2356 2353 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2357 2354 del override['with_id']
2358 2355
2359 2356 if 'uri_tmpl' in override:
2360 2357 uri_tmpl = override['uri_tmpl']
2361 2358 del override['uri_tmpl']
2362 2359
2363 2360 ssh = False
2364 2361 if 'ssh' in override:
2365 2362 ssh = True
2366 2363 del override['ssh']
2367 2364
2368 2365 # we didn't override our tmpl from **overrides
2369 2366 request = get_current_request()
2370 2367 if not uri_tmpl:
2371 2368 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2372 2369 rc_config = request.call_context.rc_config
2373 2370 else:
2374 2371 rc_config = SettingsModel().get_all_settings(cache=True)
2375 2372
2376 2373 if ssh:
2377 2374 uri_tmpl = rc_config.get(
2378 2375 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2379 2376
2380 2377 else:
2381 2378 uri_tmpl = rc_config.get(
2382 2379 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2383 2380
2384 2381 return get_clone_url(request=request,
2385 2382 uri_tmpl=uri_tmpl,
2386 2383 repo_name=self.repo_name,
2387 2384 repo_id=self.repo_id,
2388 2385 repo_type=self.repo_type,
2389 2386 **override)
2390 2387
2391 2388 def set_state(self, state):
2392 2389 self.repo_state = state
2393 2390 Session().add(self)
2394 2391 #==========================================================================
2395 2392 # SCM PROPERTIES
2396 2393 #==========================================================================
2397 2394
2398 2395 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2399 2396 return get_commit_safe(
2400 2397 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2401 2398 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2402 2399
2403 2400 def get_changeset(self, rev=None, pre_load=None):
2404 2401 warnings.warn("Use get_commit", DeprecationWarning)
2405 2402 commit_id = None
2406 2403 commit_idx = None
2407 2404 if isinstance(rev, str):
2408 2405 commit_id = rev
2409 2406 else:
2410 2407 commit_idx = rev
2411 2408 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2412 2409 pre_load=pre_load)
2413 2410
2414 2411 def get_landing_commit(self):
2415 2412 """
2416 2413 Returns landing commit, or if that doesn't exist returns the tip
2417 2414 """
2418 2415 _rev_type, _rev = self.landing_rev
2419 2416 commit = self.get_commit(_rev)
2420 2417 if isinstance(commit, EmptyCommit):
2421 2418 return self.get_commit()
2422 2419 return commit
2423 2420
2424 2421 def flush_commit_cache(self):
2425 2422 self.update_commit_cache(cs_cache={'raw_id':'0'})
2426 2423 self.update_commit_cache()
2427 2424
2428 2425 def update_commit_cache(self, cs_cache=None, config=None):
2429 2426 """
2430 2427 Update cache of last commit for repository
2431 2428 cache_keys should be::
2432 2429
2433 2430 source_repo_id
2434 2431 short_id
2435 2432 raw_id
2436 2433 revision
2437 2434 parents
2438 2435 message
2439 2436 date
2440 2437 author
2441 2438 updated_on
2442 2439
2443 2440 """
2444 2441 from rhodecode.lib.vcs.backends.base import BaseCommit
2445 2442 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2446 2443 empty_date = datetime.datetime.fromtimestamp(0)
2447 2444 repo_commit_count = 0
2448 2445
2449 2446 if cs_cache is None:
2450 2447 # use no-cache version here
2451 2448 try:
2452 2449 scm_repo = self.scm_instance(cache=False, config=config)
2453 2450 except VCSError:
2454 2451 scm_repo = None
2455 2452 empty = scm_repo is None or scm_repo.is_empty()
2456 2453
2457 2454 if not empty:
2458 2455 cs_cache = scm_repo.get_commit(
2459 2456 pre_load=["author", "date", "message", "parents", "branch"])
2460 2457 repo_commit_count = scm_repo.count()
2461 2458 else:
2462 2459 cs_cache = EmptyCommit()
2463 2460
2464 2461 if isinstance(cs_cache, BaseCommit):
2465 2462 cs_cache = cs_cache.__json__()
2466 2463
2467 2464 def is_outdated(new_cs_cache):
2468 2465 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2469 2466 new_cs_cache['revision'] != self.changeset_cache['revision']):
2470 2467 return True
2471 2468 return False
2472 2469
2473 2470 # check if we have maybe already latest cached revision
2474 2471 if is_outdated(cs_cache) or not self.changeset_cache:
2475 2472 _current_datetime = datetime.datetime.utcnow()
2476 2473 last_change = cs_cache.get('date') or _current_datetime
2477 2474 # we check if last update is newer than the new value
2478 2475 # if yes, we use the current timestamp instead. Imagine you get
2479 2476 # old commit pushed 1y ago, we'd set last update 1y to ago.
2480 2477 last_change_timestamp = datetime_to_time(last_change)
2481 2478 current_timestamp = datetime_to_time(last_change)
2482 2479 if last_change_timestamp > current_timestamp and not empty:
2483 2480 cs_cache['date'] = _current_datetime
2484 2481
2485 2482 # also store size of repo
2486 2483 cs_cache['repo_commit_count'] = repo_commit_count
2487 2484
2488 2485 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2489 2486 cs_cache['updated_on'] = time.time()
2490 2487 self.changeset_cache = cs_cache
2491 2488 self.updated_on = last_change
2492 2489 Session().add(self)
2493 2490 Session().commit()
2494 2491
2495 2492 else:
2496 2493 if empty:
2497 2494 cs_cache = EmptyCommit().__json__()
2498 2495 else:
2499 2496 cs_cache = self.changeset_cache
2500 2497
2501 2498 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2502 2499
2503 2500 cs_cache['updated_on'] = time.time()
2504 2501 self.changeset_cache = cs_cache
2505 2502 self.updated_on = _date_latest
2506 2503 Session().add(self)
2507 2504 Session().commit()
2508 2505
2509 2506 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2510 2507 self.repo_name, cs_cache, _date_latest)
2511 2508
2512 2509 @property
2513 2510 def tip(self):
2514 2511 return self.get_commit('tip')
2515 2512
2516 2513 @property
2517 2514 def author(self):
2518 2515 return self.tip.author
2519 2516
2520 2517 @property
2521 2518 def last_change(self):
2522 2519 return self.scm_instance().last_change
2523 2520
2524 2521 def get_comments(self, revisions=None):
2525 2522 """
2526 2523 Returns comments for this repository grouped by revisions
2527 2524
2528 2525 :param revisions: filter query by revisions only
2529 2526 """
2530 2527 cmts = ChangesetComment.query()\
2531 2528 .filter(ChangesetComment.repo == self)
2532 2529 if revisions:
2533 2530 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2534 2531 grouped = collections.defaultdict(list)
2535 2532 for cmt in cmts.all():
2536 2533 grouped[cmt.revision].append(cmt)
2537 2534 return grouped
2538 2535
2539 2536 def statuses(self, revisions=None):
2540 2537 """
2541 2538 Returns statuses for this repository
2542 2539
2543 2540 :param revisions: list of revisions to get statuses for
2544 2541 """
2545 2542 statuses = ChangesetStatus.query()\
2546 2543 .filter(ChangesetStatus.repo == self)\
2547 2544 .filter(ChangesetStatus.version == 0)
2548 2545
2549 2546 if revisions:
2550 2547 # Try doing the filtering in chunks to avoid hitting limits
2551 2548 size = 500
2552 2549 status_results = []
2553 2550 for chunk in range(0, len(revisions), size):
2554 2551 status_results += statuses.filter(
2555 2552 ChangesetStatus.revision.in_(
2556 2553 revisions[chunk: chunk+size])
2557 2554 ).all()
2558 2555 else:
2559 2556 status_results = statuses.all()
2560 2557
2561 2558 grouped = {}
2562 2559
2563 2560 # maybe we have open new pullrequest without a status?
2564 2561 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2565 2562 status_lbl = ChangesetStatus.get_status_lbl(stat)
2566 2563 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2567 2564 for rev in pr.revisions:
2568 2565 pr_id = pr.pull_request_id
2569 2566 pr_repo = pr.target_repo.repo_name
2570 2567 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2571 2568
2572 2569 for stat in status_results:
2573 2570 pr_id = pr_repo = None
2574 2571 if stat.pull_request:
2575 2572 pr_id = stat.pull_request.pull_request_id
2576 2573 pr_repo = stat.pull_request.target_repo.repo_name
2577 2574 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2578 2575 pr_id, pr_repo]
2579 2576 return grouped
2580 2577
2581 2578 # ==========================================================================
2582 2579 # SCM CACHE INSTANCE
2583 2580 # ==========================================================================
2584 2581
2585 2582 def scm_instance(self, **kwargs):
2586 2583 import rhodecode
2587 2584
2588 2585 # Passing a config will not hit the cache currently only used
2589 2586 # for repo2dbmapper
2590 2587 config = kwargs.pop('config', None)
2591 2588 cache = kwargs.pop('cache', None)
2592 2589 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2593 2590 if vcs_full_cache is not None:
2594 2591 # allows override global config
2595 2592 full_cache = vcs_full_cache
2596 2593 else:
2597 2594 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2598 2595 # if cache is NOT defined use default global, else we have a full
2599 2596 # control over cache behaviour
2600 2597 if cache is None and full_cache and not config:
2601 2598 log.debug('Initializing pure cached instance for %s', self.repo_path)
2602 2599 return self._get_instance_cached()
2603 2600
2604 2601 # cache here is sent to the "vcs server"
2605 2602 return self._get_instance(cache=bool(cache), config=config)
2606 2603
2607 2604 def _get_instance_cached(self):
2608 2605 from rhodecode.lib import rc_cache
2609 2606
2610 2607 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2611 2608 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2612 2609
2613 2610 # we must use thread scoped cache here,
2614 2611 # because each thread of gevent needs it's own not shared connection and cache
2615 2612 # we also alter `args` so the cache key is individual for every green thread.
2616 2613 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2617 2614 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2618 2615
2619 2616 # our wrapped caching function that takes state_uid to save the previous state in
2620 2617 def cache_generator(_state_uid):
2621 2618
2622 2619 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2623 2620 def get_instance_cached(_repo_id, _process_context_id):
2624 2621 # we save in cached func the generation state so we can detect a change and invalidate caches
2625 2622 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2626 2623
2627 2624 return get_instance_cached
2628 2625
2629 2626 with inv_context_manager as invalidation_context:
2630 2627 cache_state_uid = invalidation_context.state_uid
2631 2628 cache_func = cache_generator(cache_state_uid)
2632 2629
2633 2630 args = self.repo_id, inv_context_manager.proc_key
2634 2631
2635 2632 previous_state_uid, instance = cache_func(*args)
2636 2633
2637 2634 # now compare keys, the "cache" state vs expected state.
2638 2635 if previous_state_uid != cache_state_uid:
2639 2636 log.warning('Cached state uid %s is different than current state uid %s',
2640 2637 previous_state_uid, cache_state_uid)
2641 2638 _, instance = cache_func.refresh(*args)
2642 2639
2643 2640 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2644 2641 return instance
2645 2642
2646 2643 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2647 2644 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2648 2645 self.repo_type, self.repo_path, cache)
2649 2646 config = config or self._config
2650 2647 custom_wire = {
2651 2648 'cache': cache, # controls the vcs.remote cache
2652 2649 'repo_state_uid': repo_state_uid
2653 2650 }
2654 2651
2655 2652 repo = get_vcs_instance(
2656 2653 repo_path=safe_str(self.repo_full_path),
2657 2654 config=config,
2658 2655 with_wire=custom_wire,
2659 2656 create=False,
2660 2657 _vcs_alias=self.repo_type)
2661 2658 if repo is not None:
2662 2659 repo.count() # cache rebuild
2663 2660
2664 2661 return repo
2665 2662
2666 2663 def get_shadow_repository_path(self, workspace_id):
2667 2664 from rhodecode.lib.vcs.backends.base import BaseRepository
2668 2665 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2669 2666 self.repo_full_path, self.repo_id, workspace_id)
2670 2667 return shadow_repo_path
2671 2668
2672 2669 def __json__(self):
2673 2670 return {'landing_rev': self.landing_rev}
2674 2671
2675 2672 def get_dict(self):
2676 2673
2677 2674 # Since we transformed `repo_name` to a hybrid property, we need to
2678 2675 # keep compatibility with the code which uses `repo_name` field.
2679 2676
2680 2677 result = super(Repository, self).get_dict()
2681 2678 result['repo_name'] = result.pop('_repo_name', None)
2682 2679 result.pop('_changeset_cache', '')
2683 2680 return result
2684 2681
2685 2682
2686 2683 class RepoGroup(Base, BaseModel):
2687 2684 __tablename__ = 'groups'
2688 2685 __table_args__ = (
2689 2686 UniqueConstraint('group_name', 'group_parent_id'),
2690 2687 base_table_args,
2691 2688 )
2692 2689
2693 2690 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2694 2691
2695 2692 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2696 2693 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2697 2694 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2698 2695 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2699 2696 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2700 2697 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2701 2698 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2702 2699 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2703 2700 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2704 2701 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2705 2702 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2706 2703
2707 2704 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2708 2705 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2709 2706 parent_group = relationship('RepoGroup', remote_side=group_id)
2710 2707 user = relationship('User', back_populates='repository_groups')
2711 2708 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2712 2709
2713 2710 # no cascade, set NULL
2714 2711 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2715 2712
2716 2713 def __init__(self, group_name='', parent_group=None):
2717 2714 self.group_name = group_name
2718 2715 self.parent_group = parent_group
2719 2716
2720 2717 def __repr__(self):
2721 2718 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2722 2719
2723 2720 @hybrid_property
2724 2721 def group_name(self):
2725 2722 return self._group_name
2726 2723
2727 2724 @group_name.setter
2728 2725 def group_name(self, value):
2729 2726 self._group_name = value
2730 2727 self.group_name_hash = self.hash_repo_group_name(value)
2731 2728
2732 2729 @classmethod
2733 2730 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2734 2731 from rhodecode.lib.vcs.backends.base import EmptyCommit
2735 2732 dummy = EmptyCommit().__json__()
2736 2733 if not changeset_cache_raw:
2737 2734 dummy['source_repo_id'] = repo_id
2738 2735 return json.loads(json.dumps(dummy))
2739 2736
2740 2737 try:
2741 2738 return json.loads(changeset_cache_raw)
2742 2739 except TypeError:
2743 2740 return dummy
2744 2741 except Exception:
2745 2742 log.error(traceback.format_exc())
2746 2743 return dummy
2747 2744
2748 2745 @hybrid_property
2749 2746 def changeset_cache(self):
2750 2747 return self._load_changeset_cache('', self._changeset_cache)
2751 2748
2752 2749 @changeset_cache.setter
2753 2750 def changeset_cache(self, val):
2754 2751 try:
2755 2752 self._changeset_cache = json.dumps(val)
2756 2753 except Exception:
2757 2754 log.error(traceback.format_exc())
2758 2755
2759 2756 @validates('group_parent_id')
2760 2757 def validate_group_parent_id(self, key, val):
2761 2758 """
2762 2759 Check cycle references for a parent group to self
2763 2760 """
2764 2761 if self.group_id and val:
2765 2762 assert val != self.group_id
2766 2763
2767 2764 return val
2768 2765
2769 2766 @hybrid_property
2770 2767 def description_safe(self):
2771 2768 from rhodecode.lib import helpers as h
2772 2769 return h.escape(self.group_description)
2773 2770
2774 2771 @classmethod
2775 2772 def hash_repo_group_name(cls, repo_group_name):
2776 2773 val = remove_formatting(repo_group_name)
2777 2774 val = safe_str(val).lower()
2778 2775 chars = []
2779 2776 for c in val:
2780 2777 if c not in string.ascii_letters:
2781 2778 c = str(ord(c))
2782 2779 chars.append(c)
2783 2780
2784 2781 return ''.join(chars)
2785 2782
2786 2783 @classmethod
2787 2784 def _generate_choice(cls, repo_group):
2788 2785 from webhelpers2.html import literal as _literal
2789 2786
2790 2787 def _name(k):
2791 2788 return _literal(cls.CHOICES_SEPARATOR.join(k))
2792 2789
2793 2790 return repo_group.group_id, _name(repo_group.full_path_splitted)
2794 2791
2795 2792 @classmethod
2796 2793 def groups_choices(cls, groups=None, show_empty_group=True):
2797 2794 if not groups:
2798 2795 groups = cls.query().all()
2799 2796
2800 2797 repo_groups = []
2801 2798 if show_empty_group:
2802 2799 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2803 2800
2804 2801 repo_groups.extend([cls._generate_choice(x) for x in groups])
2805 2802
2806 2803 repo_groups = sorted(
2807 2804 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2808 2805 return repo_groups
2809 2806
2810 2807 @classmethod
2811 2808 def url_sep(cls):
2812 2809 return URL_SEP
2813 2810
2814 2811 @classmethod
2815 2812 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2816 2813 if case_insensitive:
2817 2814 gr = cls.query().filter(func.lower(cls.group_name)
2818 2815 == func.lower(group_name))
2819 2816 else:
2820 2817 gr = cls.query().filter(cls.group_name == group_name)
2821 2818 if cache:
2822 2819 name_key = _hash_key(group_name)
2823 2820 gr = gr.options(
2824 2821 FromCache("sql_cache_short", f"get_group_{name_key}"))
2825 2822 return gr.scalar()
2826 2823
2827 2824 @classmethod
2828 2825 def get_user_personal_repo_group(cls, user_id):
2829 2826 user = User.get(user_id)
2830 2827 if user.username == User.DEFAULT_USER:
2831 2828 return None
2832 2829
2833 2830 return cls.query()\
2834 2831 .filter(cls.personal == true()) \
2835 2832 .filter(cls.user == user) \
2836 2833 .order_by(cls.group_id.asc()) \
2837 2834 .first()
2838 2835
2839 2836 @classmethod
2840 2837 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2841 2838 case_insensitive=True):
2842 2839 q = RepoGroup.query()
2843 2840
2844 2841 if not isinstance(user_id, Optional):
2845 2842 q = q.filter(RepoGroup.user_id == user_id)
2846 2843
2847 2844 if not isinstance(group_id, Optional):
2848 2845 q = q.filter(RepoGroup.group_parent_id == group_id)
2849 2846
2850 2847 if case_insensitive:
2851 2848 q = q.order_by(func.lower(RepoGroup.group_name))
2852 2849 else:
2853 2850 q = q.order_by(RepoGroup.group_name)
2854 2851 return q.all()
2855 2852
2856 2853 @property
2857 2854 def parents(self, parents_recursion_limit=10):
2858 2855 groups = []
2859 2856 if self.parent_group is None:
2860 2857 return groups
2861 2858 cur_gr = self.parent_group
2862 2859 groups.insert(0, cur_gr)
2863 2860 cnt = 0
2864 2861 while 1:
2865 2862 cnt += 1
2866 2863 gr = getattr(cur_gr, 'parent_group', None)
2867 2864 cur_gr = cur_gr.parent_group
2868 2865 if gr is None:
2869 2866 break
2870 2867 if cnt == parents_recursion_limit:
2871 2868 # this will prevent accidental infinit loops
2872 2869 log.error('more than %s parents found for group %s, stopping '
2873 2870 'recursive parent fetching', parents_recursion_limit, self)
2874 2871 break
2875 2872
2876 2873 groups.insert(0, gr)
2877 2874 return groups
2878 2875
2879 2876 @property
2880 2877 def last_commit_cache_update_diff(self):
2881 2878 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2882 2879
2883 2880 @classmethod
2884 2881 def _load_commit_change(cls, last_commit_cache):
2885 2882 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2886 2883 empty_date = datetime.datetime.fromtimestamp(0)
2887 2884 date_latest = last_commit_cache.get('date', empty_date)
2888 2885 try:
2889 2886 return parse_datetime(date_latest)
2890 2887 except Exception:
2891 2888 return empty_date
2892 2889
2893 2890 @property
2894 2891 def last_commit_change(self):
2895 2892 return self._load_commit_change(self.changeset_cache)
2896 2893
2897 2894 @property
2898 2895 def last_db_change(self):
2899 2896 return self.updated_on
2900 2897
2901 2898 @property
2902 2899 def children(self):
2903 2900 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2904 2901
2905 2902 @property
2906 2903 def name(self):
2907 2904 return self.group_name.split(RepoGroup.url_sep())[-1]
2908 2905
2909 2906 @property
2910 2907 def full_path(self):
2911 2908 return self.group_name
2912 2909
2913 2910 @property
2914 2911 def full_path_splitted(self):
2915 2912 return self.group_name.split(RepoGroup.url_sep())
2916 2913
2917 2914 @property
2918 2915 def repositories(self):
2919 2916 return Repository.query()\
2920 2917 .filter(Repository.group == self)\
2921 2918 .order_by(Repository.repo_name)
2922 2919
2923 2920 @property
2924 2921 def repositories_recursive_count(self):
2925 2922 cnt = self.repositories.count()
2926 2923
2927 2924 def children_count(group):
2928 2925 cnt = 0
2929 2926 for child in group.children:
2930 2927 cnt += child.repositories.count()
2931 2928 cnt += children_count(child)
2932 2929 return cnt
2933 2930
2934 2931 return cnt + children_count(self)
2935 2932
2936 2933 def _recursive_objects(self, include_repos=True, include_groups=True):
2937 2934 all_ = []
2938 2935
2939 2936 def _get_members(root_gr):
2940 2937 if include_repos:
2941 2938 for r in root_gr.repositories:
2942 2939 all_.append(r)
2943 2940 childs = root_gr.children.all()
2944 2941 if childs:
2945 2942 for gr in childs:
2946 2943 if include_groups:
2947 2944 all_.append(gr)
2948 2945 _get_members(gr)
2949 2946
2950 2947 root_group = []
2951 2948 if include_groups:
2952 2949 root_group = [self]
2953 2950
2954 2951 _get_members(self)
2955 2952 return root_group + all_
2956 2953
2957 2954 def recursive_groups_and_repos(self):
2958 2955 """
2959 2956 Recursive return all groups, with repositories in those groups
2960 2957 """
2961 2958 return self._recursive_objects()
2962 2959
2963 2960 def recursive_groups(self):
2964 2961 """
2965 2962 Returns all children groups for this group including children of children
2966 2963 """
2967 2964 return self._recursive_objects(include_repos=False)
2968 2965
2969 2966 def recursive_repos(self):
2970 2967 """
2971 2968 Returns all children repositories for this group
2972 2969 """
2973 2970 return self._recursive_objects(include_groups=False)
2974 2971
2975 2972 def get_new_name(self, group_name):
2976 2973 """
2977 2974 returns new full group name based on parent and new name
2978 2975
2979 2976 :param group_name:
2980 2977 """
2981 2978 path_prefix = (self.parent_group.full_path_splitted if
2982 2979 self.parent_group else [])
2983 2980 return RepoGroup.url_sep().join(path_prefix + [group_name])
2984 2981
2985 2982 def update_commit_cache(self, config=None):
2986 2983 """
2987 2984 Update cache of last commit for newest repository inside this repository group.
2988 2985 cache_keys should be::
2989 2986
2990 2987 source_repo_id
2991 2988 short_id
2992 2989 raw_id
2993 2990 revision
2994 2991 parents
2995 2992 message
2996 2993 date
2997 2994 author
2998 2995
2999 2996 """
3000 2997 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3001 2998 empty_date = datetime.datetime.fromtimestamp(0)
3002 2999
3003 3000 def repo_groups_and_repos(root_gr):
3004 3001 for _repo in root_gr.repositories:
3005 3002 yield _repo
3006 3003 for child_group in root_gr.children.all():
3007 3004 yield child_group
3008 3005
3009 3006 latest_repo_cs_cache = {}
3010 3007 for obj in repo_groups_and_repos(self):
3011 3008 repo_cs_cache = obj.changeset_cache
3012 3009 date_latest = latest_repo_cs_cache.get('date', empty_date)
3013 3010 date_current = repo_cs_cache.get('date', empty_date)
3014 3011 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3015 3012 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3016 3013 latest_repo_cs_cache = repo_cs_cache
3017 3014 if hasattr(obj, 'repo_id'):
3018 3015 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3019 3016 else:
3020 3017 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3021 3018
3022 3019 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3023 3020
3024 3021 latest_repo_cs_cache['updated_on'] = time.time()
3025 3022 self.changeset_cache = latest_repo_cs_cache
3026 3023 self.updated_on = _date_latest
3027 3024 Session().add(self)
3028 3025 Session().commit()
3029 3026
3030 3027 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3031 3028 self.group_name, latest_repo_cs_cache, _date_latest)
3032 3029
3033 3030 def permissions(self, with_admins=True, with_owner=True,
3034 3031 expand_from_user_groups=False):
3035 3032 """
3036 3033 Permissions for repository groups
3037 3034 """
3038 3035 _admin_perm = 'group.admin'
3039 3036
3040 3037 owner_row = []
3041 3038 if with_owner:
3042 3039 usr = AttributeDict(self.user.get_dict())
3043 3040 usr.owner_row = True
3044 3041 usr.permission = _admin_perm
3045 3042 owner_row.append(usr)
3046 3043
3047 3044 super_admin_ids = []
3048 3045 super_admin_rows = []
3049 3046 if with_admins:
3050 3047 for usr in User.get_all_super_admins():
3051 3048 super_admin_ids.append(usr.user_id)
3052 3049 # if this admin is also owner, don't double the record
3053 3050 if usr.user_id == owner_row[0].user_id:
3054 3051 owner_row[0].admin_row = True
3055 3052 else:
3056 3053 usr = AttributeDict(usr.get_dict())
3057 3054 usr.admin_row = True
3058 3055 usr.permission = _admin_perm
3059 3056 super_admin_rows.append(usr)
3060 3057
3061 3058 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3062 3059 q = q.options(joinedload(UserRepoGroupToPerm.group),
3063 3060 joinedload(UserRepoGroupToPerm.user),
3064 3061 joinedload(UserRepoGroupToPerm.permission),)
3065 3062
3066 3063 # get owners and admins and permissions. We do a trick of re-writing
3067 3064 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3068 3065 # has a global reference and changing one object propagates to all
3069 3066 # others. This means if admin is also an owner admin_row that change
3070 3067 # would propagate to both objects
3071 3068 perm_rows = []
3072 3069 for _usr in q.all():
3073 3070 usr = AttributeDict(_usr.user.get_dict())
3074 3071 # if this user is also owner/admin, mark as duplicate record
3075 3072 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3076 3073 usr.duplicate_perm = True
3077 3074 usr.permission = _usr.permission.permission_name
3078 3075 perm_rows.append(usr)
3079 3076
3080 3077 # filter the perm rows by 'default' first and then sort them by
3081 3078 # admin,write,read,none permissions sorted again alphabetically in
3082 3079 # each group
3083 3080 perm_rows = sorted(perm_rows, key=display_user_sort)
3084 3081
3085 3082 user_groups_rows = []
3086 3083 if expand_from_user_groups:
3087 3084 for ug in self.permission_user_groups(with_members=True):
3088 3085 for user_data in ug.members:
3089 3086 user_groups_rows.append(user_data)
3090 3087
3091 3088 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3092 3089
3093 3090 def permission_user_groups(self, with_members=False):
3094 3091 q = UserGroupRepoGroupToPerm.query()\
3095 3092 .filter(UserGroupRepoGroupToPerm.group == self)
3096 3093 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3097 3094 joinedload(UserGroupRepoGroupToPerm.users_group),
3098 3095 joinedload(UserGroupRepoGroupToPerm.permission),)
3099 3096
3100 3097 perm_rows = []
3101 3098 for _user_group in q.all():
3102 3099 entry = AttributeDict(_user_group.users_group.get_dict())
3103 3100 entry.permission = _user_group.permission.permission_name
3104 3101 if with_members:
3105 3102 entry.members = [x.user.get_dict()
3106 3103 for x in _user_group.users_group.members]
3107 3104 perm_rows.append(entry)
3108 3105
3109 3106 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3110 3107 return perm_rows
3111 3108
3112 3109 def get_api_data(self):
3113 3110 """
3114 3111 Common function for generating api data
3115 3112
3116 3113 """
3117 3114 group = self
3118 3115 data = {
3119 3116 'group_id': group.group_id,
3120 3117 'group_name': group.group_name,
3121 3118 'group_description': group.description_safe,
3122 3119 'parent_group': group.parent_group.group_name if group.parent_group else None,
3123 3120 'repositories': [x.repo_name for x in group.repositories],
3124 3121 'owner': group.user.username,
3125 3122 }
3126 3123 return data
3127 3124
3128 3125 def get_dict(self):
3129 3126 # Since we transformed `group_name` to a hybrid property, we need to
3130 3127 # keep compatibility with the code which uses `group_name` field.
3131 3128 result = super(RepoGroup, self).get_dict()
3132 3129 result['group_name'] = result.pop('_group_name', None)
3133 3130 result.pop('_changeset_cache', '')
3134 3131 return result
3135 3132
3136 3133
3137 3134 class Permission(Base, BaseModel):
3138 3135 __tablename__ = 'permissions'
3139 3136 __table_args__ = (
3140 3137 Index('p_perm_name_idx', 'permission_name'),
3141 3138 base_table_args,
3142 3139 )
3143 3140
3144 3141 PERMS = [
3145 3142 ('hg.admin', _('RhodeCode Super Administrator')),
3146 3143
3147 3144 ('repository.none', _('Repository no access')),
3148 3145 ('repository.read', _('Repository read access')),
3149 3146 ('repository.write', _('Repository write access')),
3150 3147 ('repository.admin', _('Repository admin access')),
3151 3148
3152 3149 ('group.none', _('Repository group no access')),
3153 3150 ('group.read', _('Repository group read access')),
3154 3151 ('group.write', _('Repository group write access')),
3155 3152 ('group.admin', _('Repository group admin access')),
3156 3153
3157 3154 ('usergroup.none', _('User group no access')),
3158 3155 ('usergroup.read', _('User group read access')),
3159 3156 ('usergroup.write', _('User group write access')),
3160 3157 ('usergroup.admin', _('User group admin access')),
3161 3158
3162 3159 ('branch.none', _('Branch no permissions')),
3163 3160 ('branch.merge', _('Branch access by web merge')),
3164 3161 ('branch.push', _('Branch access by push')),
3165 3162 ('branch.push_force', _('Branch access by push with force')),
3166 3163
3167 3164 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3168 3165 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3169 3166
3170 3167 ('hg.usergroup.create.false', _('User Group creation disabled')),
3171 3168 ('hg.usergroup.create.true', _('User Group creation enabled')),
3172 3169
3173 3170 ('hg.create.none', _('Repository creation disabled')),
3174 3171 ('hg.create.repository', _('Repository creation enabled')),
3175 3172 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3176 3173 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3177 3174
3178 3175 ('hg.fork.none', _('Repository forking disabled')),
3179 3176 ('hg.fork.repository', _('Repository forking enabled')),
3180 3177
3181 3178 ('hg.register.none', _('Registration disabled')),
3182 3179 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3183 3180 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3184 3181
3185 3182 ('hg.password_reset.enabled', _('Password reset enabled')),
3186 3183 ('hg.password_reset.hidden', _('Password reset hidden')),
3187 3184 ('hg.password_reset.disabled', _('Password reset disabled')),
3188 3185
3189 3186 ('hg.extern_activate.manual', _('Manual activation of external account')),
3190 3187 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3191 3188
3192 3189 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3193 3190 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3194 3191 ]
3195 3192
3196 3193 # definition of system default permissions for DEFAULT user, created on
3197 3194 # system setup
3198 3195 DEFAULT_USER_PERMISSIONS = [
3199 3196 # object perms
3200 3197 'repository.read',
3201 3198 'group.read',
3202 3199 'usergroup.read',
3203 3200 # branch, for backward compat we need same value as before so forced pushed
3204 3201 'branch.push_force',
3205 3202 # global
3206 3203 'hg.create.repository',
3207 3204 'hg.repogroup.create.false',
3208 3205 'hg.usergroup.create.false',
3209 3206 'hg.create.write_on_repogroup.true',
3210 3207 'hg.fork.repository',
3211 3208 'hg.register.manual_activate',
3212 3209 'hg.password_reset.enabled',
3213 3210 'hg.extern_activate.auto',
3214 3211 'hg.inherit_default_perms.true',
3215 3212 ]
3216 3213
3217 3214 # defines which permissions are more important higher the more important
3218 3215 # Weight defines which permissions are more important.
3219 3216 # The higher number the more important.
3220 3217 PERM_WEIGHTS = {
3221 3218 'repository.none': 0,
3222 3219 'repository.read': 1,
3223 3220 'repository.write': 3,
3224 3221 'repository.admin': 4,
3225 3222
3226 3223 'group.none': 0,
3227 3224 'group.read': 1,
3228 3225 'group.write': 3,
3229 3226 'group.admin': 4,
3230 3227
3231 3228 'usergroup.none': 0,
3232 3229 'usergroup.read': 1,
3233 3230 'usergroup.write': 3,
3234 3231 'usergroup.admin': 4,
3235 3232
3236 3233 'branch.none': 0,
3237 3234 'branch.merge': 1,
3238 3235 'branch.push': 3,
3239 3236 'branch.push_force': 4,
3240 3237
3241 3238 'hg.repogroup.create.false': 0,
3242 3239 'hg.repogroup.create.true': 1,
3243 3240
3244 3241 'hg.usergroup.create.false': 0,
3245 3242 'hg.usergroup.create.true': 1,
3246 3243
3247 3244 'hg.fork.none': 0,
3248 3245 'hg.fork.repository': 1,
3249 3246 'hg.create.none': 0,
3250 3247 'hg.create.repository': 1
3251 3248 }
3252 3249
3253 3250 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3254 3251 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3255 3252 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3256 3253
3257 3254 def __repr__(self):
3258 3255 return "<%s('%s:%s')>" % (
3259 3256 self.cls_name, self.permission_id, self.permission_name
3260 3257 )
3261 3258
3262 3259 @classmethod
3263 3260 def get_by_key(cls, key):
3264 3261 return cls.query().filter(cls.permission_name == key).scalar()
3265 3262
3266 3263 @classmethod
3267 3264 def get_default_repo_perms(cls, user_id, repo_id=None):
3268 3265 q = Session().query(UserRepoToPerm, Repository, Permission)\
3269 3266 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3270 3267 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3271 3268 .filter(UserRepoToPerm.user_id == user_id)
3272 3269 if repo_id:
3273 3270 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3274 3271 return q.all()
3275 3272
3276 3273 @classmethod
3277 3274 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3278 3275 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3279 3276 .join(
3280 3277 Permission,
3281 3278 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3282 3279 .join(
3283 3280 UserRepoToPerm,
3284 3281 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3285 3282 .filter(UserRepoToPerm.user_id == user_id)
3286 3283
3287 3284 if repo_id:
3288 3285 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3289 3286 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3290 3287
3291 3288 @classmethod
3292 3289 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3293 3290 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3294 3291 .join(
3295 3292 Permission,
3296 3293 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3297 3294 .join(
3298 3295 Repository,
3299 3296 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3300 3297 .join(
3301 3298 UserGroup,
3302 3299 UserGroupRepoToPerm.users_group_id ==
3303 3300 UserGroup.users_group_id)\
3304 3301 .join(
3305 3302 UserGroupMember,
3306 3303 UserGroupRepoToPerm.users_group_id ==
3307 3304 UserGroupMember.users_group_id)\
3308 3305 .filter(
3309 3306 UserGroupMember.user_id == user_id,
3310 3307 UserGroup.users_group_active == true())
3311 3308 if repo_id:
3312 3309 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3313 3310 return q.all()
3314 3311
3315 3312 @classmethod
3316 3313 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3317 3314 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3318 3315 .join(
3319 3316 Permission,
3320 3317 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3321 3318 .join(
3322 3319 UserGroupRepoToPerm,
3323 3320 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3324 3321 .join(
3325 3322 UserGroup,
3326 3323 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3327 3324 .join(
3328 3325 UserGroupMember,
3329 3326 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3330 3327 .filter(
3331 3328 UserGroupMember.user_id == user_id,
3332 3329 UserGroup.users_group_active == true())
3333 3330
3334 3331 if repo_id:
3335 3332 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3336 3333 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3337 3334
3338 3335 @classmethod
3339 3336 def get_default_group_perms(cls, user_id, repo_group_id=None):
3340 3337 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3341 3338 .join(
3342 3339 Permission,
3343 3340 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3344 3341 .join(
3345 3342 RepoGroup,
3346 3343 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3347 3344 .filter(UserRepoGroupToPerm.user_id == user_id)
3348 3345 if repo_group_id:
3349 3346 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3350 3347 return q.all()
3351 3348
3352 3349 @classmethod
3353 3350 def get_default_group_perms_from_user_group(
3354 3351 cls, user_id, repo_group_id=None):
3355 3352 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3356 3353 .join(
3357 3354 Permission,
3358 3355 UserGroupRepoGroupToPerm.permission_id ==
3359 3356 Permission.permission_id)\
3360 3357 .join(
3361 3358 RepoGroup,
3362 3359 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3363 3360 .join(
3364 3361 UserGroup,
3365 3362 UserGroupRepoGroupToPerm.users_group_id ==
3366 3363 UserGroup.users_group_id)\
3367 3364 .join(
3368 3365 UserGroupMember,
3369 3366 UserGroupRepoGroupToPerm.users_group_id ==
3370 3367 UserGroupMember.users_group_id)\
3371 3368 .filter(
3372 3369 UserGroupMember.user_id == user_id,
3373 3370 UserGroup.users_group_active == true())
3374 3371 if repo_group_id:
3375 3372 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3376 3373 return q.all()
3377 3374
3378 3375 @classmethod
3379 3376 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3380 3377 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3381 3378 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3382 3379 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3383 3380 .filter(UserUserGroupToPerm.user_id == user_id)
3384 3381 if user_group_id:
3385 3382 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3386 3383 return q.all()
3387 3384
3388 3385 @classmethod
3389 3386 def get_default_user_group_perms_from_user_group(
3390 3387 cls, user_id, user_group_id=None):
3391 3388 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3392 3389 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3393 3390 .join(
3394 3391 Permission,
3395 3392 UserGroupUserGroupToPerm.permission_id ==
3396 3393 Permission.permission_id)\
3397 3394 .join(
3398 3395 TargetUserGroup,
3399 3396 UserGroupUserGroupToPerm.target_user_group_id ==
3400 3397 TargetUserGroup.users_group_id)\
3401 3398 .join(
3402 3399 UserGroup,
3403 3400 UserGroupUserGroupToPerm.user_group_id ==
3404 3401 UserGroup.users_group_id)\
3405 3402 .join(
3406 3403 UserGroupMember,
3407 3404 UserGroupUserGroupToPerm.user_group_id ==
3408 3405 UserGroupMember.users_group_id)\
3409 3406 .filter(
3410 3407 UserGroupMember.user_id == user_id,
3411 3408 UserGroup.users_group_active == true())
3412 3409 if user_group_id:
3413 3410 q = q.filter(
3414 3411 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3415 3412
3416 3413 return q.all()
3417 3414
3418 3415
3419 3416 class UserRepoToPerm(Base, BaseModel):
3420 3417 __tablename__ = 'repo_to_perm'
3421 3418 __table_args__ = (
3422 3419 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3423 3420 base_table_args
3424 3421 )
3425 3422
3426 3423 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3427 3424 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3428 3425 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3429 3426 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3430 3427
3431 3428 user = relationship('User', back_populates="repo_to_perm")
3432 3429 repository = relationship('Repository', back_populates="repo_to_perm")
3433 3430 permission = relationship('Permission')
3434 3431
3435 3432 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3436 3433
3437 3434 @classmethod
3438 3435 def create(cls, user, repository, permission):
3439 3436 n = cls()
3440 3437 n.user = user
3441 3438 n.repository = repository
3442 3439 n.permission = permission
3443 3440 Session().add(n)
3444 3441 return n
3445 3442
3446 3443 def __repr__(self):
3447 3444 return f'<{self.user} => {self.repository} >'
3448 3445
3449 3446
3450 3447 class UserUserGroupToPerm(Base, BaseModel):
3451 3448 __tablename__ = 'user_user_group_to_perm'
3452 3449 __table_args__ = (
3453 3450 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3454 3451 base_table_args
3455 3452 )
3456 3453
3457 3454 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3458 3455 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3459 3456 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3460 3457 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3461 3458
3462 3459 user = relationship('User', back_populates='user_group_to_perm')
3463 3460 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3464 3461 permission = relationship('Permission')
3465 3462
3466 3463 @classmethod
3467 3464 def create(cls, user, user_group, permission):
3468 3465 n = cls()
3469 3466 n.user = user
3470 3467 n.user_group = user_group
3471 3468 n.permission = permission
3472 3469 Session().add(n)
3473 3470 return n
3474 3471
3475 3472 def __repr__(self):
3476 3473 return f'<{self.user} => {self.user_group} >'
3477 3474
3478 3475
3479 3476 class UserToPerm(Base, BaseModel):
3480 3477 __tablename__ = 'user_to_perm'
3481 3478 __table_args__ = (
3482 3479 UniqueConstraint('user_id', 'permission_id'),
3483 3480 base_table_args
3484 3481 )
3485 3482
3486 3483 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3487 3484 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3488 3485 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3489 3486
3490 3487 user = relationship('User', back_populates='user_perms')
3491 3488 permission = relationship('Permission', lazy='joined')
3492 3489
3493 3490 def __repr__(self):
3494 3491 return f'<{self.user} => {self.permission} >'
3495 3492
3496 3493
3497 3494 class UserGroupRepoToPerm(Base, BaseModel):
3498 3495 __tablename__ = 'users_group_repo_to_perm'
3499 3496 __table_args__ = (
3500 3497 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3501 3498 base_table_args
3502 3499 )
3503 3500
3504 3501 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 3502 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3506 3503 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3507 3504 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3508 3505
3509 3506 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3510 3507 permission = relationship('Permission')
3511 3508 repository = relationship('Repository', back_populates='users_group_to_perm')
3512 3509 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3513 3510
3514 3511 @classmethod
3515 3512 def create(cls, users_group, repository, permission):
3516 3513 n = cls()
3517 3514 n.users_group = users_group
3518 3515 n.repository = repository
3519 3516 n.permission = permission
3520 3517 Session().add(n)
3521 3518 return n
3522 3519
3523 3520 def __repr__(self):
3524 3521 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3525 3522
3526 3523
3527 3524 class UserGroupUserGroupToPerm(Base, BaseModel):
3528 3525 __tablename__ = 'user_group_user_group_to_perm'
3529 3526 __table_args__ = (
3530 3527 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3531 3528 CheckConstraint('target_user_group_id != user_group_id'),
3532 3529 base_table_args
3533 3530 )
3534 3531
3535 3532 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3536 3533 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3537 3534 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3538 3535 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3539 3536
3540 3537 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3541 3538 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3542 3539 permission = relationship('Permission')
3543 3540
3544 3541 @classmethod
3545 3542 def create(cls, target_user_group, user_group, permission):
3546 3543 n = cls()
3547 3544 n.target_user_group = target_user_group
3548 3545 n.user_group = user_group
3549 3546 n.permission = permission
3550 3547 Session().add(n)
3551 3548 return n
3552 3549
3553 3550 def __repr__(self):
3554 3551 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3555 3552
3556 3553
3557 3554 class UserGroupToPerm(Base, BaseModel):
3558 3555 __tablename__ = 'users_group_to_perm'
3559 3556 __table_args__ = (
3560 3557 UniqueConstraint('users_group_id', 'permission_id',),
3561 3558 base_table_args
3562 3559 )
3563 3560
3564 3561 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 3562 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3566 3563 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3567 3564
3568 3565 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3569 3566 permission = relationship('Permission')
3570 3567
3571 3568
3572 3569 class UserRepoGroupToPerm(Base, BaseModel):
3573 3570 __tablename__ = 'user_repo_group_to_perm'
3574 3571 __table_args__ = (
3575 3572 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3576 3573 base_table_args
3577 3574 )
3578 3575
3579 3576 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3580 3577 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3581 3578 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3582 3579 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3583 3580
3584 3581 user = relationship('User', back_populates='repo_group_to_perm')
3585 3582 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3586 3583 permission = relationship('Permission')
3587 3584
3588 3585 @classmethod
3589 3586 def create(cls, user, repository_group, permission):
3590 3587 n = cls()
3591 3588 n.user = user
3592 3589 n.group = repository_group
3593 3590 n.permission = permission
3594 3591 Session().add(n)
3595 3592 return n
3596 3593
3597 3594
3598 3595 class UserGroupRepoGroupToPerm(Base, BaseModel):
3599 3596 __tablename__ = 'users_group_repo_group_to_perm'
3600 3597 __table_args__ = (
3601 3598 UniqueConstraint('users_group_id', 'group_id'),
3602 3599 base_table_args
3603 3600 )
3604 3601
3605 3602 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3606 3603 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3607 3604 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3608 3605 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3609 3606
3610 3607 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3611 3608 permission = relationship('Permission')
3612 3609 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3613 3610
3614 3611 @classmethod
3615 3612 def create(cls, user_group, repository_group, permission):
3616 3613 n = cls()
3617 3614 n.users_group = user_group
3618 3615 n.group = repository_group
3619 3616 n.permission = permission
3620 3617 Session().add(n)
3621 3618 return n
3622 3619
3623 3620 def __repr__(self):
3624 3621 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3625 3622
3626 3623
3627 3624 class Statistics(Base, BaseModel):
3628 3625 __tablename__ = 'statistics'
3629 3626 __table_args__ = (
3630 3627 base_table_args
3631 3628 )
3632 3629
3633 3630 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3634 3631 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3635 3632 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3636 3633 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3637 3634 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3638 3635 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3639 3636
3640 3637 repository = relationship('Repository', single_parent=True, viewonly=True)
3641 3638
3642 3639
3643 3640 class UserFollowing(Base, BaseModel):
3644 3641 __tablename__ = 'user_followings'
3645 3642 __table_args__ = (
3646 3643 UniqueConstraint('user_id', 'follows_repository_id'),
3647 3644 UniqueConstraint('user_id', 'follows_user_id'),
3648 3645 base_table_args
3649 3646 )
3650 3647
3651 3648 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3652 3649 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3653 3650 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3654 3651 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3655 3652 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3656 3653
3657 3654 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3658 3655
3659 3656 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3660 3657 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3661 3658
3662 3659 @classmethod
3663 3660 def get_repo_followers(cls, repo_id):
3664 3661 return cls.query().filter(cls.follows_repo_id == repo_id)
3665 3662
3666 3663
3667 3664 class CacheKey(Base, BaseModel):
3668 3665 __tablename__ = 'cache_invalidation'
3669 3666 __table_args__ = (
3670 3667 UniqueConstraint('cache_key'),
3671 3668 Index('key_idx', 'cache_key'),
3672 3669 Index('cache_args_idx', 'cache_args'),
3673 3670 base_table_args,
3674 3671 )
3675 3672
3676 3673 CACHE_TYPE_FEED = 'FEED'
3677 3674
3678 3675 # namespaces used to register process/thread aware caches
3679 3676 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3680 3677
3681 3678 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3682 3679 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3683 3680 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3684 3681 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3685 3682 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3686 3683
3687 3684 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3688 3685 self.cache_key = cache_key
3689 3686 self.cache_args = cache_args
3690 3687 self.cache_active = cache_active
3691 3688 # first key should be same for all entries, since all workers should share it
3692 3689 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3693 3690
3694 3691 def __repr__(self):
3695 3692 return "<%s('%s:%s[%s]')>" % (
3696 3693 self.cls_name,
3697 3694 self.cache_id, self.cache_key, self.cache_active)
3698 3695
3699 3696 def _cache_key_partition(self):
3700 3697 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3701 3698 return prefix, repo_name, suffix
3702 3699
3703 3700 def get_prefix(self):
3704 3701 """
3705 3702 Try to extract prefix from existing cache key. The key could consist
3706 3703 of prefix, repo_name, suffix
3707 3704 """
3708 3705 # this returns prefix, repo_name, suffix
3709 3706 return self._cache_key_partition()[0]
3710 3707
3711 3708 def get_suffix(self):
3712 3709 """
3713 3710 get suffix that might have been used in _get_cache_key to
3714 3711 generate self.cache_key. Only used for informational purposes
3715 3712 in repo_edit.mako.
3716 3713 """
3717 3714 # prefix, repo_name, suffix
3718 3715 return self._cache_key_partition()[2]
3719 3716
3720 3717 @classmethod
3721 3718 def generate_new_state_uid(cls, based_on=None):
3722 3719 if based_on:
3723 3720 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3724 3721 else:
3725 3722 return str(uuid.uuid4())
3726 3723
3727 3724 @classmethod
3728 3725 def delete_all_cache(cls):
3729 3726 """
3730 3727 Delete all cache keys from database.
3731 3728 Should only be run when all instances are down and all entries
3732 3729 thus stale.
3733 3730 """
3734 3731 cls.query().delete()
3735 3732 Session().commit()
3736 3733
3737 3734 @classmethod
3738 3735 def set_invalidate(cls, cache_uid, delete=False):
3739 3736 """
3740 3737 Mark all caches of a repo as invalid in the database.
3741 3738 """
3742 3739 try:
3743 3740 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3744 3741 if delete:
3745 3742 qry.delete()
3746 3743 log.debug('cache objects deleted for cache args %s',
3747 3744 safe_str(cache_uid))
3748 3745 else:
3749 3746 new_uid = cls.generate_new_state_uid()
3750 3747 qry.update({"cache_state_uid": new_uid,
3751 3748 "cache_args": f"repo_state:{time.time()}"})
3752 3749 log.debug('cache object %s set new UID %s',
3753 3750 safe_str(cache_uid), new_uid)
3754 3751
3755 3752 Session().commit()
3756 3753 except Exception:
3757 3754 log.exception(
3758 3755 'Cache key invalidation failed for cache args %s',
3759 3756 safe_str(cache_uid))
3760 3757 Session().rollback()
3761 3758
3762 3759 @classmethod
3763 3760 def get_active_cache(cls, cache_key):
3764 3761 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3765 3762 if inv_obj:
3766 3763 return inv_obj
3767 3764 return None
3768 3765
3769 3766 @classmethod
3770 3767 def get_namespace_map(cls, namespace):
3771 3768 return {
3772 3769 x.cache_key: x
3773 3770 for x in cls.query().filter(cls.cache_args == namespace)}
3774 3771
3775 3772
3776 3773 class ChangesetComment(Base, BaseModel):
3777 3774 __tablename__ = 'changeset_comments'
3778 3775 __table_args__ = (
3779 3776 Index('cc_revision_idx', 'revision'),
3780 3777 base_table_args,
3781 3778 )
3782 3779
3783 3780 COMMENT_OUTDATED = 'comment_outdated'
3784 3781 COMMENT_TYPE_NOTE = 'note'
3785 3782 COMMENT_TYPE_TODO = 'todo'
3786 3783 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3787 3784
3788 3785 OP_IMMUTABLE = 'immutable'
3789 3786 OP_CHANGEABLE = 'changeable'
3790 3787
3791 3788 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3792 3789 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3793 3790 revision = Column('revision', String(40), nullable=True)
3794 3791 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3795 3792 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3796 3793 line_no = Column('line_no', Unicode(10), nullable=True)
3797 3794 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3798 3795 f_path = Column('f_path', Unicode(1000), nullable=True)
3799 3796 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3800 3797 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3801 3798 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3802 3799 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3803 3800 renderer = Column('renderer', Unicode(64), nullable=True)
3804 3801 display_state = Column('display_state', Unicode(128), nullable=True)
3805 3802 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3806 3803 draft = Column('draft', Boolean(), nullable=True, default=False)
3807 3804
3808 3805 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3809 3806 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3810 3807
3811 3808 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3812 3809 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3813 3810
3814 3811 author = relationship('User', lazy='select', back_populates='user_comments')
3815 3812 repo = relationship('Repository', back_populates='comments')
3816 3813 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3817 3814 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3818 3815 pull_request_version = relationship('PullRequestVersion', lazy='select')
3819 3816 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3820 3817
3821 3818 @classmethod
3822 3819 def get_users(cls, revision=None, pull_request_id=None):
3823 3820 """
3824 3821 Returns user associated with this ChangesetComment. ie those
3825 3822 who actually commented
3826 3823
3827 3824 :param cls:
3828 3825 :param revision:
3829 3826 """
3830 3827 q = Session().query(User).join(ChangesetComment.author)
3831 3828 if revision:
3832 3829 q = q.filter(cls.revision == revision)
3833 3830 elif pull_request_id:
3834 3831 q = q.filter(cls.pull_request_id == pull_request_id)
3835 3832 return q.all()
3836 3833
3837 3834 @classmethod
3838 3835 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3839 3836 if pr_version is None:
3840 3837 return 0
3841 3838
3842 3839 if versions is not None:
3843 3840 num_versions = [x.pull_request_version_id for x in versions]
3844 3841
3845 3842 num_versions = num_versions or []
3846 3843 try:
3847 3844 return num_versions.index(pr_version) + 1
3848 3845 except (IndexError, ValueError):
3849 3846 return 0
3850 3847
3851 3848 @property
3852 3849 def outdated(self):
3853 3850 return self.display_state == self.COMMENT_OUTDATED
3854 3851
3855 3852 @property
3856 3853 def outdated_js(self):
3857 3854 return str_json(self.display_state == self.COMMENT_OUTDATED)
3858 3855
3859 3856 @property
3860 3857 def immutable(self):
3861 3858 return self.immutable_state == self.OP_IMMUTABLE
3862 3859
3863 3860 def outdated_at_version(self, version: int) -> bool:
3864 3861 """
3865 3862 Checks if comment is outdated for given pull request version
3866 3863 """
3867 3864
3868 3865 def version_check():
3869 3866 return self.pull_request_version_id and self.pull_request_version_id != version
3870 3867
3871 3868 if self.is_inline:
3872 3869 return self.outdated and version_check()
3873 3870 else:
3874 3871 # general comments don't have .outdated set, also latest don't have a version
3875 3872 return version_check()
3876 3873
3877 3874 def outdated_at_version_js(self, version):
3878 3875 """
3879 3876 Checks if comment is outdated for given pull request version
3880 3877 """
3881 3878 return str_json(self.outdated_at_version(version))
3882 3879
3883 3880 def older_than_version(self, version: int) -> bool:
3884 3881 """
3885 3882 Checks if comment is made from a previous version than given.
3886 3883 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
3887 3884 """
3888 3885
3889 3886 # If version is None, return False as the current version cannot be less than None
3890 3887 if version is None:
3891 3888 return False
3892 3889
3893 3890 # Ensure that the version is an integer to prevent TypeError on comparison
3894 3891 if not isinstance(version, int):
3895 3892 raise ValueError("The provided version must be an integer.")
3896 3893
3897 3894 # Initialize current version to 0 or pull_request_version_id if it's available
3898 3895 cur_ver = 0
3899 3896 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
3900 3897 cur_ver = self.pull_request_version.pull_request_version_id
3901 3898
3902 3899 # Return True if the current version is less than the given version
3903 3900 return cur_ver < version
3904 3901
3905 3902 def older_than_version_js(self, version):
3906 3903 """
3907 3904 Checks if comment is made from previous version than given
3908 3905 """
3909 3906 return str_json(self.older_than_version(version))
3910 3907
3911 3908 @property
3912 3909 def commit_id(self):
3913 3910 """New style naming to stop using .revision"""
3914 3911 return self.revision
3915 3912
3916 3913 @property
3917 3914 def resolved(self):
3918 3915 return self.resolved_by[0] if self.resolved_by else None
3919 3916
3920 3917 @property
3921 3918 def is_todo(self):
3922 3919 return self.comment_type == self.COMMENT_TYPE_TODO
3923 3920
3924 3921 @property
3925 3922 def is_inline(self):
3926 3923 if self.line_no and self.f_path:
3927 3924 return True
3928 3925 return False
3929 3926
3930 3927 @property
3931 3928 def last_version(self):
3932 3929 version = 0
3933 3930 if self.history:
3934 3931 version = self.history[-1].version
3935 3932 return version
3936 3933
3937 3934 def get_index_version(self, versions):
3938 3935 return self.get_index_from_version(
3939 3936 self.pull_request_version_id, versions)
3940 3937
3941 3938 @property
3942 3939 def review_status(self):
3943 3940 if self.status_change:
3944 3941 return self.status_change[0].status
3945 3942
3946 3943 @property
3947 3944 def review_status_lbl(self):
3948 3945 if self.status_change:
3949 3946 return self.status_change[0].status_lbl
3950 3947
3951 3948 def __repr__(self):
3952 3949 if self.comment_id:
3953 3950 return f'<DB:Comment #{self.comment_id}>'
3954 3951 else:
3955 3952 return f'<DB:Comment at {id(self)!r}>'
3956 3953
3957 3954 def get_api_data(self):
3958 3955 comment = self
3959 3956
3960 3957 data = {
3961 3958 'comment_id': comment.comment_id,
3962 3959 'comment_type': comment.comment_type,
3963 3960 'comment_text': comment.text,
3964 3961 'comment_status': comment.status_change,
3965 3962 'comment_f_path': comment.f_path,
3966 3963 'comment_lineno': comment.line_no,
3967 3964 'comment_author': comment.author,
3968 3965 'comment_created_on': comment.created_on,
3969 3966 'comment_resolved_by': self.resolved,
3970 3967 'comment_commit_id': comment.revision,
3971 3968 'comment_pull_request_id': comment.pull_request_id,
3972 3969 'comment_last_version': self.last_version
3973 3970 }
3974 3971 return data
3975 3972
3976 3973 def __json__(self):
3977 3974 data = dict()
3978 3975 data.update(self.get_api_data())
3979 3976 return data
3980 3977
3981 3978
3982 3979 class ChangesetCommentHistory(Base, BaseModel):
3983 3980 __tablename__ = 'changeset_comments_history'
3984 3981 __table_args__ = (
3985 3982 Index('cch_comment_id_idx', 'comment_id'),
3986 3983 base_table_args,
3987 3984 )
3988 3985
3989 3986 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3990 3987 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3991 3988 version = Column("version", Integer(), nullable=False, default=0)
3992 3989 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3993 3990 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3994 3991 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3995 3992 deleted = Column('deleted', Boolean(), default=False)
3996 3993
3997 3994 author = relationship('User', lazy='joined')
3998 3995 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
3999 3996
4000 3997 @classmethod
4001 3998 def get_version(cls, comment_id):
4002 3999 q = Session().query(ChangesetCommentHistory).filter(
4003 4000 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4004 4001 if q.count() == 0:
4005 4002 return 1
4006 4003 elif q.count() >= q[0].version:
4007 4004 return q.count() + 1
4008 4005 else:
4009 4006 return q[0].version + 1
4010 4007
4011 4008
4012 4009 class ChangesetStatus(Base, BaseModel):
4013 4010 __tablename__ = 'changeset_statuses'
4014 4011 __table_args__ = (
4015 4012 Index('cs_revision_idx', 'revision'),
4016 4013 Index('cs_version_idx', 'version'),
4017 4014 UniqueConstraint('repo_id', 'revision', 'version'),
4018 4015 base_table_args
4019 4016 )
4020 4017
4021 4018 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4022 4019 STATUS_APPROVED = 'approved'
4023 4020 STATUS_REJECTED = 'rejected'
4024 4021 STATUS_UNDER_REVIEW = 'under_review'
4025 4022
4026 4023 STATUSES = [
4027 4024 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4028 4025 (STATUS_APPROVED, _("Approved")),
4029 4026 (STATUS_REJECTED, _("Rejected")),
4030 4027 (STATUS_UNDER_REVIEW, _("Under Review")),
4031 4028 ]
4032 4029
4033 4030 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4034 4031 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4035 4032 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4036 4033 revision = Column('revision', String(40), nullable=False)
4037 4034 status = Column('status', String(128), nullable=False, default=DEFAULT)
4038 4035 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4039 4036 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4040 4037 version = Column('version', Integer(), nullable=False, default=0)
4041 4038 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4042 4039
4043 4040 author = relationship('User', lazy='select')
4044 4041 repo = relationship('Repository', lazy='select')
4045 4042 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4046 4043 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4047 4044
4048 4045 def __repr__(self):
4049 4046 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4050 4047
4051 4048 @classmethod
4052 4049 def get_status_lbl(cls, value):
4053 4050 return dict(cls.STATUSES).get(value)
4054 4051
4055 4052 @property
4056 4053 def status_lbl(self):
4057 4054 return ChangesetStatus.get_status_lbl(self.status)
4058 4055
4059 4056 def get_api_data(self):
4060 4057 status = self
4061 4058 data = {
4062 4059 'status_id': status.changeset_status_id,
4063 4060 'status': status.status,
4064 4061 }
4065 4062 return data
4066 4063
4067 4064 def __json__(self):
4068 4065 data = dict()
4069 4066 data.update(self.get_api_data())
4070 4067 return data
4071 4068
4072 4069
4073 4070 class _SetState(object):
4074 4071 """
4075 4072 Context processor allowing changing state for sensitive operation such as
4076 4073 pull request update or merge
4077 4074 """
4078 4075
4079 4076 def __init__(self, pull_request, pr_state, back_state=None):
4080 4077 self._pr = pull_request
4081 4078 self._org_state = back_state or pull_request.pull_request_state
4082 4079 self._pr_state = pr_state
4083 4080 self._current_state = None
4084 4081
4085 4082 def __enter__(self):
4086 4083 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4087 4084 self._pr, self._pr_state)
4088 4085 self.set_pr_state(self._pr_state)
4089 4086 return self
4090 4087
4091 4088 def __exit__(self, exc_type, exc_val, exc_tb):
4092 4089 if exc_val is not None or exc_type is not None:
4093 4090 log.error(traceback.format_tb(exc_tb))
4094 4091 return None
4095 4092
4096 4093 self.set_pr_state(self._org_state)
4097 4094 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4098 4095 self._pr, self._org_state)
4099 4096
4100 4097 @property
4101 4098 def state(self):
4102 4099 return self._current_state
4103 4100
4104 4101 def set_pr_state(self, pr_state):
4105 4102 try:
4106 4103 self._pr.pull_request_state = pr_state
4107 4104 Session().add(self._pr)
4108 4105 Session().commit()
4109 4106 self._current_state = pr_state
4110 4107 except Exception:
4111 4108 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4112 4109 raise
4113 4110
4114 4111
4115 4112 class _PullRequestBase(BaseModel):
4116 4113 """
4117 4114 Common attributes of pull request and version entries.
4118 4115 """
4119 4116
4120 4117 # .status values
4121 4118 STATUS_NEW = 'new'
4122 4119 STATUS_OPEN = 'open'
4123 4120 STATUS_CLOSED = 'closed'
4124 4121
4125 4122 # available states
4126 4123 STATE_CREATING = 'creating'
4127 4124 STATE_UPDATING = 'updating'
4128 4125 STATE_MERGING = 'merging'
4129 4126 STATE_CREATED = 'created'
4130 4127
4131 4128 title = Column('title', Unicode(255), nullable=True)
4132 4129 description = Column(
4133 4130 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4134 4131 nullable=True)
4135 4132 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4136 4133
4137 4134 # new/open/closed status of pull request (not approve/reject/etc)
4138 4135 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4139 4136 created_on = Column(
4140 4137 'created_on', DateTime(timezone=False), nullable=False,
4141 4138 default=datetime.datetime.now)
4142 4139 updated_on = Column(
4143 4140 'updated_on', DateTime(timezone=False), nullable=False,
4144 4141 default=datetime.datetime.now)
4145 4142
4146 4143 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4147 4144
4148 4145 @declared_attr
4149 4146 def user_id(cls):
4150 4147 return Column(
4151 4148 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4152 4149 unique=None)
4153 4150
4154 4151 # 500 revisions max
4155 4152 _revisions = Column(
4156 4153 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4157 4154
4158 4155 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4159 4156
4160 4157 @declared_attr
4161 4158 def source_repo_id(cls):
4162 4159 # TODO: dan: rename column to source_repo_id
4163 4160 return Column(
4164 4161 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4165 4162 nullable=False)
4166 4163
4167 4164 @declared_attr
4168 4165 def pr_source(cls):
4169 4166 return relationship(
4170 4167 'Repository',
4171 4168 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4172 4169 overlaps="pull_requests_source"
4173 4170 )
4174 4171
4175 4172 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4176 4173
4177 4174 @hybrid_property
4178 4175 def source_ref(self):
4179 4176 return self._source_ref
4180 4177
4181 4178 @source_ref.setter
4182 4179 def source_ref(self, val):
4183 4180 parts = (val or '').split(':')
4184 4181 if len(parts) != 3:
4185 4182 raise ValueError(
4186 4183 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4187 4184 self._source_ref = safe_str(val)
4188 4185
4189 4186 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4190 4187
4191 4188 @hybrid_property
4192 4189 def target_ref(self):
4193 4190 return self._target_ref
4194 4191
4195 4192 @target_ref.setter
4196 4193 def target_ref(self, val):
4197 4194 parts = (val or '').split(':')
4198 4195 if len(parts) != 3:
4199 4196 raise ValueError(
4200 4197 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4201 4198 self._target_ref = safe_str(val)
4202 4199
4203 4200 @declared_attr
4204 4201 def target_repo_id(cls):
4205 4202 # TODO: dan: rename column to target_repo_id
4206 4203 return Column(
4207 4204 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4208 4205 nullable=False)
4209 4206
4210 4207 @declared_attr
4211 4208 def pr_target(cls):
4212 4209 return relationship(
4213 4210 'Repository',
4214 4211 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4215 4212 overlaps="pull_requests_target"
4216 4213 )
4217 4214
4218 4215 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4219 4216
4220 4217 # TODO: dan: rename column to last_merge_source_rev
4221 4218 _last_merge_source_rev = Column(
4222 4219 'last_merge_org_rev', String(40), nullable=True)
4223 4220 # TODO: dan: rename column to last_merge_target_rev
4224 4221 _last_merge_target_rev = Column(
4225 4222 'last_merge_other_rev', String(40), nullable=True)
4226 4223 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4227 4224 last_merge_metadata = Column(
4228 4225 'last_merge_metadata', MutationObj.as_mutable(
4229 4226 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4230 4227
4231 4228 merge_rev = Column('merge_rev', String(40), nullable=True)
4232 4229
4233 4230 reviewer_data = Column(
4234 4231 'reviewer_data_json', MutationObj.as_mutable(
4235 4232 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4236 4233
4237 4234 @property
4238 4235 def reviewer_data_json(self):
4239 4236 return str_json(self.reviewer_data)
4240 4237
4241 4238 @property
4242 4239 def last_merge_metadata_parsed(self):
4243 4240 metadata = {}
4244 4241 if not self.last_merge_metadata:
4245 4242 return metadata
4246 4243
4247 4244 if hasattr(self.last_merge_metadata, 'de_coerce'):
4248 4245 for k, v in self.last_merge_metadata.de_coerce().items():
4249 4246 if k in ['target_ref', 'source_ref']:
4250 4247 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4251 4248 else:
4252 4249 if hasattr(v, 'de_coerce'):
4253 4250 metadata[k] = v.de_coerce()
4254 4251 else:
4255 4252 metadata[k] = v
4256 4253 return metadata
4257 4254
4258 4255 @property
4259 4256 def work_in_progress(self):
4260 4257 """checks if pull request is work in progress by checking the title"""
4261 4258 title = self.title.upper()
4262 4259 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4263 4260 return True
4264 4261 return False
4265 4262
4266 4263 @property
4267 4264 def title_safe(self):
4268 4265 return self.title\
4269 4266 .replace('{', '{{')\
4270 4267 .replace('}', '}}')
4271 4268
4272 4269 @hybrid_property
4273 4270 def description_safe(self):
4274 4271 from rhodecode.lib import helpers as h
4275 4272 return h.escape(self.description)
4276 4273
4277 4274 @hybrid_property
4278 4275 def revisions(self):
4279 4276 return self._revisions.split(':') if self._revisions else []
4280 4277
4281 4278 @revisions.setter
4282 4279 def revisions(self, val):
4283 4280 self._revisions = ':'.join(val)
4284 4281
4285 4282 @hybrid_property
4286 4283 def last_merge_status(self):
4287 4284 return safe_int(self._last_merge_status)
4288 4285
4289 4286 @last_merge_status.setter
4290 4287 def last_merge_status(self, val):
4291 4288 self._last_merge_status = val
4292 4289
4293 4290 @declared_attr
4294 4291 def author(cls):
4295 4292 return relationship(
4296 4293 'User', lazy='joined',
4297 4294 #TODO, problem that is somehow :?
4298 4295 #back_populates='user_pull_requests'
4299 4296 )
4300 4297
4301 4298 @declared_attr
4302 4299 def source_repo(cls):
4303 4300 return relationship(
4304 4301 'Repository',
4305 4302 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4306 4303 overlaps="pr_source"
4307 4304 )
4308 4305
4309 4306 @property
4310 4307 def source_ref_parts(self):
4311 4308 return self.unicode_to_reference(self.source_ref)
4312 4309
4313 4310 @declared_attr
4314 4311 def target_repo(cls):
4315 4312 return relationship(
4316 4313 'Repository',
4317 4314 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4318 4315 overlaps="pr_target"
4319 4316 )
4320 4317
4321 4318 @property
4322 4319 def target_ref_parts(self):
4323 4320 return self.unicode_to_reference(self.target_ref)
4324 4321
4325 4322 @property
4326 4323 def shadow_merge_ref(self):
4327 4324 return self.unicode_to_reference(self._shadow_merge_ref)
4328 4325
4329 4326 @shadow_merge_ref.setter
4330 4327 def shadow_merge_ref(self, ref):
4331 4328 self._shadow_merge_ref = self.reference_to_unicode(ref)
4332 4329
4333 4330 @staticmethod
4334 4331 def unicode_to_reference(raw):
4335 4332 return unicode_to_reference(raw)
4336 4333
4337 4334 @staticmethod
4338 4335 def reference_to_unicode(ref):
4339 4336 return reference_to_unicode(ref)
4340 4337
4341 4338 def get_api_data(self, with_merge_state=True):
4342 4339 from rhodecode.model.pull_request import PullRequestModel
4343 4340
4344 4341 pull_request = self
4345 4342 if with_merge_state:
4346 4343 merge_response, merge_status, msg = \
4347 4344 PullRequestModel().merge_status(pull_request)
4348 4345 merge_state = {
4349 4346 'status': merge_status,
4350 4347 'message': safe_str(msg),
4351 4348 }
4352 4349 else:
4353 4350 merge_state = {'status': 'not_available',
4354 4351 'message': 'not_available'}
4355 4352
4356 4353 merge_data = {
4357 4354 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4358 4355 'reference': (
4359 4356 pull_request.shadow_merge_ref.asdict()
4360 4357 if pull_request.shadow_merge_ref else None),
4361 4358 }
4362 4359
4363 4360 data = {
4364 4361 'pull_request_id': pull_request.pull_request_id,
4365 4362 'url': PullRequestModel().get_url(pull_request),
4366 4363 'title': pull_request.title,
4367 4364 'description': pull_request.description,
4368 4365 'status': pull_request.status,
4369 4366 'state': pull_request.pull_request_state,
4370 4367 'created_on': pull_request.created_on,
4371 4368 'updated_on': pull_request.updated_on,
4372 4369 'commit_ids': pull_request.revisions,
4373 4370 'review_status': pull_request.calculated_review_status(),
4374 4371 'mergeable': merge_state,
4375 4372 'source': {
4376 4373 'clone_url': pull_request.source_repo.clone_url(),
4377 4374 'repository': pull_request.source_repo.repo_name,
4378 4375 'reference': {
4379 4376 'name': pull_request.source_ref_parts.name,
4380 4377 'type': pull_request.source_ref_parts.type,
4381 4378 'commit_id': pull_request.source_ref_parts.commit_id,
4382 4379 },
4383 4380 },
4384 4381 'target': {
4385 4382 'clone_url': pull_request.target_repo.clone_url(),
4386 4383 'repository': pull_request.target_repo.repo_name,
4387 4384 'reference': {
4388 4385 'name': pull_request.target_ref_parts.name,
4389 4386 'type': pull_request.target_ref_parts.type,
4390 4387 'commit_id': pull_request.target_ref_parts.commit_id,
4391 4388 },
4392 4389 },
4393 4390 'merge': merge_data,
4394 4391 'author': pull_request.author.get_api_data(include_secrets=False,
4395 4392 details='basic'),
4396 4393 'reviewers': [
4397 4394 {
4398 4395 'user': reviewer.get_api_data(include_secrets=False,
4399 4396 details='basic'),
4400 4397 'reasons': reasons,
4401 4398 'review_status': st[0][1].status if st else 'not_reviewed',
4402 4399 }
4403 4400 for obj, reviewer, reasons, mandatory, st in
4404 4401 pull_request.reviewers_statuses()
4405 4402 ]
4406 4403 }
4407 4404
4408 4405 return data
4409 4406
4410 4407 def set_state(self, pull_request_state, final_state=None):
4411 4408 """
4412 4409 # goes from initial state to updating to initial state.
4413 4410 # initial state can be changed by specifying back_state=
4414 4411 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4415 4412 pull_request.merge()
4416 4413
4417 4414 :param pull_request_state:
4418 4415 :param final_state:
4419 4416
4420 4417 """
4421 4418
4422 4419 return _SetState(self, pull_request_state, back_state=final_state)
4423 4420
4424 4421
4425 4422 class PullRequest(Base, _PullRequestBase):
4426 4423 __tablename__ = 'pull_requests'
4427 4424 __table_args__ = (
4428 4425 base_table_args,
4429 4426 )
4430 4427 LATEST_VER = 'latest'
4431 4428
4432 4429 pull_request_id = Column(
4433 4430 'pull_request_id', Integer(), nullable=False, primary_key=True)
4434 4431
4435 4432 def __repr__(self):
4436 4433 if self.pull_request_id:
4437 4434 return f'<DB:PullRequest #{self.pull_request_id}>'
4438 4435 else:
4439 4436 return f'<DB:PullRequest at {id(self)!r}>'
4440 4437
4441 4438 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4442 4439 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4443 4440 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4444 4441 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4445 4442
4446 4443 @classmethod
4447 4444 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4448 4445 internal_methods=None):
4449 4446
4450 4447 class PullRequestDisplay(object):
4451 4448 """
4452 4449 Special object wrapper for showing PullRequest data via Versions
4453 4450 It mimics PR object as close as possible. This is read only object
4454 4451 just for display
4455 4452 """
4456 4453
4457 4454 def __init__(self, attrs, internal=None):
4458 4455 self.attrs = attrs
4459 4456 # internal have priority over the given ones via attrs
4460 4457 self.internal = internal or ['versions']
4461 4458
4462 4459 def __getattr__(self, item):
4463 4460 if item in self.internal:
4464 4461 return getattr(self, item)
4465 4462 try:
4466 4463 return self.attrs[item]
4467 4464 except KeyError:
4468 4465 raise AttributeError(
4469 4466 '%s object has no attribute %s' % (self, item))
4470 4467
4471 4468 def __repr__(self):
4472 4469 pr_id = self.attrs.get('pull_request_id')
4473 4470 return f'<DB:PullRequestDisplay #{pr_id}>'
4474 4471
4475 4472 def versions(self):
4476 4473 return pull_request_obj.versions.order_by(
4477 4474 PullRequestVersion.pull_request_version_id).all()
4478 4475
4479 4476 def is_closed(self):
4480 4477 return pull_request_obj.is_closed()
4481 4478
4482 4479 def is_state_changing(self):
4483 4480 return pull_request_obj.is_state_changing()
4484 4481
4485 4482 @property
4486 4483 def pull_request_version_id(self):
4487 4484 return getattr(pull_request_obj, 'pull_request_version_id', None)
4488 4485
4489 4486 @property
4490 4487 def pull_request_last_version(self):
4491 4488 return pull_request_obj.pull_request_last_version
4492 4489
4493 4490 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4494 4491
4495 4492 attrs.author = StrictAttributeDict(
4496 4493 pull_request_obj.author.get_api_data())
4497 4494 if pull_request_obj.target_repo:
4498 4495 attrs.target_repo = StrictAttributeDict(
4499 4496 pull_request_obj.target_repo.get_api_data())
4500 4497 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4501 4498
4502 4499 if pull_request_obj.source_repo:
4503 4500 attrs.source_repo = StrictAttributeDict(
4504 4501 pull_request_obj.source_repo.get_api_data())
4505 4502 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4506 4503
4507 4504 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4508 4505 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4509 4506 attrs.revisions = pull_request_obj.revisions
4510 4507 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4511 4508 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4512 4509 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4513 4510 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4514 4511
4515 4512 return PullRequestDisplay(attrs, internal=internal_methods)
4516 4513
4517 4514 def is_closed(self):
4518 4515 return self.status == self.STATUS_CLOSED
4519 4516
4520 4517 def is_state_changing(self):
4521 4518 return self.pull_request_state != PullRequest.STATE_CREATED
4522 4519
4523 4520 def __json__(self):
4524 4521 return {
4525 4522 'revisions': self.revisions,
4526 4523 'versions': self.versions_count
4527 4524 }
4528 4525
4529 4526 def calculated_review_status(self):
4530 4527 from rhodecode.model.changeset_status import ChangesetStatusModel
4531 4528 return ChangesetStatusModel().calculated_review_status(self)
4532 4529
4533 4530 def reviewers_statuses(self, user=None):
4534 4531 from rhodecode.model.changeset_status import ChangesetStatusModel
4535 4532 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4536 4533
4537 4534 def get_pull_request_reviewers(self, role=None):
4538 4535 qry = PullRequestReviewers.query()\
4539 4536 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4540 4537 if role:
4541 4538 qry = qry.filter(PullRequestReviewers.role == role)
4542 4539
4543 4540 return qry.all()
4544 4541
4545 4542 @property
4546 4543 def reviewers_count(self):
4547 4544 qry = PullRequestReviewers.query()\
4548 4545 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4549 4546 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4550 4547 return qry.count()
4551 4548
4552 4549 @property
4553 4550 def observers_count(self):
4554 4551 qry = PullRequestReviewers.query()\
4555 4552 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4556 4553 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4557 4554 return qry.count()
4558 4555
4559 4556 def observers(self):
4560 4557 qry = PullRequestReviewers.query()\
4561 4558 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4562 4559 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4563 4560 .all()
4564 4561
4565 4562 for entry in qry:
4566 4563 yield entry, entry.user
4567 4564
4568 4565 @property
4569 4566 def workspace_id(self):
4570 4567 from rhodecode.model.pull_request import PullRequestModel
4571 4568 return PullRequestModel()._workspace_id(self)
4572 4569
4573 4570 def get_shadow_repo(self):
4574 4571 workspace_id = self.workspace_id
4575 4572 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4576 4573 if os.path.isdir(shadow_repository_path):
4577 4574 vcs_obj = self.target_repo.scm_instance()
4578 4575 return vcs_obj.get_shadow_instance(shadow_repository_path)
4579 4576
4580 4577 @property
4581 4578 def versions_count(self):
4582 4579 """
4583 4580 return number of versions this PR have, e.g a PR that once been
4584 4581 updated will have 2 versions
4585 4582 """
4586 4583 return self.versions.count() + 1
4587 4584
4588 4585 @property
4589 4586 def pull_request_last_version(self):
4590 4587 return self.versions_count
4591 4588
4592 4589
4593 4590 class PullRequestVersion(Base, _PullRequestBase):
4594 4591 __tablename__ = 'pull_request_versions'
4595 4592 __table_args__ = (
4596 4593 base_table_args,
4597 4594 )
4598 4595
4599 4596 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4600 4597 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4601 4598 pull_request = relationship('PullRequest', back_populates='versions')
4602 4599
4603 4600 def __repr__(self):
4604 4601 if self.pull_request_version_id:
4605 4602 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4606 4603 else:
4607 4604 return f'<DB:PullRequestVersion at {id(self)!r}>'
4608 4605
4609 4606 @property
4610 4607 def reviewers(self):
4611 4608 return self.pull_request.reviewers
4612 4609
4613 4610 @property
4614 4611 def versions(self):
4615 4612 return self.pull_request.versions
4616 4613
4617 4614 def is_closed(self):
4618 4615 # calculate from original
4619 4616 return self.pull_request.status == self.STATUS_CLOSED
4620 4617
4621 4618 def is_state_changing(self):
4622 4619 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4623 4620
4624 4621 def calculated_review_status(self):
4625 4622 return self.pull_request.calculated_review_status()
4626 4623
4627 4624 def reviewers_statuses(self):
4628 4625 return self.pull_request.reviewers_statuses()
4629 4626
4630 4627 def observers(self):
4631 4628 return self.pull_request.observers()
4632 4629
4633 4630
4634 4631 class PullRequestReviewers(Base, BaseModel):
4635 4632 __tablename__ = 'pull_request_reviewers'
4636 4633 __table_args__ = (
4637 4634 base_table_args,
4638 4635 )
4639 4636 ROLE_REVIEWER = 'reviewer'
4640 4637 ROLE_OBSERVER = 'observer'
4641 4638 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4642 4639
4643 4640 @hybrid_property
4644 4641 def reasons(self):
4645 4642 if not self._reasons:
4646 4643 return []
4647 4644 return self._reasons
4648 4645
4649 4646 @reasons.setter
4650 4647 def reasons(self, val):
4651 4648 val = val or []
4652 4649 if any(not isinstance(x, str) for x in val):
4653 4650 raise Exception('invalid reasons type, must be list of strings')
4654 4651 self._reasons = val
4655 4652
4656 4653 pull_requests_reviewers_id = Column(
4657 4654 'pull_requests_reviewers_id', Integer(), nullable=False,
4658 4655 primary_key=True)
4659 4656 pull_request_id = Column(
4660 4657 "pull_request_id", Integer(),
4661 4658 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4662 4659 user_id = Column(
4663 4660 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4664 4661 _reasons = Column(
4665 4662 'reason', MutationList.as_mutable(
4666 4663 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4667 4664
4668 4665 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4669 4666 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4670 4667
4671 4668 user = relationship('User')
4672 4669 pull_request = relationship('PullRequest', back_populates='reviewers')
4673 4670
4674 4671 rule_data = Column(
4675 4672 'rule_data_json',
4676 4673 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4677 4674
4678 4675 def rule_user_group_data(self):
4679 4676 """
4680 4677 Returns the voting user group rule data for this reviewer
4681 4678 """
4682 4679
4683 4680 if self.rule_data and 'vote_rule' in self.rule_data:
4684 4681 user_group_data = {}
4685 4682 if 'rule_user_group_entry_id' in self.rule_data:
4686 4683 # means a group with voting rules !
4687 4684 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4688 4685 user_group_data['name'] = self.rule_data['rule_name']
4689 4686 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4690 4687
4691 4688 return user_group_data
4692 4689
4693 4690 @classmethod
4694 4691 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4695 4692 qry = PullRequestReviewers.query()\
4696 4693 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4697 4694 if role:
4698 4695 qry = qry.filter(PullRequestReviewers.role == role)
4699 4696
4700 4697 return qry.all()
4701 4698
4702 4699 def __repr__(self):
4703 4700 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4704 4701
4705 4702
4706 4703 class Notification(Base, BaseModel):
4707 4704 __tablename__ = 'notifications'
4708 4705 __table_args__ = (
4709 4706 Index('notification_type_idx', 'type'),
4710 4707 base_table_args,
4711 4708 )
4712 4709
4713 4710 TYPE_CHANGESET_COMMENT = 'cs_comment'
4714 4711 TYPE_MESSAGE = 'message'
4715 4712 TYPE_MENTION = 'mention'
4716 4713 TYPE_REGISTRATION = 'registration'
4717 4714 TYPE_PULL_REQUEST = 'pull_request'
4718 4715 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4719 4716 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4720 4717
4721 4718 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4722 4719 subject = Column('subject', Unicode(512), nullable=True)
4723 4720 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4724 4721 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4725 4722 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4726 4723 type_ = Column('type', Unicode(255))
4727 4724
4728 4725 created_by_user = relationship('User', back_populates='user_created_notifications')
4729 4726 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4730 4727
4731 4728 @property
4732 4729 def recipients(self):
4733 4730 return [x.user for x in UserNotification.query()\
4734 4731 .filter(UserNotification.notification == self)\
4735 4732 .order_by(UserNotification.user_id.asc()).all()]
4736 4733
4737 4734 @classmethod
4738 4735 def create(cls, created_by, subject, body, recipients, type_=None):
4739 4736 if type_ is None:
4740 4737 type_ = Notification.TYPE_MESSAGE
4741 4738
4742 4739 notification = cls()
4743 4740 notification.created_by_user = created_by
4744 4741 notification.subject = subject
4745 4742 notification.body = body
4746 4743 notification.type_ = type_
4747 4744 notification.created_on = datetime.datetime.now()
4748 4745
4749 4746 # For each recipient link the created notification to his account
4750 4747 for u in recipients:
4751 4748 assoc = UserNotification()
4752 4749 assoc.user_id = u.user_id
4753 4750 assoc.notification = notification
4754 4751
4755 4752 # if created_by is inside recipients mark his notification
4756 4753 # as read
4757 4754 if u.user_id == created_by.user_id:
4758 4755 assoc.read = True
4759 4756 Session().add(assoc)
4760 4757
4761 4758 Session().add(notification)
4762 4759
4763 4760 return notification
4764 4761
4765 4762
4766 4763 class UserNotification(Base, BaseModel):
4767 4764 __tablename__ = 'user_to_notification'
4768 4765 __table_args__ = (
4769 4766 UniqueConstraint('user_id', 'notification_id'),
4770 4767 base_table_args
4771 4768 )
4772 4769
4773 4770 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4774 4771 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4775 4772 read = Column('read', Boolean, default=False)
4776 4773 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4777 4774
4778 4775 user = relationship('User', lazy="joined", back_populates='notifications')
4779 4776 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4780 4777
4781 4778 def mark_as_read(self):
4782 4779 self.read = True
4783 4780 Session().add(self)
4784 4781
4785 4782
4786 4783 class UserNotice(Base, BaseModel):
4787 4784 __tablename__ = 'user_notices'
4788 4785 __table_args__ = (
4789 4786 base_table_args
4790 4787 )
4791 4788
4792 4789 NOTIFICATION_TYPE_MESSAGE = 'message'
4793 4790 NOTIFICATION_TYPE_NOTICE = 'notice'
4794 4791
4795 4792 NOTIFICATION_LEVEL_INFO = 'info'
4796 4793 NOTIFICATION_LEVEL_WARNING = 'warning'
4797 4794 NOTIFICATION_LEVEL_ERROR = 'error'
4798 4795
4799 4796 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4800 4797
4801 4798 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4802 4799 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4803 4800
4804 4801 notice_read = Column('notice_read', Boolean, default=False)
4805 4802
4806 4803 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4807 4804 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4808 4805
4809 4806 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4810 4807 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4811 4808
4812 4809 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4813 4810 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4814 4811
4815 4812 @classmethod
4816 4813 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4817 4814
4818 4815 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4819 4816 cls.NOTIFICATION_LEVEL_WARNING,
4820 4817 cls.NOTIFICATION_LEVEL_INFO]:
4821 4818 return
4822 4819
4823 4820 from rhodecode.model.user import UserModel
4824 4821 user = UserModel().get_user(user)
4825 4822
4826 4823 new_notice = UserNotice()
4827 4824 if not allow_duplicate:
4828 4825 existing_msg = UserNotice().query() \
4829 4826 .filter(UserNotice.user == user) \
4830 4827 .filter(UserNotice.notice_body == body) \
4831 4828 .filter(UserNotice.notice_read == false()) \
4832 4829 .scalar()
4833 4830 if existing_msg:
4834 4831 log.warning('Ignoring duplicate notice for user %s', user)
4835 4832 return
4836 4833
4837 4834 new_notice.user = user
4838 4835 new_notice.notice_subject = subject
4839 4836 new_notice.notice_body = body
4840 4837 new_notice.notification_level = notice_level
4841 4838 Session().add(new_notice)
4842 4839 Session().commit()
4843 4840
4844 4841
4845 4842 class Gist(Base, BaseModel):
4846 4843 __tablename__ = 'gists'
4847 4844 __table_args__ = (
4848 4845 Index('g_gist_access_id_idx', 'gist_access_id'),
4849 4846 Index('g_created_on_idx', 'created_on'),
4850 4847 base_table_args
4851 4848 )
4852 4849
4853 4850 GIST_PUBLIC = 'public'
4854 4851 GIST_PRIVATE = 'private'
4855 4852 DEFAULT_FILENAME = 'gistfile1.txt'
4856 4853
4857 4854 ACL_LEVEL_PUBLIC = 'acl_public'
4858 4855 ACL_LEVEL_PRIVATE = 'acl_private'
4859 4856
4860 4857 gist_id = Column('gist_id', Integer(), primary_key=True)
4861 4858 gist_access_id = Column('gist_access_id', Unicode(250))
4862 4859 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4863 4860 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4864 4861 gist_expires = Column('gist_expires', Float(53), nullable=False)
4865 4862 gist_type = Column('gist_type', Unicode(128), nullable=False)
4866 4863 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4867 4864 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4868 4865 acl_level = Column('acl_level', Unicode(128), nullable=True)
4869 4866
4870 4867 owner = relationship('User', back_populates='user_gists')
4871 4868
4872 4869 def __repr__(self):
4873 4870 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4874 4871
4875 4872 @hybrid_property
4876 4873 def description_safe(self):
4877 4874 from rhodecode.lib import helpers as h
4878 4875 return h.escape(self.gist_description)
4879 4876
4880 4877 @classmethod
4881 4878 def get_or_404(cls, id_):
4882 4879 from pyramid.httpexceptions import HTTPNotFound
4883 4880
4884 4881 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4885 4882 if not res:
4886 4883 log.debug('WARN: No DB entry with id %s', id_)
4887 4884 raise HTTPNotFound()
4888 4885 return res
4889 4886
4890 4887 @classmethod
4891 4888 def get_by_access_id(cls, gist_access_id):
4892 4889 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4893 4890
4894 4891 def gist_url(self):
4895 4892 from rhodecode.model.gist import GistModel
4896 4893 return GistModel().get_url(self)
4897 4894
4898 4895 @classmethod
4899 4896 def base_path(cls):
4900 4897 """
4901 4898 Returns base path when all gists are stored
4902 4899
4903 4900 :param cls:
4904 4901 """
4905 4902 from rhodecode.model.gist import GIST_STORE_LOC
4906 4903 q = Session().query(RhodeCodeUi)\
4907 4904 .filter(RhodeCodeUi.ui_key == URL_SEP)
4908 4905 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4909 4906 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4910 4907
4911 4908 def get_api_data(self):
4912 4909 """
4913 4910 Common function for generating gist related data for API
4914 4911 """
4915 4912 gist = self
4916 4913 data = {
4917 4914 'gist_id': gist.gist_id,
4918 4915 'type': gist.gist_type,
4919 4916 'access_id': gist.gist_access_id,
4920 4917 'description': gist.gist_description,
4921 4918 'url': gist.gist_url(),
4922 4919 'expires': gist.gist_expires,
4923 4920 'created_on': gist.created_on,
4924 4921 'modified_at': gist.modified_at,
4925 4922 'content': None,
4926 4923 'acl_level': gist.acl_level,
4927 4924 }
4928 4925 return data
4929 4926
4930 4927 def __json__(self):
4931 4928 data = dict(
4932 4929 )
4933 4930 data.update(self.get_api_data())
4934 4931 return data
4935 4932 # SCM functions
4936 4933
4937 4934 def scm_instance(self, **kwargs):
4938 4935 """
4939 4936 Get an instance of VCS Repository
4940 4937
4941 4938 :param kwargs:
4942 4939 """
4943 4940 from rhodecode.model.gist import GistModel
4944 4941 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4945 4942 return get_vcs_instance(
4946 4943 repo_path=safe_str(full_repo_path), create=False,
4947 4944 _vcs_alias=GistModel.vcs_backend)
4948 4945
4949 4946
4950 4947 class ExternalIdentity(Base, BaseModel):
4951 4948 __tablename__ = 'external_identities'
4952 4949 __table_args__ = (
4953 4950 Index('local_user_id_idx', 'local_user_id'),
4954 4951 Index('external_id_idx', 'external_id'),
4955 4952 base_table_args
4956 4953 )
4957 4954
4958 4955 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
4959 4956 external_username = Column('external_username', Unicode(1024), default='')
4960 4957 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4961 4958 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
4962 4959 access_token = Column('access_token', String(1024), default='')
4963 4960 alt_token = Column('alt_token', String(1024), default='')
4964 4961 token_secret = Column('token_secret', String(1024), default='')
4965 4962
4966 4963 @classmethod
4967 4964 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4968 4965 """
4969 4966 Returns ExternalIdentity instance based on search params
4970 4967
4971 4968 :param external_id:
4972 4969 :param provider_name:
4973 4970 :return: ExternalIdentity
4974 4971 """
4975 4972 query = cls.query()
4976 4973 query = query.filter(cls.external_id == external_id)
4977 4974 query = query.filter(cls.provider_name == provider_name)
4978 4975 if local_user_id:
4979 4976 query = query.filter(cls.local_user_id == local_user_id)
4980 4977 return query.first()
4981 4978
4982 4979 @classmethod
4983 4980 def user_by_external_id_and_provider(cls, external_id, provider_name):
4984 4981 """
4985 4982 Returns User instance based on search params
4986 4983
4987 4984 :param external_id:
4988 4985 :param provider_name:
4989 4986 :return: User
4990 4987 """
4991 4988 query = User.query()
4992 4989 query = query.filter(cls.external_id == external_id)
4993 4990 query = query.filter(cls.provider_name == provider_name)
4994 4991 query = query.filter(User.user_id == cls.local_user_id)
4995 4992 return query.first()
4996 4993
4997 4994 @classmethod
4998 4995 def by_local_user_id(cls, local_user_id):
4999 4996 """
5000 4997 Returns all tokens for user
5001 4998
5002 4999 :param local_user_id:
5003 5000 :return: ExternalIdentity
5004 5001 """
5005 5002 query = cls.query()
5006 5003 query = query.filter(cls.local_user_id == local_user_id)
5007 5004 return query
5008 5005
5009 5006 @classmethod
5010 5007 def load_provider_plugin(cls, plugin_id):
5011 5008 from rhodecode.authentication.base import loadplugin
5012 5009 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5013 5010 auth_plugin = loadplugin(_plugin_id)
5014 5011 return auth_plugin
5015 5012
5016 5013
5017 5014 class Integration(Base, BaseModel):
5018 5015 __tablename__ = 'integrations'
5019 5016 __table_args__ = (
5020 5017 base_table_args
5021 5018 )
5022 5019
5023 5020 integration_id = Column('integration_id', Integer(), primary_key=True)
5024 5021 integration_type = Column('integration_type', String(255))
5025 5022 enabled = Column('enabled', Boolean(), nullable=False)
5026 5023 name = Column('name', String(255), nullable=False)
5027 5024 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5028 5025
5029 5026 settings = Column(
5030 5027 'settings_json', MutationObj.as_mutable(
5031 5028 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5032 5029 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5033 5030 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5034 5031
5035 5032 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5036 5033 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5037 5034
5038 5035 @property
5039 5036 def scope(self):
5040 5037 if self.repo:
5041 5038 return repr(self.repo)
5042 5039 if self.repo_group:
5043 5040 if self.child_repos_only:
5044 5041 return repr(self.repo_group) + ' (child repos only)'
5045 5042 else:
5046 5043 return repr(self.repo_group) + ' (recursive)'
5047 5044 if self.child_repos_only:
5048 5045 return 'root_repos'
5049 5046 return 'global'
5050 5047
5051 5048 def __repr__(self):
5052 5049 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5053 5050
5054 5051
5055 5052 class RepoReviewRuleUser(Base, BaseModel):
5056 5053 __tablename__ = 'repo_review_rules_users'
5057 5054 __table_args__ = (
5058 5055 base_table_args
5059 5056 )
5060 5057 ROLE_REVIEWER = 'reviewer'
5061 5058 ROLE_OBSERVER = 'observer'
5062 5059 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5063 5060
5064 5061 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5065 5062 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5066 5063 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5067 5064 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5068 5065 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5069 5066 user = relationship('User', back_populates='user_review_rules')
5070 5067
5071 5068 def rule_data(self):
5072 5069 return {
5073 5070 'mandatory': self.mandatory,
5074 5071 'role': self.role,
5075 5072 }
5076 5073
5077 5074
5078 5075 class RepoReviewRuleUserGroup(Base, BaseModel):
5079 5076 __tablename__ = 'repo_review_rules_users_groups'
5080 5077 __table_args__ = (
5081 5078 base_table_args
5082 5079 )
5083 5080
5084 5081 VOTE_RULE_ALL = -1
5085 5082 ROLE_REVIEWER = 'reviewer'
5086 5083 ROLE_OBSERVER = 'observer'
5087 5084 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5088 5085
5089 5086 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5090 5087 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5091 5088 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5092 5089 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5093 5090 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5094 5091 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5095 5092 users_group = relationship('UserGroup')
5096 5093
5097 5094 def rule_data(self):
5098 5095 return {
5099 5096 'mandatory': self.mandatory,
5100 5097 'role': self.role,
5101 5098 'vote_rule': self.vote_rule
5102 5099 }
5103 5100
5104 5101 @property
5105 5102 def vote_rule_label(self):
5106 5103 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5107 5104 return 'all must vote'
5108 5105 else:
5109 5106 return 'min. vote {}'.format(self.vote_rule)
5110 5107
5111 5108
5112 5109 class RepoReviewRule(Base, BaseModel):
5113 5110 __tablename__ = 'repo_review_rules'
5114 5111 __table_args__ = (
5115 5112 base_table_args
5116 5113 )
5117 5114
5118 5115 repo_review_rule_id = Column(
5119 5116 'repo_review_rule_id', Integer(), primary_key=True)
5120 5117 repo_id = Column(
5121 5118 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5122 5119 repo = relationship('Repository', back_populates='review_rules')
5123 5120
5124 5121 review_rule_name = Column('review_rule_name', String(255))
5125 5122 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5126 5123 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5127 5124 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5128 5125
5129 5126 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5130 5127
5131 5128 # Legacy fields, just for backward compat
5132 5129 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5133 5130 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5134 5131
5135 5132 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5136 5133 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5137 5134
5138 5135 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5139 5136
5140 5137 rule_users = relationship('RepoReviewRuleUser')
5141 5138 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5142 5139
5143 5140 def _validate_pattern(self, value):
5144 5141 re.compile('^' + glob2re(value) + '$')
5145 5142
5146 5143 @hybrid_property
5147 5144 def source_branch_pattern(self):
5148 5145 return self._branch_pattern or '*'
5149 5146
5150 5147 @source_branch_pattern.setter
5151 5148 def source_branch_pattern(self, value):
5152 5149 self._validate_pattern(value)
5153 5150 self._branch_pattern = value or '*'
5154 5151
5155 5152 @hybrid_property
5156 5153 def target_branch_pattern(self):
5157 5154 return self._target_branch_pattern or '*'
5158 5155
5159 5156 @target_branch_pattern.setter
5160 5157 def target_branch_pattern(self, value):
5161 5158 self._validate_pattern(value)
5162 5159 self._target_branch_pattern = value or '*'
5163 5160
5164 5161 @hybrid_property
5165 5162 def file_pattern(self):
5166 5163 return self._file_pattern or '*'
5167 5164
5168 5165 @file_pattern.setter
5169 5166 def file_pattern(self, value):
5170 5167 self._validate_pattern(value)
5171 5168 self._file_pattern = value or '*'
5172 5169
5173 5170 @hybrid_property
5174 5171 def forbid_pr_author_to_review(self):
5175 5172 return self.pr_author == 'forbid_pr_author'
5176 5173
5177 5174 @hybrid_property
5178 5175 def include_pr_author_to_review(self):
5179 5176 return self.pr_author == 'include_pr_author'
5180 5177
5181 5178 @hybrid_property
5182 5179 def forbid_commit_author_to_review(self):
5183 5180 return self.commit_author == 'forbid_commit_author'
5184 5181
5185 5182 @hybrid_property
5186 5183 def include_commit_author_to_review(self):
5187 5184 return self.commit_author == 'include_commit_author'
5188 5185
5189 5186 def matches(self, source_branch, target_branch, files_changed):
5190 5187 """
5191 5188 Check if this review rule matches a branch/files in a pull request
5192 5189
5193 5190 :param source_branch: source branch name for the commit
5194 5191 :param target_branch: target branch name for the commit
5195 5192 :param files_changed: list of file paths changed in the pull request
5196 5193 """
5197 5194
5198 5195 source_branch = source_branch or ''
5199 5196 target_branch = target_branch or ''
5200 5197 files_changed = files_changed or []
5201 5198
5202 5199 branch_matches = True
5203 5200 if source_branch or target_branch:
5204 5201 if self.source_branch_pattern == '*':
5205 5202 source_branch_match = True
5206 5203 else:
5207 5204 if self.source_branch_pattern.startswith('re:'):
5208 5205 source_pattern = self.source_branch_pattern[3:]
5209 5206 else:
5210 5207 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5211 5208 source_branch_regex = re.compile(source_pattern)
5212 5209 source_branch_match = bool(source_branch_regex.search(source_branch))
5213 5210 if self.target_branch_pattern == '*':
5214 5211 target_branch_match = True
5215 5212 else:
5216 5213 if self.target_branch_pattern.startswith('re:'):
5217 5214 target_pattern = self.target_branch_pattern[3:]
5218 5215 else:
5219 5216 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5220 5217 target_branch_regex = re.compile(target_pattern)
5221 5218 target_branch_match = bool(target_branch_regex.search(target_branch))
5222 5219
5223 5220 branch_matches = source_branch_match and target_branch_match
5224 5221
5225 5222 files_matches = True
5226 5223 if self.file_pattern != '*':
5227 5224 files_matches = False
5228 5225 if self.file_pattern.startswith('re:'):
5229 5226 file_pattern = self.file_pattern[3:]
5230 5227 else:
5231 5228 file_pattern = glob2re(self.file_pattern)
5232 5229 file_regex = re.compile(file_pattern)
5233 5230 for file_data in files_changed:
5234 5231 filename = file_data.get('filename')
5235 5232
5236 5233 if file_regex.search(filename):
5237 5234 files_matches = True
5238 5235 break
5239 5236
5240 5237 return branch_matches and files_matches
5241 5238
5242 5239 @property
5243 5240 def review_users(self):
5244 5241 """ Returns the users which this rule applies to """
5245 5242
5246 5243 users = collections.OrderedDict()
5247 5244
5248 5245 for rule_user in self.rule_users:
5249 5246 if rule_user.user.active:
5250 5247 if rule_user.user not in users:
5251 5248 users[rule_user.user.username] = {
5252 5249 'user': rule_user.user,
5253 5250 'source': 'user',
5254 5251 'source_data': {},
5255 5252 'data': rule_user.rule_data()
5256 5253 }
5257 5254
5258 5255 for rule_user_group in self.rule_user_groups:
5259 5256 source_data = {
5260 5257 'user_group_id': rule_user_group.users_group.users_group_id,
5261 5258 'name': rule_user_group.users_group.users_group_name,
5262 5259 'members': len(rule_user_group.users_group.members)
5263 5260 }
5264 5261 for member in rule_user_group.users_group.members:
5265 5262 if member.user.active:
5266 5263 key = member.user.username
5267 5264 if key in users:
5268 5265 # skip this member as we have him already
5269 5266 # this prevents from override the "first" matched
5270 5267 # users with duplicates in multiple groups
5271 5268 continue
5272 5269
5273 5270 users[key] = {
5274 5271 'user': member.user,
5275 5272 'source': 'user_group',
5276 5273 'source_data': source_data,
5277 5274 'data': rule_user_group.rule_data()
5278 5275 }
5279 5276
5280 5277 return users
5281 5278
5282 5279 def user_group_vote_rule(self, user_id):
5283 5280
5284 5281 rules = []
5285 5282 if not self.rule_user_groups:
5286 5283 return rules
5287 5284
5288 5285 for user_group in self.rule_user_groups:
5289 5286 user_group_members = [x.user_id for x in user_group.users_group.members]
5290 5287 if user_id in user_group_members:
5291 5288 rules.append(user_group)
5292 5289 return rules
5293 5290
5294 5291 def __repr__(self):
5295 5292 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5296 5293
5297 5294
5298 5295 class ScheduleEntry(Base, BaseModel):
5299 5296 __tablename__ = 'schedule_entries'
5300 5297 __table_args__ = (
5301 5298 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5302 5299 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5303 5300 base_table_args,
5304 5301 )
5305 5302 SCHEDULE_TYPE_INTEGER = "integer"
5306 5303 SCHEDULE_TYPE_CRONTAB = "crontab"
5307 5304
5308 5305 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5309 5306 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5310 5307
5311 5308 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5312 5309 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5313 5310 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5314 5311
5315 5312 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5316 5313 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5317 5314
5318 5315 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5319 5316 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5320 5317
5321 5318 # task
5322 5319 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5323 5320 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5324 5321 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5325 5322 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5326 5323
5327 5324 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5328 5325 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5329 5326
5330 5327 @hybrid_property
5331 5328 def schedule_type(self):
5332 5329 return self._schedule_type
5333 5330
5334 5331 @schedule_type.setter
5335 5332 def schedule_type(self, val):
5336 5333 if val not in self.schedule_types:
5337 5334 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5338 5335 val, self.schedule_type))
5339 5336
5340 5337 self._schedule_type = val
5341 5338
5342 5339 @classmethod
5343 5340 def get_uid(cls, obj):
5344 5341 args = obj.task_args
5345 5342 kwargs = obj.task_kwargs
5346 5343 if isinstance(args, JsonRaw):
5347 5344 try:
5348 5345 args = json.loads(args)
5349 5346 except ValueError:
5350 5347 args = tuple()
5351 5348
5352 5349 if isinstance(kwargs, JsonRaw):
5353 5350 try:
5354 5351 kwargs = json.loads(kwargs)
5355 5352 except ValueError:
5356 5353 kwargs = dict()
5357 5354
5358 5355 dot_notation = obj.task_dot_notation
5359 5356 val = '.'.join(map(safe_str, [
5360 5357 sorted(dot_notation), args, sorted(kwargs.items())]))
5361 5358 return sha1(safe_bytes(val))
5362 5359
5363 5360 @classmethod
5364 5361 def get_by_schedule_name(cls, schedule_name):
5365 5362 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5366 5363
5367 5364 @classmethod
5368 5365 def get_by_schedule_id(cls, schedule_id):
5369 5366 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5370 5367
5371 5368 @property
5372 5369 def task(self):
5373 5370 return self.task_dot_notation
5374 5371
5375 5372 @property
5376 5373 def schedule(self):
5377 5374 from rhodecode.lib.celerylib.utils import raw_2_schedule
5378 5375 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5379 5376 return schedule
5380 5377
5381 5378 @property
5382 5379 def args(self):
5383 5380 try:
5384 5381 return list(self.task_args or [])
5385 5382 except ValueError:
5386 5383 return list()
5387 5384
5388 5385 @property
5389 5386 def kwargs(self):
5390 5387 try:
5391 5388 return dict(self.task_kwargs or {})
5392 5389 except ValueError:
5393 5390 return dict()
5394 5391
5395 5392 def _as_raw(self, val, indent=False):
5396 5393 if hasattr(val, 'de_coerce'):
5397 5394 val = val.de_coerce()
5398 5395 if val:
5399 5396 if indent:
5400 5397 val = ext_json.formatted_str_json(val)
5401 5398 else:
5402 5399 val = ext_json.str_json(val)
5403 5400
5404 5401 return val
5405 5402
5406 5403 @property
5407 5404 def schedule_definition_raw(self):
5408 5405 return self._as_raw(self.schedule_definition)
5409 5406
5410 5407 def args_raw(self, indent=False):
5411 5408 return self._as_raw(self.task_args, indent)
5412 5409
5413 5410 def kwargs_raw(self, indent=False):
5414 5411 return self._as_raw(self.task_kwargs, indent)
5415 5412
5416 5413 def __repr__(self):
5417 5414 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5418 5415
5419 5416
5420 5417 @event.listens_for(ScheduleEntry, 'before_update')
5421 5418 def update_task_uid(mapper, connection, target):
5422 5419 target.task_uid = ScheduleEntry.get_uid(target)
5423 5420
5424 5421
5425 5422 @event.listens_for(ScheduleEntry, 'before_insert')
5426 5423 def set_task_uid(mapper, connection, target):
5427 5424 target.task_uid = ScheduleEntry.get_uid(target)
5428 5425
5429 5426
5430 5427 class _BaseBranchPerms(BaseModel):
5431 5428 @classmethod
5432 5429 def compute_hash(cls, value):
5433 5430 return sha1_safe(value)
5434 5431
5435 5432 @hybrid_property
5436 5433 def branch_pattern(self):
5437 5434 return self._branch_pattern or '*'
5438 5435
5439 5436 @hybrid_property
5440 5437 def branch_hash(self):
5441 5438 return self._branch_hash
5442 5439
5443 5440 def _validate_glob(self, value):
5444 5441 re.compile('^' + glob2re(value) + '$')
5445 5442
5446 5443 @branch_pattern.setter
5447 5444 def branch_pattern(self, value):
5448 5445 self._validate_glob(value)
5449 5446 self._branch_pattern = value or '*'
5450 5447 # set the Hash when setting the branch pattern
5451 5448 self._branch_hash = self.compute_hash(self._branch_pattern)
5452 5449
5453 5450 def matches(self, branch):
5454 5451 """
5455 5452 Check if this the branch matches entry
5456 5453
5457 5454 :param branch: branch name for the commit
5458 5455 """
5459 5456
5460 5457 branch = branch or ''
5461 5458
5462 5459 branch_matches = True
5463 5460 if branch:
5464 5461 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5465 5462 branch_matches = bool(branch_regex.search(branch))
5466 5463
5467 5464 return branch_matches
5468 5465
5469 5466
5470 5467 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5471 5468 __tablename__ = 'user_to_repo_branch_permissions'
5472 5469 __table_args__ = (
5473 5470 base_table_args
5474 5471 )
5475 5472
5476 5473 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5477 5474
5478 5475 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5479 5476 repo = relationship('Repository', back_populates='user_branch_perms')
5480 5477
5481 5478 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5482 5479 permission = relationship('Permission')
5483 5480
5484 5481 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5485 5482 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5486 5483
5487 5484 rule_order = Column('rule_order', Integer(), nullable=False)
5488 5485 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5489 5486 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5490 5487
5491 5488 def __repr__(self):
5492 5489 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5493 5490
5494 5491
5495 5492 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5496 5493 __tablename__ = 'user_group_to_repo_branch_permissions'
5497 5494 __table_args__ = (
5498 5495 base_table_args
5499 5496 )
5500 5497
5501 5498 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5502 5499
5503 5500 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5504 5501 repo = relationship('Repository', back_populates='user_group_branch_perms')
5505 5502
5506 5503 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5507 5504 permission = relationship('Permission')
5508 5505
5509 5506 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5510 5507 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5511 5508
5512 5509 rule_order = Column('rule_order', Integer(), nullable=False)
5513 5510 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5514 5511 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5515 5512
5516 5513 def __repr__(self):
5517 5514 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5518 5515
5519 5516
5520 5517 class UserBookmark(Base, BaseModel):
5521 5518 __tablename__ = 'user_bookmarks'
5522 5519 __table_args__ = (
5523 5520 UniqueConstraint('user_id', 'bookmark_repo_id'),
5524 5521 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5525 5522 UniqueConstraint('user_id', 'bookmark_position'),
5526 5523 base_table_args
5527 5524 )
5528 5525
5529 5526 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5530 5527 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5531 5528 position = Column("bookmark_position", Integer(), nullable=False)
5532 5529 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5533 5530 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5534 5531 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5535 5532
5536 5533 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5537 5534 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5538 5535
5539 5536 user = relationship("User")
5540 5537
5541 5538 repository = relationship("Repository")
5542 5539 repository_group = relationship("RepoGroup")
5543 5540
5544 5541 @classmethod
5545 5542 def get_by_position_for_user(cls, position, user_id):
5546 5543 return cls.query() \
5547 5544 .filter(UserBookmark.user_id == user_id) \
5548 5545 .filter(UserBookmark.position == position).scalar()
5549 5546
5550 5547 @classmethod
5551 5548 def get_bookmarks_for_user(cls, user_id, cache=True):
5552 5549 bookmarks = cls.query() \
5553 5550 .filter(UserBookmark.user_id == user_id) \
5554 5551 .options(joinedload(UserBookmark.repository)) \
5555 5552 .options(joinedload(UserBookmark.repository_group)) \
5556 5553 .order_by(UserBookmark.position.asc())
5557 5554
5558 5555 if cache:
5559 5556 bookmarks = bookmarks.options(
5560 5557 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5561 5558 )
5562 5559
5563 5560 return bookmarks.all()
5564 5561
5565 5562 def __repr__(self):
5566 5563 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5567 5564
5568 5565
5569 5566 class FileStore(Base, BaseModel):
5570 5567 __tablename__ = 'file_store'
5571 5568 __table_args__ = (
5572 5569 base_table_args
5573 5570 )
5574 5571
5575 5572 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5576 5573 file_uid = Column('file_uid', String(1024), nullable=False)
5577 5574 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5578 5575 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5579 5576 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5580 5577
5581 5578 # sha256 hash
5582 5579 file_hash = Column('file_hash', String(512), nullable=False)
5583 5580 file_size = Column('file_size', BigInteger(), nullable=False)
5584 5581
5585 5582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5586 5583 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5587 5584 accessed_count = Column('accessed_count', Integer(), default=0)
5588 5585
5589 5586 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5590 5587
5591 5588 # if repo/repo_group reference is set, check for permissions
5592 5589 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5593 5590
5594 5591 # hidden defines an attachment that should be hidden from showing in artifact listing
5595 5592 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5596 5593
5597 5594 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5598 5595 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5599 5596
5600 5597 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5601 5598
5602 5599 # scope limited to user, which requester have access to
5603 5600 scope_user_id = Column(
5604 5601 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5605 5602 nullable=True, unique=None, default=None)
5606 5603 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5607 5604
5608 5605 # scope limited to user group, which requester have access to
5609 5606 scope_user_group_id = Column(
5610 5607 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5611 5608 nullable=True, unique=None, default=None)
5612 5609 user_group = relationship('UserGroup', lazy='joined')
5613 5610
5614 5611 # scope limited to repo, which requester have access to
5615 5612 scope_repo_id = Column(
5616 5613 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5617 5614 nullable=True, unique=None, default=None)
5618 5615 repo = relationship('Repository', lazy='joined')
5619 5616
5620 5617 # scope limited to repo group, which requester have access to
5621 5618 scope_repo_group_id = Column(
5622 5619 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5623 5620 nullable=True, unique=None, default=None)
5624 5621 repo_group = relationship('RepoGroup', lazy='joined')
5625 5622
5626 5623 @classmethod
5627 5624 def get_scope(cls, scope_type, scope_id):
5628 5625 if scope_type == 'repo':
5629 5626 return f'repo:{scope_id}'
5630 5627 elif scope_type == 'repo-group':
5631 5628 return f'repo-group:{scope_id}'
5632 5629 elif scope_type == 'user':
5633 5630 return f'user:{scope_id}'
5634 5631 elif scope_type == 'user-group':
5635 5632 return f'user-group:{scope_id}'
5636 5633 else:
5637 5634 return scope_type
5638 5635
5639 5636 @classmethod
5640 5637 def get_by_store_uid(cls, file_store_uid, safe=False):
5641 5638 if safe:
5642 5639 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5643 5640 else:
5644 5641 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5645 5642
5646 5643 @classmethod
5647 5644 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5648 5645 file_description='', enabled=True, hidden=False, check_acl=True,
5649 5646 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5650 5647
5651 5648 store_entry = FileStore()
5652 5649 store_entry.file_uid = file_uid
5653 5650 store_entry.file_display_name = file_display_name
5654 5651 store_entry.file_org_name = filename
5655 5652 store_entry.file_size = file_size
5656 5653 store_entry.file_hash = file_hash
5657 5654 store_entry.file_description = file_description
5658 5655
5659 5656 store_entry.check_acl = check_acl
5660 5657 store_entry.enabled = enabled
5661 5658 store_entry.hidden = hidden
5662 5659
5663 5660 store_entry.user_id = user_id
5664 5661 store_entry.scope_user_id = scope_user_id
5665 5662 store_entry.scope_repo_id = scope_repo_id
5666 5663 store_entry.scope_repo_group_id = scope_repo_group_id
5667 5664
5668 5665 return store_entry
5669 5666
5670 5667 @classmethod
5671 5668 def store_metadata(cls, file_store_id, args, commit=True):
5672 5669 file_store = FileStore.get(file_store_id)
5673 5670 if file_store is None:
5674 5671 return
5675 5672
5676 5673 for section, key, value, value_type in args:
5677 5674 has_key = FileStoreMetadata().query() \
5678 5675 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5679 5676 .filter(FileStoreMetadata.file_store_meta_section == section) \
5680 5677 .filter(FileStoreMetadata.file_store_meta_key == key) \
5681 5678 .scalar()
5682 5679 if has_key:
5683 5680 msg = 'key `{}` already defined under section `{}` for this file.'\
5684 5681 .format(key, section)
5685 5682 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5686 5683
5687 5684 # NOTE(marcink): raises ArtifactMetadataBadValueType
5688 5685 FileStoreMetadata.valid_value_type(value_type)
5689 5686
5690 5687 meta_entry = FileStoreMetadata()
5691 5688 meta_entry.file_store = file_store
5692 5689 meta_entry.file_store_meta_section = section
5693 5690 meta_entry.file_store_meta_key = key
5694 5691 meta_entry.file_store_meta_value_type = value_type
5695 5692 meta_entry.file_store_meta_value = value
5696 5693
5697 5694 Session().add(meta_entry)
5698 5695
5699 5696 try:
5700 5697 if commit:
5701 5698 Session().commit()
5702 5699 except IntegrityError:
5703 5700 Session().rollback()
5704 5701 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5705 5702
5706 5703 @classmethod
5707 5704 def bump_access_counter(cls, file_uid, commit=True):
5708 5705 FileStore().query()\
5709 5706 .filter(FileStore.file_uid == file_uid)\
5710 5707 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5711 5708 FileStore.accessed_on: datetime.datetime.now()})
5712 5709 if commit:
5713 5710 Session().commit()
5714 5711
5715 5712 def __json__(self):
5716 5713 data = {
5717 5714 'filename': self.file_display_name,
5718 5715 'filename_org': self.file_org_name,
5719 5716 'file_uid': self.file_uid,
5720 5717 'description': self.file_description,
5721 5718 'hidden': self.hidden,
5722 5719 'size': self.file_size,
5723 5720 'created_on': self.created_on,
5724 5721 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5725 5722 'downloaded_times': self.accessed_count,
5726 5723 'sha256': self.file_hash,
5727 5724 'metadata': self.file_metadata,
5728 5725 }
5729 5726
5730 5727 return data
5731 5728
5732 5729 def __repr__(self):
5733 5730 return f'<FileStore({self.file_store_id})>'
5734 5731
5735 5732
5736 5733 class FileStoreMetadata(Base, BaseModel):
5737 5734 __tablename__ = 'file_store_metadata'
5738 5735 __table_args__ = (
5739 5736 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5740 5737 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5741 5738 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5742 5739 base_table_args
5743 5740 )
5744 5741 SETTINGS_TYPES = {
5745 5742 'str': safe_str,
5746 5743 'int': safe_int,
5747 5744 'unicode': safe_str,
5748 5745 'bool': str2bool,
5749 5746 'list': functools.partial(aslist, sep=',')
5750 5747 }
5751 5748
5752 5749 file_store_meta_id = Column(
5753 5750 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5754 5751 primary_key=True)
5755 5752 _file_store_meta_section = Column(
5756 5753 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5757 5754 nullable=True, unique=None, default=None)
5758 5755 _file_store_meta_section_hash = Column(
5759 5756 "file_store_meta_section_hash", String(255),
5760 5757 nullable=True, unique=None, default=None)
5761 5758 _file_store_meta_key = Column(
5762 5759 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5763 5760 nullable=True, unique=None, default=None)
5764 5761 _file_store_meta_key_hash = Column(
5765 5762 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5766 5763 _file_store_meta_value = Column(
5767 5764 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5768 5765 nullable=True, unique=None, default=None)
5769 5766 _file_store_meta_value_type = Column(
5770 5767 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5771 5768 default='unicode')
5772 5769
5773 5770 file_store_id = Column(
5774 5771 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5775 5772 nullable=True, unique=None, default=None)
5776 5773
5777 5774 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5778 5775
5779 5776 @classmethod
5780 5777 def valid_value_type(cls, value):
5781 5778 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5782 5779 raise ArtifactMetadataBadValueType(
5783 5780 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5784 5781
5785 5782 @hybrid_property
5786 5783 def file_store_meta_section(self):
5787 5784 return self._file_store_meta_section
5788 5785
5789 5786 @file_store_meta_section.setter
5790 5787 def file_store_meta_section(self, value):
5791 5788 self._file_store_meta_section = value
5792 5789 self._file_store_meta_section_hash = _hash_key(value)
5793 5790
5794 5791 @hybrid_property
5795 5792 def file_store_meta_key(self):
5796 5793 return self._file_store_meta_key
5797 5794
5798 5795 @file_store_meta_key.setter
5799 5796 def file_store_meta_key(self, value):
5800 5797 self._file_store_meta_key = value
5801 5798 self._file_store_meta_key_hash = _hash_key(value)
5802 5799
5803 5800 @hybrid_property
5804 5801 def file_store_meta_value(self):
5805 5802 val = self._file_store_meta_value
5806 5803
5807 5804 if self._file_store_meta_value_type:
5808 5805 # e.g unicode.encrypted == unicode
5809 5806 _type = self._file_store_meta_value_type.split('.')[0]
5810 5807 # decode the encrypted value if it's encrypted field type
5811 5808 if '.encrypted' in self._file_store_meta_value_type:
5812 5809 cipher = EncryptedTextValue()
5813 5810 val = safe_str(cipher.process_result_value(val, None))
5814 5811 # do final type conversion
5815 5812 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5816 5813 val = converter(val)
5817 5814
5818 5815 return val
5819 5816
5820 5817 @file_store_meta_value.setter
5821 5818 def file_store_meta_value(self, val):
5822 5819 val = safe_str(val)
5823 5820 # encode the encrypted value
5824 5821 if '.encrypted' in self.file_store_meta_value_type:
5825 5822 cipher = EncryptedTextValue()
5826 5823 val = safe_str(cipher.process_bind_param(val, None))
5827 5824 self._file_store_meta_value = val
5828 5825
5829 5826 @hybrid_property
5830 5827 def file_store_meta_value_type(self):
5831 5828 return self._file_store_meta_value_type
5832 5829
5833 5830 @file_store_meta_value_type.setter
5834 5831 def file_store_meta_value_type(self, val):
5835 5832 # e.g unicode.encrypted
5836 5833 self.valid_value_type(val)
5837 5834 self._file_store_meta_value_type = val
5838 5835
5839 5836 def __json__(self):
5840 5837 data = {
5841 5838 'artifact': self.file_store.file_uid,
5842 5839 'section': self.file_store_meta_section,
5843 5840 'key': self.file_store_meta_key,
5844 5841 'value': self.file_store_meta_value,
5845 5842 }
5846 5843
5847 5844 return data
5848 5845
5849 5846 def __repr__(self):
5850 5847 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
5851 5848 self.file_store_meta_key, self.file_store_meta_value)
5852 5849
5853 5850
5854 5851 class DbMigrateVersion(Base, BaseModel):
5855 5852 __tablename__ = 'db_migrate_version'
5856 5853 __table_args__ = (
5857 5854 base_table_args,
5858 5855 )
5859 5856
5860 5857 repository_id = Column('repository_id', String(250), primary_key=True)
5861 5858 repository_path = Column('repository_path', Text)
5862 5859 version = Column('version', Integer)
5863 5860
5864 5861 @classmethod
5865 5862 def set_version(cls, version):
5866 5863 """
5867 5864 Helper for forcing a different version, usually for debugging purposes via ishell.
5868 5865 """
5869 5866 ver = DbMigrateVersion.query().first()
5870 5867 ver.version = version
5871 5868 Session().commit()
5872 5869
5873 5870
5874 5871 class DbSession(Base, BaseModel):
5875 5872 __tablename__ = 'db_session'
5876 5873 __table_args__ = (
5877 5874 base_table_args,
5878 5875 )
5879 5876
5880 5877 def __repr__(self):
5881 5878 return f'<DB:DbSession({self.id})>'
5882 5879
5883 5880 id = Column('id', Integer())
5884 5881 namespace = Column('namespace', String(255), primary_key=True)
5885 5882 accessed = Column('accessed', DateTime, nullable=False)
5886 5883 created = Column('created', DateTime, nullable=False)
5887 5884 data = Column('data', PickleType, nullable=False)
@@ -1,638 +1,634 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 this is forms validation classes
21 21 http://formencode.org/module-formencode.validators.html
22 22 for list off all availible validators
23 23
24 24 we can create our own validators
25 25
26 26 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 27 pre_validators [] These validators will be applied before the schema
28 28 chained_validators [] These validators will be applied after the schema
29 29 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 30 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 31 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 32 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33 33
34 34
35 35 <name> = formencode.validators.<name of validator>
36 36 <name> must equal form name
37 37 list=[1,2,3,4,5]
38 38 for SELECT use formencode.All(OneOf(list), Int())
39 39
40 40 """
41 41
42 42 import deform
43 43 import logging
44 44 import formencode
45 45
46 46 from pkg_resources import resource_filename
47 47 from formencode import All, Pipe
48 48
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from rhodecode import BACKENDS
52 52 from rhodecode.lib import helpers
53 53 from rhodecode.model import validators as v
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 deform_templates = resource_filename('deform', 'templates')
59 59 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 60 search_path = (rhodecode_templates, deform_templates)
61 61
62 62
63 63 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 64 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 65 def __call__(self, template_name, **kw):
66 66 kw['h'] = helpers
67 67 kw['request'] = get_current_request()
68 68 return self.load(template_name)(**kw)
69 69
70 70
71 71 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 72 deform.Form.set_default_renderer(form_renderer)
73 73
74 74
75 75 def LoginForm(localizer):
76 76 _ = localizer
77 77
78 78 class _LoginForm(formencode.Schema):
79 79 allow_extra_fields = True
80 80 filter_extra_fields = True
81 81 username = v.UnicodeString(
82 82 strip=True,
83 83 min=1,
84 84 not_empty=True,
85 85 messages={
86 86 'empty': _('Please enter a login'),
87 87 'tooShort': _('Enter a value %(min)i characters long or more')
88 88 }
89 89 )
90 90
91 91 password = v.UnicodeString(
92 92 strip=False,
93 93 min=3,
94 94 max=72,
95 95 not_empty=True,
96 96 messages={
97 97 'empty': _('Please enter a password'),
98 98 'tooShort': _('Enter %(min)i characters or more')}
99 99 )
100 100
101 101 remember = v.StringBoolean(if_missing=False)
102 102
103 103 chained_validators = [v.ValidAuth(localizer)]
104 104 return _LoginForm
105 105
106 106
107 107 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
108 108 old_data = old_data or {}
109 109 available_languages = available_languages or []
110 110 _ = localizer
111 111
112 112 class _UserForm(formencode.Schema):
113 113 allow_extra_fields = True
114 114 filter_extra_fields = True
115 115 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
116 116 v.ValidUsername(localizer, edit, old_data))
117 117 if edit:
118 118 new_password = All(
119 119 v.ValidPassword(localizer),
120 120 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
121 121 )
122 122 password_confirmation = All(
123 123 v.ValidPassword(localizer),
124 124 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
125 125 )
126 126 admin = v.StringBoolean(if_missing=False)
127 127 else:
128 128 password = All(
129 129 v.ValidPassword(localizer),
130 130 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
131 131 )
132 132 password_confirmation = All(
133 133 v.ValidPassword(localizer),
134 134 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
135 135 )
136 136
137 137 password_change = v.StringBoolean(if_missing=False)
138 138 create_repo_group = v.StringBoolean(if_missing=False)
139 139
140 140 active = v.StringBoolean(if_missing=False)
141 141 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 142 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 143 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
144 144 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
145 145 if_missing='')
146 146 extern_name = v.UnicodeString(strip=True)
147 147 extern_type = v.UnicodeString(strip=True)
148 148 language = v.OneOf(available_languages, hideList=False,
149 149 testValueList=True, if_missing=None)
150 150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 151 return _UserForm
152 152
153 153
154 154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 155 old_data = old_data or {}
156 156 _ = localizer
157 157
158 158 class _UserGroupForm(formencode.Schema):
159 159 allow_extra_fields = True
160 160 filter_extra_fields = True
161 161
162 162 users_group_name = All(
163 163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 164 v.ValidUserGroup(localizer, edit, old_data)
165 165 )
166 166 user_group_description = v.UnicodeString(strip=True, min=1,
167 167 not_empty=False)
168 168
169 169 users_group_active = v.StringBoolean(if_missing=False)
170 170
171 171 if edit:
172 172 # this is user group owner
173 173 user = All(
174 174 v.UnicodeString(not_empty=True),
175 175 v.ValidRepoUser(localizer, allow_disabled))
176 176 return _UserGroupForm
177 177
178 178
179 179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 180 can_create_in_root=False, allow_disabled=False):
181 181 _ = localizer
182 182 old_data = old_data or {}
183 183 available_groups = available_groups or []
184 184
185 185 class _RepoGroupForm(formencode.Schema):
186 186 allow_extra_fields = True
187 187 filter_extra_fields = False
188 188
189 189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 190 v.SlugifyName(localizer),)
191 191 group_description = v.UnicodeString(strip=True, min=1,
192 192 not_empty=False)
193 193 group_copy_permissions = v.StringBoolean(if_missing=False)
194 194
195 195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 196 testValueList=True, not_empty=True)
197 197 enable_locking = v.StringBoolean(if_missing=False)
198 198 chained_validators = [
199 199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 200
201 201 if edit:
202 202 # this is repo group owner
203 203 user = All(
204 204 v.UnicodeString(not_empty=True),
205 205 v.ValidRepoUser(localizer, allow_disabled))
206 206 return _RepoGroupForm
207 207
208 208
209 209 def RegisterForm(localizer, edit=False, old_data=None):
210 210 _ = localizer
211 211 old_data = old_data or {}
212 212
213 213 class _RegisterForm(formencode.Schema):
214 214 allow_extra_fields = True
215 215 filter_extra_fields = True
216 216 username = All(
217 217 v.ValidUsername(localizer, edit, old_data),
218 218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 219 )
220 220 password = All(
221 221 v.ValidPassword(localizer),
222 222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 223 )
224 224 password_confirmation = All(
225 225 v.ValidPassword(localizer),
226 226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 227 )
228 228 active = v.StringBoolean(if_missing=False)
229 229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 232
233 233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 234 return _RegisterForm
235 235
236 236
237 237 def PasswordResetForm(localizer):
238 238 _ = localizer
239 239
240 240 class _PasswordResetForm(formencode.Schema):
241 241 allow_extra_fields = True
242 242 filter_extra_fields = True
243 243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 244 return _PasswordResetForm
245 245
246 246
247 247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
248 248 _ = localizer
249 249 old_data = old_data or {}
250 250 repo_groups = repo_groups or []
251 251 supported_backends = BACKENDS.keys()
252 252
253 253 class _RepoForm(formencode.Schema):
254 254 allow_extra_fields = True
255 255 filter_extra_fields = False
256 256 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
257 257 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
258 258 repo_group = All(v.CanWriteGroup(localizer, old_data),
259 259 v.OneOf(repo_groups, hideList=True))
260 260 repo_type = v.OneOf(supported_backends, required=False,
261 261 if_missing=old_data.get('repo_type'))
262 262 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
263 263 repo_private = v.StringBoolean(if_missing=False)
264 264 repo_copy_permissions = v.StringBoolean(if_missing=False)
265 265 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
266 266
267 267 repo_enable_statistics = v.StringBoolean(if_missing=False)
268 268 repo_enable_downloads = v.StringBoolean(if_missing=False)
269 269 repo_enable_locking = v.StringBoolean(if_missing=False)
270 270
271 271 if edit:
272 272 # this is repo owner
273 273 user = All(
274 274 v.UnicodeString(not_empty=True),
275 275 v.ValidRepoUser(localizer, allow_disabled))
276 276 clone_uri_change = v.UnicodeString(
277 277 not_empty=False, if_missing=v.Missing)
278 278
279 279 chained_validators = [v.ValidCloneUri(localizer),
280 280 v.ValidRepoName(localizer, edit, old_data)]
281 281 return _RepoForm
282 282
283 283
284 284 def RepoPermsForm(localizer):
285 285 _ = localizer
286 286
287 287 class _RepoPermsForm(formencode.Schema):
288 288 allow_extra_fields = True
289 289 filter_extra_fields = False
290 290 chained_validators = [v.ValidPerms(localizer, type_='repo')]
291 291 return _RepoPermsForm
292 292
293 293
294 294 def RepoGroupPermsForm(localizer, valid_recursive_choices):
295 295 _ = localizer
296 296
297 297 class _RepoGroupPermsForm(formencode.Schema):
298 298 allow_extra_fields = True
299 299 filter_extra_fields = False
300 300 recursive = v.OneOf(valid_recursive_choices)
301 301 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
302 302 return _RepoGroupPermsForm
303 303
304 304
305 305 def UserGroupPermsForm(localizer):
306 306 _ = localizer
307 307
308 308 class _UserPermsForm(formencode.Schema):
309 309 allow_extra_fields = True
310 310 filter_extra_fields = False
311 311 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
312 312 return _UserPermsForm
313 313
314 314
315 315 def RepoFieldForm(localizer):
316 316 _ = localizer
317 317
318 318 class _RepoFieldForm(formencode.Schema):
319 319 filter_extra_fields = True
320 320 allow_extra_fields = True
321 321
322 322 new_field_key = All(v.FieldKey(localizer),
323 323 v.UnicodeString(strip=True, min=3, not_empty=True))
324 324 new_field_value = v.UnicodeString(not_empty=False, if_missing='')
325 325 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
326 326 if_missing='str')
327 327 new_field_label = v.UnicodeString(not_empty=False)
328 328 new_field_desc = v.UnicodeString(not_empty=False)
329 329 return _RepoFieldForm
330 330
331 331
332 332 def RepoForkForm(localizer, edit=False, old_data=None,
333 333 supported_backends=BACKENDS.keys(), repo_groups=None):
334 334 _ = localizer
335 335 old_data = old_data or {}
336 336 repo_groups = repo_groups or []
337 337
338 338 class _RepoForkForm(formencode.Schema):
339 339 allow_extra_fields = True
340 340 filter_extra_fields = False
341 341 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
342 342 v.SlugifyName(localizer))
343 343 repo_group = All(v.CanWriteGroup(localizer, ),
344 344 v.OneOf(repo_groups, hideList=True))
345 345 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
346 346 description = v.UnicodeString(strip=True, min=1, not_empty=True)
347 347 private = v.StringBoolean(if_missing=False)
348 348 copy_permissions = v.StringBoolean(if_missing=False)
349 349 fork_parent_id = v.UnicodeString()
350 350 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
351 351 return _RepoForkForm
352 352
353 353
354 354 def ApplicationSettingsForm(localizer):
355 355 _ = localizer
356 356
357 357 class _ApplicationSettingsForm(formencode.Schema):
358 358 allow_extra_fields = True
359 359 filter_extra_fields = False
360 360 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
361 361 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
362 362 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 363 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
364 364 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 365 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
366 366 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
367 367 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
368 368 return _ApplicationSettingsForm
369 369
370 370
371 371 def ApplicationVisualisationForm(localizer):
372 372 from rhodecode.model.db import Repository
373 373 _ = localizer
374 374
375 375 class _ApplicationVisualisationForm(formencode.Schema):
376 376 allow_extra_fields = True
377 377 filter_extra_fields = False
378 378 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
379 379 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
380 380 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
381 381
382 382 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
383 383 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
384 384 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
385 385 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
386 386 rhodecode_show_version = v.StringBoolean(if_missing=False)
387 387 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
388 388 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
389 389 rhodecode_gravatar_url = v.UnicodeString(min=3)
390 390 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
391 391 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
392 392 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
393 393 rhodecode_support_url = v.UnicodeString()
394 394 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
395 395 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
396 396 return _ApplicationVisualisationForm
397 397
398 398
399 399 class _BaseVcsSettingsForm(formencode.Schema):
400 400
401 401 allow_extra_fields = True
402 402 filter_extra_fields = False
403 403 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
404 404 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
405 405 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
406 406
407 407 # PR/Code-review
408 408 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
409 409 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
410 410
411 411 # hg
412 412 extensions_largefiles = v.StringBoolean(if_missing=False)
413 413 extensions_evolve = v.StringBoolean(if_missing=False)
414 414 phases_publish = v.StringBoolean(if_missing=False)
415 415
416 416 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
417 417 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
418 418
419 419 # git
420 420 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
421 421 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 422 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
423 423
424 424 # svn
425 425 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
426 426 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
427 427
428 428 # cache
429 429 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
430 430
431 431
432 432 def ApplicationUiSettingsForm(localizer):
433 433 _ = localizer
434 434
435 435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 436 web_push_ssl = v.StringBoolean(if_missing=False)
437 paths_root_path = All(
438 v.ValidPath(localizer),
439 v.UnicodeString(strip=True, min=1, not_empty=True)
440 )
441 437 largefiles_usercache = All(
442 438 v.ValidPath(localizer),
443 439 v.UnicodeString(strip=True, min=2, not_empty=True))
444 440 vcs_git_lfs_store_location = All(
445 441 v.ValidPath(localizer),
446 442 v.UnicodeString(strip=True, min=2, not_empty=True))
447 443 extensions_hggit = v.StringBoolean(if_missing=False)
448 444 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
449 445 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
450 446 return _ApplicationUiSettingsForm
451 447
452 448
453 449 def RepoVcsSettingsForm(localizer, repo_name):
454 450 _ = localizer
455 451
456 452 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
457 453 inherit_global_settings = v.StringBoolean(if_missing=False)
458 454 new_svn_branch = v.ValidSvnPattern(localizer,
459 455 section='vcs_svn_branch', repo_name=repo_name)
460 456 new_svn_tag = v.ValidSvnPattern(localizer,
461 457 section='vcs_svn_tag', repo_name=repo_name)
462 458 return _RepoVcsSettingsForm
463 459
464 460
465 461 def LabsSettingsForm(localizer):
466 462 _ = localizer
467 463
468 464 class _LabSettingsForm(formencode.Schema):
469 465 allow_extra_fields = True
470 466 filter_extra_fields = False
471 467 return _LabSettingsForm
472 468
473 469
474 470 def ApplicationPermissionsForm(
475 471 localizer, register_choices, password_reset_choices,
476 472 extern_activate_choices):
477 473 _ = localizer
478 474
479 475 class _DefaultPermissionsForm(formencode.Schema):
480 476 allow_extra_fields = True
481 477 filter_extra_fields = True
482 478
483 479 anonymous = v.StringBoolean(if_missing=False)
484 480 default_register = v.OneOf(register_choices)
485 481 default_register_message = v.UnicodeString()
486 482 default_password_reset = v.OneOf(password_reset_choices)
487 483 default_extern_activate = v.OneOf(extern_activate_choices)
488 484 return _DefaultPermissionsForm
489 485
490 486
491 487 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
492 488 user_group_perms_choices):
493 489 _ = localizer
494 490
495 491 class _ObjectPermissionsForm(formencode.Schema):
496 492 allow_extra_fields = True
497 493 filter_extra_fields = True
498 494 overwrite_default_repo = v.StringBoolean(if_missing=False)
499 495 overwrite_default_group = v.StringBoolean(if_missing=False)
500 496 overwrite_default_user_group = v.StringBoolean(if_missing=False)
501 497
502 498 default_repo_perm = v.OneOf(repo_perms_choices)
503 499 default_group_perm = v.OneOf(group_perms_choices)
504 500 default_user_group_perm = v.OneOf(user_group_perms_choices)
505 501
506 502 return _ObjectPermissionsForm
507 503
508 504
509 505 def BranchPermissionsForm(localizer, branch_perms_choices):
510 506 _ = localizer
511 507
512 508 class _BranchPermissionsForm(formencode.Schema):
513 509 allow_extra_fields = True
514 510 filter_extra_fields = True
515 511 overwrite_default_branch = v.StringBoolean(if_missing=False)
516 512 default_branch_perm = v.OneOf(branch_perms_choices)
517 513
518 514 return _BranchPermissionsForm
519 515
520 516
521 517 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
522 518 repo_group_create_choices, user_group_create_choices,
523 519 fork_choices, inherit_default_permissions_choices):
524 520 _ = localizer
525 521
526 522 class _DefaultPermissionsForm(formencode.Schema):
527 523 allow_extra_fields = True
528 524 filter_extra_fields = True
529 525
530 526 anonymous = v.StringBoolean(if_missing=False)
531 527
532 528 default_repo_create = v.OneOf(create_choices)
533 529 default_repo_create_on_write = v.OneOf(create_on_write_choices)
534 530 default_user_group_create = v.OneOf(user_group_create_choices)
535 531 default_repo_group_create = v.OneOf(repo_group_create_choices)
536 532 default_fork_create = v.OneOf(fork_choices)
537 533 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
538 534 return _DefaultPermissionsForm
539 535
540 536
541 537 def UserIndividualPermissionsForm(localizer):
542 538 _ = localizer
543 539
544 540 class _DefaultPermissionsForm(formencode.Schema):
545 541 allow_extra_fields = True
546 542 filter_extra_fields = True
547 543
548 544 inherit_default_permissions = v.StringBoolean(if_missing=False)
549 545 return _DefaultPermissionsForm
550 546
551 547
552 548 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
553 549 _ = localizer
554 550 old_data = old_data or {}
555 551
556 552 class _DefaultsForm(formencode.Schema):
557 553 allow_extra_fields = True
558 554 filter_extra_fields = True
559 555 default_repo_type = v.OneOf(supported_backends)
560 556 default_repo_private = v.StringBoolean(if_missing=False)
561 557 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
562 558 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
563 559 default_repo_enable_locking = v.StringBoolean(if_missing=False)
564 560 return _DefaultsForm
565 561
566 562
567 563 def AuthSettingsForm(localizer):
568 564 _ = localizer
569 565
570 566 class _AuthSettingsForm(formencode.Schema):
571 567 allow_extra_fields = True
572 568 filter_extra_fields = True
573 569 auth_plugins = All(v.ValidAuthPlugins(localizer),
574 570 v.UniqueListFromString(localizer)(not_empty=True))
575 571 return _AuthSettingsForm
576 572
577 573
578 574 def UserExtraEmailForm(localizer):
579 575 _ = localizer
580 576
581 577 class _UserExtraEmailForm(formencode.Schema):
582 578 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
583 579 return _UserExtraEmailForm
584 580
585 581
586 582 def UserExtraIpForm(localizer):
587 583 _ = localizer
588 584
589 585 class _UserExtraIpForm(formencode.Schema):
590 586 ip = v.ValidIp(localizer)(not_empty=True)
591 587 return _UserExtraIpForm
592 588
593 589
594 590 def PullRequestForm(localizer, repo_id):
595 591 _ = localizer
596 592
597 593 class ReviewerForm(formencode.Schema):
598 594 user_id = v.Int(not_empty=True)
599 595 reasons = All()
600 596 rules = All(v.UniqueList(localizer, convert=int)())
601 597 mandatory = v.StringBoolean()
602 598 role = v.String(if_missing='reviewer')
603 599
604 600 class ObserverForm(formencode.Schema):
605 601 user_id = v.Int(not_empty=True)
606 602 reasons = All()
607 603 rules = All(v.UniqueList(localizer, convert=int)())
608 604 mandatory = v.StringBoolean()
609 605 role = v.String(if_missing='observer')
610 606
611 607 class _PullRequestForm(formencode.Schema):
612 608 allow_extra_fields = True
613 609 filter_extra_fields = True
614 610
615 611 common_ancestor = v.UnicodeString(strip=True, required=True)
616 612 source_repo = v.UnicodeString(strip=True, required=True)
617 613 source_ref = v.UnicodeString(strip=True, required=True)
618 614 target_repo = v.UnicodeString(strip=True, required=True)
619 615 target_ref = v.UnicodeString(strip=True, required=True)
620 616 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
621 617 v.UniqueList(localizer)(not_empty=True))
622 618 review_members = formencode.ForEach(ReviewerForm())
623 619 observer_members = formencode.ForEach(ObserverForm())
624 620 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
625 621 pullrequest_desc = v.UnicodeString(strip=True, required=False)
626 622 description_renderer = v.UnicodeString(strip=True, required=False)
627 623
628 624 return _PullRequestForm
629 625
630 626
631 627 def IssueTrackerPatternsForm(localizer):
632 628 _ = localizer
633 629
634 630 class _IssueTrackerPatternsForm(formencode.Schema):
635 631 allow_extra_fields = True
636 632 filter_extra_fields = False
637 633 chained_validators = [v.ValidPattern(localizer)]
638 634 return _IssueTrackerPatternsForm
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now