##// END OF EJS Templates
caches: use dogpile for sql_cache_short region.
marcink -
r2883:f2837b35 default
parent child Browse files
Show More
@@ -1,738 +1,738 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 use = egg:waitress#main
55 55 ## number of worker threads
56 56 threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 #use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 #workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 #proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 #worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 #max_requests = 1000
88 88 #max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 #timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 # During development the we want to have the debug toolbar enabled
112 112 pyramid.includes =
113 113 pyramid_debugtoolbar
114 114 rhodecode.lib.middleware.request_wrapper
115 115
116 116 pyramid.reload_templates = true
117 117
118 118 debugtoolbar.hosts = 0.0.0.0/0
119 119 debugtoolbar.exclude_prefixes =
120 120 /css
121 121 /fonts
122 122 /images
123 123 /js
124 124
125 125 ## RHODECODE PLUGINS ##
126 126 rhodecode.includes =
127 127 rhodecode.api
128 128
129 129
130 130 # api prefix url
131 131 rhodecode.api.url = /_admin/api
132 132
133 133
134 134 ## END RHODECODE PLUGINS ##
135 135
136 136 ## encryption key used to encrypt social plugin tokens,
137 137 ## remote_urls with credentials etc, if not set it defaults to
138 138 ## `beaker.session.secret`
139 139 #rhodecode.encrypted_values.secret =
140 140
141 141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 143 #rhodecode.encrypted_values.strict = false
144 144
145 145 ## return gzipped responses from Rhodecode (static files/application)
146 146 gzip_responses = false
147 147
148 148 ## autogenerate javascript routes file on startup
149 149 generate_js_files = false
150 150
151 151 ## Optional Languages
152 152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 153 lang = en
154 154
155 155 ## perform a full repository scan on each server start, this should be
156 156 ## set to false after first startup, to allow faster server restarts.
157 157 startup.import_repos = false
158 158
159 159 ## Uncomment and set this path to use archive download cache.
160 160 ## Once enabled, generated archives will be cached at this location
161 161 ## and served from the cache during subsequent requests for the same archive of
162 162 ## the repository.
163 163 #archive_cache_dir = /tmp/tarballcache
164 164
165 165 ## URL at which the application is running. This is used for bootstraping
166 166 ## requests in context when no web request is available. Used in ishell, or
167 167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 168 app.base_url = http://rhodecode.local
169 169
170 170 ## change this to unique ID for security
171 171 app_instance_uuid = rc-production
172 172
173 173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 174 ## commit, or pull request exceeds this limit this diff will be displayed
175 175 ## partially. E.g 512000 == 512Kb
176 176 cut_off_limit_diff = 512000
177 177
178 178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 179 ## file inside diff which exceeds this limit will be displayed partially.
180 180 ## E.g 128000 == 128Kb
181 181 cut_off_limit_file = 128000
182 182
183 183 ## use cache version of scm repo everywhere
184 184 vcs_full_cache = true
185 185
186 186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 187 ## Normally this is controlled by proper http flags sent from http server
188 188 force_https = false
189 189
190 190 ## use Strict-Transport-Security headers
191 191 use_htsts = false
192 192
193 193 ## git rev filter option, --all is the default filter, if you need to
194 194 ## hide all refs in changelog switch this to --branches --tags
195 195 git_rev_filter = --branches --tags
196 196
197 197 # Set to true if your repos are exposed using the dumb protocol
198 198 git_update_server_info = false
199 199
200 200 ## RSS/ATOM feed options
201 201 rss_cut_off_limit = 256000
202 202 rss_items_per_page = 10
203 203 rss_include_diff = false
204 204
205 205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 206 ## url that does rewrites to _admin/gists/{gistid}.
207 207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 209 gist_alias_url =
210 210
211 211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 212 ## used for access.
213 213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 214 ## came from the the logged in user who own this authentication token.
215 215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 216 ## authentication token. Such view would be only accessible when used together
217 217 ## with this authentication token
218 218 ##
219 219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 220 ## The list should be "," separated and on a single line.
221 221 ##
222 222 ## Most common views to enable:
223 223 # RepoCommitsView:repo_commit_download
224 224 # RepoCommitsView:repo_commit_patch
225 225 # RepoCommitsView:repo_commit_raw
226 226 # RepoCommitsView:repo_commit_raw@TOKEN
227 227 # RepoFilesView:repo_files_diff
228 228 # RepoFilesView:repo_archivefile
229 229 # RepoFilesView:repo_file_raw
230 230 # GistView:*
231 231 api_access_controllers_whitelist =
232 232
233 233 ## default encoding used to convert from and to unicode
234 234 ## can be also a comma separated list of encoding in case of mixed encodings
235 235 default_encoding = UTF-8
236 236
237 237 ## instance-id prefix
238 238 ## a prefix key for this instance used for cache invalidation when running
239 239 ## multiple instances of rhodecode, make sure it's globally unique for
240 240 ## all running rhodecode instances. Leave empty if you don't use it
241 241 instance_id =
242 242
243 243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 244 ## of an authentication plugin also if it is disabled by it's settings.
245 245 ## This could be useful if you are unable to log in to the system due to broken
246 246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 247 ## module to log in again and fix the settings.
248 248 ##
249 249 ## Available builtin plugin IDs (hash is part of the ID):
250 250 ## egg:rhodecode-enterprise-ce#rhodecode
251 251 ## egg:rhodecode-enterprise-ce#pam
252 252 ## egg:rhodecode-enterprise-ce#ldap
253 253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 254 ## egg:rhodecode-enterprise-ce#headers
255 255 ## egg:rhodecode-enterprise-ce#crowd
256 256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257 257
258 258 ## alternative return HTTP header for failed authentication. Default HTTP
259 259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 260 ## handling that causing a series of failed authentication calls.
261 261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 262 ## This will be served instead of default 401 on bad authnetication
263 263 auth_ret_code =
264 264
265 265 ## use special detection method when serving auth_ret_code, instead of serving
266 266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 267 ## and then serve auth_ret_code to clients
268 268 auth_ret_code_detection = false
269 269
270 270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 271 ## codes don't break the transactions while 4XX codes do
272 272 lock_ret_code = 423
273 273
274 274 ## allows to change the repository location in settings page
275 275 allow_repo_location_change = true
276 276
277 277 ## allows to setup custom hooks in settings page
278 278 allow_custom_hooks_settings = true
279 279
280 280 ## generated license token, goto license page in RhodeCode settings to obtain
281 281 ## new token
282 282 license_token =
283 283
284 284 ## supervisor connection uri, for managing supervisor and logs.
285 285 supervisor.uri =
286 286 ## supervisord group name/id we only want this RC instance to handle
287 287 supervisor.group_id = dev
288 288
289 289 ## Display extended labs settings
290 290 labs_settings_active = true
291 291
292 292 ####################################
293 293 ### CELERY CONFIG ####
294 294 ####################################
295 295 ## run: /path/to/celery worker \
296 296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299 299
300 300 use_celery = false
301 301
302 302 ## connection url to the message broker (default rabbitmq)
303 303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304 304
305 305 ## maximum tasks to execute before worker restart
306 306 celery.max_tasks_per_child = 100
307 307
308 308 ## tasks will never be sent to the queue, but executed locally instead.
309 309 celery.task_always_eager = false
310 310
311 311 #####################################
312 312 ### DOGPILE CACHE ####
313 313 #####################################
314 314 ## Default cache dir for caches. Putting this into a ramdisk
315 315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 316 ## of space
317 317 cache_dir = /tmp/rcdev/data
318 318
319 319 ## cache settings for permission tree, auth TTL.
320 320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 321 rc_cache.cache_perms.expiration_time = 300
322 322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323 323
324 324 ## redis backend with distributed locks
325 325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 326 #rc_cache.cache_perms.expiration_time = 300
327 327 #rc_cache.cache_perms.arguments.host = localhost
328 328 #rc_cache.cache_perms.arguments.port = 6379
329 329 #rc_cache.cache_perms.arguments.db = 0
330 330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 331 #rc_cache.cache_perms.arguments.distributed_lock = true
332 332
333 333
334 334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 335 rc_cache.cache_repo.expiration_time = 2592000
336 336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337 337
338 338 ## redis backend with distributed locks
339 339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 340 #rc_cache.cache_repo.expiration_time = 2592000
341 341 ## this needs to be greater then expiration_time
342 342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 343 #rc_cache.cache_repo.arguments.host = localhost
344 344 #rc_cache.cache_repo.arguments.port = 6379
345 345 #rc_cache.cache_repo.arguments.db = 1
346 346 #rc_cache.cache_repo.arguments.distributed_lock = true
347 347
348 ## cache settings for SQL queries
349 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
350 rc_cache.sql_cache_short.expiration_time = 30
351
348 352
349 353 ####################################
350 354 ### BEAKER CACHE ####
351 355 ####################################
352 356
353 357 ## locking and default file storage for Beaker. Putting this into a ramdisk
354 358 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
355 359 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
356 360 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
357 361
358 beaker.cache.regions = long_term, sql_cache_short
362 beaker.cache.regions = long_term
359 363
360 364 beaker.cache.long_term.type = memorylru_base
361 365 beaker.cache.long_term.expire = 172800
362 366 beaker.cache.long_term.key_length = 256
363 367
364 beaker.cache.sql_cache_short.type = memorylru_base
365 beaker.cache.sql_cache_short.expire = 10
366 beaker.cache.sql_cache_short.key_length = 256
367
368 368
369 369 ####################################
370 370 ### BEAKER SESSION ####
371 371 ####################################
372 372
373 373 ## .session.type is type of storage options for the session, current allowed
374 374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
375 375 beaker.session.type = file
376 376 beaker.session.data_dir = %(here)s/data/sessions
377 377
378 378 ## db based session, fast, and allows easy management over logged in users
379 379 #beaker.session.type = ext:database
380 380 #beaker.session.table_name = db_session
381 381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
382 382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
383 383 #beaker.session.sa.pool_recycle = 3600
384 384 #beaker.session.sa.echo = false
385 385
386 386 beaker.session.key = rhodecode
387 387 beaker.session.secret = develop-rc-uytcxaz
388 388 beaker.session.lock_dir = %(here)s/data/sessions/lock
389 389
390 390 ## Secure encrypted cookie. Requires AES and AES python libraries
391 391 ## you must disable beaker.session.secret to use this
392 392 #beaker.session.encrypt_key = key_for_encryption
393 393 #beaker.session.validate_key = validation_key
394 394
395 395 ## sets session as invalid(also logging out user) if it haven not been
396 396 ## accessed for given amount of time in seconds
397 397 beaker.session.timeout = 2592000
398 398 beaker.session.httponly = true
399 399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
400 400 #beaker.session.cookie_path = /custom_prefix
401 401
402 402 ## uncomment for https secure cookie
403 403 beaker.session.secure = false
404 404
405 405 ## auto save the session to not to use .save()
406 406 beaker.session.auto = false
407 407
408 408 ## default cookie expiration time in seconds, set to `true` to set expire
409 409 ## at browser close
410 410 #beaker.session.cookie_expires = 3600
411 411
412 412 ###################################
413 413 ## SEARCH INDEXING CONFIGURATION ##
414 414 ###################################
415 415 ## Full text search indexer is available in rhodecode-tools under
416 416 ## `rhodecode-tools index` command
417 417
418 418 ## WHOOSH Backend, doesn't require additional services to run
419 419 ## it works good with few dozen repos
420 420 search.module = rhodecode.lib.index.whoosh
421 421 search.location = %(here)s/data/index
422 422
423 423 ########################################
424 424 ### CHANNELSTREAM CONFIG ####
425 425 ########################################
426 426 ## channelstream enables persistent connections and live notification
427 427 ## in the system. It's also used by the chat system
428 428 channelstream.enabled = false
429 429
430 430 ## server address for channelstream server on the backend
431 431 channelstream.server = 127.0.0.1:9800
432 432
433 433 ## location of the channelstream server from outside world
434 434 ## use ws:// for http or wss:// for https. This address needs to be handled
435 435 ## by external HTTP server such as Nginx or Apache
436 436 ## see nginx/apache configuration examples in our docs
437 437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
438 438 channelstream.secret = secret
439 439 channelstream.history.location = %(here)s/channelstream_history
440 440
441 441 ## Internal application path that Javascript uses to connect into.
442 442 ## If you use proxy-prefix the prefix should be added before /_channelstream
443 443 channelstream.proxy_path = /_channelstream
444 444
445 445
446 446 ###################################
447 447 ## APPENLIGHT CONFIG ##
448 448 ###################################
449 449
450 450 ## Appenlight is tailored to work with RhodeCode, see
451 451 ## http://appenlight.com for details how to obtain an account
452 452
453 453 ## appenlight integration enabled
454 454 appenlight = false
455 455
456 456 appenlight.server_url = https://api.appenlight.com
457 457 appenlight.api_key = YOUR_API_KEY
458 458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
459 459
460 460 # used for JS client
461 461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
462 462
463 463 ## TWEAK AMOUNT OF INFO SENT HERE
464 464
465 465 ## enables 404 error logging (default False)
466 466 appenlight.report_404 = false
467 467
468 468 ## time in seconds after request is considered being slow (default 1)
469 469 appenlight.slow_request_time = 1
470 470
471 471 ## record slow requests in application
472 472 ## (needs to be enabled for slow datastore recording and time tracking)
473 473 appenlight.slow_requests = true
474 474
475 475 ## enable hooking to application loggers
476 476 appenlight.logging = true
477 477
478 478 ## minimum log level for log capture
479 479 appenlight.logging.level = WARNING
480 480
481 481 ## send logs only from erroneous/slow requests
482 482 ## (saves API quota for intensive logging)
483 483 appenlight.logging_on_error = false
484 484
485 485 ## list of additonal keywords that should be grabbed from environ object
486 486 ## can be string with comma separated list of words in lowercase
487 487 ## (by default client will always send following info:
488 488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
489 489 ## start with HTTP* this list be extended with additional keywords here
490 490 appenlight.environ_keys_whitelist =
491 491
492 492 ## list of keywords that should be blanked from request object
493 493 ## can be string with comma separated list of words in lowercase
494 494 ## (by default client will always blank keys that contain following words
495 495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
496 496 ## this list be extended with additional keywords set here
497 497 appenlight.request_keys_blacklist =
498 498
499 499 ## list of namespaces that should be ignores when gathering log entries
500 500 ## can be string with comma separated list of namespaces
501 501 ## (by default the client ignores own entries: appenlight_client.client)
502 502 appenlight.log_namespace_blacklist =
503 503
504 504
505 505 ################################################################################
506 506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
507 507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
508 508 ## execute malicious code after an exception is raised. ##
509 509 ################################################################################
510 510 #set debug = false
511 511
512 512
513 513 ##############
514 514 ## STYLING ##
515 515 ##############
516 516 debug_style = true
517 517
518 518 ###########################################
519 519 ### MAIN RHODECODE DATABASE CONFIG ###
520 520 ###########################################
521 521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
522 522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
523 523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525 525
526 526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
527 527
528 528 # see sqlalchemy docs for other advanced settings
529 529
530 530 ## print the sql statements to output
531 531 sqlalchemy.db1.echo = false
532 532 ## recycle the connections after this amount of seconds
533 533 sqlalchemy.db1.pool_recycle = 3600
534 534 sqlalchemy.db1.convert_unicode = true
535 535
536 536 ## the number of connections to keep open inside the connection pool.
537 537 ## 0 indicates no limit
538 538 #sqlalchemy.db1.pool_size = 5
539 539
540 540 ## the number of connections to allow in connection pool "overflow", that is
541 541 ## connections that can be opened above and beyond the pool_size setting,
542 542 ## which defaults to five.
543 543 #sqlalchemy.db1.max_overflow = 10
544 544
545 545 ## Connection check ping, used to detect broken database connections
546 546 ## could be enabled to better handle cases if MySQL has gone away errors
547 547 #sqlalchemy.db1.ping_connection = true
548 548
549 549 ##################
550 550 ### VCS CONFIG ###
551 551 ##################
552 552 vcs.server.enable = true
553 553 vcs.server = localhost:9900
554 554
555 555 ## Web server connectivity protocol, responsible for web based VCS operatations
556 556 ## Available protocols are:
557 557 ## `http` - use http-rpc backend (default)
558 558 vcs.server.protocol = http
559 559
560 560 ## Push/Pull operations protocol, available options are:
561 561 ## `http` - use http-rpc backend (default)
562 562 ##
563 563 vcs.scm_app_implementation = http
564 564
565 565 ## Push/Pull operations hooks protocol, available options are:
566 566 ## `http` - use http-rpc backend (default)
567 567 vcs.hooks.protocol = http
568 568
569 569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
570 570 ## this should be adjusted.
571 571 vcs.hooks.host = 127.0.0.1
572 572
573 573 vcs.server.log_level = debug
574 574 ## Start VCSServer with this instance as a subprocess, usefull for development
575 575 vcs.start_server = false
576 576
577 577 ## List of enabled VCS backends, available options are:
578 578 ## `hg` - mercurial
579 579 ## `git` - git
580 580 ## `svn` - subversion
581 581 vcs.backends = hg, git, svn
582 582
583 583 vcs.connection_timeout = 3600
584 584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
585 585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
586 586 #vcs.svn.compatible_version = pre-1.8-compatible
587 587
588 588
589 589 ############################################################
590 590 ### Subversion proxy support (mod_dav_svn) ###
591 591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
592 592 ############################################################
593 593 ## Enable or disable the config file generation.
594 594 svn.proxy.generate_config = false
595 595 ## Generate config file with `SVNListParentPath` set to `On`.
596 596 svn.proxy.list_parent_path = true
597 597 ## Set location and file name of generated config file.
598 598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
599 599 ## alternative mod_dav config template. This needs to be a mako template
600 600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
601 601 ## Used as a prefix to the `Location` block in the generated config file.
602 602 ## In most cases it should be set to `/`.
603 603 svn.proxy.location_root = /
604 604 ## Command to reload the mod dav svn configuration on change.
605 605 ## Example: `/etc/init.d/apache2 reload`
606 606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
607 607 ## If the timeout expires before the reload command finishes, the command will
608 608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
609 609 #svn.proxy.reload_timeout = 10
610 610
611 611 ############################################################
612 612 ### SSH Support Settings ###
613 613 ############################################################
614 614
615 615 ## Defines if a custom authorized_keys file should be created and written on
616 616 ## any change user ssh keys. Setting this to false also disables posibility
617 617 ## of adding SSH keys by users from web interface. Super admins can still
618 618 ## manage SSH Keys.
619 619 ssh.generate_authorized_keyfile = false
620 620
621 621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
622 622 # ssh.authorized_keys_ssh_opts =
623 623
624 624 ## Path to the authrozied_keys file where the generate entries are placed.
625 625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
626 626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
627 627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
628 628
629 629 ## Command to execute the SSH wrapper. The binary is available in the
630 630 ## rhodecode installation directory.
631 631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
632 632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
633 633
634 634 ## Allow shell when executing the ssh-wrapper command
635 635 ssh.wrapper_cmd_allow_shell = false
636 636
637 637 ## Enables logging, and detailed output send back to the client during SSH
638 638 ## operations. Usefull for debugging, shouldn't be used in production.
639 639 ssh.enable_debug_logging = true
640 640
641 641 ## Paths to binary executable, by default they are the names, but we can
642 642 ## override them if we want to use a custom one
643 643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
644 644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
645 645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
646 646
647 647
648 648 ## Dummy marker to add new entries after.
649 649 ## Add any custom entries below. Please don't remove.
650 650 custom.conf = 1
651 651
652 652
653 653 ################################
654 654 ### LOGGING CONFIGURATION ####
655 655 ################################
656 656 [loggers]
657 657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
658 658
659 659 [handlers]
660 660 keys = console, console_sql
661 661
662 662 [formatters]
663 663 keys = generic, color_formatter, color_formatter_sql
664 664
665 665 #############
666 666 ## LOGGERS ##
667 667 #############
668 668 [logger_root]
669 669 level = NOTSET
670 670 handlers = console
671 671
672 672 [logger_sqlalchemy]
673 673 level = INFO
674 674 handlers = console_sql
675 675 qualname = sqlalchemy.engine
676 676 propagate = 0
677 677
678 678 [logger_beaker]
679 679 level = DEBUG
680 680 handlers =
681 681 qualname = beaker.container
682 682 propagate = 1
683 683
684 684 [logger_rhodecode]
685 685 level = DEBUG
686 686 handlers =
687 687 qualname = rhodecode
688 688 propagate = 1
689 689
690 690 [logger_ssh_wrapper]
691 691 level = DEBUG
692 692 handlers =
693 693 qualname = ssh_wrapper
694 694 propagate = 1
695 695
696 696 [logger_celery]
697 697 level = DEBUG
698 698 handlers =
699 699 qualname = celery
700 700
701 701
702 702 ##############
703 703 ## HANDLERS ##
704 704 ##############
705 705
706 706 [handler_console]
707 707 class = StreamHandler
708 708 args = (sys.stderr, )
709 709 level = DEBUG
710 710 formatter = color_formatter
711 711
712 712 [handler_console_sql]
713 713 # "level = DEBUG" logs SQL queries and results.
714 714 # "level = INFO" logs SQL queries.
715 715 # "level = WARN" logs neither. (Recommended for production systems.)
716 716 class = StreamHandler
717 717 args = (sys.stderr, )
718 718 level = WARN
719 719 formatter = color_formatter_sql
720 720
721 721 ################
722 722 ## FORMATTERS ##
723 723 ################
724 724
725 725 [formatter_generic]
726 726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
727 727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
728 728 datefmt = %Y-%m-%d %H:%M:%S
729 729
730 730 [formatter_color_formatter]
731 731 class = rhodecode.lib.logging_formatter.ColorFormatter
732 732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
733 733 datefmt = %Y-%m-%d %H:%M:%S
734 734
735 735 [formatter_color_formatter_sql]
736 736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
737 737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
738 738 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,707 +1,707 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 #use = egg:waitress#main
55 55 ## number of worker threads
56 56 #threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 #max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 #asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 max_requests = 1000
88 88 max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 ## encryption key used to encrypt social plugin tokens,
112 112 ## remote_urls with credentials etc, if not set it defaults to
113 113 ## `beaker.session.secret`
114 114 #rhodecode.encrypted_values.secret =
115 115
116 116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 118 #rhodecode.encrypted_values.strict = false
119 119
120 120 ## return gzipped responses from Rhodecode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ## autogenerate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ## Optional Languages
127 127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ## perform a full repository scan on each server start, this should be
131 131 ## set to false after first startup, to allow faster server restarts.
132 132 startup.import_repos = false
133 133
134 134 ## Uncomment and set this path to use archive download cache.
135 135 ## Once enabled, generated archives will be cached at this location
136 136 ## and served from the cache during subsequent requests for the same archive of
137 137 ## the repository.
138 138 #archive_cache_dir = /tmp/tarballcache
139 139
140 140 ## URL at which the application is running. This is used for bootstraping
141 141 ## requests in context when no web request is available. Used in ishell, or
142 142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 143 app.base_url = http://rhodecode.local
144 144
145 145 ## change this to unique ID for security
146 146 app_instance_uuid = rc-production
147 147
148 148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ## commit, or pull request exceeds this limit this diff will be displayed
150 150 ## partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ## file inside diff which exceeds this limit will be displayed partially.
155 155 ## E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ## use cache version of scm repo everywhere
159 159 vcs_full_cache = true
160 160
161 161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 162 ## Normally this is controlled by proper http flags sent from http server
163 163 force_https = false
164 164
165 165 ## use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ## git rev filter option, --all is the default filter, if you need to
169 169 ## hide all refs in changelog switch this to --branches --tags
170 170 git_rev_filter = --branches --tags
171 171
172 172 # Set to true if your repos are exposed using the dumb protocol
173 173 git_update_server_info = false
174 174
175 175 ## RSS/ATOM feed options
176 176 rss_cut_off_limit = 256000
177 177 rss_items_per_page = 10
178 178 rss_include_diff = false
179 179
180 180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 181 ## url that does rewrites to _admin/gists/{gistid}.
182 182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 184 gist_alias_url =
185 185
186 186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 187 ## used for access.
188 188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 189 ## came from the the logged in user who own this authentication token.
190 190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 191 ## authentication token. Such view would be only accessible when used together
192 192 ## with this authentication token
193 193 ##
194 194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 195 ## The list should be "," separated and on a single line.
196 196 ##
197 197 ## Most common views to enable:
198 198 # RepoCommitsView:repo_commit_download
199 199 # RepoCommitsView:repo_commit_patch
200 200 # RepoCommitsView:repo_commit_raw
201 201 # RepoCommitsView:repo_commit_raw@TOKEN
202 202 # RepoFilesView:repo_files_diff
203 203 # RepoFilesView:repo_archivefile
204 204 # RepoFilesView:repo_file_raw
205 205 # GistView:*
206 206 api_access_controllers_whitelist =
207 207
208 208 ## default encoding used to convert from and to unicode
209 209 ## can be also a comma separated list of encoding in case of mixed encodings
210 210 default_encoding = UTF-8
211 211
212 212 ## instance-id prefix
213 213 ## a prefix key for this instance used for cache invalidation when running
214 214 ## multiple instances of rhodecode, make sure it's globally unique for
215 215 ## all running rhodecode instances. Leave empty if you don't use it
216 216 instance_id =
217 217
218 218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 219 ## of an authentication plugin also if it is disabled by it's settings.
220 220 ## This could be useful if you are unable to log in to the system due to broken
221 221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 222 ## module to log in again and fix the settings.
223 223 ##
224 224 ## Available builtin plugin IDs (hash is part of the ID):
225 225 ## egg:rhodecode-enterprise-ce#rhodecode
226 226 ## egg:rhodecode-enterprise-ce#pam
227 227 ## egg:rhodecode-enterprise-ce#ldap
228 228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 229 ## egg:rhodecode-enterprise-ce#headers
230 230 ## egg:rhodecode-enterprise-ce#crowd
231 231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232 232
233 233 ## alternative return HTTP header for failed authentication. Default HTTP
234 234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 235 ## handling that causing a series of failed authentication calls.
236 236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 237 ## This will be served instead of default 401 on bad authnetication
238 238 auth_ret_code =
239 239
240 240 ## use special detection method when serving auth_ret_code, instead of serving
241 241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 242 ## and then serve auth_ret_code to clients
243 243 auth_ret_code_detection = false
244 244
245 245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 246 ## codes don't break the transactions while 4XX codes do
247 247 lock_ret_code = 423
248 248
249 249 ## allows to change the repository location in settings page
250 250 allow_repo_location_change = true
251 251
252 252 ## allows to setup custom hooks in settings page
253 253 allow_custom_hooks_settings = true
254 254
255 255 ## generated license token, goto license page in RhodeCode settings to obtain
256 256 ## new token
257 257 license_token =
258 258
259 259 ## supervisor connection uri, for managing supervisor and logs.
260 260 supervisor.uri =
261 261 ## supervisord group name/id we only want this RC instance to handle
262 262 supervisor.group_id = prod
263 263
264 264 ## Display extended labs settings
265 265 labs_settings_active = true
266 266
267 267 ####################################
268 268 ### CELERY CONFIG ####
269 269 ####################################
270 270 ## run: /path/to/celery worker \
271 271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274 274
275 275 use_celery = false
276 276
277 277 ## connection url to the message broker (default rabbitmq)
278 278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279 279
280 280 ## maximum tasks to execute before worker restart
281 281 celery.max_tasks_per_child = 100
282 282
283 283 ## tasks will never be sent to the queue, but executed locally instead.
284 284 celery.task_always_eager = false
285 285
286 286 #####################################
287 287 ### DOGPILE CACHE ####
288 288 #####################################
289 289 ## Default cache dir for caches. Putting this into a ramdisk
290 290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 291 ## of space
292 292 cache_dir = /tmp/rcdev/data
293 293
294 294 ## cache settings for permission tree, auth TTL.
295 295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 296 rc_cache.cache_perms.expiration_time = 300
297 297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298 298
299 299 ## redis backend with distributed locks
300 300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 301 #rc_cache.cache_perms.expiration_time = 300
302 302 #rc_cache.cache_perms.arguments.host = localhost
303 303 #rc_cache.cache_perms.arguments.port = 6379
304 304 #rc_cache.cache_perms.arguments.db = 0
305 305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 306 #rc_cache.cache_perms.arguments.distributed_lock = true
307 307
308 308
309 309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 310 rc_cache.cache_repo.expiration_time = 2592000
311 311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312 312
313 313 ## redis backend with distributed locks
314 314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 315 #rc_cache.cache_repo.expiration_time = 2592000
316 316 ## this needs to be greater then expiration_time
317 317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 318 #rc_cache.cache_repo.arguments.host = localhost
319 319 #rc_cache.cache_repo.arguments.port = 6379
320 320 #rc_cache.cache_repo.arguments.db = 1
321 321 #rc_cache.cache_repo.arguments.distributed_lock = true
322 322
323 ## cache settings for SQL queries
324 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
325 rc_cache.sql_cache_short.expiration_time = 30
326
323 327
324 328 ####################################
325 329 ### BEAKER CACHE ####
326 330 ####################################
327 331
328 332 ## locking and default file storage for Beaker. Putting this into a ramdisk
329 333 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
330 334 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
331 335 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
332 336
333 beaker.cache.regions = long_term, sql_cache_short
337 beaker.cache.regions = long_term
334 338
335 339 beaker.cache.long_term.type = memory
336 340 beaker.cache.long_term.expire = 172800
337 341 beaker.cache.long_term.key_length = 256
338 342
339 beaker.cache.sql_cache_short.type = memory
340 beaker.cache.sql_cache_short.expire = 10
341 beaker.cache.sql_cache_short.key_length = 256
342
343 343
344 344 ####################################
345 345 ### BEAKER SESSION ####
346 346 ####################################
347 347
348 348 ## .session.type is type of storage options for the session, current allowed
349 349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 350 beaker.session.type = file
351 351 beaker.session.data_dir = %(here)s/data/sessions
352 352
353 353 ## db based session, fast, and allows easy management over logged in users
354 354 #beaker.session.type = ext:database
355 355 #beaker.session.table_name = db_session
356 356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
357 357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
358 358 #beaker.session.sa.pool_recycle = 3600
359 359 #beaker.session.sa.echo = false
360 360
361 361 beaker.session.key = rhodecode
362 362 beaker.session.secret = production-rc-uytcxaz
363 363 beaker.session.lock_dir = %(here)s/data/sessions/lock
364 364
365 365 ## Secure encrypted cookie. Requires AES and AES python libraries
366 366 ## you must disable beaker.session.secret to use this
367 367 #beaker.session.encrypt_key = key_for_encryption
368 368 #beaker.session.validate_key = validation_key
369 369
370 370 ## sets session as invalid(also logging out user) if it haven not been
371 371 ## accessed for given amount of time in seconds
372 372 beaker.session.timeout = 2592000
373 373 beaker.session.httponly = true
374 374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
375 375 #beaker.session.cookie_path = /custom_prefix
376 376
377 377 ## uncomment for https secure cookie
378 378 beaker.session.secure = false
379 379
380 380 ## auto save the session to not to use .save()
381 381 beaker.session.auto = false
382 382
383 383 ## default cookie expiration time in seconds, set to `true` to set expire
384 384 ## at browser close
385 385 #beaker.session.cookie_expires = 3600
386 386
387 387 ###################################
388 388 ## SEARCH INDEXING CONFIGURATION ##
389 389 ###################################
390 390 ## Full text search indexer is available in rhodecode-tools under
391 391 ## `rhodecode-tools index` command
392 392
393 393 ## WHOOSH Backend, doesn't require additional services to run
394 394 ## it works good with few dozen repos
395 395 search.module = rhodecode.lib.index.whoosh
396 396 search.location = %(here)s/data/index
397 397
398 398 ########################################
399 399 ### CHANNELSTREAM CONFIG ####
400 400 ########################################
401 401 ## channelstream enables persistent connections and live notification
402 402 ## in the system. It's also used by the chat system
403 403 channelstream.enabled = false
404 404
405 405 ## server address for channelstream server on the backend
406 406 channelstream.server = 127.0.0.1:9800
407 407
408 408 ## location of the channelstream server from outside world
409 409 ## use ws:// for http or wss:// for https. This address needs to be handled
410 410 ## by external HTTP server such as Nginx or Apache
411 411 ## see nginx/apache configuration examples in our docs
412 412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 413 channelstream.secret = secret
414 414 channelstream.history.location = %(here)s/channelstream_history
415 415
416 416 ## Internal application path that Javascript uses to connect into.
417 417 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 418 channelstream.proxy_path = /_channelstream
419 419
420 420
421 421 ###################################
422 422 ## APPENLIGHT CONFIG ##
423 423 ###################################
424 424
425 425 ## Appenlight is tailored to work with RhodeCode, see
426 426 ## http://appenlight.com for details how to obtain an account
427 427
428 428 ## appenlight integration enabled
429 429 appenlight = false
430 430
431 431 appenlight.server_url = https://api.appenlight.com
432 432 appenlight.api_key = YOUR_API_KEY
433 433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434 434
435 435 # used for JS client
436 436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437 437
438 438 ## TWEAK AMOUNT OF INFO SENT HERE
439 439
440 440 ## enables 404 error logging (default False)
441 441 appenlight.report_404 = false
442 442
443 443 ## time in seconds after request is considered being slow (default 1)
444 444 appenlight.slow_request_time = 1
445 445
446 446 ## record slow requests in application
447 447 ## (needs to be enabled for slow datastore recording and time tracking)
448 448 appenlight.slow_requests = true
449 449
450 450 ## enable hooking to application loggers
451 451 appenlight.logging = true
452 452
453 453 ## minimum log level for log capture
454 454 appenlight.logging.level = WARNING
455 455
456 456 ## send logs only from erroneous/slow requests
457 457 ## (saves API quota for intensive logging)
458 458 appenlight.logging_on_error = false
459 459
460 460 ## list of additonal keywords that should be grabbed from environ object
461 461 ## can be string with comma separated list of words in lowercase
462 462 ## (by default client will always send following info:
463 463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 464 ## start with HTTP* this list be extended with additional keywords here
465 465 appenlight.environ_keys_whitelist =
466 466
467 467 ## list of keywords that should be blanked from request object
468 468 ## can be string with comma separated list of words in lowercase
469 469 ## (by default client will always blank keys that contain following words
470 470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 471 ## this list be extended with additional keywords set here
472 472 appenlight.request_keys_blacklist =
473 473
474 474 ## list of namespaces that should be ignores when gathering log entries
475 475 ## can be string with comma separated list of namespaces
476 476 ## (by default the client ignores own entries: appenlight_client.client)
477 477 appenlight.log_namespace_blacklist =
478 478
479 479
480 480 ################################################################################
481 481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
482 482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
483 483 ## execute malicious code after an exception is raised. ##
484 484 ################################################################################
485 485 set debug = false
486 486
487 487
488 488 ###########################################
489 489 ### MAIN RHODECODE DATABASE CONFIG ###
490 490 ###########################################
491 491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
492 492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
493 493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495 495
496 496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
497 497
498 498 # see sqlalchemy docs for other advanced settings
499 499
500 500 ## print the sql statements to output
501 501 sqlalchemy.db1.echo = false
502 502 ## recycle the connections after this amount of seconds
503 503 sqlalchemy.db1.pool_recycle = 3600
504 504 sqlalchemy.db1.convert_unicode = true
505 505
506 506 ## the number of connections to keep open inside the connection pool.
507 507 ## 0 indicates no limit
508 508 #sqlalchemy.db1.pool_size = 5
509 509
510 510 ## the number of connections to allow in connection pool "overflow", that is
511 511 ## connections that can be opened above and beyond the pool_size setting,
512 512 ## which defaults to five.
513 513 #sqlalchemy.db1.max_overflow = 10
514 514
515 515 ## Connection check ping, used to detect broken database connections
516 516 ## could be enabled to better handle cases if MySQL has gone away errors
517 517 #sqlalchemy.db1.ping_connection = true
518 518
519 519 ##################
520 520 ### VCS CONFIG ###
521 521 ##################
522 522 vcs.server.enable = true
523 523 vcs.server = localhost:9900
524 524
525 525 ## Web server connectivity protocol, responsible for web based VCS operatations
526 526 ## Available protocols are:
527 527 ## `http` - use http-rpc backend (default)
528 528 vcs.server.protocol = http
529 529
530 530 ## Push/Pull operations protocol, available options are:
531 531 ## `http` - use http-rpc backend (default)
532 532 ##
533 533 vcs.scm_app_implementation = http
534 534
535 535 ## Push/Pull operations hooks protocol, available options are:
536 536 ## `http` - use http-rpc backend (default)
537 537 vcs.hooks.protocol = http
538 538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
539 539 ## this should be adjusted.
540 540 vcs.hooks.host = 127.0.0.1
541 541
542 542 vcs.server.log_level = info
543 543 ## Start VCSServer with this instance as a subprocess, usefull for development
544 544 vcs.start_server = false
545 545
546 546 ## List of enabled VCS backends, available options are:
547 547 ## `hg` - mercurial
548 548 ## `git` - git
549 549 ## `svn` - subversion
550 550 vcs.backends = hg, git, svn
551 551
552 552 vcs.connection_timeout = 3600
553 553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
554 554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
555 555 #vcs.svn.compatible_version = pre-1.8-compatible
556 556
557 557
558 558 ############################################################
559 559 ### Subversion proxy support (mod_dav_svn) ###
560 560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
561 561 ############################################################
562 562 ## Enable or disable the config file generation.
563 563 svn.proxy.generate_config = false
564 564 ## Generate config file with `SVNListParentPath` set to `On`.
565 565 svn.proxy.list_parent_path = true
566 566 ## Set location and file name of generated config file.
567 567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
568 568 ## alternative mod_dav config template. This needs to be a mako template
569 569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
570 570 ## Used as a prefix to the `Location` block in the generated config file.
571 571 ## In most cases it should be set to `/`.
572 572 svn.proxy.location_root = /
573 573 ## Command to reload the mod dav svn configuration on change.
574 574 ## Example: `/etc/init.d/apache2 reload`
575 575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
576 576 ## If the timeout expires before the reload command finishes, the command will
577 577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
578 578 #svn.proxy.reload_timeout = 10
579 579
580 580 ############################################################
581 581 ### SSH Support Settings ###
582 582 ############################################################
583 583
584 584 ## Defines if a custom authorized_keys file should be created and written on
585 585 ## any change user ssh keys. Setting this to false also disables posibility
586 586 ## of adding SSH keys by users from web interface. Super admins can still
587 587 ## manage SSH Keys.
588 588 ssh.generate_authorized_keyfile = false
589 589
590 590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
591 591 # ssh.authorized_keys_ssh_opts =
592 592
593 593 ## Path to the authrozied_keys file where the generate entries are placed.
594 594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
595 595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
596 596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
597 597
598 598 ## Command to execute the SSH wrapper. The binary is available in the
599 599 ## rhodecode installation directory.
600 600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
601 601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
602 602
603 603 ## Allow shell when executing the ssh-wrapper command
604 604 ssh.wrapper_cmd_allow_shell = false
605 605
606 606 ## Enables logging, and detailed output send back to the client during SSH
607 607 ## operations. Usefull for debugging, shouldn't be used in production.
608 608 ssh.enable_debug_logging = false
609 609
610 610 ## Paths to binary executable, by default they are the names, but we can
611 611 ## override them if we want to use a custom one
612 612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
613 613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
614 614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
615 615
616 616
617 617 ## Dummy marker to add new entries after.
618 618 ## Add any custom entries below. Please don't remove.
619 619 custom.conf = 1
620 620
621 621
622 622 ################################
623 623 ### LOGGING CONFIGURATION ####
624 624 ################################
625 625 [loggers]
626 626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
627 627
628 628 [handlers]
629 629 keys = console, console_sql
630 630
631 631 [formatters]
632 632 keys = generic, color_formatter, color_formatter_sql
633 633
634 634 #############
635 635 ## LOGGERS ##
636 636 #############
637 637 [logger_root]
638 638 level = NOTSET
639 639 handlers = console
640 640
641 641 [logger_sqlalchemy]
642 642 level = INFO
643 643 handlers = console_sql
644 644 qualname = sqlalchemy.engine
645 645 propagate = 0
646 646
647 647 [logger_beaker]
648 648 level = DEBUG
649 649 handlers =
650 650 qualname = beaker.container
651 651 propagate = 1
652 652
653 653 [logger_rhodecode]
654 654 level = DEBUG
655 655 handlers =
656 656 qualname = rhodecode
657 657 propagate = 1
658 658
659 659 [logger_ssh_wrapper]
660 660 level = DEBUG
661 661 handlers =
662 662 qualname = ssh_wrapper
663 663 propagate = 1
664 664
665 665 [logger_celery]
666 666 level = DEBUG
667 667 handlers =
668 668 qualname = celery
669 669
670 670
671 671 ##############
672 672 ## HANDLERS ##
673 673 ##############
674 674
675 675 [handler_console]
676 676 class = StreamHandler
677 677 args = (sys.stderr, )
678 678 level = INFO
679 679 formatter = generic
680 680
681 681 [handler_console_sql]
682 682 # "level = DEBUG" logs SQL queries and results.
683 683 # "level = INFO" logs SQL queries.
684 684 # "level = WARN" logs neither. (Recommended for production systems.)
685 685 class = StreamHandler
686 686 args = (sys.stderr, )
687 687 level = WARN
688 688 formatter = generic
689 689
690 690 ################
691 691 ## FORMATTERS ##
692 692 ################
693 693
694 694 [formatter_generic]
695 695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
696 696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
697 697 datefmt = %Y-%m-%d %H:%M:%S
698 698
699 699 [formatter_color_formatter]
700 700 class = rhodecode.lib.logging_formatter.ColorFormatter
701 701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
702 702 datefmt = %Y-%m-%d %H:%M:%S
703 703
704 704 [formatter_color_formatter_sql]
705 705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
706 706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
707 707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,300 +1,299 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 from rhodecode.model.db import User, UserIpMap
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.permission import PermissionModel
26 26 from rhodecode.model.ssh_key import SshKeyModel
27 27 from rhodecode.tests import (
28 TestController, clear_all_caches, assert_session_flash)
28 TestController, clear_cache_regions, assert_session_flash)
29 29
30 30
31 31 def route_path(name, params=None, **kwargs):
32 32 import urllib
33 33 from rhodecode.apps._base import ADMIN_PREFIX
34 34
35 35 base_url = {
36 36 'edit_user_ips':
37 37 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
38 38 'edit_user_ips_add':
39 39 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
40 40 'edit_user_ips_delete':
41 41 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
42 42
43 43 'admin_permissions_application':
44 44 ADMIN_PREFIX + '/permissions/application',
45 45 'admin_permissions_application_update':
46 46 ADMIN_PREFIX + '/permissions/application/update',
47 47
48 48 'admin_permissions_global':
49 49 ADMIN_PREFIX + '/permissions/global',
50 50 'admin_permissions_global_update':
51 51 ADMIN_PREFIX + '/permissions/global/update',
52 52
53 53 'admin_permissions_object':
54 54 ADMIN_PREFIX + '/permissions/object',
55 55 'admin_permissions_object_update':
56 56 ADMIN_PREFIX + '/permissions/object/update',
57 57
58 58 'admin_permissions_ips':
59 59 ADMIN_PREFIX + '/permissions/ips',
60 60 'admin_permissions_overview':
61 61 ADMIN_PREFIX + '/permissions/overview',
62 62
63 63 'admin_permissions_ssh_keys':
64 64 ADMIN_PREFIX + '/permissions/ssh_keys',
65 65 'admin_permissions_ssh_keys_data':
66 66 ADMIN_PREFIX + '/permissions/ssh_keys/data',
67 67 'admin_permissions_ssh_keys_update':
68 68 ADMIN_PREFIX + '/permissions/ssh_keys/update'
69 69
70 70 }[name].format(**kwargs)
71 71
72 72 if params:
73 73 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
74 74 return base_url
75 75
76 76
77 77 class TestAdminPermissionsController(TestController):
78 78
79 79 @pytest.fixture(scope='class', autouse=True)
80 80 def prepare(self, request):
81 81 # cleanup and reset to default permissions after
82 82 @request.addfinalizer
83 83 def cleanup():
84 84 PermissionModel().create_default_user_permissions(
85 85 User.get_default_user(), force=True)
86 86
87 87 def test_index_application(self):
88 88 self.log_user()
89 89 self.app.get(route_path('admin_permissions_application'))
90 90
91 91 @pytest.mark.parametrize(
92 92 'anonymous, default_register, default_register_message, default_password_reset,'
93 93 'default_extern_activate, expect_error, expect_form_error', [
94 94 (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
95 95 False, False),
96 96 (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto',
97 97 False, False),
98 98 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
99 99 False, False),
100 100 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
101 101 False, False),
102 102 (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
103 103 False, True),
104 104 (True, '', '', 'hg.password_reset.enabled', '', True, False),
105 105 ])
106 106 def test_update_application_permissions(
107 107 self, anonymous, default_register, default_register_message, default_password_reset,
108 108 default_extern_activate, expect_error, expect_form_error):
109 109
110 110 self.log_user()
111 111
112 112 # TODO: anonymous access set here to False, breaks some other tests
113 113 params = {
114 114 'csrf_token': self.csrf_token,
115 115 'anonymous': anonymous,
116 116 'default_register': default_register,
117 117 'default_register_message': default_register_message,
118 118 'default_password_reset': default_password_reset,
119 119 'default_extern_activate': default_extern_activate,
120 120 }
121 121 response = self.app.post(route_path('admin_permissions_application_update'),
122 122 params=params)
123 123 if expect_form_error:
124 124 assert response.status_int == 200
125 125 response.mustcontain('Value must be one of')
126 126 else:
127 127 if expect_error:
128 128 msg = 'Error occurred during update of permissions'
129 129 else:
130 130 msg = 'Application permissions updated successfully'
131 131 assert_session_flash(response, msg)
132 132
133 133 def test_index_object(self):
134 134 self.log_user()
135 135 self.app.get(route_path('admin_permissions_object'))
136 136
137 137 @pytest.mark.parametrize(
138 138 'repo, repo_group, user_group, expect_error, expect_form_error', [
139 139 ('repository.none', 'group.none', 'usergroup.none', False, False),
140 140 ('repository.read', 'group.read', 'usergroup.read', False, False),
141 141 ('repository.write', 'group.write', 'usergroup.write',
142 142 False, False),
143 143 ('repository.admin', 'group.admin', 'usergroup.admin',
144 144 False, False),
145 145 ('repository.XXX', 'group.admin', 'usergroup.admin', False, True),
146 146 ('', '', '', True, False),
147 147 ])
148 148 def test_update_object_permissions(self, repo, repo_group, user_group,
149 149 expect_error, expect_form_error):
150 150 self.log_user()
151 151
152 152 params = {
153 153 'csrf_token': self.csrf_token,
154 154 'default_repo_perm': repo,
155 155 'overwrite_default_repo': False,
156 156 'default_group_perm': repo_group,
157 157 'overwrite_default_group': False,
158 158 'default_user_group_perm': user_group,
159 159 'overwrite_default_user_group': False,
160 160 }
161 161 response = self.app.post(route_path('admin_permissions_object_update'),
162 162 params=params)
163 163 if expect_form_error:
164 164 assert response.status_int == 200
165 165 response.mustcontain('Value must be one of')
166 166 else:
167 167 if expect_error:
168 168 msg = 'Error occurred during update of permissions'
169 169 else:
170 170 msg = 'Object permissions updated successfully'
171 171 assert_session_flash(response, msg)
172 172
173 173 def test_index_global(self):
174 174 self.log_user()
175 175 self.app.get(route_path('admin_permissions_global'))
176 176
177 177 @pytest.mark.parametrize(
178 178 'repo_create, repo_create_write, user_group_create, repo_group_create,'
179 179 'fork_create, inherit_default_permissions, expect_error,'
180 180 'expect_form_error', [
181 181 ('hg.create.none', 'hg.create.write_on_repogroup.false',
182 182 'hg.usergroup.create.false', 'hg.repogroup.create.false',
183 183 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
184 184 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
185 185 'hg.usergroup.create.true', 'hg.repogroup.create.true',
186 186 'hg.fork.repository', 'hg.inherit_default_perms.false',
187 187 False, False),
188 188 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
189 189 'hg.usergroup.create.true', 'hg.repogroup.create.true',
190 190 'hg.fork.repository', 'hg.inherit_default_perms.false',
191 191 False, True),
192 192 ('', '', '', '', '', '', True, False),
193 193 ])
194 194 def test_update_global_permissions(
195 195 self, repo_create, repo_create_write, user_group_create,
196 196 repo_group_create, fork_create, inherit_default_permissions,
197 197 expect_error, expect_form_error):
198 198 self.log_user()
199 199
200 200 params = {
201 201 'csrf_token': self.csrf_token,
202 202 'default_repo_create': repo_create,
203 203 'default_repo_create_on_write': repo_create_write,
204 204 'default_user_group_create': user_group_create,
205 205 'default_repo_group_create': repo_group_create,
206 206 'default_fork_create': fork_create,
207 207 'default_inherit_default_permissions': inherit_default_permissions
208 208 }
209 209 response = self.app.post(route_path('admin_permissions_global_update'),
210 210 params=params)
211 211 if expect_form_error:
212 212 assert response.status_int == 200
213 213 response.mustcontain('Value must be one of')
214 214 else:
215 215 if expect_error:
216 216 msg = 'Error occurred during update of permissions'
217 217 else:
218 218 msg = 'Global permissions updated successfully'
219 219 assert_session_flash(response, msg)
220 220
221 221 def test_index_ips(self):
222 222 self.log_user()
223 223 response = self.app.get(route_path('admin_permissions_ips'))
224 # TODO: Test response...
225 224 response.mustcontain('All IP addresses are allowed')
226 225
227 226 def test_add_delete_ips(self):
227 clear_cache_regions(['sql_cache_short'])
228 228 self.log_user()
229 clear_all_caches()
230 229
231 230 # ADD
232 231 default_user_id = User.get_default_user().user_id
233 232 self.app.post(
234 233 route_path('edit_user_ips_add', user_id=default_user_id),
235 params={'new_ip': '127.0.0.0/24', 'csrf_token': self.csrf_token})
234 params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token})
236 235
237 236 response = self.app.get(route_path('admin_permissions_ips'))
238 response.mustcontain('127.0.0.0/24')
239 response.mustcontain('127.0.0.0 - 127.0.0.255')
237 response.mustcontain('0.0.0.0/24')
238 response.mustcontain('0.0.0.0 - 0.0.0.255')
240 239
241 240 # DELETE
242 241 default_user_id = User.get_default_user().user_id
243 242 del_ip_id = UserIpMap.query().filter(UserIpMap.user_id ==
244 243 default_user_id).first().ip_id
245 244
246 245 response = self.app.post(
247 246 route_path('edit_user_ips_delete', user_id=default_user_id),
248 247 params={'del_ip_id': del_ip_id, 'csrf_token': self.csrf_token})
249 248
250 249 assert_session_flash(response, 'Removed ip address from user whitelist')
251 250
252 clear_all_caches()
251 clear_cache_regions(['sql_cache_short'])
253 252 response = self.app.get(route_path('admin_permissions_ips'))
254 253 response.mustcontain('All IP addresses are allowed')
255 response.mustcontain(no=['127.0.0.0/24'])
256 response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
254 response.mustcontain(no=['0.0.0.0/24'])
255 response.mustcontain(no=['0.0.0.0 - 0.0.0.255'])
257 256
258 257 def test_index_overview(self):
259 258 self.log_user()
260 259 self.app.get(route_path('admin_permissions_overview'))
261 260
262 261 def test_ssh_keys(self):
263 262 self.log_user()
264 263 self.app.get(route_path('admin_permissions_ssh_keys'), status=200)
265 264
266 265 def test_ssh_keys_data(self, user_util, xhr_header):
267 266 self.log_user()
268 267 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
269 268 extra_environ=xhr_header)
270 269 assert response.json == {u'data': [], u'draw': None,
271 270 u'recordsFiltered': 0, u'recordsTotal': 0}
272 271
273 272 dummy_user = user_util.create_user()
274 273 SshKeyModel().create(dummy_user, 'ab:cd:ef', 'KEYKEY', 'test_key')
275 274 Session().commit()
276 275 response = self.app.get(route_path('admin_permissions_ssh_keys_data'),
277 276 extra_environ=xhr_header)
278 277 assert response.json['data'][0]['fingerprint'] == 'ab:cd:ef'
279 278
280 279 def test_ssh_keys_update(self):
281 280 self.log_user()
282 281 response = self.app.post(
283 282 route_path('admin_permissions_ssh_keys_update'),
284 283 dict(csrf_token=self.csrf_token), status=302)
285 284
286 285 assert_session_flash(
287 286 response, 'Updated SSH keys file')
288 287
289 288 def test_ssh_keys_update_disabled(self):
290 289 self.log_user()
291 290
292 291 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
293 292 with mock.patch.object(AdminPermissionsView, 'ssh_enabled',
294 293 return_value=False):
295 294 response = self.app.post(
296 295 route_path('admin_permissions_ssh_keys_update'),
297 296 dict(csrf_token=self.csrf_token), status=302)
298 297
299 298 assert_session_flash(
300 299 response, 'SSH key support is disabled in .ini file') No newline at end of file
@@ -1,121 +1,121 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib import helpers as h
24 24 from rhodecode.tests import (
25 TestController, clear_all_caches,
25 TestController, clear_cache_regions,
26 26 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
27 27 from rhodecode.tests.fixture import Fixture
28 28 from rhodecode.tests.utils import AssertResponse
29 29
30 30 fixture = Fixture()
31 31
32 32
33 33 def route_path(name, params=None, **kwargs):
34 34 import urllib
35 35 from rhodecode.apps._base import ADMIN_PREFIX
36 36
37 37 base_url = {
38 38 'login': ADMIN_PREFIX + '/login',
39 39 'logout': ADMIN_PREFIX + '/logout',
40 40 'register': ADMIN_PREFIX + '/register',
41 41 'reset_password':
42 42 ADMIN_PREFIX + '/password_reset',
43 43 'reset_password_confirmation':
44 44 ADMIN_PREFIX + '/password_reset_confirmation',
45 45
46 46 'admin_permissions_application':
47 47 ADMIN_PREFIX + '/permissions/application',
48 48 'admin_permissions_application_update':
49 49 ADMIN_PREFIX + '/permissions/application/update',
50 50 }[name].format(**kwargs)
51 51
52 52 if params:
53 53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
54 54 return base_url
55 55
56 56
57 57 class TestPasswordReset(TestController):
58 58
59 59 @pytest.mark.parametrize(
60 60 'pwd_reset_setting, show_link, show_reset', [
61 61 ('hg.password_reset.enabled', True, True),
62 62 ('hg.password_reset.hidden', False, True),
63 63 ('hg.password_reset.disabled', False, False),
64 64 ])
65 65 def test_password_reset_settings(
66 66 self, pwd_reset_setting, show_link, show_reset):
67 clear_all_caches()
67 clear_cache_regions()
68 68 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
69 69 params = {
70 70 'csrf_token': self.csrf_token,
71 71 'anonymous': 'True',
72 72 'default_register': 'hg.register.auto_activate',
73 73 'default_register_message': '',
74 74 'default_password_reset': pwd_reset_setting,
75 75 'default_extern_activate': 'hg.extern_activate.auto',
76 76 }
77 77 resp = self.app.post(route_path('admin_permissions_application_update'), params=params)
78 78 self.logout_user()
79 79
80 80 login_page = self.app.get(route_path('login'))
81 81 asr_login = AssertResponse(login_page)
82 82 index_page = self.app.get(h.route_path('home'))
83 83 asr_index = AssertResponse(index_page)
84 84
85 85 if show_link:
86 86 asr_login.one_element_exists('a.pwd_reset')
87 87 asr_index.one_element_exists('a.pwd_reset')
88 88 else:
89 89 asr_login.no_element_exists('a.pwd_reset')
90 90 asr_index.no_element_exists('a.pwd_reset')
91 91
92 92 response = self.app.get(route_path('reset_password'))
93 93
94 94 assert_response = AssertResponse(response)
95 95 if show_reset:
96 96 response.mustcontain('Send password reset email')
97 97 assert_response.one_element_exists('#email')
98 98 assert_response.one_element_exists('#send')
99 99 else:
100 100 response.mustcontain('Password reset is disabled.')
101 101 assert_response.no_element_exists('#email')
102 102 assert_response.no_element_exists('#send')
103 103
104 104 def test_password_form_disabled(self):
105 105 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 106 params = {
107 107 'csrf_token': self.csrf_token,
108 108 'anonymous': 'True',
109 109 'default_register': 'hg.register.auto_activate',
110 110 'default_register_message': '',
111 111 'default_password_reset': 'hg.password_reset.disabled',
112 112 'default_extern_activate': 'hg.extern_activate.auto',
113 113 }
114 114 self.app.post(route_path('admin_permissions_application_update'), params=params)
115 115 self.logout_user()
116 116
117 117 response = self.app.post(
118 118 route_path('reset_password'), {'email': 'lisa@rhodecode.com',}
119 119 )
120 120 response = response.follow()
121 121 response.mustcontain('Password reset is disabled.')
@@ -1,180 +1,179 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 import formencode.htmlfill
23 23 import logging
24 24
25 25 from pyramid.httpexceptions import HTTPFound
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import BaseAppView
30 30 from rhodecode.authentication.base import get_authn_registry
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib.auth import (
33 33 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
34 from rhodecode.lib.caches import clear_cache_manager
35 34 from rhodecode.model.forms import AuthSettingsForm
36 35 from rhodecode.model.meta import Session
37 36 from rhodecode.model.settings import SettingsModel
38 37
39 38 log = logging.getLogger(__name__)
40 39
41 40
42 41 class AuthnPluginViewBase(BaseAppView):
43 42
44 43 def load_default_context(self):
45 44 c = self._get_local_tmpl_context()
46 45 self.plugin = self.context.plugin
47 46 return c
48 47
49 48 @LoginRequired()
50 49 @HasPermissionAllDecorator('hg.admin')
51 50 def settings_get(self, defaults=None, errors=None):
52 51 """
53 52 View that displays the plugin settings as a form.
54 53 """
55 54 c = self.load_default_context()
56 55 defaults = defaults or {}
57 56 errors = errors or {}
58 57 schema = self.plugin.get_settings_schema()
59 58
60 59 # Compute default values for the form. Priority is:
61 60 # 1. Passed to this method 2. DB value 3. Schema default
62 61 for node in schema:
63 62 if node.name not in defaults:
64 63 defaults[node.name] = self.plugin.get_setting_by_name(
65 64 node.name, node.default)
66 65
67 66 template_context = {
68 67 'defaults': defaults,
69 68 'errors': errors,
70 69 'plugin': self.context.plugin,
71 70 'resource': self.context,
72 71 }
73 72
74 73 return self._get_template_context(c, **template_context)
75 74
76 75 @LoginRequired()
77 76 @HasPermissionAllDecorator('hg.admin')
78 77 @CSRFRequired()
79 78 def settings_post(self):
80 79 """
81 80 View that validates and stores the plugin settings.
82 81 """
83 82 _ = self.request.translate
84 83 self.load_default_context()
85 84 schema = self.plugin.get_settings_schema()
86 85 data = self.request.params
87 86
88 87 try:
89 88 valid_data = schema.deserialize(data)
90 89 except colander.Invalid as e:
91 90 # Display error message and display form again.
92 91 h.flash(
93 92 _('Errors exist when saving plugin settings. '
94 93 'Please check the form inputs.'),
95 94 category='error')
96 95 defaults = {key: data[key] for key in data if key in schema}
97 96 return self.settings_get(errors=e.asdict(), defaults=defaults)
98 97
99 98 # Store validated data.
100 99 for name, value in valid_data.items():
101 100 self.plugin.create_or_update_setting(name, value)
102 101 Session().commit()
103 102
104 103 # Display success message and redirect.
105 104 h.flash(_('Auth settings updated successfully.'), category='success')
106 105 redirect_to = self.request.resource_path(
107 106 self.context, route_name='auth_home')
108 107 return HTTPFound(redirect_to)
109 108
110 109
111 110 class AuthSettingsView(BaseAppView):
112 111 def load_default_context(self):
113 112 c = self._get_local_tmpl_context()
114 113 return c
115 114
116 115 @LoginRequired()
117 116 @HasPermissionAllDecorator('hg.admin')
118 117 def index(self, defaults=None, errors=None, prefix_error=False):
119 118 c = self.load_default_context()
120 119
121 120 defaults = defaults or {}
122 121 authn_registry = get_authn_registry(self.request.registry)
123 122 enabled_plugins = SettingsModel().get_auth_plugins()
124 123
125 124 # Create template context and render it.
126 125 template_context = {
127 126 'resource': self.context,
128 127 'available_plugins': authn_registry.get_plugins(),
129 128 'enabled_plugins': enabled_plugins,
130 129 }
131 130 html = render('rhodecode:templates/admin/auth/auth_settings.mako',
132 131 self._get_template_context(c, **template_context),
133 132 self.request)
134 133
135 134 # Create form default values and fill the form.
136 135 form_defaults = {
137 136 'auth_plugins': ',\n'.join(enabled_plugins)
138 137 }
139 138 form_defaults.update(defaults)
140 139 html = formencode.htmlfill.render(
141 140 html,
142 141 defaults=form_defaults,
143 142 errors=errors,
144 143 prefix_error=prefix_error,
145 144 encoding="UTF-8",
146 145 force_defaults=False)
147 146
148 147 return Response(html)
149 148
150 149 @LoginRequired()
151 150 @HasPermissionAllDecorator('hg.admin')
152 151 @CSRFRequired()
153 152 def auth_settings(self):
154 153 _ = self.request.translate
155 154 try:
156 155 form = AuthSettingsForm(self.request.translate)()
157 156 form_result = form.to_python(self.request.POST)
158 157 plugins = ','.join(form_result['auth_plugins'])
159 158 setting = SettingsModel().create_or_update_setting(
160 159 'auth_plugins', plugins)
161 160 Session().add(setting)
162 161 Session().commit()
163 162
164 163 h.flash(_('Auth settings updated successfully.'), category='success')
165 164 except formencode.Invalid as errors:
166 165 e = errors.error_dict or {}
167 166 h.flash(_('Errors exist when saving plugin setting. '
168 167 'Please check the form inputs.'), category='error')
169 168 return self.index(
170 169 defaults=errors.value,
171 170 errors=e,
172 171 prefix_error=False)
173 172 except Exception:
174 173 log.exception('Exception in auth_settings')
175 174 h.flash(_('Error occurred during update of auth settings.'),
176 175 category='error')
177 176
178 177 redirect_to = self.request.resource_path(
179 178 self.context, route_name='auth_home')
180 179 return HTTPFound(redirect_to)
@@ -1,483 +1,510 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import traceback
24 24 import collections
25 25 import tempfile
26 26
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.settings import asbool, aslist
32 32 from pyramid.httpexceptions import (
33 33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
34 34 from pyramid.events import ApplicationCreated
35 35 from pyramid.renderers import render_to_response
36 36
37 37 from rhodecode.model import meta
38 38 from rhodecode.config import patches
39 39 from rhodecode.config import utils as config_utils
40 40 from rhodecode.config.environment import load_pyramid_environment
41 41
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.celerylib.loader import configure_celery
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 51 from rhodecode.subscribers import (
52 52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, inject_app_settings)
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def make_pyramid_app(global_config, **settings):
65 65 """
66 66 Constructs the WSGI application based on Pyramid.
67 67
68 68 Specials:
69 69
70 70 * The application can also be integrated like a plugin via the call to
71 71 `includeme`. This is accompanied with the other utility functions which
72 72 are called. Changing this should be done with great care to not break
73 73 cases when these fragments are assembled from another place.
74 74
75 75 """
76 76
77 77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
78 78 # will be replaced by the value of the environment variable "NAME" in this case.
79 79 environ = {
80 80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
81 81
82 82 global_config = _substitute_values(global_config, environ)
83 83 settings = _substitute_values(settings, environ)
84 84
85 85 sanitize_settings_and_apply_defaults(settings)
86 86
87 87 config = Configurator(settings=settings)
88 88
89 89 # Apply compatibility patches
90 90 patches.inspect_getargspec()
91 91
92 92 load_pyramid_environment(global_config, settings)
93 93
94 94 # Static file view comes first
95 95 includeme_first(config)
96 96
97 97 includeme(config)
98 98
99 99 pyramid_app = config.make_wsgi_app()
100 100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
101 101 pyramid_app.config = config
102 102
103 103 config.configure_celery(global_config['__file__'])
104 104 # creating the app uses a connection - return it after we are done
105 105 meta.Session.remove()
106 106
107 107 log.info('Pyramid app %s created and configured.', pyramid_app)
108 108 return pyramid_app
109 109
110 110
111 111 def not_found_view(request):
112 112 """
113 113 This creates the view which should be registered as not-found-view to
114 114 pyramid.
115 115 """
116 116
117 117 if not getattr(request, 'vcs_call', None):
118 118 # handle like regular case with our error_handler
119 119 return error_handler(HTTPNotFound(), request)
120 120
121 121 # handle not found view as a vcs call
122 122 settings = request.registry.settings
123 123 ae_client = getattr(request, 'ae_client', None)
124 124 vcs_app = VCSMiddleware(
125 125 HTTPNotFound(), request.registry, settings,
126 126 appenlight_client=ae_client)
127 127
128 128 return wsgiapp(vcs_app)(None, request)
129 129
130 130
131 131 def error_handler(exception, request):
132 132 import rhodecode
133 133 from rhodecode.lib import helpers
134 134
135 135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
136 136
137 137 base_response = HTTPInternalServerError()
138 138 # prefer original exception for the response since it may have headers set
139 139 if isinstance(exception, HTTPException):
140 140 base_response = exception
141 141 elif isinstance(exception, VCSCommunicationError):
142 142 base_response = VCSServerUnavailable()
143 143
144 144 if is_http_error(base_response):
145 145 log.exception(
146 146 'error occurred handling this request for path: %s', request.path)
147 147
148 148 error_explanation = base_response.explanation or str(base_response)
149 149 if base_response.status_code == 404:
150 150 error_explanation += " Or you don't have permission to access it."
151 151 c = AttributeDict()
152 152 c.error_message = base_response.status
153 153 c.error_explanation = error_explanation
154 154 c.visual = AttributeDict()
155 155
156 156 c.visual.rhodecode_support_url = (
157 157 request.registry.settings.get('rhodecode_support_url') or
158 158 request.route_url('rhodecode_support')
159 159 )
160 160 c.redirect_time = 0
161 161 c.rhodecode_name = rhodecode_title
162 162 if not c.rhodecode_name:
163 163 c.rhodecode_name = 'Rhodecode'
164 164
165 165 c.causes = []
166 166 if is_http_error(base_response):
167 167 c.causes.append('Server is overloaded.')
168 168 c.causes.append('Server database connection is lost.')
169 169 c.causes.append('Server expected unhandled error.')
170 170
171 171 if hasattr(base_response, 'causes'):
172 172 c.causes = base_response.causes
173 173
174 174 c.messages = helpers.flash.pop_messages(request=request)
175 175 c.traceback = traceback.format_exc()
176 176 response = render_to_response(
177 177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
178 178 response=base_response)
179 179
180 180 return response
181 181
182 182
183 183 def includeme_first(config):
184 184 # redirect automatic browser favicon.ico requests to correct place
185 185 def favicon_redirect(context, request):
186 186 return HTTPFound(
187 187 request.static_path('rhodecode:public/images/favicon.ico'))
188 188
189 189 config.add_view(favicon_redirect, route_name='favicon')
190 190 config.add_route('favicon', '/favicon.ico')
191 191
192 192 def robots_redirect(context, request):
193 193 return HTTPFound(
194 194 request.static_path('rhodecode:public/robots.txt'))
195 195
196 196 config.add_view(robots_redirect, route_name='robots')
197 197 config.add_route('robots', '/robots.txt')
198 198
199 199 config.add_static_view(
200 200 '_static/deform', 'deform:static')
201 201 config.add_static_view(
202 202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207 config.set_request_factory(Request)
208 208
209 209 # plugin information
210 210 config.registry.rhodecode_plugins = collections.OrderedDict()
211 211
212 212 config.add_directive(
213 213 'register_rhodecode_plugin', register_rhodecode_plugin)
214 214
215 215 config.add_directive('configure_celery', configure_celery)
216 216
217 217 if asbool(settings.get('appenlight', 'false')):
218 218 config.include('appenlight_client.ext.pyramid_tween')
219 219
220 220 # Includes which are required. The application would fail without them.
221 221 config.include('pyramid_mako')
222 222 config.include('pyramid_beaker')
223 223 config.include('rhodecode.lib.caches')
224 224 config.include('rhodecode.lib.rc_cache')
225 225
226 226 config.include('rhodecode.authentication')
227 227 config.include('rhodecode.integrations')
228 228
229 229 # apps
230 230 config.include('rhodecode.apps._base')
231 231 config.include('rhodecode.apps.ops')
232 232
233 233 config.include('rhodecode.apps.admin')
234 234 config.include('rhodecode.apps.channelstream')
235 235 config.include('rhodecode.apps.login')
236 236 config.include('rhodecode.apps.home')
237 237 config.include('rhodecode.apps.journal')
238 238 config.include('rhodecode.apps.repository')
239 239 config.include('rhodecode.apps.repo_group')
240 240 config.include('rhodecode.apps.user_group')
241 241 config.include('rhodecode.apps.search')
242 242 config.include('rhodecode.apps.user_profile')
243 243 config.include('rhodecode.apps.user_group_profile')
244 244 config.include('rhodecode.apps.my_account')
245 245 config.include('rhodecode.apps.svn_support')
246 246 config.include('rhodecode.apps.ssh_support')
247 247 config.include('rhodecode.apps.gist')
248 248
249 249 config.include('rhodecode.apps.debug_style')
250 250 config.include('rhodecode.tweens')
251 251 config.include('rhodecode.api')
252 252
253 253 config.add_route(
254 254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
255 255
256 256 config.add_translation_dirs('rhodecode:i18n/')
257 257 settings['default_locale_name'] = settings.get('lang', 'en')
258 258
259 259 # Add subscribers.
260 260 config.add_subscriber(inject_app_settings, ApplicationCreated)
261 261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
262 262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
263 263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
264 264
265 265 # events
266 266 # TODO(marcink): this should be done when pyramid migration is finished
267 267 # config.add_subscriber(
268 268 # 'rhodecode.integrations.integrations_event_handler',
269 269 # 'rhodecode.events.RhodecodeEvent')
270 270
271 271 # request custom methods
272 272 config.add_request_method(
273 273 'rhodecode.lib.partial_renderer.get_partial_renderer',
274 274 'get_partial_renderer')
275 275
276 276 # Set the authorization policy.
277 277 authz_policy = ACLAuthorizationPolicy()
278 278 config.set_authorization_policy(authz_policy)
279 279
280 280 # Set the default renderer for HTML templates to mako.
281 281 config.add_mako_renderer('.html')
282 282
283 283 config.add_renderer(
284 284 name='json_ext',
285 285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
286 286
287 287 # include RhodeCode plugins
288 288 includes = aslist(settings.get('rhodecode.includes', []))
289 289 for inc in includes:
290 290 config.include(inc)
291 291
292 292 # custom not found view, if our pyramid app doesn't know how to handle
293 293 # the request pass it to potential VCS handling ap
294 294 config.add_notfound_view(not_found_view)
295 295 if not settings.get('debugtoolbar.enabled', False):
296 296 # disabled debugtoolbar handle all exceptions via the error_handlers
297 297 config.add_view(error_handler, context=Exception)
298 298
299 299 # all errors including 403/404/50X
300 300 config.add_view(error_handler, context=HTTPError)
301 301
302 302
303 303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
304 304 """
305 305 Apply outer WSGI middlewares around the application.
306 306 """
307 307 settings = config.registry.settings
308 308
309 309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
310 310 pyramid_app = HttpsFixup(pyramid_app, settings)
311 311
312 312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
313 313 pyramid_app, settings)
314 314 config.registry.ae_client = _ae_client
315 315
316 316 if settings['gzip_responses']:
317 317 pyramid_app = make_gzip_middleware(
318 318 pyramid_app, settings, compress_level=1)
319 319
320 320 # this should be the outer most middleware in the wsgi stack since
321 321 # middleware like Routes make database calls
322 322 def pyramid_app_with_cleanup(environ, start_response):
323 323 try:
324 324 return pyramid_app(environ, start_response)
325 325 finally:
326 326 # Dispose current database session and rollback uncommitted
327 327 # transactions.
328 328 meta.Session.remove()
329 329
330 330 # In a single threaded mode server, on non sqlite db we should have
331 331 # '0 Current Checked out connections' at the end of a request,
332 332 # if not, then something, somewhere is leaving a connection open
333 333 pool = meta.Base.metadata.bind.engine.pool
334 334 log.debug('sa pool status: %s', pool.status())
335 335
336 336 return pyramid_app_with_cleanup
337 337
338 338
339 339 def sanitize_settings_and_apply_defaults(settings):
340 340 """
341 341 Applies settings defaults and does all type conversion.
342 342
343 343 We would move all settings parsing and preparation into this place, so that
344 344 we have only one place left which deals with this part. The remaining parts
345 345 of the application would start to rely fully on well prepared settings.
346 346
347 347 This piece would later be split up per topic to avoid a big fat monster
348 348 function.
349 349 """
350 350
351 351 settings.setdefault('rhodecode.edition', 'Community Edition')
352 352
353 353 if 'mako.default_filters' not in settings:
354 354 # set custom default filters if we don't have it defined
355 355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
356 356 settings['mako.default_filters'] = 'h_filter'
357 357
358 358 if 'mako.directories' not in settings:
359 359 mako_directories = settings.setdefault('mako.directories', [
360 360 # Base templates of the original application
361 361 'rhodecode:templates',
362 362 ])
363 363 log.debug(
364 364 "Using the following Mako template directories: %s",
365 365 mako_directories)
366 366
367 367 # Default includes, possible to change as a user
368 368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 369 'rhodecode.lib.middleware.request_wrapper',
370 370 ])
371 371 log.debug(
372 372 "Using the following pyramid.includes: %s",
373 373 pyramid_includes)
374 374
375 375 # TODO: johbo: Re-think this, usually the call to config.include
376 376 # should allow to pass in a prefix.
377 377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 378
379 379 # Sanitize generic settings.
380 380 _list_setting(settings, 'default_encoding', 'UTF-8')
381 381 _bool_setting(settings, 'is_test', 'false')
382 382 _bool_setting(settings, 'gzip_responses', 'false')
383 383
384 384 # Call split out functions that sanitize settings for each topic.
385 385 _sanitize_appenlight_settings(settings)
386 386 _sanitize_vcs_settings(settings)
387 387 _sanitize_cache_settings(settings)
388 388
389 389 # configure instance id
390 390 config_utils.set_instance_id(settings)
391 391
392 392 return settings
393 393
394 394
395 395 def _sanitize_appenlight_settings(settings):
396 396 _bool_setting(settings, 'appenlight', 'false')
397 397
398 398
399 399 def _sanitize_vcs_settings(settings):
400 400 """
401 401 Applies settings defaults and does type conversion for all VCS related
402 402 settings.
403 403 """
404 404 _string_setting(settings, 'vcs.svn.compatible_version', '')
405 405 _string_setting(settings, 'git_rev_filter', '--all')
406 406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
407 407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
408 408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
409 409 _string_setting(settings, 'vcs.server', '')
410 410 _string_setting(settings, 'vcs.server.log_level', 'debug')
411 411 _string_setting(settings, 'vcs.server.protocol', 'http')
412 412 _bool_setting(settings, 'startup.import_repos', 'false')
413 413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
414 414 _bool_setting(settings, 'vcs.server.enable', 'true')
415 415 _bool_setting(settings, 'vcs.start_server', 'false')
416 416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
417 417 _int_setting(settings, 'vcs.connection_timeout', 3600)
418 418
419 419 # Support legacy values of vcs.scm_app_implementation. Legacy
420 420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
421 421 # which is now mapped to 'http'.
422 422 scm_app_impl = settings['vcs.scm_app_implementation']
423 423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
424 424 settings['vcs.scm_app_implementation'] = 'http'
425 425
426 426
427 427 def _sanitize_cache_settings(settings):
428 428 _string_setting(settings, 'cache_dir',
429 429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
430 # cache_perms
431 _string_setting(
432 settings,
433 'rc_cache.cache_perms.backend',
434 'dogpile.cache.rc.file_namespace')
435 _int_setting(
436 settings,
437 'rc_cache.cache_perms.expiration_time',
438 60)
439 _string_setting(
440 settings,
441 'rc_cache.cache_perms.arguments.filename',
442 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
430 443
431 _string_setting(settings, 'rc_cache.cache_perms.backend',
432 'dogpile.cache.rc.file_namespace')
433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
434 60)
435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
444 # cache_repo
445 _string_setting(
446 settings,
447 'rc_cache.cache_repo.backend',
448 'dogpile.cache.rc.file_namespace')
449 _int_setting(
450 settings,
451 'rc_cache.cache_repo.expiration_time',
452 60)
453 _string_setting(
454 settings,
455 'rc_cache.cache_repo.arguments.filename',
456 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
437 457
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
441 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
458 # sql_cache_short
459 _string_setting(
460 settings,
461 'rc_cache.sql_cache_short.backend',
462 'dogpile.cache.rc.memory_lru')
463 _int_setting(
464 settings,
465 'rc_cache.sql_cache_short.expiration_time',
466 30)
467 _int_setting(
468 settings,
469 'rc_cache.sql_cache_short.max_size',
470 10000)
444 471
445 472
446 473 def _int_setting(settings, name, default):
447 474 settings[name] = int(settings.get(name, default))
448 475
449 476
450 477 def _bool_setting(settings, name, default):
451 478 input_val = settings.get(name, default)
452 479 if isinstance(input_val, unicode):
453 480 input_val = input_val.encode('utf8')
454 481 settings[name] = asbool(input_val)
455 482
456 483
457 484 def _list_setting(settings, name, default):
458 485 raw_value = settings.get(name, default)
459 486
460 487 old_separator = ','
461 488 if old_separator in raw_value:
462 489 # If we get a comma separated list, pass it to our own function.
463 490 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
464 491 else:
465 492 # Otherwise we assume it uses pyramids space/newline separation.
466 493 settings[name] = aslist(raw_value)
467 494
468 495
469 496 def _string_setting(settings, name, default, lower=True):
470 497 value = settings.get(name, default)
471 498 if lower:
472 499 value = value.lower()
473 500 settings[name] = value
474 501
475 502
476 503 def _substitute_values(mapping, substitutions):
477 504 result = {
478 505 # Note: Cannot use regular replacements, since they would clash
479 506 # with the implementation of ConfigParser. Using "format" instead.
480 507 key: value.format(**substitutions)
481 508 for key, value in mapping.items()
482 509 }
483 510 return result
@@ -1,226 +1,188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import functools
21 21
22 22 import beaker
23 23 import logging
24 24 import threading
25 25
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
26 from beaker.cache import _cache_decorate, region_invalidate
27 27 from sqlalchemy.exc import IntegrityError
28 28
29 29 from rhodecode.lib.utils import safe_str, sha1
30 30 from rhodecode.model.db import Session, CacheKey
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 DEFAULT_CACHE_MANAGER_CONFIG = {
36 36 'type': 'memorylru_base',
37 37 'max_items': 10240,
38 38 'key_length': 256,
39 39 'enabled': True
40 40 }
41 41
42 42
43 43 def get_default_cache_settings(settings):
44 44 cache_settings = {}
45 45 for key in settings.keys():
46 46 for prefix in ['beaker.cache.', 'cache.']:
47 47 if key.startswith(prefix):
48 48 name = key.split(prefix)[1].strip()
49 49 cache_settings[name] = settings[key].strip()
50 50 return cache_settings
51 51
52 52
53 53 # set cache regions for beaker so celery can utilise it
54 54 def configure_caches(settings, default_region_settings=None):
55 55 cache_settings = {'regions': None}
56 56 # main cache settings used as default ...
57 57 cache_settings.update(get_default_cache_settings(settings))
58 58 default_region_settings = default_region_settings or \
59 59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
60 60 if cache_settings['regions']:
61 61 for region in cache_settings['regions'].split(','):
62 62 region = region.strip()
63 63 region_settings = default_region_settings.copy()
64 64 for key, value in cache_settings.items():
65 65 if key.startswith(region):
66 66 region_settings[key.split(region + '.')[-1]] = value
67 67 log.debug('Configuring cache region `%s` with settings %s',
68 68 region, region_settings)
69 69 configure_cache_region(
70 70 region, region_settings, cache_settings)
71 71
72 72
73 73 def configure_cache_region(
74 74 region_name, region_settings, default_cache_kw, default_expire=60):
75 75 default_type = default_cache_kw.get('type', 'memory')
76 76 default_lock_dir = default_cache_kw.get('lock_dir')
77 77 default_data_dir = default_cache_kw.get('data_dir')
78 78
79 79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
80 80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
81 81 region_settings['type'] = region_settings.get('type', default_type)
82 82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
83 83
84 84 beaker.cache.cache_regions[region_name] = region_settings
85 85
86 86
87 def get_cache_manager(region_name, cache_name, custom_ttl=None):
88 """
89 Creates a Beaker cache manager. Such instance can be used like that::
90
91 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
92 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
93 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
94 def heavy_compute():
95 ...
96 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
97
98 :param region_name: region from ini file
99 :param cache_name: custom cache name, usually prefix+repo_name. eg
100 file_switcher_repo1
101 :param custom_ttl: override .ini file timeout on this cache
102 :return: instance of cache manager
103 """
104
105 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
106 if custom_ttl:
107 log.debug('Updating region %s with custom ttl: %s',
108 region_name, custom_ttl)
109 cache_config.update({'expire': custom_ttl})
110
111 return beaker.cache.Cache._get_cache(cache_name, cache_config)
112
113
114 def clear_cache_manager(cache_manager):
115 """
116 namespace = 'foobar'
117 cache_manager = get_cache_manager('some_namespace_name', namespace)
118 clear_cache_manager(cache_manager)
119 """
120
121 log.debug('Clearing all values for cache manager %s', cache_manager)
122 cache_manager.clear()
123
124
125 87 def compute_key_from_params(*args):
126 88 """
127 89 Helper to compute key from given params to be used in cache manager
128 90 """
129 91 return sha1("_".join(map(safe_str, args)))
130 92
131 93
132 94 def get_repo_namespace_key(prefix, repo_name):
133 95 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
134 96
135 97
136 98 class ActiveRegionCache(object):
137 99 def __init__(self, context):
138 100 self.context = context
139 101
140 102 def invalidate(self, *args, **kwargs):
141 103 return False
142 104
143 105 def compute(self):
144 106 log.debug('Context cache: getting obj %s from cache', self.context)
145 107 return self.context.compute_func(self.context.cache_key)
146 108
147 109
148 110 class FreshRegionCache(ActiveRegionCache):
149 111 def invalidate(self):
150 112 log.debug('Context cache: invalidating cache for %s', self.context)
151 113 region_invalidate(
152 114 self.context.compute_func, None, self.context.cache_key)
153 115 return True
154 116
155 117
156 118 class InvalidationContext(object):
157 119 def __repr__(self):
158 120 return '<InvalidationContext:{}[{}]>'.format(
159 121 safe_str(self.repo_name), safe_str(self.cache_type))
160 122
161 123 def __init__(self, compute_func, repo_name, cache_type,
162 124 raise_exception=False, thread_scoped=False):
163 125 self.compute_func = compute_func
164 126 self.repo_name = repo_name
165 127 self.cache_type = cache_type
166 128 self.cache_key = compute_key_from_params(
167 129 repo_name, cache_type)
168 130 self.raise_exception = raise_exception
169 131
170 132 # Append the thread id to the cache key if this invalidation context
171 133 # should be scoped to the current thread.
172 134 if thread_scoped:
173 135 thread_id = threading.current_thread().ident
174 136 self.cache_key = '{cache_key}_{thread_id}'.format(
175 137 cache_key=self.cache_key, thread_id=thread_id)
176 138
177 139 def get_cache_obj(self):
178 140 cache_key = CacheKey.get_cache_key(
179 141 self.repo_name, self.cache_type)
180 142 cache_obj = CacheKey.get_active_cache(cache_key)
181 143 if not cache_obj:
182 144 cache_obj = CacheKey(cache_key, self.repo_name)
183 145 return cache_obj
184 146
185 147 def __enter__(self):
186 148 """
187 149 Test if current object is valid, and return CacheRegion function
188 150 that does invalidation and calculation
189 151 """
190 152
191 153 self.cache_obj = self.get_cache_obj()
192 154 if self.cache_obj.cache_active:
193 155 # means our cache obj is existing and marked as it's
194 156 # cache is not outdated, we return BaseInvalidator
195 157 self.skip_cache_active_change = True
196 158 return ActiveRegionCache(self)
197 159
198 160 # the key is either not existing or set to False, we return
199 161 # the real invalidator which re-computes value. We additionally set
200 162 # the flag to actually update the Database objects
201 163 self.skip_cache_active_change = False
202 164 return FreshRegionCache(self)
203 165
204 166 def __exit__(self, exc_type, exc_val, exc_tb):
205 167
206 168 if self.skip_cache_active_change:
207 169 return
208 170
209 171 try:
210 172 self.cache_obj.cache_active = True
211 173 Session().add(self.cache_obj)
212 174 Session().commit()
213 175 except IntegrityError:
214 176 # if we catch integrity error, it means we inserted this object
215 177 # assumption is that's really an edge race-condition case and
216 178 # it's safe is to skip it
217 179 Session().rollback()
218 180 except Exception:
219 181 log.exception('Failed to commit on cache key update')
220 182 Session().rollback()
221 183 if self.raise_exception:
222 184 raise
223 185
224 186
225 187 def includeme(config):
226 188 configure_caches(config.registry.settings)
@@ -1,325 +1,298 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 """
22 caching_query.py
21 """caching_query.py
23 22
24 Represent persistence structures which allow the usage of
25 Beaker caching with SQLAlchemy.
23 Represent functions and classes
24 which allow the usage of Dogpile caching with SQLAlchemy.
25 Introduces a query option called FromCache.
26 26
27 27 The three new concepts introduced here are:
28 28
29 29 * CachingQuery - a Query subclass that caches and
30 retrieves results in/from Beaker.
30 retrieves results in/from dogpile.cache.
31 31 * FromCache - a query option that establishes caching
32 32 parameters on a Query
33 33 * RelationshipCache - a variant of FromCache which is specific
34 34 to a query invoked during a lazy load.
35 35 * _params_from_query - extracts value parameters from
36 36 a Query.
37 37
38 38 The rest of what's here are standard SQLAlchemy and
39 Beaker constructs.
39 dogpile.cache constructs.
40 40
41 41 """
42 import beaker
43 from beaker.exceptions import BeakerException
44
45 42 from sqlalchemy.orm.interfaces import MapperOption
46 43 from sqlalchemy.orm.query import Query
47 44 from sqlalchemy.sql import visitors
45 from dogpile.cache.api import NO_VALUE
48 46
49 47 from rhodecode.lib.utils2 import safe_str
50 48
51 49
52 50 class CachingQuery(Query):
53 """A Query subclass which optionally loads full results from a Beaker
51 """A Query subclass which optionally loads full results from a dogpile
54 52 cache region.
55 53
56 The CachingQuery stores additional state that allows it to consult
57 a Beaker cache before accessing the database:
58
59 * A "region", which is a cache region argument passed to a
60 Beaker CacheManager, specifies a particular cache configuration
61 (including backend implementation, expiration times, etc.)
62 * A "namespace", which is a qualifying name that identifies a
63 group of keys within the cache. A query that filters on a name
64 might use the name "by_name", a query that filters on a date range
65 to a joined table might use the name "related_date_range".
66
67 When the above state is present, a Beaker cache is retrieved.
68
69 The "namespace" name is first concatenated with
70 a string composed of the individual entities and columns the Query
71 requests, i.e. such as ``Query(User.id, User.name)``.
72
73 The Beaker cache is then loaded from the cache manager based
74 on the region and composed namespace. The key within the cache
75 itself is then constructed against the bind parameters specified
76 by this query, which are usually literals defined in the
77 WHERE clause.
54 The CachingQuery optionally stores additional state that allows it to consult
55 a dogpile.cache cache before accessing the database, in the form
56 of a FromCache or RelationshipCache object. Each of these objects
57 refer to the name of a :class:`dogpile.cache.Region` that's been configured
58 and stored in a lookup dictionary. When such an object has associated
59 itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
60 is used to locate a cached result. If none is present, then the
61 Query is invoked normally, the results being cached.
78 62
79 63 The FromCache and RelationshipCache mapper options below represent
80 64 the "public" method of configuring this state upon the CachingQuery.
81 65
82 66 """
67 def _get_region(self):
68 from rhodecode.lib.rc_cache import region_meta
69 return region_meta.dogpile_cache_regions
83 70
84 def __init__(self, manager, *args, **kw):
85 self.cache_manager = manager
71 def __init__(self, regions, *args, **kw):
72 self.cache_regions = regions or self._get_region()
86 73 Query.__init__(self, *args, **kw)
87 74
88 75 def __iter__(self):
89 """override __iter__ to pull results from Beaker
76 """override __iter__ to pull results from dogpile
90 77 if particular attributes have been configured.
91 78
92 79 Note that this approach does *not* detach the loaded objects from
93 80 the current session. If the cache backend is an in-process cache
94 81 (like "memory") and lives beyond the scope of the current session's
95 82 transaction, those objects may be expired. The method here can be
96 83 modified to first expunge() each loaded item from the current
97 84 session before returning the list of items, so that the items
98 85 in the cache are not the same ones in the current Session.
99 86
100 87 """
101 if hasattr(self, '_cache_parameters'):
88 super_ = super(CachingQuery, self)
89
90 if hasattr(self, '_cache_region'):
91 return self.get_value(createfunc=lambda: list(super_.__iter__()))
92 else:
93 return super_.__iter__()
94
95 def _execute_and_instances(self, context):
96 """override _execute_and_instances to pull results from dogpile
97 if the query is invoked directly from an external context.
98
99 This method is necessary in order to maintain compatibility
100 with the "baked query" system now used by default in some
101 relationship loader scenarios. Note also the
102 RelationshipCache._generate_cache_key method which enables
103 the baked query to be used within lazy loads.
102 104
103 def caching_query():
104 return list(Query.__iter__(self))
105 .. versionadded:: 1.2.7
106 """
107 super_ = super(CachingQuery, self)
105 108
106 return self.get_value(createfunc=caching_query)
109 if context.query is not self and hasattr(self, '_cache_region'):
110 # special logic called when the Query._execute_and_instances()
111 # method is called directly from the baked query
112 return self.get_value(
113 createfunc=lambda: list(
114 super_._execute_and_instances(context)
115 )
116 )
107 117 else:
108 return Query.__iter__(self)
118 return super_._execute_and_instances(context)
119
120 def _get_cache_plus_key(self):
121 """Return a cache region plus key."""
122 dogpile_region = self.cache_regions[self._cache_region.region]
123 if self._cache_region.cache_key:
124 key = self._cache_region.cache_key
125 else:
126 key = _key_from_query(self)
127 return dogpile_region, key
109 128
110 129 def invalidate(self):
111 """Invalidate the value represented by this Query."""
130 """Invalidate the cache value represented by this Query."""
112 131
113 cache, cache_key = _get_cache_parameters(self)
114 cache.remove(cache_key)
132 dogpile_region, cache_key = self._get_cache_plus_key()
133 dogpile_region.delete(cache_key)
115 134
116 def get_value(self, merge=True, createfunc=None):
135 def get_value(self, merge=True, createfunc=None,
136 expiration_time=None, ignore_expiration=False):
117 137 """Return the value from the cache for this query.
118 138
119 139 Raise KeyError if no value present and no
120 140 createfunc specified.
121 141
122 142 """
123 cache, cache_key = _get_cache_parameters(self)
124 ret = cache.get_value(cache_key, createfunc=createfunc)
143 dogpile_region, cache_key = self._get_cache_plus_key()
144
145 # ignore_expiration means, if the value is in the cache
146 # but is expired, return it anyway. This doesn't make sense
147 # with createfunc, which says, if the value is expired, generate
148 # a new value.
149 assert not ignore_expiration or not createfunc, \
150 "Can't ignore expiration and also provide createfunc"
151
152 if ignore_expiration or not createfunc:
153 cached_value = dogpile_region.get(cache_key,
154 expiration_time=expiration_time,
155 ignore_expiration=ignore_expiration)
156 else:
157 cached_value = dogpile_region.get_or_create(
158 cache_key,
159 createfunc,
160 expiration_time=expiration_time
161 )
162 if cached_value is NO_VALUE:
163 raise KeyError(cache_key)
125 164 if merge:
126 ret = self.merge_result(ret, load=False)
127 return ret
165 cached_value = self.merge_result(cached_value, load=False)
166 return cached_value
128 167
129 168 def set_value(self, value):
130 169 """Set the value in the cache for this query."""
131 170
132 cache, cache_key = _get_cache_parameters(self)
133 cache.put(cache_key, value)
171 dogpile_region, cache_key = self._get_cache_plus_key()
172 dogpile_region.set(cache_key, value)
134 173
135 174
136 def query_callable(manager, query_cls=CachingQuery):
175 def query_callable(regions=None, query_cls=CachingQuery):
137 176 def query(*arg, **kw):
138 return query_cls(manager, *arg, **kw)
177 return query_cls(regions, *arg, **kw)
139 178 return query
140 179
141 180
142 def get_cache_region(name, region):
143 if region not in beaker.cache.cache_regions:
144 raise BeakerException('Cache region `%s` not configured '
145 'Check if proper cache settings are in the .ini files' % region)
146 kw = beaker.cache.cache_regions[region]
147 return beaker.cache.Cache._get_cache(name, kw)
181 def _key_from_query(query, qualifier=None):
182 """Given a Query, create a cache key.
148 183
149
150 def _get_cache_parameters(query):
151 """For a query with cache_region and cache_namespace configured,
152 return the correspoinding Cache instance and cache key, based
153 on this query's current criterion and parameter values.
184 There are many approaches to this; here we use the simplest,
185 which is to create an md5 hash of the text of the SQL statement,
186 combined with stringified versions of all the bound parameters
187 within it. There's a bit of a performance hit with
188 compiling out "query.statement" here; other approaches include
189 setting up an explicit cache key with a particular Query,
190 then combining that with the bound parameter values.
154 191
155 192 """
156 if not hasattr(query, '_cache_parameters'):
157 raise ValueError("This Query does not have caching "
158 "parameters configured.")
159 193
160 region, namespace, cache_key = query._cache_parameters
161
162 namespace = _namespace_from_query(namespace, query)
163
164 if cache_key is None:
165 # cache key - the value arguments from this query's parameters.
166 args = [safe_str(x) for x in _params_from_query(query)]
167 args.extend(filter(lambda k: k not in ['None', None, u'None'],
168 [str(query._limit), str(query._offset)]))
169
170 cache_key = " ".join(args)
171
172 if cache_key is None:
173 raise Exception('Cache key cannot be None')
194 stmt = query.with_labels().statement
195 compiled = stmt.compile()
196 params = compiled.params
174 197
175 # get cache
176 #cache = query.cache_manager.get_cache_region(namespace, region)
177 cache = get_cache_region(namespace, region)
178 # optional - hash the cache_key too for consistent length
179 # import uuid
180 # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
181
182 return cache, cache_key
183
184
185 def _namespace_from_query(namespace, query):
186 # cache namespace - the token handed in by the
187 # option + class we're querying against
188 namespace = " ".join([namespace] + [str(x) for x in query._entities])
189
190 # memcached wants this
191 namespace = namespace.replace(' ', '_')
192
193 return namespace
194
195
196 def _set_cache_parameters(query, region, namespace, cache_key):
197
198 if hasattr(query, '_cache_parameters'):
199 region, namespace, cache_key = query._cache_parameters
200 raise ValueError("This query is already configured "
201 "for region %r namespace %r" %
202 (region, namespace))
203 query._cache_parameters = region, namespace, cache_key
198 # here we return the key as a long string. our "key mangler"
199 # set up with the region will boil it down to an md5.
200 return " ".join(
201 [safe_str(compiled)] +
202 [safe_str(params[k]) for k in sorted(params)])
204 203
205 204
206 205 class FromCache(MapperOption):
207 206 """Specifies that a Query should load results from a cache."""
208 207
209 208 propagate_to_loaders = False
210 209
211 def __init__(self, region, namespace, cache_key=None):
210 def __init__(self, region="sql_cache_short", cache_key=None):
212 211 """Construct a new FromCache.
213 212
214 213 :param region: the cache region. Should be a
215 region configured in the Beaker CacheManager.
216
217 :param namespace: the cache namespace. Should
218 be a name uniquely describing the target Query's
219 lexical structure.
214 region configured in the dictionary of dogpile
215 regions.
220 216
221 217 :param cache_key: optional. A string cache key
222 218 that will serve as the key to the query. Use this
223 219 if your query has a huge amount of parameters (such
224 220 as when using in_()) which correspond more simply to
225 221 some other identifier.
226 222
227 223 """
228 224 self.region = region
229 self.namespace = namespace
230 225 self.cache_key = cache_key
231 226
232 227 def process_query(self, query):
233 228 """Process a Query during normal loading operation."""
234
235 _set_cache_parameters(query, self.region, self.namespace,
236 self.cache_key)
229 query._cache_region = self
237 230
238 231
239 232 class RelationshipCache(MapperOption):
240 233 """Specifies that a Query as called within a "lazy load"
241 234 should load results from a cache."""
242 235
243 236 propagate_to_loaders = True
244 237
245 def __init__(self, region, namespace, attribute):
238 def __init__(self, attribute, region="sql_cache_short", cache_key=None):
246 239 """Construct a new RelationshipCache.
247 240
248 :param region: the cache region. Should be a
249 region configured in the Beaker CacheManager.
250
251 :param namespace: the cache namespace. Should
252 be a name uniquely describing the target Query's
253 lexical structure.
254
255 241 :param attribute: A Class.attribute which
256 242 indicates a particular class relationship() whose
257 243 lazy loader should be pulled from the cache.
258 244
245 :param region: name of the cache region.
246
247 :param cache_key: optional. A string cache key
248 that will serve as the key to the query, bypassing
249 the usual means of forming a key from the Query itself.
250
259 251 """
260 252 self.region = region
261 self.namespace = namespace
253 self.cache_key = cache_key
262 254 self._relationship_options = {
263 255 (attribute.property.parent.class_, attribute.property.key): self
264 256 }
265 257
258 def _generate_cache_key(self, path):
259 """Indicate to the lazy-loader strategy that a "baked" query
260 may be used by returning ``None``.
261
262 If this method is omitted, the default implementation of
263 :class:`.MapperOption._generate_cache_key` takes place, which
264 returns ``False`` to disable the "baked" query from being used.
265
266 .. versionadded:: 1.2.7
267
268 """
269 return None
270
266 271 def process_query_conditionally(self, query):
267 272 """Process a Query that is used within a lazy loader.
268 273
269 274 (the process_query_conditionally() method is a SQLAlchemy
270 275 hook invoked only within lazyload.)
271 276
272 277 """
273 278 if query._current_path:
274 mapper, key = query._current_path[-2:]
279 mapper, prop = query._current_path[-2:]
280 key = prop.key
275 281
276 282 for cls in mapper.class_.__mro__:
277 283 if (cls, key) in self._relationship_options:
278 relationship_option = \
279 self._relationship_options[(cls, key)]
280 _set_cache_parameters(
281 query,
282 relationship_option.region,
283 relationship_option.namespace,
284 None)
284 relationship_option = self._relationship_options[(cls, key)]
285 query._cache_region = relationship_option
286 break
285 287
286 288 def and_(self, option):
287 289 """Chain another RelationshipCache option to this one.
288 290
289 291 While many RelationshipCache objects can be specified on a single
290 292 Query separately, chaining them together allows for a more efficient
291 293 lookup during load.
292 294
293 295 """
294 296 self._relationship_options.update(option._relationship_options)
295 297 return self
296 298
297
298 def _params_from_query(query):
299 """Pull the bind parameter values from a query.
300
301 This takes into account any scalar attribute bindparam set up.
302
303 E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
304 would return [5, 7].
305
306 """
307 v = []
308 def visit_bindparam(bind):
309
310 if bind.key in query._params:
311 value = query._params[bind.key]
312 elif bind.callable:
313 # lazyloader may dig a callable in here, intended
314 # to late-evaluate params after autoflush is called.
315 # convert to a scalar value.
316 value = bind.callable()
317 else:
318 value = bind.value
319
320 v.append(value)
321 if query._criterion is not None:
322 visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
323 for f in query._from_obj:
324 visitors.traverse(f, {}, {'bindparam':visit_bindparam})
325 return v
@@ -1,4527 +1,4527 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance
57 57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 58 from rhodecode.lib.utils2 import (
59 59 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
60 60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 61 glob2re, StrictAttributeDict, cleaned_uri)
62 62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 63 JsonRaw
64 64 from rhodecode.lib.ext_json import json
65 65 from rhodecode.lib.caching_query import FromCache
66 66 from rhodecode.lib.encrypt import AESCipher
67 67
68 68 from rhodecode.model.meta import Base, Session
69 69
70 70 URL_SEP = '/'
71 71 log = logging.getLogger(__name__)
72 72
73 73 # =============================================================================
74 74 # BASE CLASSES
75 75 # =============================================================================
76 76
77 77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 78 # beaker.session.secret if first is not set.
79 79 # and initialized at environment.py
80 80 ENCRYPTION_KEY = None
81 81
82 82 # used to sort permissions by types, '#' used here is not allowed to be in
83 83 # usernames, and it's very early in sorted string.printable table.
84 84 PERMISSION_TYPE_SORT = {
85 85 'admin': '####',
86 86 'write': '###',
87 87 'read': '##',
88 88 'none': '#',
89 89 }
90 90
91 91
92 92 def display_user_sort(obj):
93 93 """
94 94 Sort function used to sort permissions in .permissions() function of
95 95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 96 of all other resources
97 97 """
98 98
99 99 if obj.username == User.DEFAULT_USER:
100 100 return '#####'
101 101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 102 return prefix + obj.username
103 103
104 104
105 105 def display_user_group_sort(obj):
106 106 """
107 107 Sort function used to sort permissions in .permissions() function of
108 108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 109 of all other resources
110 110 """
111 111
112 112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 113 return prefix + obj.users_group_name
114 114
115 115
116 116 def _hash_key(k):
117 117 return sha1_safe(k)
118 118
119 119
120 120 def in_filter_generator(qry, items, limit=500):
121 121 """
122 122 Splits IN() into multiple with OR
123 123 e.g.::
124 124 cnt = Repository.query().filter(
125 125 or_(
126 126 *in_filter_generator(Repository.repo_id, range(100000))
127 127 )).count()
128 128 """
129 129 if not items:
130 130 # empty list will cause empty query which might cause security issues
131 131 # this can lead to hidden unpleasant results
132 132 items = [-1]
133 133
134 134 parts = []
135 135 for chunk in xrange(0, len(items), limit):
136 136 parts.append(
137 137 qry.in_(items[chunk: chunk + limit])
138 138 )
139 139
140 140 return parts
141 141
142 142
143 143 base_table_args = {
144 144 'extend_existing': True,
145 145 'mysql_engine': 'InnoDB',
146 146 'mysql_charset': 'utf8',
147 147 'sqlite_autoincrement': True
148 148 }
149 149
150 150
151 151 class EncryptedTextValue(TypeDecorator):
152 152 """
153 153 Special column for encrypted long text data, use like::
154 154
155 155 value = Column("encrypted_value", EncryptedValue(), nullable=False)
156 156
157 157 This column is intelligent so if value is in unencrypted form it return
158 158 unencrypted form, but on save it always encrypts
159 159 """
160 160 impl = Text
161 161
162 162 def process_bind_param(self, value, dialect):
163 163 if not value:
164 164 return value
165 165 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
166 166 # protect against double encrypting if someone manually starts
167 167 # doing
168 168 raise ValueError('value needs to be in unencrypted format, ie. '
169 169 'not starting with enc$aes')
170 170 return 'enc$aes_hmac$%s' % AESCipher(
171 171 ENCRYPTION_KEY, hmac=True).encrypt(value)
172 172
173 173 def process_result_value(self, value, dialect):
174 174 import rhodecode
175 175
176 176 if not value:
177 177 return value
178 178
179 179 parts = value.split('$', 3)
180 180 if not len(parts) == 3:
181 181 # probably not encrypted values
182 182 return value
183 183 else:
184 184 if parts[0] != 'enc':
185 185 # parts ok but without our header ?
186 186 return value
187 187 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
188 188 'rhodecode.encrypted_values.strict') or True)
189 189 # at that stage we know it's our encryption
190 190 if parts[1] == 'aes':
191 191 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
192 192 elif parts[1] == 'aes_hmac':
193 193 decrypted_data = AESCipher(
194 194 ENCRYPTION_KEY, hmac=True,
195 195 strict_verification=enc_strict_mode).decrypt(parts[2])
196 196 else:
197 197 raise ValueError(
198 198 'Encryption type part is wrong, must be `aes` '
199 199 'or `aes_hmac`, got `%s` instead' % (parts[1]))
200 200 return decrypted_data
201 201
202 202
203 203 class BaseModel(object):
204 204 """
205 205 Base Model for all classes
206 206 """
207 207
208 208 @classmethod
209 209 def _get_keys(cls):
210 210 """return column names for this model """
211 211 return class_mapper(cls).c.keys()
212 212
213 213 def get_dict(self):
214 214 """
215 215 return dict with keys and values corresponding
216 216 to this model data """
217 217
218 218 d = {}
219 219 for k in self._get_keys():
220 220 d[k] = getattr(self, k)
221 221
222 222 # also use __json__() if present to get additional fields
223 223 _json_attr = getattr(self, '__json__', None)
224 224 if _json_attr:
225 225 # update with attributes from __json__
226 226 if callable(_json_attr):
227 227 _json_attr = _json_attr()
228 228 for k, val in _json_attr.iteritems():
229 229 d[k] = val
230 230 return d
231 231
232 232 def get_appstruct(self):
233 233 """return list with keys and values tuples corresponding
234 234 to this model data """
235 235
236 236 lst = []
237 237 for k in self._get_keys():
238 238 lst.append((k, getattr(self, k),))
239 239 return lst
240 240
241 241 def populate_obj(self, populate_dict):
242 242 """populate model with data from given populate_dict"""
243 243
244 244 for k in self._get_keys():
245 245 if k in populate_dict:
246 246 setattr(self, k, populate_dict[k])
247 247
248 248 @classmethod
249 249 def query(cls):
250 250 return Session().query(cls)
251 251
252 252 @classmethod
253 253 def get(cls, id_):
254 254 if id_:
255 255 return cls.query().get(id_)
256 256
257 257 @classmethod
258 258 def get_or_404(cls, id_):
259 259 from pyramid.httpexceptions import HTTPNotFound
260 260
261 261 try:
262 262 id_ = int(id_)
263 263 except (TypeError, ValueError):
264 264 raise HTTPNotFound()
265 265
266 266 res = cls.query().get(id_)
267 267 if not res:
268 268 raise HTTPNotFound()
269 269 return res
270 270
271 271 @classmethod
272 272 def getAll(cls):
273 273 # deprecated and left for backward compatibility
274 274 return cls.get_all()
275 275
276 276 @classmethod
277 277 def get_all(cls):
278 278 return cls.query().all()
279 279
280 280 @classmethod
281 281 def delete(cls, id_):
282 282 obj = cls.query().get(id_)
283 283 Session().delete(obj)
284 284
285 285 @classmethod
286 286 def identity_cache(cls, session, attr_name, value):
287 287 exist_in_session = []
288 288 for (item_cls, pkey), instance in session.identity_map.items():
289 289 if cls == item_cls and getattr(instance, attr_name) == value:
290 290 exist_in_session.append(instance)
291 291 if exist_in_session:
292 292 if len(exist_in_session) == 1:
293 293 return exist_in_session[0]
294 294 log.exception(
295 295 'multiple objects with attr %s and '
296 296 'value %s found with same name: %r',
297 297 attr_name, value, exist_in_session)
298 298
299 299 def __repr__(self):
300 300 if hasattr(self, '__unicode__'):
301 301 # python repr needs to return str
302 302 try:
303 303 return safe_str(self.__unicode__())
304 304 except UnicodeDecodeError:
305 305 pass
306 306 return '<DB:%s>' % (self.__class__.__name__)
307 307
308 308
309 309 class RhodeCodeSetting(Base, BaseModel):
310 310 __tablename__ = 'rhodecode_settings'
311 311 __table_args__ = (
312 312 UniqueConstraint('app_settings_name'),
313 313 base_table_args
314 314 )
315 315
316 316 SETTINGS_TYPES = {
317 317 'str': safe_str,
318 318 'int': safe_int,
319 319 'unicode': safe_unicode,
320 320 'bool': str2bool,
321 321 'list': functools.partial(aslist, sep=',')
322 322 }
323 323 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
324 324 GLOBAL_CONF_KEY = 'app_settings'
325 325
326 326 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
327 327 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
328 328 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
329 329 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
330 330
331 331 def __init__(self, key='', val='', type='unicode'):
332 332 self.app_settings_name = key
333 333 self.app_settings_type = type
334 334 self.app_settings_value = val
335 335
336 336 @validates('_app_settings_value')
337 337 def validate_settings_value(self, key, val):
338 338 assert type(val) == unicode
339 339 return val
340 340
341 341 @hybrid_property
342 342 def app_settings_value(self):
343 343 v = self._app_settings_value
344 344 _type = self.app_settings_type
345 345 if _type:
346 346 _type = self.app_settings_type.split('.')[0]
347 347 # decode the encrypted value
348 348 if 'encrypted' in self.app_settings_type:
349 349 cipher = EncryptedTextValue()
350 350 v = safe_unicode(cipher.process_result_value(v, None))
351 351
352 352 converter = self.SETTINGS_TYPES.get(_type) or \
353 353 self.SETTINGS_TYPES['unicode']
354 354 return converter(v)
355 355
356 356 @app_settings_value.setter
357 357 def app_settings_value(self, val):
358 358 """
359 359 Setter that will always make sure we use unicode in app_settings_value
360 360
361 361 :param val:
362 362 """
363 363 val = safe_unicode(val)
364 364 # encode the encrypted value
365 365 if 'encrypted' in self.app_settings_type:
366 366 cipher = EncryptedTextValue()
367 367 val = safe_unicode(cipher.process_bind_param(val, None))
368 368 self._app_settings_value = val
369 369
370 370 @hybrid_property
371 371 def app_settings_type(self):
372 372 return self._app_settings_type
373 373
374 374 @app_settings_type.setter
375 375 def app_settings_type(self, val):
376 376 if val.split('.')[0] not in self.SETTINGS_TYPES:
377 377 raise Exception('type must be one of %s got %s'
378 378 % (self.SETTINGS_TYPES.keys(), val))
379 379 self._app_settings_type = val
380 380
381 381 def __unicode__(self):
382 382 return u"<%s('%s:%s[%s]')>" % (
383 383 self.__class__.__name__,
384 384 self.app_settings_name, self.app_settings_value,
385 385 self.app_settings_type
386 386 )
387 387
388 388
389 389 class RhodeCodeUi(Base, BaseModel):
390 390 __tablename__ = 'rhodecode_ui'
391 391 __table_args__ = (
392 392 UniqueConstraint('ui_key'),
393 393 base_table_args
394 394 )
395 395
396 396 HOOK_REPO_SIZE = 'changegroup.repo_size'
397 397 # HG
398 398 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
399 399 HOOK_PULL = 'outgoing.pull_logger'
400 400 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
401 401 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
402 402 HOOK_PUSH = 'changegroup.push_logger'
403 403 HOOK_PUSH_KEY = 'pushkey.key_push'
404 404
405 405 # TODO: johbo: Unify way how hooks are configured for git and hg,
406 406 # git part is currently hardcoded.
407 407
408 408 # SVN PATTERNS
409 409 SVN_BRANCH_ID = 'vcs_svn_branch'
410 410 SVN_TAG_ID = 'vcs_svn_tag'
411 411
412 412 ui_id = Column(
413 413 "ui_id", Integer(), nullable=False, unique=True, default=None,
414 414 primary_key=True)
415 415 ui_section = Column(
416 416 "ui_section", String(255), nullable=True, unique=None, default=None)
417 417 ui_key = Column(
418 418 "ui_key", String(255), nullable=True, unique=None, default=None)
419 419 ui_value = Column(
420 420 "ui_value", String(255), nullable=True, unique=None, default=None)
421 421 ui_active = Column(
422 422 "ui_active", Boolean(), nullable=True, unique=None, default=True)
423 423
424 424 def __repr__(self):
425 425 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
426 426 self.ui_key, self.ui_value)
427 427
428 428
429 429 class RepoRhodeCodeSetting(Base, BaseModel):
430 430 __tablename__ = 'repo_rhodecode_settings'
431 431 __table_args__ = (
432 432 UniqueConstraint(
433 433 'app_settings_name', 'repository_id',
434 434 name='uq_repo_rhodecode_setting_name_repo_id'),
435 435 base_table_args
436 436 )
437 437
438 438 repository_id = Column(
439 439 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
440 440 nullable=False)
441 441 app_settings_id = Column(
442 442 "app_settings_id", Integer(), nullable=False, unique=True,
443 443 default=None, primary_key=True)
444 444 app_settings_name = Column(
445 445 "app_settings_name", String(255), nullable=True, unique=None,
446 446 default=None)
447 447 _app_settings_value = Column(
448 448 "app_settings_value", String(4096), nullable=True, unique=None,
449 449 default=None)
450 450 _app_settings_type = Column(
451 451 "app_settings_type", String(255), nullable=True, unique=None,
452 452 default=None)
453 453
454 454 repository = relationship('Repository')
455 455
456 456 def __init__(self, repository_id, key='', val='', type='unicode'):
457 457 self.repository_id = repository_id
458 458 self.app_settings_name = key
459 459 self.app_settings_type = type
460 460 self.app_settings_value = val
461 461
462 462 @validates('_app_settings_value')
463 463 def validate_settings_value(self, key, val):
464 464 assert type(val) == unicode
465 465 return val
466 466
467 467 @hybrid_property
468 468 def app_settings_value(self):
469 469 v = self._app_settings_value
470 470 type_ = self.app_settings_type
471 471 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
472 472 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
473 473 return converter(v)
474 474
475 475 @app_settings_value.setter
476 476 def app_settings_value(self, val):
477 477 """
478 478 Setter that will always make sure we use unicode in app_settings_value
479 479
480 480 :param val:
481 481 """
482 482 self._app_settings_value = safe_unicode(val)
483 483
484 484 @hybrid_property
485 485 def app_settings_type(self):
486 486 return self._app_settings_type
487 487
488 488 @app_settings_type.setter
489 489 def app_settings_type(self, val):
490 490 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
491 491 if val not in SETTINGS_TYPES:
492 492 raise Exception('type must be one of %s got %s'
493 493 % (SETTINGS_TYPES.keys(), val))
494 494 self._app_settings_type = val
495 495
496 496 def __unicode__(self):
497 497 return u"<%s('%s:%s:%s[%s]')>" % (
498 498 self.__class__.__name__, self.repository.repo_name,
499 499 self.app_settings_name, self.app_settings_value,
500 500 self.app_settings_type
501 501 )
502 502
503 503
504 504 class RepoRhodeCodeUi(Base, BaseModel):
505 505 __tablename__ = 'repo_rhodecode_ui'
506 506 __table_args__ = (
507 507 UniqueConstraint(
508 508 'repository_id', 'ui_section', 'ui_key',
509 509 name='uq_repo_rhodecode_ui_repository_id_section_key'),
510 510 base_table_args
511 511 )
512 512
513 513 repository_id = Column(
514 514 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
515 515 nullable=False)
516 516 ui_id = Column(
517 517 "ui_id", Integer(), nullable=False, unique=True, default=None,
518 518 primary_key=True)
519 519 ui_section = Column(
520 520 "ui_section", String(255), nullable=True, unique=None, default=None)
521 521 ui_key = Column(
522 522 "ui_key", String(255), nullable=True, unique=None, default=None)
523 523 ui_value = Column(
524 524 "ui_value", String(255), nullable=True, unique=None, default=None)
525 525 ui_active = Column(
526 526 "ui_active", Boolean(), nullable=True, unique=None, default=True)
527 527
528 528 repository = relationship('Repository')
529 529
530 530 def __repr__(self):
531 531 return '<%s[%s:%s]%s=>%s]>' % (
532 532 self.__class__.__name__, self.repository.repo_name,
533 533 self.ui_section, self.ui_key, self.ui_value)
534 534
535 535
536 536 class User(Base, BaseModel):
537 537 __tablename__ = 'users'
538 538 __table_args__ = (
539 539 UniqueConstraint('username'), UniqueConstraint('email'),
540 540 Index('u_username_idx', 'username'),
541 541 Index('u_email_idx', 'email'),
542 542 base_table_args
543 543 )
544 544
545 545 DEFAULT_USER = 'default'
546 546 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
547 547 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
548 548
549 549 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
550 550 username = Column("username", String(255), nullable=True, unique=None, default=None)
551 551 password = Column("password", String(255), nullable=True, unique=None, default=None)
552 552 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
553 553 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
554 554 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
555 555 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
556 556 _email = Column("email", String(255), nullable=True, unique=None, default=None)
557 557 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
558 558 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
559 559
560 560 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
561 561 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
562 562 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
563 563 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
564 564 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
565 565 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
566 566
567 567 user_log = relationship('UserLog')
568 568 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
569 569
570 570 repositories = relationship('Repository')
571 571 repository_groups = relationship('RepoGroup')
572 572 user_groups = relationship('UserGroup')
573 573
574 574 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
575 575 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
576 576
577 577 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
578 578 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
579 579 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
580 580
581 581 group_member = relationship('UserGroupMember', cascade='all')
582 582
583 583 notifications = relationship('UserNotification', cascade='all')
584 584 # notifications assigned to this user
585 585 user_created_notifications = relationship('Notification', cascade='all')
586 586 # comments created by this user
587 587 user_comments = relationship('ChangesetComment', cascade='all')
588 588 # user profile extra info
589 589 user_emails = relationship('UserEmailMap', cascade='all')
590 590 user_ip_map = relationship('UserIpMap', cascade='all')
591 591 user_auth_tokens = relationship('UserApiKeys', cascade='all')
592 592 user_ssh_keys = relationship('UserSshKeys', cascade='all')
593 593
594 594 # gists
595 595 user_gists = relationship('Gist', cascade='all')
596 596 # user pull requests
597 597 user_pull_requests = relationship('PullRequest', cascade='all')
598 598 # external identities
599 599 extenal_identities = relationship(
600 600 'ExternalIdentity',
601 601 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
602 602 cascade='all')
603 603 # review rules
604 604 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
605 605
606 606 def __unicode__(self):
607 607 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
608 608 self.user_id, self.username)
609 609
610 610 @hybrid_property
611 611 def email(self):
612 612 return self._email
613 613
614 614 @email.setter
615 615 def email(self, val):
616 616 self._email = val.lower() if val else None
617 617
618 618 @hybrid_property
619 619 def first_name(self):
620 620 from rhodecode.lib import helpers as h
621 621 if self.name:
622 622 return h.escape(self.name)
623 623 return self.name
624 624
625 625 @hybrid_property
626 626 def last_name(self):
627 627 from rhodecode.lib import helpers as h
628 628 if self.lastname:
629 629 return h.escape(self.lastname)
630 630 return self.lastname
631 631
632 632 @hybrid_property
633 633 def api_key(self):
634 634 """
635 635 Fetch if exist an auth-token with role ALL connected to this user
636 636 """
637 637 user_auth_token = UserApiKeys.query()\
638 638 .filter(UserApiKeys.user_id == self.user_id)\
639 639 .filter(or_(UserApiKeys.expires == -1,
640 640 UserApiKeys.expires >= time.time()))\
641 641 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
642 642 if user_auth_token:
643 643 user_auth_token = user_auth_token.api_key
644 644
645 645 return user_auth_token
646 646
647 647 @api_key.setter
648 648 def api_key(self, val):
649 649 # don't allow to set API key this is deprecated for now
650 650 self._api_key = None
651 651
652 652 @property
653 653 def reviewer_pull_requests(self):
654 654 return PullRequestReviewers.query() \
655 655 .options(joinedload(PullRequestReviewers.pull_request)) \
656 656 .filter(PullRequestReviewers.user_id == self.user_id) \
657 657 .all()
658 658
659 659 @property
660 660 def firstname(self):
661 661 # alias for future
662 662 return self.name
663 663
664 664 @property
665 665 def emails(self):
666 666 other = UserEmailMap.query()\
667 667 .filter(UserEmailMap.user == self) \
668 668 .order_by(UserEmailMap.email_id.asc()) \
669 669 .all()
670 670 return [self.email] + [x.email for x in other]
671 671
672 672 @property
673 673 def auth_tokens(self):
674 674 auth_tokens = self.get_auth_tokens()
675 675 return [x.api_key for x in auth_tokens]
676 676
677 677 def get_auth_tokens(self):
678 678 return UserApiKeys.query()\
679 679 .filter(UserApiKeys.user == self)\
680 680 .order_by(UserApiKeys.user_api_key_id.asc())\
681 681 .all()
682 682
683 683 @LazyProperty
684 684 def feed_token(self):
685 685 return self.get_feed_token()
686 686
687 687 def get_feed_token(self, cache=True):
688 688 feed_tokens = UserApiKeys.query()\
689 689 .filter(UserApiKeys.user == self)\
690 690 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
691 691 if cache:
692 692 feed_tokens = feed_tokens.options(
693 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
693 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
694 694
695 695 feed_tokens = feed_tokens.all()
696 696 if feed_tokens:
697 697 return feed_tokens[0].api_key
698 698 return 'NO_FEED_TOKEN_AVAILABLE'
699 699
700 700 @classmethod
701 701 def get(cls, user_id, cache=False):
702 702 if not user_id:
703 703 return
704 704
705 705 user = cls.query()
706 706 if cache:
707 707 user = user.options(
708 708 FromCache("sql_cache_short", "get_users_%s" % user_id))
709 709 return user.get(user_id)
710 710
711 711 @classmethod
712 712 def extra_valid_auth_tokens(cls, user, role=None):
713 713 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
714 714 .filter(or_(UserApiKeys.expires == -1,
715 715 UserApiKeys.expires >= time.time()))
716 716 if role:
717 717 tokens = tokens.filter(or_(UserApiKeys.role == role,
718 718 UserApiKeys.role == UserApiKeys.ROLE_ALL))
719 719 return tokens.all()
720 720
721 721 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
722 722 from rhodecode.lib import auth
723 723
724 724 log.debug('Trying to authenticate user: %s via auth-token, '
725 725 'and roles: %s', self, roles)
726 726
727 727 if not auth_token:
728 728 return False
729 729
730 730 crypto_backend = auth.crypto_backend()
731 731
732 732 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
733 733 tokens_q = UserApiKeys.query()\
734 734 .filter(UserApiKeys.user_id == self.user_id)\
735 735 .filter(or_(UserApiKeys.expires == -1,
736 736 UserApiKeys.expires >= time.time()))
737 737
738 738 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
739 739
740 740 plain_tokens = []
741 741 hash_tokens = []
742 742
743 743 for token in tokens_q.all():
744 744 # verify scope first
745 745 if token.repo_id:
746 746 # token has a scope, we need to verify it
747 747 if scope_repo_id != token.repo_id:
748 748 log.debug(
749 749 'Scope mismatch: token has a set repo scope: %s, '
750 750 'and calling scope is:%s, skipping further checks',
751 751 token.repo, scope_repo_id)
752 752 # token has a scope, and it doesn't match, skip token
753 753 continue
754 754
755 755 if token.api_key.startswith(crypto_backend.ENC_PREF):
756 756 hash_tokens.append(token.api_key)
757 757 else:
758 758 plain_tokens.append(token.api_key)
759 759
760 760 is_plain_match = auth_token in plain_tokens
761 761 if is_plain_match:
762 762 return True
763 763
764 764 for hashed in hash_tokens:
765 765 # TODO(marcink): this is expensive to calculate, but most secure
766 766 match = crypto_backend.hash_check(auth_token, hashed)
767 767 if match:
768 768 return True
769 769
770 770 return False
771 771
772 772 @property
773 773 def ip_addresses(self):
774 774 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
775 775 return [x.ip_addr for x in ret]
776 776
777 777 @property
778 778 def username_and_name(self):
779 779 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
780 780
781 781 @property
782 782 def username_or_name_or_email(self):
783 783 full_name = self.full_name if self.full_name is not ' ' else None
784 784 return self.username or full_name or self.email
785 785
786 786 @property
787 787 def full_name(self):
788 788 return '%s %s' % (self.first_name, self.last_name)
789 789
790 790 @property
791 791 def full_name_or_username(self):
792 792 return ('%s %s' % (self.first_name, self.last_name)
793 793 if (self.first_name and self.last_name) else self.username)
794 794
795 795 @property
796 796 def full_contact(self):
797 797 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
798 798
799 799 @property
800 800 def short_contact(self):
801 801 return '%s %s' % (self.first_name, self.last_name)
802 802
803 803 @property
804 804 def is_admin(self):
805 805 return self.admin
806 806
807 807 def AuthUser(self, **kwargs):
808 808 """
809 809 Returns instance of AuthUser for this user
810 810 """
811 811 from rhodecode.lib.auth import AuthUser
812 812 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
813 813
814 814 @hybrid_property
815 815 def user_data(self):
816 816 if not self._user_data:
817 817 return {}
818 818
819 819 try:
820 820 return json.loads(self._user_data)
821 821 except TypeError:
822 822 return {}
823 823
824 824 @user_data.setter
825 825 def user_data(self, val):
826 826 if not isinstance(val, dict):
827 827 raise Exception('user_data must be dict, got %s' % type(val))
828 828 try:
829 829 self._user_data = json.dumps(val)
830 830 except Exception:
831 831 log.error(traceback.format_exc())
832 832
833 833 @classmethod
834 834 def get_by_username(cls, username, case_insensitive=False,
835 835 cache=False, identity_cache=False):
836 836 session = Session()
837 837
838 838 if case_insensitive:
839 839 q = cls.query().filter(
840 840 func.lower(cls.username) == func.lower(username))
841 841 else:
842 842 q = cls.query().filter(cls.username == username)
843 843
844 844 if cache:
845 845 if identity_cache:
846 846 val = cls.identity_cache(session, 'username', username)
847 847 if val:
848 848 return val
849 849 else:
850 850 cache_key = "get_user_by_name_%s" % _hash_key(username)
851 851 q = q.options(
852 852 FromCache("sql_cache_short", cache_key))
853 853
854 854 return q.scalar()
855 855
856 856 @classmethod
857 857 def get_by_auth_token(cls, auth_token, cache=False):
858 858 q = UserApiKeys.query()\
859 859 .filter(UserApiKeys.api_key == auth_token)\
860 860 .filter(or_(UserApiKeys.expires == -1,
861 861 UserApiKeys.expires >= time.time()))
862 862 if cache:
863 863 q = q.options(
864 864 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
865 865
866 866 match = q.first()
867 867 if match:
868 868 return match.user
869 869
870 870 @classmethod
871 871 def get_by_email(cls, email, case_insensitive=False, cache=False):
872 872
873 873 if case_insensitive:
874 874 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
875 875
876 876 else:
877 877 q = cls.query().filter(cls.email == email)
878 878
879 879 email_key = _hash_key(email)
880 880 if cache:
881 881 q = q.options(
882 882 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
883 883
884 884 ret = q.scalar()
885 885 if ret is None:
886 886 q = UserEmailMap.query()
887 887 # try fetching in alternate email map
888 888 if case_insensitive:
889 889 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
890 890 else:
891 891 q = q.filter(UserEmailMap.email == email)
892 892 q = q.options(joinedload(UserEmailMap.user))
893 893 if cache:
894 894 q = q.options(
895 895 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
896 896 ret = getattr(q.scalar(), 'user', None)
897 897
898 898 return ret
899 899
900 900 @classmethod
901 901 def get_from_cs_author(cls, author):
902 902 """
903 903 Tries to get User objects out of commit author string
904 904
905 905 :param author:
906 906 """
907 907 from rhodecode.lib.helpers import email, author_name
908 908 # Valid email in the attribute passed, see if they're in the system
909 909 _email = email(author)
910 910 if _email:
911 911 user = cls.get_by_email(_email, case_insensitive=True)
912 912 if user:
913 913 return user
914 914 # Maybe we can match by username?
915 915 _author = author_name(author)
916 916 user = cls.get_by_username(_author, case_insensitive=True)
917 917 if user:
918 918 return user
919 919
920 920 def update_userdata(self, **kwargs):
921 921 usr = self
922 922 old = usr.user_data
923 923 old.update(**kwargs)
924 924 usr.user_data = old
925 925 Session().add(usr)
926 926 log.debug('updated userdata with ', kwargs)
927 927
928 928 def update_lastlogin(self):
929 929 """Update user lastlogin"""
930 930 self.last_login = datetime.datetime.now()
931 931 Session().add(self)
932 932 log.debug('updated user %s lastlogin', self.username)
933 933
934 934 def update_lastactivity(self):
935 935 """Update user lastactivity"""
936 936 self.last_activity = datetime.datetime.now()
937 937 Session().add(self)
938 938 log.debug('updated user `%s` last activity', self.username)
939 939
940 940 def update_password(self, new_password):
941 941 from rhodecode.lib.auth import get_crypt_password
942 942
943 943 self.password = get_crypt_password(new_password)
944 944 Session().add(self)
945 945
946 946 @classmethod
947 947 def get_first_super_admin(cls):
948 948 user = User.query().filter(User.admin == true()).first()
949 949 if user is None:
950 950 raise Exception('FATAL: Missing administrative account!')
951 951 return user
952 952
953 953 @classmethod
954 954 def get_all_super_admins(cls):
955 955 """
956 956 Returns all admin accounts sorted by username
957 957 """
958 958 return User.query().filter(User.admin == true())\
959 959 .order_by(User.username.asc()).all()
960 960
961 961 @classmethod
962 962 def get_default_user(cls, cache=False, refresh=False):
963 963 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
964 964 if user is None:
965 965 raise Exception('FATAL: Missing default account!')
966 966 if refresh:
967 967 # The default user might be based on outdated state which
968 968 # has been loaded from the cache.
969 969 # A call to refresh() ensures that the
970 970 # latest state from the database is used.
971 971 Session().refresh(user)
972 972 return user
973 973
974 974 def _get_default_perms(self, user, suffix=''):
975 975 from rhodecode.model.permission import PermissionModel
976 976 return PermissionModel().get_default_perms(user.user_perms, suffix)
977 977
978 978 def get_default_perms(self, suffix=''):
979 979 return self._get_default_perms(self, suffix)
980 980
981 981 def get_api_data(self, include_secrets=False, details='full'):
982 982 """
983 983 Common function for generating user related data for API
984 984
985 985 :param include_secrets: By default secrets in the API data will be replaced
986 986 by a placeholder value to prevent exposing this data by accident. In case
987 987 this data shall be exposed, set this flag to ``True``.
988 988
989 989 :param details: details can be 'basic|full' basic gives only a subset of
990 990 the available user information that includes user_id, name and emails.
991 991 """
992 992 user = self
993 993 user_data = self.user_data
994 994 data = {
995 995 'user_id': user.user_id,
996 996 'username': user.username,
997 997 'firstname': user.name,
998 998 'lastname': user.lastname,
999 999 'email': user.email,
1000 1000 'emails': user.emails,
1001 1001 }
1002 1002 if details == 'basic':
1003 1003 return data
1004 1004
1005 1005 auth_token_length = 40
1006 1006 auth_token_replacement = '*' * auth_token_length
1007 1007
1008 1008 extras = {
1009 1009 'auth_tokens': [auth_token_replacement],
1010 1010 'active': user.active,
1011 1011 'admin': user.admin,
1012 1012 'extern_type': user.extern_type,
1013 1013 'extern_name': user.extern_name,
1014 1014 'last_login': user.last_login,
1015 1015 'last_activity': user.last_activity,
1016 1016 'ip_addresses': user.ip_addresses,
1017 1017 'language': user_data.get('language')
1018 1018 }
1019 1019 data.update(extras)
1020 1020
1021 1021 if include_secrets:
1022 1022 data['auth_tokens'] = user.auth_tokens
1023 1023 return data
1024 1024
1025 1025 def __json__(self):
1026 1026 data = {
1027 1027 'full_name': self.full_name,
1028 1028 'full_name_or_username': self.full_name_or_username,
1029 1029 'short_contact': self.short_contact,
1030 1030 'full_contact': self.full_contact,
1031 1031 }
1032 1032 data.update(self.get_api_data())
1033 1033 return data
1034 1034
1035 1035
1036 1036 class UserApiKeys(Base, BaseModel):
1037 1037 __tablename__ = 'user_api_keys'
1038 1038 __table_args__ = (
1039 1039 Index('uak_api_key_idx', 'api_key', unique=True),
1040 1040 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1041 1041 base_table_args
1042 1042 )
1043 1043 __mapper_args__ = {}
1044 1044
1045 1045 # ApiKey role
1046 1046 ROLE_ALL = 'token_role_all'
1047 1047 ROLE_HTTP = 'token_role_http'
1048 1048 ROLE_VCS = 'token_role_vcs'
1049 1049 ROLE_API = 'token_role_api'
1050 1050 ROLE_FEED = 'token_role_feed'
1051 1051 ROLE_PASSWORD_RESET = 'token_password_reset'
1052 1052
1053 1053 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1054 1054
1055 1055 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1056 1056 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1057 1057 api_key = Column("api_key", String(255), nullable=False, unique=True)
1058 1058 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1059 1059 expires = Column('expires', Float(53), nullable=False)
1060 1060 role = Column('role', String(255), nullable=True)
1061 1061 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1062 1062
1063 1063 # scope columns
1064 1064 repo_id = Column(
1065 1065 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1066 1066 nullable=True, unique=None, default=None)
1067 1067 repo = relationship('Repository', lazy='joined')
1068 1068
1069 1069 repo_group_id = Column(
1070 1070 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1071 1071 nullable=True, unique=None, default=None)
1072 1072 repo_group = relationship('RepoGroup', lazy='joined')
1073 1073
1074 1074 user = relationship('User', lazy='joined')
1075 1075
1076 1076 def __unicode__(self):
1077 1077 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1078 1078
1079 1079 def __json__(self):
1080 1080 data = {
1081 1081 'auth_token': self.api_key,
1082 1082 'role': self.role,
1083 1083 'scope': self.scope_humanized,
1084 1084 'expired': self.expired
1085 1085 }
1086 1086 return data
1087 1087
1088 1088 def get_api_data(self, include_secrets=False):
1089 1089 data = self.__json__()
1090 1090 if include_secrets:
1091 1091 return data
1092 1092 else:
1093 1093 data['auth_token'] = self.token_obfuscated
1094 1094 return data
1095 1095
1096 1096 @hybrid_property
1097 1097 def description_safe(self):
1098 1098 from rhodecode.lib import helpers as h
1099 1099 return h.escape(self.description)
1100 1100
1101 1101 @property
1102 1102 def expired(self):
1103 1103 if self.expires == -1:
1104 1104 return False
1105 1105 return time.time() > self.expires
1106 1106
1107 1107 @classmethod
1108 1108 def _get_role_name(cls, role):
1109 1109 return {
1110 1110 cls.ROLE_ALL: _('all'),
1111 1111 cls.ROLE_HTTP: _('http/web interface'),
1112 1112 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1113 1113 cls.ROLE_API: _('api calls'),
1114 1114 cls.ROLE_FEED: _('feed access'),
1115 1115 }.get(role, role)
1116 1116
1117 1117 @property
1118 1118 def role_humanized(self):
1119 1119 return self._get_role_name(self.role)
1120 1120
1121 1121 def _get_scope(self):
1122 1122 if self.repo:
1123 1123 return repr(self.repo)
1124 1124 if self.repo_group:
1125 1125 return repr(self.repo_group) + ' (recursive)'
1126 1126 return 'global'
1127 1127
1128 1128 @property
1129 1129 def scope_humanized(self):
1130 1130 return self._get_scope()
1131 1131
1132 1132 @property
1133 1133 def token_obfuscated(self):
1134 1134 if self.api_key:
1135 1135 return self.api_key[:4] + "****"
1136 1136
1137 1137
1138 1138 class UserEmailMap(Base, BaseModel):
1139 1139 __tablename__ = 'user_email_map'
1140 1140 __table_args__ = (
1141 1141 Index('uem_email_idx', 'email'),
1142 1142 UniqueConstraint('email'),
1143 1143 base_table_args
1144 1144 )
1145 1145 __mapper_args__ = {}
1146 1146
1147 1147 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 1148 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 1149 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1150 1150 user = relationship('User', lazy='joined')
1151 1151
1152 1152 @validates('_email')
1153 1153 def validate_email(self, key, email):
1154 1154 # check if this email is not main one
1155 1155 main_email = Session().query(User).filter(User.email == email).scalar()
1156 1156 if main_email is not None:
1157 1157 raise AttributeError('email %s is present is user table' % email)
1158 1158 return email
1159 1159
1160 1160 @hybrid_property
1161 1161 def email(self):
1162 1162 return self._email
1163 1163
1164 1164 @email.setter
1165 1165 def email(self, val):
1166 1166 self._email = val.lower() if val else None
1167 1167
1168 1168
1169 1169 class UserIpMap(Base, BaseModel):
1170 1170 __tablename__ = 'user_ip_map'
1171 1171 __table_args__ = (
1172 1172 UniqueConstraint('user_id', 'ip_addr'),
1173 1173 base_table_args
1174 1174 )
1175 1175 __mapper_args__ = {}
1176 1176
1177 1177 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1178 1178 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1179 1179 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1180 1180 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1181 1181 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1182 1182 user = relationship('User', lazy='joined')
1183 1183
1184 1184 @hybrid_property
1185 1185 def description_safe(self):
1186 1186 from rhodecode.lib import helpers as h
1187 1187 return h.escape(self.description)
1188 1188
1189 1189 @classmethod
1190 1190 def _get_ip_range(cls, ip_addr):
1191 1191 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1192 1192 return [str(net.network_address), str(net.broadcast_address)]
1193 1193
1194 1194 def __json__(self):
1195 1195 return {
1196 1196 'ip_addr': self.ip_addr,
1197 1197 'ip_range': self._get_ip_range(self.ip_addr),
1198 1198 }
1199 1199
1200 1200 def __unicode__(self):
1201 1201 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1202 1202 self.user_id, self.ip_addr)
1203 1203
1204 1204
1205 1205 class UserSshKeys(Base, BaseModel):
1206 1206 __tablename__ = 'user_ssh_keys'
1207 1207 __table_args__ = (
1208 1208 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1209 1209
1210 1210 UniqueConstraint('ssh_key_fingerprint'),
1211 1211
1212 1212 base_table_args
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219 1219
1220 1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221 1221
1222 1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225 1225
1226 1226 user = relationship('User', lazy='joined')
1227 1227
1228 1228 def __json__(self):
1229 1229 data = {
1230 1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 1231 'description': self.description,
1232 1232 'created_on': self.created_on
1233 1233 }
1234 1234 return data
1235 1235
1236 1236 def get_api_data(self):
1237 1237 data = self.__json__()
1238 1238 return data
1239 1239
1240 1240
1241 1241 class UserLog(Base, BaseModel):
1242 1242 __tablename__ = 'user_logs'
1243 1243 __table_args__ = (
1244 1244 base_table_args,
1245 1245 )
1246 1246
1247 1247 VERSION_1 = 'v1'
1248 1248 VERSION_2 = 'v2'
1249 1249 VERSIONS = [VERSION_1, VERSION_2]
1250 1250
1251 1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259 1259
1260 1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263 1263
1264 1264 def __unicode__(self):
1265 1265 return u"<%s('id:%s:%s')>" % (
1266 1266 self.__class__.__name__, self.repository_name, self.action)
1267 1267
1268 1268 def __json__(self):
1269 1269 return {
1270 1270 'user_id': self.user_id,
1271 1271 'username': self.username,
1272 1272 'repository_id': self.repository_id,
1273 1273 'repository_name': self.repository_name,
1274 1274 'user_ip': self.user_ip,
1275 1275 'action_date': self.action_date,
1276 1276 'action': self.action,
1277 1277 }
1278 1278
1279 1279 @hybrid_property
1280 1280 def entry_id(self):
1281 1281 return self.user_log_id
1282 1282
1283 1283 @property
1284 1284 def action_as_day(self):
1285 1285 return datetime.date(*self.action_date.timetuple()[:3])
1286 1286
1287 1287 user = relationship('User')
1288 1288 repository = relationship('Repository', cascade='')
1289 1289
1290 1290
1291 1291 class UserGroup(Base, BaseModel):
1292 1292 __tablename__ = 'users_groups'
1293 1293 __table_args__ = (
1294 1294 base_table_args,
1295 1295 )
1296 1296
1297 1297 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1298 1298 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1299 1299 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1300 1300 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1301 1301 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1302 1302 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1303 1303 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1304 1304 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1305 1305
1306 1306 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1307 1307 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1308 1308 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1309 1309 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1310 1310 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1311 1311 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1312 1312
1313 1313 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1314 1314 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1315 1315
1316 1316 @classmethod
1317 1317 def _load_group_data(cls, column):
1318 1318 if not column:
1319 1319 return {}
1320 1320
1321 1321 try:
1322 1322 return json.loads(column) or {}
1323 1323 except TypeError:
1324 1324 return {}
1325 1325
1326 1326 @hybrid_property
1327 1327 def description_safe(self):
1328 1328 from rhodecode.lib import helpers as h
1329 1329 return h.escape(self.user_group_description)
1330 1330
1331 1331 @hybrid_property
1332 1332 def group_data(self):
1333 1333 return self._load_group_data(self._group_data)
1334 1334
1335 1335 @group_data.expression
1336 1336 def group_data(self, **kwargs):
1337 1337 return self._group_data
1338 1338
1339 1339 @group_data.setter
1340 1340 def group_data(self, val):
1341 1341 try:
1342 1342 self._group_data = json.dumps(val)
1343 1343 except Exception:
1344 1344 log.error(traceback.format_exc())
1345 1345
1346 1346 @classmethod
1347 1347 def _load_sync(cls, group_data):
1348 1348 if group_data:
1349 1349 return group_data.get('extern_type')
1350 1350
1351 1351 @property
1352 1352 def sync(self):
1353 1353 return self._load_sync(self.group_data)
1354 1354
1355 1355 def __unicode__(self):
1356 1356 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1357 1357 self.users_group_id,
1358 1358 self.users_group_name)
1359 1359
1360 1360 @classmethod
1361 1361 def get_by_group_name(cls, group_name, cache=False,
1362 1362 case_insensitive=False):
1363 1363 if case_insensitive:
1364 1364 q = cls.query().filter(func.lower(cls.users_group_name) ==
1365 1365 func.lower(group_name))
1366 1366
1367 1367 else:
1368 1368 q = cls.query().filter(cls.users_group_name == group_name)
1369 1369 if cache:
1370 1370 q = q.options(
1371 1371 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1372 1372 return q.scalar()
1373 1373
1374 1374 @classmethod
1375 1375 def get(cls, user_group_id, cache=False):
1376 1376 if not user_group_id:
1377 1377 return
1378 1378
1379 1379 user_group = cls.query()
1380 1380 if cache:
1381 1381 user_group = user_group.options(
1382 1382 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1383 1383 return user_group.get(user_group_id)
1384 1384
1385 1385 def permissions(self, with_admins=True, with_owner=True):
1386 1386 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1387 1387 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1388 1388 joinedload(UserUserGroupToPerm.user),
1389 1389 joinedload(UserUserGroupToPerm.permission),)
1390 1390
1391 1391 # get owners and admins and permissions. We do a trick of re-writing
1392 1392 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1393 1393 # has a global reference and changing one object propagates to all
1394 1394 # others. This means if admin is also an owner admin_row that change
1395 1395 # would propagate to both objects
1396 1396 perm_rows = []
1397 1397 for _usr in q.all():
1398 1398 usr = AttributeDict(_usr.user.get_dict())
1399 1399 usr.permission = _usr.permission.permission_name
1400 1400 perm_rows.append(usr)
1401 1401
1402 1402 # filter the perm rows by 'default' first and then sort them by
1403 1403 # admin,write,read,none permissions sorted again alphabetically in
1404 1404 # each group
1405 1405 perm_rows = sorted(perm_rows, key=display_user_sort)
1406 1406
1407 1407 _admin_perm = 'usergroup.admin'
1408 1408 owner_row = []
1409 1409 if with_owner:
1410 1410 usr = AttributeDict(self.user.get_dict())
1411 1411 usr.owner_row = True
1412 1412 usr.permission = _admin_perm
1413 1413 owner_row.append(usr)
1414 1414
1415 1415 super_admin_rows = []
1416 1416 if with_admins:
1417 1417 for usr in User.get_all_super_admins():
1418 1418 # if this admin is also owner, don't double the record
1419 1419 if usr.user_id == owner_row[0].user_id:
1420 1420 owner_row[0].admin_row = True
1421 1421 else:
1422 1422 usr = AttributeDict(usr.get_dict())
1423 1423 usr.admin_row = True
1424 1424 usr.permission = _admin_perm
1425 1425 super_admin_rows.append(usr)
1426 1426
1427 1427 return super_admin_rows + owner_row + perm_rows
1428 1428
1429 1429 def permission_user_groups(self):
1430 1430 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1431 1431 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1432 1432 joinedload(UserGroupUserGroupToPerm.target_user_group),
1433 1433 joinedload(UserGroupUserGroupToPerm.permission),)
1434 1434
1435 1435 perm_rows = []
1436 1436 for _user_group in q.all():
1437 1437 usr = AttributeDict(_user_group.user_group.get_dict())
1438 1438 usr.permission = _user_group.permission.permission_name
1439 1439 perm_rows.append(usr)
1440 1440
1441 1441 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1442 1442 return perm_rows
1443 1443
1444 1444 def _get_default_perms(self, user_group, suffix=''):
1445 1445 from rhodecode.model.permission import PermissionModel
1446 1446 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1447 1447
1448 1448 def get_default_perms(self, suffix=''):
1449 1449 return self._get_default_perms(self, suffix)
1450 1450
1451 1451 def get_api_data(self, with_group_members=True, include_secrets=False):
1452 1452 """
1453 1453 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1454 1454 basically forwarded.
1455 1455
1456 1456 """
1457 1457 user_group = self
1458 1458 data = {
1459 1459 'users_group_id': user_group.users_group_id,
1460 1460 'group_name': user_group.users_group_name,
1461 1461 'group_description': user_group.user_group_description,
1462 1462 'active': user_group.users_group_active,
1463 1463 'owner': user_group.user.username,
1464 1464 'sync': user_group.sync,
1465 1465 'owner_email': user_group.user.email,
1466 1466 }
1467 1467
1468 1468 if with_group_members:
1469 1469 users = []
1470 1470 for user in user_group.members:
1471 1471 user = user.user
1472 1472 users.append(user.get_api_data(include_secrets=include_secrets))
1473 1473 data['users'] = users
1474 1474
1475 1475 return data
1476 1476
1477 1477
1478 1478 class UserGroupMember(Base, BaseModel):
1479 1479 __tablename__ = 'users_groups_members'
1480 1480 __table_args__ = (
1481 1481 base_table_args,
1482 1482 )
1483 1483
1484 1484 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1485 1485 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1486 1486 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1487 1487
1488 1488 user = relationship('User', lazy='joined')
1489 1489 users_group = relationship('UserGroup')
1490 1490
1491 1491 def __init__(self, gr_id='', u_id=''):
1492 1492 self.users_group_id = gr_id
1493 1493 self.user_id = u_id
1494 1494
1495 1495
1496 1496 class RepositoryField(Base, BaseModel):
1497 1497 __tablename__ = 'repositories_fields'
1498 1498 __table_args__ = (
1499 1499 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1500 1500 base_table_args,
1501 1501 )
1502 1502
1503 1503 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1504 1504
1505 1505 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1506 1506 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1507 1507 field_key = Column("field_key", String(250))
1508 1508 field_label = Column("field_label", String(1024), nullable=False)
1509 1509 field_value = Column("field_value", String(10000), nullable=False)
1510 1510 field_desc = Column("field_desc", String(1024), nullable=False)
1511 1511 field_type = Column("field_type", String(255), nullable=False, unique=None)
1512 1512 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1513 1513
1514 1514 repository = relationship('Repository')
1515 1515
1516 1516 @property
1517 1517 def field_key_prefixed(self):
1518 1518 return 'ex_%s' % self.field_key
1519 1519
1520 1520 @classmethod
1521 1521 def un_prefix_key(cls, key):
1522 1522 if key.startswith(cls.PREFIX):
1523 1523 return key[len(cls.PREFIX):]
1524 1524 return key
1525 1525
1526 1526 @classmethod
1527 1527 def get_by_key_name(cls, key, repo):
1528 1528 row = cls.query()\
1529 1529 .filter(cls.repository == repo)\
1530 1530 .filter(cls.field_key == key).scalar()
1531 1531 return row
1532 1532
1533 1533
1534 1534 class Repository(Base, BaseModel):
1535 1535 __tablename__ = 'repositories'
1536 1536 __table_args__ = (
1537 1537 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1538 1538 base_table_args,
1539 1539 )
1540 1540 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1541 1541 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1542 1542 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1543 1543
1544 1544 STATE_CREATED = 'repo_state_created'
1545 1545 STATE_PENDING = 'repo_state_pending'
1546 1546 STATE_ERROR = 'repo_state_error'
1547 1547
1548 1548 LOCK_AUTOMATIC = 'lock_auto'
1549 1549 LOCK_API = 'lock_api'
1550 1550 LOCK_WEB = 'lock_web'
1551 1551 LOCK_PULL = 'lock_pull'
1552 1552
1553 1553 NAME_SEP = URL_SEP
1554 1554
1555 1555 repo_id = Column(
1556 1556 "repo_id", Integer(), nullable=False, unique=True, default=None,
1557 1557 primary_key=True)
1558 1558 _repo_name = Column(
1559 1559 "repo_name", Text(), nullable=False, default=None)
1560 1560 _repo_name_hash = Column(
1561 1561 "repo_name_hash", String(255), nullable=False, unique=True)
1562 1562 repo_state = Column("repo_state", String(255), nullable=True)
1563 1563
1564 1564 clone_uri = Column(
1565 1565 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1566 1566 default=None)
1567 1567 push_uri = Column(
1568 1568 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 1569 default=None)
1570 1570 repo_type = Column(
1571 1571 "repo_type", String(255), nullable=False, unique=False, default=None)
1572 1572 user_id = Column(
1573 1573 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1574 1574 unique=False, default=None)
1575 1575 private = Column(
1576 1576 "private", Boolean(), nullable=True, unique=None, default=None)
1577 1577 enable_statistics = Column(
1578 1578 "statistics", Boolean(), nullable=True, unique=None, default=True)
1579 1579 enable_downloads = Column(
1580 1580 "downloads", Boolean(), nullable=True, unique=None, default=True)
1581 1581 description = Column(
1582 1582 "description", String(10000), nullable=True, unique=None, default=None)
1583 1583 created_on = Column(
1584 1584 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1585 1585 default=datetime.datetime.now)
1586 1586 updated_on = Column(
1587 1587 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1588 1588 default=datetime.datetime.now)
1589 1589 _landing_revision = Column(
1590 1590 "landing_revision", String(255), nullable=False, unique=False,
1591 1591 default=None)
1592 1592 enable_locking = Column(
1593 1593 "enable_locking", Boolean(), nullable=False, unique=None,
1594 1594 default=False)
1595 1595 _locked = Column(
1596 1596 "locked", String(255), nullable=True, unique=False, default=None)
1597 1597 _changeset_cache = Column(
1598 1598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1599 1599
1600 1600 fork_id = Column(
1601 1601 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1602 1602 nullable=True, unique=False, default=None)
1603 1603 group_id = Column(
1604 1604 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1605 1605 unique=False, default=None)
1606 1606
1607 1607 user = relationship('User', lazy='joined')
1608 1608 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1609 1609 group = relationship('RepoGroup', lazy='joined')
1610 1610 repo_to_perm = relationship(
1611 1611 'UserRepoToPerm', cascade='all',
1612 1612 order_by='UserRepoToPerm.repo_to_perm_id')
1613 1613 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1614 1614 stats = relationship('Statistics', cascade='all', uselist=False)
1615 1615
1616 1616 followers = relationship(
1617 1617 'UserFollowing',
1618 1618 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1619 1619 cascade='all')
1620 1620 extra_fields = relationship(
1621 1621 'RepositoryField', cascade="all, delete, delete-orphan")
1622 1622 logs = relationship('UserLog')
1623 1623 comments = relationship(
1624 1624 'ChangesetComment', cascade="all, delete, delete-orphan")
1625 1625 pull_requests_source = relationship(
1626 1626 'PullRequest',
1627 1627 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1628 1628 cascade="all, delete, delete-orphan")
1629 1629 pull_requests_target = relationship(
1630 1630 'PullRequest',
1631 1631 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1632 1632 cascade="all, delete, delete-orphan")
1633 1633 ui = relationship('RepoRhodeCodeUi', cascade="all")
1634 1634 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1635 1635 integrations = relationship('Integration',
1636 1636 cascade="all, delete, delete-orphan")
1637 1637
1638 1638 scoped_tokens = relationship('UserApiKeys', cascade="all")
1639 1639
1640 1640 def __unicode__(self):
1641 1641 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1642 1642 safe_unicode(self.repo_name))
1643 1643
1644 1644 @hybrid_property
1645 1645 def description_safe(self):
1646 1646 from rhodecode.lib import helpers as h
1647 1647 return h.escape(self.description)
1648 1648
1649 1649 @hybrid_property
1650 1650 def landing_rev(self):
1651 1651 # always should return [rev_type, rev]
1652 1652 if self._landing_revision:
1653 1653 _rev_info = self._landing_revision.split(':')
1654 1654 if len(_rev_info) < 2:
1655 1655 _rev_info.insert(0, 'rev')
1656 1656 return [_rev_info[0], _rev_info[1]]
1657 1657 return [None, None]
1658 1658
1659 1659 @landing_rev.setter
1660 1660 def landing_rev(self, val):
1661 1661 if ':' not in val:
1662 1662 raise ValueError('value must be delimited with `:` and consist '
1663 1663 'of <rev_type>:<rev>, got %s instead' % val)
1664 1664 self._landing_revision = val
1665 1665
1666 1666 @hybrid_property
1667 1667 def locked(self):
1668 1668 if self._locked:
1669 1669 user_id, timelocked, reason = self._locked.split(':')
1670 1670 lock_values = int(user_id), timelocked, reason
1671 1671 else:
1672 1672 lock_values = [None, None, None]
1673 1673 return lock_values
1674 1674
1675 1675 @locked.setter
1676 1676 def locked(self, val):
1677 1677 if val and isinstance(val, (list, tuple)):
1678 1678 self._locked = ':'.join(map(str, val))
1679 1679 else:
1680 1680 self._locked = None
1681 1681
1682 1682 @hybrid_property
1683 1683 def changeset_cache(self):
1684 1684 from rhodecode.lib.vcs.backends.base import EmptyCommit
1685 1685 dummy = EmptyCommit().__json__()
1686 1686 if not self._changeset_cache:
1687 1687 return dummy
1688 1688 try:
1689 1689 return json.loads(self._changeset_cache)
1690 1690 except TypeError:
1691 1691 return dummy
1692 1692 except Exception:
1693 1693 log.error(traceback.format_exc())
1694 1694 return dummy
1695 1695
1696 1696 @changeset_cache.setter
1697 1697 def changeset_cache(self, val):
1698 1698 try:
1699 1699 self._changeset_cache = json.dumps(val)
1700 1700 except Exception:
1701 1701 log.error(traceback.format_exc())
1702 1702
1703 1703 @hybrid_property
1704 1704 def repo_name(self):
1705 1705 return self._repo_name
1706 1706
1707 1707 @repo_name.setter
1708 1708 def repo_name(self, value):
1709 1709 self._repo_name = value
1710 1710 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1711 1711
1712 1712 @classmethod
1713 1713 def normalize_repo_name(cls, repo_name):
1714 1714 """
1715 1715 Normalizes os specific repo_name to the format internally stored inside
1716 1716 database using URL_SEP
1717 1717
1718 1718 :param cls:
1719 1719 :param repo_name:
1720 1720 """
1721 1721 return cls.NAME_SEP.join(repo_name.split(os.sep))
1722 1722
1723 1723 @classmethod
1724 1724 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1725 1725 session = Session()
1726 1726 q = session.query(cls).filter(cls.repo_name == repo_name)
1727 1727
1728 1728 if cache:
1729 1729 if identity_cache:
1730 1730 val = cls.identity_cache(session, 'repo_name', repo_name)
1731 1731 if val:
1732 1732 return val
1733 1733 else:
1734 1734 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1735 1735 q = q.options(
1736 1736 FromCache("sql_cache_short", cache_key))
1737 1737
1738 1738 return q.scalar()
1739 1739
1740 1740 @classmethod
1741 1741 def get_by_id_or_repo_name(cls, repoid):
1742 1742 if isinstance(repoid, (int, long)):
1743 1743 try:
1744 1744 repo = cls.get(repoid)
1745 1745 except ValueError:
1746 1746 repo = None
1747 1747 else:
1748 1748 repo = cls.get_by_repo_name(repoid)
1749 1749 return repo
1750 1750
1751 1751 @classmethod
1752 1752 def get_by_full_path(cls, repo_full_path):
1753 1753 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1754 1754 repo_name = cls.normalize_repo_name(repo_name)
1755 1755 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1756 1756
1757 1757 @classmethod
1758 1758 def get_repo_forks(cls, repo_id):
1759 1759 return cls.query().filter(Repository.fork_id == repo_id)
1760 1760
1761 1761 @classmethod
1762 1762 def base_path(cls):
1763 1763 """
1764 1764 Returns base path when all repos are stored
1765 1765
1766 1766 :param cls:
1767 1767 """
1768 1768 q = Session().query(RhodeCodeUi)\
1769 1769 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1770 1770 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1771 1771 return q.one().ui_value
1772 1772
1773 1773 @classmethod
1774 1774 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1775 1775 case_insensitive=True):
1776 1776 q = Repository.query()
1777 1777
1778 1778 if not isinstance(user_id, Optional):
1779 1779 q = q.filter(Repository.user_id == user_id)
1780 1780
1781 1781 if not isinstance(group_id, Optional):
1782 1782 q = q.filter(Repository.group_id == group_id)
1783 1783
1784 1784 if case_insensitive:
1785 1785 q = q.order_by(func.lower(Repository.repo_name))
1786 1786 else:
1787 1787 q = q.order_by(Repository.repo_name)
1788 1788 return q.all()
1789 1789
1790 1790 @property
1791 1791 def forks(self):
1792 1792 """
1793 1793 Return forks of this repo
1794 1794 """
1795 1795 return Repository.get_repo_forks(self.repo_id)
1796 1796
1797 1797 @property
1798 1798 def parent(self):
1799 1799 """
1800 1800 Returns fork parent
1801 1801 """
1802 1802 return self.fork
1803 1803
1804 1804 @property
1805 1805 def just_name(self):
1806 1806 return self.repo_name.split(self.NAME_SEP)[-1]
1807 1807
1808 1808 @property
1809 1809 def groups_with_parents(self):
1810 1810 groups = []
1811 1811 if self.group is None:
1812 1812 return groups
1813 1813
1814 1814 cur_gr = self.group
1815 1815 groups.insert(0, cur_gr)
1816 1816 while 1:
1817 1817 gr = getattr(cur_gr, 'parent_group', None)
1818 1818 cur_gr = cur_gr.parent_group
1819 1819 if gr is None:
1820 1820 break
1821 1821 groups.insert(0, gr)
1822 1822
1823 1823 return groups
1824 1824
1825 1825 @property
1826 1826 def groups_and_repo(self):
1827 1827 return self.groups_with_parents, self
1828 1828
1829 1829 @LazyProperty
1830 1830 def repo_path(self):
1831 1831 """
1832 1832 Returns base full path for that repository means where it actually
1833 1833 exists on a filesystem
1834 1834 """
1835 1835 q = Session().query(RhodeCodeUi).filter(
1836 1836 RhodeCodeUi.ui_key == self.NAME_SEP)
1837 1837 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1838 1838 return q.one().ui_value
1839 1839
1840 1840 @property
1841 1841 def repo_full_path(self):
1842 1842 p = [self.repo_path]
1843 1843 # we need to split the name by / since this is how we store the
1844 1844 # names in the database, but that eventually needs to be converted
1845 1845 # into a valid system path
1846 1846 p += self.repo_name.split(self.NAME_SEP)
1847 1847 return os.path.join(*map(safe_unicode, p))
1848 1848
1849 1849 @property
1850 1850 def cache_keys(self):
1851 1851 """
1852 1852 Returns associated cache keys for that repo
1853 1853 """
1854 1854 return CacheKey.query()\
1855 1855 .filter(CacheKey.cache_args == self.repo_name)\
1856 1856 .order_by(CacheKey.cache_key)\
1857 1857 .all()
1858 1858
1859 1859 @property
1860 1860 def cached_diffs_relative_dir(self):
1861 1861 """
1862 1862 Return a relative to the repository store path of cached diffs
1863 1863 used for safe display for users, who shouldn't know the absolute store
1864 1864 path
1865 1865 """
1866 1866 return os.path.join(
1867 1867 os.path.dirname(self.repo_name),
1868 1868 self.cached_diffs_dir.split(os.path.sep)[-1])
1869 1869
1870 1870 @property
1871 1871 def cached_diffs_dir(self):
1872 1872 path = self.repo_full_path
1873 1873 return os.path.join(
1874 1874 os.path.dirname(path),
1875 1875 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1876 1876
1877 1877 def cached_diffs(self):
1878 1878 diff_cache_dir = self.cached_diffs_dir
1879 1879 if os.path.isdir(diff_cache_dir):
1880 1880 return os.listdir(diff_cache_dir)
1881 1881 return []
1882 1882
1883 1883 def shadow_repos(self):
1884 1884 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1885 1885 return [
1886 1886 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1887 1887 if x.startswith(shadow_repos_pattern)]
1888 1888
1889 1889 def get_new_name(self, repo_name):
1890 1890 """
1891 1891 returns new full repository name based on assigned group and new new
1892 1892
1893 1893 :param group_name:
1894 1894 """
1895 1895 path_prefix = self.group.full_path_splitted if self.group else []
1896 1896 return self.NAME_SEP.join(path_prefix + [repo_name])
1897 1897
1898 1898 @property
1899 1899 def _config(self):
1900 1900 """
1901 1901 Returns db based config object.
1902 1902 """
1903 1903 from rhodecode.lib.utils import make_db_config
1904 1904 return make_db_config(clear_session=False, repo=self)
1905 1905
1906 1906 def permissions(self, with_admins=True, with_owner=True):
1907 1907 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1908 1908 q = q.options(joinedload(UserRepoToPerm.repository),
1909 1909 joinedload(UserRepoToPerm.user),
1910 1910 joinedload(UserRepoToPerm.permission),)
1911 1911
1912 1912 # get owners and admins and permissions. We do a trick of re-writing
1913 1913 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1914 1914 # has a global reference and changing one object propagates to all
1915 1915 # others. This means if admin is also an owner admin_row that change
1916 1916 # would propagate to both objects
1917 1917 perm_rows = []
1918 1918 for _usr in q.all():
1919 1919 usr = AttributeDict(_usr.user.get_dict())
1920 1920 usr.permission = _usr.permission.permission_name
1921 1921 perm_rows.append(usr)
1922 1922
1923 1923 # filter the perm rows by 'default' first and then sort them by
1924 1924 # admin,write,read,none permissions sorted again alphabetically in
1925 1925 # each group
1926 1926 perm_rows = sorted(perm_rows, key=display_user_sort)
1927 1927
1928 1928 _admin_perm = 'repository.admin'
1929 1929 owner_row = []
1930 1930 if with_owner:
1931 1931 usr = AttributeDict(self.user.get_dict())
1932 1932 usr.owner_row = True
1933 1933 usr.permission = _admin_perm
1934 1934 owner_row.append(usr)
1935 1935
1936 1936 super_admin_rows = []
1937 1937 if with_admins:
1938 1938 for usr in User.get_all_super_admins():
1939 1939 # if this admin is also owner, don't double the record
1940 1940 if usr.user_id == owner_row[0].user_id:
1941 1941 owner_row[0].admin_row = True
1942 1942 else:
1943 1943 usr = AttributeDict(usr.get_dict())
1944 1944 usr.admin_row = True
1945 1945 usr.permission = _admin_perm
1946 1946 super_admin_rows.append(usr)
1947 1947
1948 1948 return super_admin_rows + owner_row + perm_rows
1949 1949
1950 1950 def permission_user_groups(self):
1951 1951 q = UserGroupRepoToPerm.query().filter(
1952 1952 UserGroupRepoToPerm.repository == self)
1953 1953 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1954 1954 joinedload(UserGroupRepoToPerm.users_group),
1955 1955 joinedload(UserGroupRepoToPerm.permission),)
1956 1956
1957 1957 perm_rows = []
1958 1958 for _user_group in q.all():
1959 1959 usr = AttributeDict(_user_group.users_group.get_dict())
1960 1960 usr.permission = _user_group.permission.permission_name
1961 1961 perm_rows.append(usr)
1962 1962
1963 1963 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1964 1964 return perm_rows
1965 1965
1966 1966 def get_api_data(self, include_secrets=False):
1967 1967 """
1968 1968 Common function for generating repo api data
1969 1969
1970 1970 :param include_secrets: See :meth:`User.get_api_data`.
1971 1971
1972 1972 """
1973 1973 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1974 1974 # move this methods on models level.
1975 1975 from rhodecode.model.settings import SettingsModel
1976 1976 from rhodecode.model.repo import RepoModel
1977 1977
1978 1978 repo = self
1979 1979 _user_id, _time, _reason = self.locked
1980 1980
1981 1981 data = {
1982 1982 'repo_id': repo.repo_id,
1983 1983 'repo_name': repo.repo_name,
1984 1984 'repo_type': repo.repo_type,
1985 1985 'clone_uri': repo.clone_uri or '',
1986 1986 'push_uri': repo.push_uri or '',
1987 1987 'url': RepoModel().get_url(self),
1988 1988 'private': repo.private,
1989 1989 'created_on': repo.created_on,
1990 1990 'description': repo.description_safe,
1991 1991 'landing_rev': repo.landing_rev,
1992 1992 'owner': repo.user.username,
1993 1993 'fork_of': repo.fork.repo_name if repo.fork else None,
1994 1994 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1995 1995 'enable_statistics': repo.enable_statistics,
1996 1996 'enable_locking': repo.enable_locking,
1997 1997 'enable_downloads': repo.enable_downloads,
1998 1998 'last_changeset': repo.changeset_cache,
1999 1999 'locked_by': User.get(_user_id).get_api_data(
2000 2000 include_secrets=include_secrets) if _user_id else None,
2001 2001 'locked_date': time_to_datetime(_time) if _time else None,
2002 2002 'lock_reason': _reason if _reason else None,
2003 2003 }
2004 2004
2005 2005 # TODO: mikhail: should be per-repo settings here
2006 2006 rc_config = SettingsModel().get_all_settings()
2007 2007 repository_fields = str2bool(
2008 2008 rc_config.get('rhodecode_repository_fields'))
2009 2009 if repository_fields:
2010 2010 for f in self.extra_fields:
2011 2011 data[f.field_key_prefixed] = f.field_value
2012 2012
2013 2013 return data
2014 2014
2015 2015 @classmethod
2016 2016 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2017 2017 if not lock_time:
2018 2018 lock_time = time.time()
2019 2019 if not lock_reason:
2020 2020 lock_reason = cls.LOCK_AUTOMATIC
2021 2021 repo.locked = [user_id, lock_time, lock_reason]
2022 2022 Session().add(repo)
2023 2023 Session().commit()
2024 2024
2025 2025 @classmethod
2026 2026 def unlock(cls, repo):
2027 2027 repo.locked = None
2028 2028 Session().add(repo)
2029 2029 Session().commit()
2030 2030
2031 2031 @classmethod
2032 2032 def getlock(cls, repo):
2033 2033 return repo.locked
2034 2034
2035 2035 def is_user_lock(self, user_id):
2036 2036 if self.lock[0]:
2037 2037 lock_user_id = safe_int(self.lock[0])
2038 2038 user_id = safe_int(user_id)
2039 2039 # both are ints, and they are equal
2040 2040 return all([lock_user_id, user_id]) and lock_user_id == user_id
2041 2041
2042 2042 return False
2043 2043
2044 2044 def get_locking_state(self, action, user_id, only_when_enabled=True):
2045 2045 """
2046 2046 Checks locking on this repository, if locking is enabled and lock is
2047 2047 present returns a tuple of make_lock, locked, locked_by.
2048 2048 make_lock can have 3 states None (do nothing) True, make lock
2049 2049 False release lock, This value is later propagated to hooks, which
2050 2050 do the locking. Think about this as signals passed to hooks what to do.
2051 2051
2052 2052 """
2053 2053 # TODO: johbo: This is part of the business logic and should be moved
2054 2054 # into the RepositoryModel.
2055 2055
2056 2056 if action not in ('push', 'pull'):
2057 2057 raise ValueError("Invalid action value: %s" % repr(action))
2058 2058
2059 2059 # defines if locked error should be thrown to user
2060 2060 currently_locked = False
2061 2061 # defines if new lock should be made, tri-state
2062 2062 make_lock = None
2063 2063 repo = self
2064 2064 user = User.get(user_id)
2065 2065
2066 2066 lock_info = repo.locked
2067 2067
2068 2068 if repo and (repo.enable_locking or not only_when_enabled):
2069 2069 if action == 'push':
2070 2070 # check if it's already locked !, if it is compare users
2071 2071 locked_by_user_id = lock_info[0]
2072 2072 if user.user_id == locked_by_user_id:
2073 2073 log.debug(
2074 2074 'Got `push` action from user %s, now unlocking', user)
2075 2075 # unlock if we have push from user who locked
2076 2076 make_lock = False
2077 2077 else:
2078 2078 # we're not the same user who locked, ban with
2079 2079 # code defined in settings (default is 423 HTTP Locked) !
2080 2080 log.debug('Repo %s is currently locked by %s', repo, user)
2081 2081 currently_locked = True
2082 2082 elif action == 'pull':
2083 2083 # [0] user [1] date
2084 2084 if lock_info[0] and lock_info[1]:
2085 2085 log.debug('Repo %s is currently locked by %s', repo, user)
2086 2086 currently_locked = True
2087 2087 else:
2088 2088 log.debug('Setting lock on repo %s by %s', repo, user)
2089 2089 make_lock = True
2090 2090
2091 2091 else:
2092 2092 log.debug('Repository %s do not have locking enabled', repo)
2093 2093
2094 2094 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2095 2095 make_lock, currently_locked, lock_info)
2096 2096
2097 2097 from rhodecode.lib.auth import HasRepoPermissionAny
2098 2098 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2099 2099 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2100 2100 # if we don't have at least write permission we cannot make a lock
2101 2101 log.debug('lock state reset back to FALSE due to lack '
2102 2102 'of at least read permission')
2103 2103 make_lock = False
2104 2104
2105 2105 return make_lock, currently_locked, lock_info
2106 2106
2107 2107 @property
2108 2108 def last_db_change(self):
2109 2109 return self.updated_on
2110 2110
2111 2111 @property
2112 2112 def clone_uri_hidden(self):
2113 2113 clone_uri = self.clone_uri
2114 2114 if clone_uri:
2115 2115 import urlobject
2116 2116 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2117 2117 if url_obj.password:
2118 2118 clone_uri = url_obj.with_password('*****')
2119 2119 return clone_uri
2120 2120
2121 2121 @property
2122 2122 def push_uri_hidden(self):
2123 2123 push_uri = self.push_uri
2124 2124 if push_uri:
2125 2125 import urlobject
2126 2126 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2127 2127 if url_obj.password:
2128 2128 push_uri = url_obj.with_password('*****')
2129 2129 return push_uri
2130 2130
2131 2131 def clone_url(self, **override):
2132 2132 from rhodecode.model.settings import SettingsModel
2133 2133
2134 2134 uri_tmpl = None
2135 2135 if 'with_id' in override:
2136 2136 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2137 2137 del override['with_id']
2138 2138
2139 2139 if 'uri_tmpl' in override:
2140 2140 uri_tmpl = override['uri_tmpl']
2141 2141 del override['uri_tmpl']
2142 2142
2143 2143 ssh = False
2144 2144 if 'ssh' in override:
2145 2145 ssh = True
2146 2146 del override['ssh']
2147 2147
2148 2148 # we didn't override our tmpl from **overrides
2149 2149 if not uri_tmpl:
2150 2150 rc_config = SettingsModel().get_all_settings(cache=True)
2151 2151 if ssh:
2152 2152 uri_tmpl = rc_config.get(
2153 2153 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2154 2154 else:
2155 2155 uri_tmpl = rc_config.get(
2156 2156 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2157 2157
2158 2158 request = get_current_request()
2159 2159 return get_clone_url(request=request,
2160 2160 uri_tmpl=uri_tmpl,
2161 2161 repo_name=self.repo_name,
2162 2162 repo_id=self.repo_id, **override)
2163 2163
2164 2164 def set_state(self, state):
2165 2165 self.repo_state = state
2166 2166 Session().add(self)
2167 2167 #==========================================================================
2168 2168 # SCM PROPERTIES
2169 2169 #==========================================================================
2170 2170
2171 2171 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2172 2172 return get_commit_safe(
2173 2173 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2174 2174
2175 2175 def get_changeset(self, rev=None, pre_load=None):
2176 2176 warnings.warn("Use get_commit", DeprecationWarning)
2177 2177 commit_id = None
2178 2178 commit_idx = None
2179 2179 if isinstance(rev, basestring):
2180 2180 commit_id = rev
2181 2181 else:
2182 2182 commit_idx = rev
2183 2183 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2184 2184 pre_load=pre_load)
2185 2185
2186 2186 def get_landing_commit(self):
2187 2187 """
2188 2188 Returns landing commit, or if that doesn't exist returns the tip
2189 2189 """
2190 2190 _rev_type, _rev = self.landing_rev
2191 2191 commit = self.get_commit(_rev)
2192 2192 if isinstance(commit, EmptyCommit):
2193 2193 return self.get_commit()
2194 2194 return commit
2195 2195
2196 2196 def update_commit_cache(self, cs_cache=None, config=None):
2197 2197 """
2198 2198 Update cache of last changeset for repository, keys should be::
2199 2199
2200 2200 short_id
2201 2201 raw_id
2202 2202 revision
2203 2203 parents
2204 2204 message
2205 2205 date
2206 2206 author
2207 2207
2208 2208 :param cs_cache:
2209 2209 """
2210 2210 from rhodecode.lib.vcs.backends.base import BaseChangeset
2211 2211 if cs_cache is None:
2212 2212 # use no-cache version here
2213 2213 scm_repo = self.scm_instance(cache=False, config=config)
2214 2214 if scm_repo:
2215 2215 cs_cache = scm_repo.get_commit(
2216 2216 pre_load=["author", "date", "message", "parents"])
2217 2217 else:
2218 2218 cs_cache = EmptyCommit()
2219 2219
2220 2220 if isinstance(cs_cache, BaseChangeset):
2221 2221 cs_cache = cs_cache.__json__()
2222 2222
2223 2223 def is_outdated(new_cs_cache):
2224 2224 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2225 2225 new_cs_cache['revision'] != self.changeset_cache['revision']):
2226 2226 return True
2227 2227 return False
2228 2228
2229 2229 # check if we have maybe already latest cached revision
2230 2230 if is_outdated(cs_cache) or not self.changeset_cache:
2231 2231 _default = datetime.datetime.utcnow()
2232 2232 last_change = cs_cache.get('date') or _default
2233 2233 if self.updated_on and self.updated_on > last_change:
2234 2234 # we check if last update is newer than the new value
2235 2235 # if yes, we use the current timestamp instead. Imagine you get
2236 2236 # old commit pushed 1y ago, we'd set last update 1y to ago.
2237 2237 last_change = _default
2238 2238 log.debug('updated repo %s with new cs cache %s',
2239 2239 self.repo_name, cs_cache)
2240 2240 self.updated_on = last_change
2241 2241 self.changeset_cache = cs_cache
2242 2242 Session().add(self)
2243 2243 Session().commit()
2244 2244 else:
2245 2245 log.debug('Skipping update_commit_cache for repo:`%s` '
2246 2246 'commit already with latest changes', self.repo_name)
2247 2247
2248 2248 @property
2249 2249 def tip(self):
2250 2250 return self.get_commit('tip')
2251 2251
2252 2252 @property
2253 2253 def author(self):
2254 2254 return self.tip.author
2255 2255
2256 2256 @property
2257 2257 def last_change(self):
2258 2258 return self.scm_instance().last_change
2259 2259
2260 2260 def get_comments(self, revisions=None):
2261 2261 """
2262 2262 Returns comments for this repository grouped by revisions
2263 2263
2264 2264 :param revisions: filter query by revisions only
2265 2265 """
2266 2266 cmts = ChangesetComment.query()\
2267 2267 .filter(ChangesetComment.repo == self)
2268 2268 if revisions:
2269 2269 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2270 2270 grouped = collections.defaultdict(list)
2271 2271 for cmt in cmts.all():
2272 2272 grouped[cmt.revision].append(cmt)
2273 2273 return grouped
2274 2274
2275 2275 def statuses(self, revisions=None):
2276 2276 """
2277 2277 Returns statuses for this repository
2278 2278
2279 2279 :param revisions: list of revisions to get statuses for
2280 2280 """
2281 2281 statuses = ChangesetStatus.query()\
2282 2282 .filter(ChangesetStatus.repo == self)\
2283 2283 .filter(ChangesetStatus.version == 0)
2284 2284
2285 2285 if revisions:
2286 2286 # Try doing the filtering in chunks to avoid hitting limits
2287 2287 size = 500
2288 2288 status_results = []
2289 2289 for chunk in xrange(0, len(revisions), size):
2290 2290 status_results += statuses.filter(
2291 2291 ChangesetStatus.revision.in_(
2292 2292 revisions[chunk: chunk+size])
2293 2293 ).all()
2294 2294 else:
2295 2295 status_results = statuses.all()
2296 2296
2297 2297 grouped = {}
2298 2298
2299 2299 # maybe we have open new pullrequest without a status?
2300 2300 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2301 2301 status_lbl = ChangesetStatus.get_status_lbl(stat)
2302 2302 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2303 2303 for rev in pr.revisions:
2304 2304 pr_id = pr.pull_request_id
2305 2305 pr_repo = pr.target_repo.repo_name
2306 2306 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2307 2307
2308 2308 for stat in status_results:
2309 2309 pr_id = pr_repo = None
2310 2310 if stat.pull_request:
2311 2311 pr_id = stat.pull_request.pull_request_id
2312 2312 pr_repo = stat.pull_request.target_repo.repo_name
2313 2313 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2314 2314 pr_id, pr_repo]
2315 2315 return grouped
2316 2316
2317 2317 # ==========================================================================
2318 2318 # SCM CACHE INSTANCE
2319 2319 # ==========================================================================
2320 2320
2321 2321 def scm_instance(self, **kwargs):
2322 2322 import rhodecode
2323 2323
2324 2324 # Passing a config will not hit the cache currently only used
2325 2325 # for repo2dbmapper
2326 2326 config = kwargs.pop('config', None)
2327 2327 cache = kwargs.pop('cache', None)
2328 2328 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2329 2329 # if cache is NOT defined use default global, else we have a full
2330 2330 # control over cache behaviour
2331 2331 if cache is None and full_cache and not config:
2332 2332 return self._get_instance_cached()
2333 2333 return self._get_instance(cache=bool(cache), config=config)
2334 2334
2335 2335 def _get_instance_cached(self):
2336 2336 @cache_region('long_term')
2337 2337 def _get_repo(cache_key):
2338 2338 return self._get_instance()
2339 2339
2340 2340 invalidator_context = CacheKey.repo_context_cache(
2341 2341 _get_repo, self.repo_name, None, thread_scoped=True)
2342 2342
2343 2343 with invalidator_context as context:
2344 2344 context.invalidate()
2345 2345 repo = context.compute()
2346 2346
2347 2347 return repo
2348 2348
2349 2349 def _get_instance(self, cache=True, config=None):
2350 2350 config = config or self._config
2351 2351 custom_wire = {
2352 2352 'cache': cache # controls the vcs.remote cache
2353 2353 }
2354 2354 repo = get_vcs_instance(
2355 2355 repo_path=safe_str(self.repo_full_path),
2356 2356 config=config,
2357 2357 with_wire=custom_wire,
2358 2358 create=False,
2359 2359 _vcs_alias=self.repo_type)
2360 2360
2361 2361 return repo
2362 2362
2363 2363 def __json__(self):
2364 2364 return {'landing_rev': self.landing_rev}
2365 2365
2366 2366 def get_dict(self):
2367 2367
2368 2368 # Since we transformed `repo_name` to a hybrid property, we need to
2369 2369 # keep compatibility with the code which uses `repo_name` field.
2370 2370
2371 2371 result = super(Repository, self).get_dict()
2372 2372 result['repo_name'] = result.pop('_repo_name', None)
2373 2373 return result
2374 2374
2375 2375
2376 2376 class RepoGroup(Base, BaseModel):
2377 2377 __tablename__ = 'groups'
2378 2378 __table_args__ = (
2379 2379 UniqueConstraint('group_name', 'group_parent_id'),
2380 2380 CheckConstraint('group_id != group_parent_id'),
2381 2381 base_table_args,
2382 2382 )
2383 2383 __mapper_args__ = {'order_by': 'group_name'}
2384 2384
2385 2385 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2386 2386
2387 2387 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2388 2388 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2389 2389 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2390 2390 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2391 2391 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2392 2392 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2393 2393 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2394 2394 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2395 2395 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2396 2396
2397 2397 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2398 2398 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2399 2399 parent_group = relationship('RepoGroup', remote_side=group_id)
2400 2400 user = relationship('User')
2401 2401 integrations = relationship('Integration',
2402 2402 cascade="all, delete, delete-orphan")
2403 2403
2404 2404 def __init__(self, group_name='', parent_group=None):
2405 2405 self.group_name = group_name
2406 2406 self.parent_group = parent_group
2407 2407
2408 2408 def __unicode__(self):
2409 2409 return u"<%s('id:%s:%s')>" % (
2410 2410 self.__class__.__name__, self.group_id, self.group_name)
2411 2411
2412 2412 @hybrid_property
2413 2413 def description_safe(self):
2414 2414 from rhodecode.lib import helpers as h
2415 2415 return h.escape(self.group_description)
2416 2416
2417 2417 @classmethod
2418 2418 def _generate_choice(cls, repo_group):
2419 2419 from webhelpers.html import literal as _literal
2420 2420 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2421 2421 return repo_group.group_id, _name(repo_group.full_path_splitted)
2422 2422
2423 2423 @classmethod
2424 2424 def groups_choices(cls, groups=None, show_empty_group=True):
2425 2425 if not groups:
2426 2426 groups = cls.query().all()
2427 2427
2428 2428 repo_groups = []
2429 2429 if show_empty_group:
2430 2430 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2431 2431
2432 2432 repo_groups.extend([cls._generate_choice(x) for x in groups])
2433 2433
2434 2434 repo_groups = sorted(
2435 2435 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2436 2436 return repo_groups
2437 2437
2438 2438 @classmethod
2439 2439 def url_sep(cls):
2440 2440 return URL_SEP
2441 2441
2442 2442 @classmethod
2443 2443 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2444 2444 if case_insensitive:
2445 2445 gr = cls.query().filter(func.lower(cls.group_name)
2446 2446 == func.lower(group_name))
2447 2447 else:
2448 2448 gr = cls.query().filter(cls.group_name == group_name)
2449 2449 if cache:
2450 2450 name_key = _hash_key(group_name)
2451 2451 gr = gr.options(
2452 2452 FromCache("sql_cache_short", "get_group_%s" % name_key))
2453 2453 return gr.scalar()
2454 2454
2455 2455 @classmethod
2456 2456 def get_user_personal_repo_group(cls, user_id):
2457 2457 user = User.get(user_id)
2458 2458 if user.username == User.DEFAULT_USER:
2459 2459 return None
2460 2460
2461 2461 return cls.query()\
2462 2462 .filter(cls.personal == true()) \
2463 2463 .filter(cls.user == user).scalar()
2464 2464
2465 2465 @classmethod
2466 2466 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2467 2467 case_insensitive=True):
2468 2468 q = RepoGroup.query()
2469 2469
2470 2470 if not isinstance(user_id, Optional):
2471 2471 q = q.filter(RepoGroup.user_id == user_id)
2472 2472
2473 2473 if not isinstance(group_id, Optional):
2474 2474 q = q.filter(RepoGroup.group_parent_id == group_id)
2475 2475
2476 2476 if case_insensitive:
2477 2477 q = q.order_by(func.lower(RepoGroup.group_name))
2478 2478 else:
2479 2479 q = q.order_by(RepoGroup.group_name)
2480 2480 return q.all()
2481 2481
2482 2482 @property
2483 2483 def parents(self):
2484 2484 parents_recursion_limit = 10
2485 2485 groups = []
2486 2486 if self.parent_group is None:
2487 2487 return groups
2488 2488 cur_gr = self.parent_group
2489 2489 groups.insert(0, cur_gr)
2490 2490 cnt = 0
2491 2491 while 1:
2492 2492 cnt += 1
2493 2493 gr = getattr(cur_gr, 'parent_group', None)
2494 2494 cur_gr = cur_gr.parent_group
2495 2495 if gr is None:
2496 2496 break
2497 2497 if cnt == parents_recursion_limit:
2498 2498 # this will prevent accidental infinit loops
2499 2499 log.error(('more than %s parents found for group %s, stopping '
2500 2500 'recursive parent fetching' % (parents_recursion_limit, self)))
2501 2501 break
2502 2502
2503 2503 groups.insert(0, gr)
2504 2504 return groups
2505 2505
2506 2506 @property
2507 2507 def last_db_change(self):
2508 2508 return self.updated_on
2509 2509
2510 2510 @property
2511 2511 def children(self):
2512 2512 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2513 2513
2514 2514 @property
2515 2515 def name(self):
2516 2516 return self.group_name.split(RepoGroup.url_sep())[-1]
2517 2517
2518 2518 @property
2519 2519 def full_path(self):
2520 2520 return self.group_name
2521 2521
2522 2522 @property
2523 2523 def full_path_splitted(self):
2524 2524 return self.group_name.split(RepoGroup.url_sep())
2525 2525
2526 2526 @property
2527 2527 def repositories(self):
2528 2528 return Repository.query()\
2529 2529 .filter(Repository.group == self)\
2530 2530 .order_by(Repository.repo_name)
2531 2531
2532 2532 @property
2533 2533 def repositories_recursive_count(self):
2534 2534 cnt = self.repositories.count()
2535 2535
2536 2536 def children_count(group):
2537 2537 cnt = 0
2538 2538 for child in group.children:
2539 2539 cnt += child.repositories.count()
2540 2540 cnt += children_count(child)
2541 2541 return cnt
2542 2542
2543 2543 return cnt + children_count(self)
2544 2544
2545 2545 def _recursive_objects(self, include_repos=True):
2546 2546 all_ = []
2547 2547
2548 2548 def _get_members(root_gr):
2549 2549 if include_repos:
2550 2550 for r in root_gr.repositories:
2551 2551 all_.append(r)
2552 2552 childs = root_gr.children.all()
2553 2553 if childs:
2554 2554 for gr in childs:
2555 2555 all_.append(gr)
2556 2556 _get_members(gr)
2557 2557
2558 2558 _get_members(self)
2559 2559 return [self] + all_
2560 2560
2561 2561 def recursive_groups_and_repos(self):
2562 2562 """
2563 2563 Recursive return all groups, with repositories in those groups
2564 2564 """
2565 2565 return self._recursive_objects()
2566 2566
2567 2567 def recursive_groups(self):
2568 2568 """
2569 2569 Returns all children groups for this group including children of children
2570 2570 """
2571 2571 return self._recursive_objects(include_repos=False)
2572 2572
2573 2573 def get_new_name(self, group_name):
2574 2574 """
2575 2575 returns new full group name based on parent and new name
2576 2576
2577 2577 :param group_name:
2578 2578 """
2579 2579 path_prefix = (self.parent_group.full_path_splitted if
2580 2580 self.parent_group else [])
2581 2581 return RepoGroup.url_sep().join(path_prefix + [group_name])
2582 2582
2583 2583 def permissions(self, with_admins=True, with_owner=True):
2584 2584 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2585 2585 q = q.options(joinedload(UserRepoGroupToPerm.group),
2586 2586 joinedload(UserRepoGroupToPerm.user),
2587 2587 joinedload(UserRepoGroupToPerm.permission),)
2588 2588
2589 2589 # get owners and admins and permissions. We do a trick of re-writing
2590 2590 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2591 2591 # has a global reference and changing one object propagates to all
2592 2592 # others. This means if admin is also an owner admin_row that change
2593 2593 # would propagate to both objects
2594 2594 perm_rows = []
2595 2595 for _usr in q.all():
2596 2596 usr = AttributeDict(_usr.user.get_dict())
2597 2597 usr.permission = _usr.permission.permission_name
2598 2598 perm_rows.append(usr)
2599 2599
2600 2600 # filter the perm rows by 'default' first and then sort them by
2601 2601 # admin,write,read,none permissions sorted again alphabetically in
2602 2602 # each group
2603 2603 perm_rows = sorted(perm_rows, key=display_user_sort)
2604 2604
2605 2605 _admin_perm = 'group.admin'
2606 2606 owner_row = []
2607 2607 if with_owner:
2608 2608 usr = AttributeDict(self.user.get_dict())
2609 2609 usr.owner_row = True
2610 2610 usr.permission = _admin_perm
2611 2611 owner_row.append(usr)
2612 2612
2613 2613 super_admin_rows = []
2614 2614 if with_admins:
2615 2615 for usr in User.get_all_super_admins():
2616 2616 # if this admin is also owner, don't double the record
2617 2617 if usr.user_id == owner_row[0].user_id:
2618 2618 owner_row[0].admin_row = True
2619 2619 else:
2620 2620 usr = AttributeDict(usr.get_dict())
2621 2621 usr.admin_row = True
2622 2622 usr.permission = _admin_perm
2623 2623 super_admin_rows.append(usr)
2624 2624
2625 2625 return super_admin_rows + owner_row + perm_rows
2626 2626
2627 2627 def permission_user_groups(self):
2628 2628 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2629 2629 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2630 2630 joinedload(UserGroupRepoGroupToPerm.users_group),
2631 2631 joinedload(UserGroupRepoGroupToPerm.permission),)
2632 2632
2633 2633 perm_rows = []
2634 2634 for _user_group in q.all():
2635 2635 usr = AttributeDict(_user_group.users_group.get_dict())
2636 2636 usr.permission = _user_group.permission.permission_name
2637 2637 perm_rows.append(usr)
2638 2638
2639 2639 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2640 2640 return perm_rows
2641 2641
2642 2642 def get_api_data(self):
2643 2643 """
2644 2644 Common function for generating api data
2645 2645
2646 2646 """
2647 2647 group = self
2648 2648 data = {
2649 2649 'group_id': group.group_id,
2650 2650 'group_name': group.group_name,
2651 2651 'group_description': group.description_safe,
2652 2652 'parent_group': group.parent_group.group_name if group.parent_group else None,
2653 2653 'repositories': [x.repo_name for x in group.repositories],
2654 2654 'owner': group.user.username,
2655 2655 }
2656 2656 return data
2657 2657
2658 2658
2659 2659 class Permission(Base, BaseModel):
2660 2660 __tablename__ = 'permissions'
2661 2661 __table_args__ = (
2662 2662 Index('p_perm_name_idx', 'permission_name'),
2663 2663 base_table_args,
2664 2664 )
2665 2665
2666 2666 PERMS = [
2667 2667 ('hg.admin', _('RhodeCode Super Administrator')),
2668 2668
2669 2669 ('repository.none', _('Repository no access')),
2670 2670 ('repository.read', _('Repository read access')),
2671 2671 ('repository.write', _('Repository write access')),
2672 2672 ('repository.admin', _('Repository admin access')),
2673 2673
2674 2674 ('group.none', _('Repository group no access')),
2675 2675 ('group.read', _('Repository group read access')),
2676 2676 ('group.write', _('Repository group write access')),
2677 2677 ('group.admin', _('Repository group admin access')),
2678 2678
2679 2679 ('usergroup.none', _('User group no access')),
2680 2680 ('usergroup.read', _('User group read access')),
2681 2681 ('usergroup.write', _('User group write access')),
2682 2682 ('usergroup.admin', _('User group admin access')),
2683 2683
2684 2684 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2685 2685 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2686 2686
2687 2687 ('hg.usergroup.create.false', _('User Group creation disabled')),
2688 2688 ('hg.usergroup.create.true', _('User Group creation enabled')),
2689 2689
2690 2690 ('hg.create.none', _('Repository creation disabled')),
2691 2691 ('hg.create.repository', _('Repository creation enabled')),
2692 2692 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2693 2693 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2694 2694
2695 2695 ('hg.fork.none', _('Repository forking disabled')),
2696 2696 ('hg.fork.repository', _('Repository forking enabled')),
2697 2697
2698 2698 ('hg.register.none', _('Registration disabled')),
2699 2699 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2700 2700 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2701 2701
2702 2702 ('hg.password_reset.enabled', _('Password reset enabled')),
2703 2703 ('hg.password_reset.hidden', _('Password reset hidden')),
2704 2704 ('hg.password_reset.disabled', _('Password reset disabled')),
2705 2705
2706 2706 ('hg.extern_activate.manual', _('Manual activation of external account')),
2707 2707 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2708 2708
2709 2709 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2710 2710 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2711 2711 ]
2712 2712
2713 2713 # definition of system default permissions for DEFAULT user
2714 2714 DEFAULT_USER_PERMISSIONS = [
2715 2715 'repository.read',
2716 2716 'group.read',
2717 2717 'usergroup.read',
2718 2718 'hg.create.repository',
2719 2719 'hg.repogroup.create.false',
2720 2720 'hg.usergroup.create.false',
2721 2721 'hg.create.write_on_repogroup.true',
2722 2722 'hg.fork.repository',
2723 2723 'hg.register.manual_activate',
2724 2724 'hg.password_reset.enabled',
2725 2725 'hg.extern_activate.auto',
2726 2726 'hg.inherit_default_perms.true',
2727 2727 ]
2728 2728
2729 2729 # defines which permissions are more important higher the more important
2730 2730 # Weight defines which permissions are more important.
2731 2731 # The higher number the more important.
2732 2732 PERM_WEIGHTS = {
2733 2733 'repository.none': 0,
2734 2734 'repository.read': 1,
2735 2735 'repository.write': 3,
2736 2736 'repository.admin': 4,
2737 2737
2738 2738 'group.none': 0,
2739 2739 'group.read': 1,
2740 2740 'group.write': 3,
2741 2741 'group.admin': 4,
2742 2742
2743 2743 'usergroup.none': 0,
2744 2744 'usergroup.read': 1,
2745 2745 'usergroup.write': 3,
2746 2746 'usergroup.admin': 4,
2747 2747
2748 2748 'hg.repogroup.create.false': 0,
2749 2749 'hg.repogroup.create.true': 1,
2750 2750
2751 2751 'hg.usergroup.create.false': 0,
2752 2752 'hg.usergroup.create.true': 1,
2753 2753
2754 2754 'hg.fork.none': 0,
2755 2755 'hg.fork.repository': 1,
2756 2756 'hg.create.none': 0,
2757 2757 'hg.create.repository': 1
2758 2758 }
2759 2759
2760 2760 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2761 2761 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2762 2762 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2763 2763
2764 2764 def __unicode__(self):
2765 2765 return u"<%s('%s:%s')>" % (
2766 2766 self.__class__.__name__, self.permission_id, self.permission_name
2767 2767 )
2768 2768
2769 2769 @classmethod
2770 2770 def get_by_key(cls, key):
2771 2771 return cls.query().filter(cls.permission_name == key).scalar()
2772 2772
2773 2773 @classmethod
2774 2774 def get_default_repo_perms(cls, user_id, repo_id=None):
2775 2775 q = Session().query(UserRepoToPerm, Repository, Permission)\
2776 2776 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2777 2777 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2778 2778 .filter(UserRepoToPerm.user_id == user_id)
2779 2779 if repo_id:
2780 2780 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2781 2781 return q.all()
2782 2782
2783 2783 @classmethod
2784 2784 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2785 2785 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2786 2786 .join(
2787 2787 Permission,
2788 2788 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2789 2789 .join(
2790 2790 Repository,
2791 2791 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2792 2792 .join(
2793 2793 UserGroup,
2794 2794 UserGroupRepoToPerm.users_group_id ==
2795 2795 UserGroup.users_group_id)\
2796 2796 .join(
2797 2797 UserGroupMember,
2798 2798 UserGroupRepoToPerm.users_group_id ==
2799 2799 UserGroupMember.users_group_id)\
2800 2800 .filter(
2801 2801 UserGroupMember.user_id == user_id,
2802 2802 UserGroup.users_group_active == true())
2803 2803 if repo_id:
2804 2804 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2805 2805 return q.all()
2806 2806
2807 2807 @classmethod
2808 2808 def get_default_group_perms(cls, user_id, repo_group_id=None):
2809 2809 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2810 2810 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2811 2811 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2812 2812 .filter(UserRepoGroupToPerm.user_id == user_id)
2813 2813 if repo_group_id:
2814 2814 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2815 2815 return q.all()
2816 2816
2817 2817 @classmethod
2818 2818 def get_default_group_perms_from_user_group(
2819 2819 cls, user_id, repo_group_id=None):
2820 2820 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2821 2821 .join(
2822 2822 Permission,
2823 2823 UserGroupRepoGroupToPerm.permission_id ==
2824 2824 Permission.permission_id)\
2825 2825 .join(
2826 2826 RepoGroup,
2827 2827 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2828 2828 .join(
2829 2829 UserGroup,
2830 2830 UserGroupRepoGroupToPerm.users_group_id ==
2831 2831 UserGroup.users_group_id)\
2832 2832 .join(
2833 2833 UserGroupMember,
2834 2834 UserGroupRepoGroupToPerm.users_group_id ==
2835 2835 UserGroupMember.users_group_id)\
2836 2836 .filter(
2837 2837 UserGroupMember.user_id == user_id,
2838 2838 UserGroup.users_group_active == true())
2839 2839 if repo_group_id:
2840 2840 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2841 2841 return q.all()
2842 2842
2843 2843 @classmethod
2844 2844 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2845 2845 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2846 2846 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2847 2847 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2848 2848 .filter(UserUserGroupToPerm.user_id == user_id)
2849 2849 if user_group_id:
2850 2850 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2851 2851 return q.all()
2852 2852
2853 2853 @classmethod
2854 2854 def get_default_user_group_perms_from_user_group(
2855 2855 cls, user_id, user_group_id=None):
2856 2856 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2857 2857 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2858 2858 .join(
2859 2859 Permission,
2860 2860 UserGroupUserGroupToPerm.permission_id ==
2861 2861 Permission.permission_id)\
2862 2862 .join(
2863 2863 TargetUserGroup,
2864 2864 UserGroupUserGroupToPerm.target_user_group_id ==
2865 2865 TargetUserGroup.users_group_id)\
2866 2866 .join(
2867 2867 UserGroup,
2868 2868 UserGroupUserGroupToPerm.user_group_id ==
2869 2869 UserGroup.users_group_id)\
2870 2870 .join(
2871 2871 UserGroupMember,
2872 2872 UserGroupUserGroupToPerm.user_group_id ==
2873 2873 UserGroupMember.users_group_id)\
2874 2874 .filter(
2875 2875 UserGroupMember.user_id == user_id,
2876 2876 UserGroup.users_group_active == true())
2877 2877 if user_group_id:
2878 2878 q = q.filter(
2879 2879 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2880 2880
2881 2881 return q.all()
2882 2882
2883 2883
2884 2884 class UserRepoToPerm(Base, BaseModel):
2885 2885 __tablename__ = 'repo_to_perm'
2886 2886 __table_args__ = (
2887 2887 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2888 2888 base_table_args
2889 2889 )
2890 2890
2891 2891 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2892 2892 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2893 2893 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2894 2894 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2895 2895
2896 2896 user = relationship('User')
2897 2897 repository = relationship('Repository')
2898 2898 permission = relationship('Permission')
2899 2899
2900 2900 @classmethod
2901 2901 def create(cls, user, repository, permission):
2902 2902 n = cls()
2903 2903 n.user = user
2904 2904 n.repository = repository
2905 2905 n.permission = permission
2906 2906 Session().add(n)
2907 2907 return n
2908 2908
2909 2909 def __unicode__(self):
2910 2910 return u'<%s => %s >' % (self.user, self.repository)
2911 2911
2912 2912
2913 2913 class UserUserGroupToPerm(Base, BaseModel):
2914 2914 __tablename__ = 'user_user_group_to_perm'
2915 2915 __table_args__ = (
2916 2916 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2917 2917 base_table_args
2918 2918 )
2919 2919
2920 2920 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2921 2921 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2922 2922 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2923 2923 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2924 2924
2925 2925 user = relationship('User')
2926 2926 user_group = relationship('UserGroup')
2927 2927 permission = relationship('Permission')
2928 2928
2929 2929 @classmethod
2930 2930 def create(cls, user, user_group, permission):
2931 2931 n = cls()
2932 2932 n.user = user
2933 2933 n.user_group = user_group
2934 2934 n.permission = permission
2935 2935 Session().add(n)
2936 2936 return n
2937 2937
2938 2938 def __unicode__(self):
2939 2939 return u'<%s => %s >' % (self.user, self.user_group)
2940 2940
2941 2941
2942 2942 class UserToPerm(Base, BaseModel):
2943 2943 __tablename__ = 'user_to_perm'
2944 2944 __table_args__ = (
2945 2945 UniqueConstraint('user_id', 'permission_id'),
2946 2946 base_table_args
2947 2947 )
2948 2948
2949 2949 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2950 2950 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2951 2951 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2952 2952
2953 2953 user = relationship('User')
2954 2954 permission = relationship('Permission', lazy='joined')
2955 2955
2956 2956 def __unicode__(self):
2957 2957 return u'<%s => %s >' % (self.user, self.permission)
2958 2958
2959 2959
2960 2960 class UserGroupRepoToPerm(Base, BaseModel):
2961 2961 __tablename__ = 'users_group_repo_to_perm'
2962 2962 __table_args__ = (
2963 2963 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2964 2964 base_table_args
2965 2965 )
2966 2966
2967 2967 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2968 2968 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2969 2969 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2970 2970 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2971 2971
2972 2972 users_group = relationship('UserGroup')
2973 2973 permission = relationship('Permission')
2974 2974 repository = relationship('Repository')
2975 2975
2976 2976 @classmethod
2977 2977 def create(cls, users_group, repository, permission):
2978 2978 n = cls()
2979 2979 n.users_group = users_group
2980 2980 n.repository = repository
2981 2981 n.permission = permission
2982 2982 Session().add(n)
2983 2983 return n
2984 2984
2985 2985 def __unicode__(self):
2986 2986 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2987 2987
2988 2988
2989 2989 class UserGroupUserGroupToPerm(Base, BaseModel):
2990 2990 __tablename__ = 'user_group_user_group_to_perm'
2991 2991 __table_args__ = (
2992 2992 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2993 2993 CheckConstraint('target_user_group_id != user_group_id'),
2994 2994 base_table_args
2995 2995 )
2996 2996
2997 2997 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2998 2998 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2999 2999 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3000 3000 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3001 3001
3002 3002 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3003 3003 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3004 3004 permission = relationship('Permission')
3005 3005
3006 3006 @classmethod
3007 3007 def create(cls, target_user_group, user_group, permission):
3008 3008 n = cls()
3009 3009 n.target_user_group = target_user_group
3010 3010 n.user_group = user_group
3011 3011 n.permission = permission
3012 3012 Session().add(n)
3013 3013 return n
3014 3014
3015 3015 def __unicode__(self):
3016 3016 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3017 3017
3018 3018
3019 3019 class UserGroupToPerm(Base, BaseModel):
3020 3020 __tablename__ = 'users_group_to_perm'
3021 3021 __table_args__ = (
3022 3022 UniqueConstraint('users_group_id', 'permission_id',),
3023 3023 base_table_args
3024 3024 )
3025 3025
3026 3026 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3027 3027 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3028 3028 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3029 3029
3030 3030 users_group = relationship('UserGroup')
3031 3031 permission = relationship('Permission')
3032 3032
3033 3033
3034 3034 class UserRepoGroupToPerm(Base, BaseModel):
3035 3035 __tablename__ = 'user_repo_group_to_perm'
3036 3036 __table_args__ = (
3037 3037 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3038 3038 base_table_args
3039 3039 )
3040 3040
3041 3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3042 3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3043 3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3044 3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3045 3045
3046 3046 user = relationship('User')
3047 3047 group = relationship('RepoGroup')
3048 3048 permission = relationship('Permission')
3049 3049
3050 3050 @classmethod
3051 3051 def create(cls, user, repository_group, permission):
3052 3052 n = cls()
3053 3053 n.user = user
3054 3054 n.group = repository_group
3055 3055 n.permission = permission
3056 3056 Session().add(n)
3057 3057 return n
3058 3058
3059 3059
3060 3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3061 3061 __tablename__ = 'users_group_repo_group_to_perm'
3062 3062 __table_args__ = (
3063 3063 UniqueConstraint('users_group_id', 'group_id'),
3064 3064 base_table_args
3065 3065 )
3066 3066
3067 3067 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3068 3068 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3069 3069 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3070 3070 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3071 3071
3072 3072 users_group = relationship('UserGroup')
3073 3073 permission = relationship('Permission')
3074 3074 group = relationship('RepoGroup')
3075 3075
3076 3076 @classmethod
3077 3077 def create(cls, user_group, repository_group, permission):
3078 3078 n = cls()
3079 3079 n.users_group = user_group
3080 3080 n.group = repository_group
3081 3081 n.permission = permission
3082 3082 Session().add(n)
3083 3083 return n
3084 3084
3085 3085 def __unicode__(self):
3086 3086 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3087 3087
3088 3088
3089 3089 class Statistics(Base, BaseModel):
3090 3090 __tablename__ = 'statistics'
3091 3091 __table_args__ = (
3092 3092 base_table_args
3093 3093 )
3094 3094
3095 3095 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3096 3096 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3097 3097 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3098 3098 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3099 3099 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3100 3100 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3101 3101
3102 3102 repository = relationship('Repository', single_parent=True)
3103 3103
3104 3104
3105 3105 class UserFollowing(Base, BaseModel):
3106 3106 __tablename__ = 'user_followings'
3107 3107 __table_args__ = (
3108 3108 UniqueConstraint('user_id', 'follows_repository_id'),
3109 3109 UniqueConstraint('user_id', 'follows_user_id'),
3110 3110 base_table_args
3111 3111 )
3112 3112
3113 3113 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3114 3114 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3115 3115 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3116 3116 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3117 3117 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3118 3118
3119 3119 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3120 3120
3121 3121 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3122 3122 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3123 3123
3124 3124 @classmethod
3125 3125 def get_repo_followers(cls, repo_id):
3126 3126 return cls.query().filter(cls.follows_repo_id == repo_id)
3127 3127
3128 3128
3129 3129 class CacheKey(Base, BaseModel):
3130 3130 __tablename__ = 'cache_invalidation'
3131 3131 __table_args__ = (
3132 3132 UniqueConstraint('cache_key'),
3133 3133 Index('key_idx', 'cache_key'),
3134 3134 base_table_args,
3135 3135 )
3136 3136
3137 3137 CACHE_TYPE_ATOM = 'ATOM'
3138 3138 CACHE_TYPE_RSS = 'RSS'
3139 3139 CACHE_TYPE_README = 'README'
3140 3140
3141 3141 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3142 3142 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3143 3143 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3144 3144 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3145 3145
3146 3146 def __init__(self, cache_key, cache_args=''):
3147 3147 self.cache_key = cache_key
3148 3148 self.cache_args = cache_args
3149 3149 self.cache_active = False
3150 3150
3151 3151 def __unicode__(self):
3152 3152 return u"<%s('%s:%s[%s]')>" % (
3153 3153 self.__class__.__name__,
3154 3154 self.cache_id, self.cache_key, self.cache_active)
3155 3155
3156 3156 def _cache_key_partition(self):
3157 3157 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3158 3158 return prefix, repo_name, suffix
3159 3159
3160 3160 def get_prefix(self):
3161 3161 """
3162 3162 Try to extract prefix from existing cache key. The key could consist
3163 3163 of prefix, repo_name, suffix
3164 3164 """
3165 3165 # this returns prefix, repo_name, suffix
3166 3166 return self._cache_key_partition()[0]
3167 3167
3168 3168 def get_suffix(self):
3169 3169 """
3170 3170 get suffix that might have been used in _get_cache_key to
3171 3171 generate self.cache_key. Only used for informational purposes
3172 3172 in repo_edit.mako.
3173 3173 """
3174 3174 # prefix, repo_name, suffix
3175 3175 return self._cache_key_partition()[2]
3176 3176
3177 3177 @classmethod
3178 3178 def delete_all_cache(cls):
3179 3179 """
3180 3180 Delete all cache keys from database.
3181 3181 Should only be run when all instances are down and all entries
3182 3182 thus stale.
3183 3183 """
3184 3184 cls.query().delete()
3185 3185 Session().commit()
3186 3186
3187 3187 @classmethod
3188 3188 def get_cache_key(cls, repo_name, cache_type):
3189 3189 """
3190 3190
3191 3191 Generate a cache key for this process of RhodeCode instance.
3192 3192 Prefix most likely will be process id or maybe explicitly set
3193 3193 instance_id from .ini file.
3194 3194 """
3195 3195 import rhodecode
3196 3196 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3197 3197
3198 3198 repo_as_unicode = safe_unicode(repo_name)
3199 3199 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3200 3200 if cache_type else repo_as_unicode
3201 3201
3202 3202 return u'{}{}'.format(prefix, key)
3203 3203
3204 3204 @classmethod
3205 3205 def set_invalidate(cls, repo_name, delete=False):
3206 3206 """
3207 3207 Mark all caches of a repo as invalid in the database.
3208 3208 """
3209 3209
3210 3210 try:
3211 3211 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3212 3212 if delete:
3213 3213 log.debug('cache objects deleted for repo %s',
3214 3214 safe_str(repo_name))
3215 3215 qry.delete()
3216 3216 else:
3217 3217 log.debug('cache objects marked as invalid for repo %s',
3218 3218 safe_str(repo_name))
3219 3219 qry.update({"cache_active": False})
3220 3220
3221 3221 Session().commit()
3222 3222 except Exception:
3223 3223 log.exception(
3224 3224 'Cache key invalidation failed for repository %s',
3225 3225 safe_str(repo_name))
3226 3226 Session().rollback()
3227 3227
3228 3228 @classmethod
3229 3229 def get_active_cache(cls, cache_key):
3230 3230 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3231 3231 if inv_obj:
3232 3232 return inv_obj
3233 3233 return None
3234 3234
3235 3235 @classmethod
3236 3236 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3237 3237 thread_scoped=False):
3238 3238 """
3239 3239 @cache_region('long_term')
3240 3240 def _heavy_calculation(cache_key):
3241 3241 return 'result'
3242 3242
3243 3243 cache_context = CacheKey.repo_context_cache(
3244 3244 _heavy_calculation, repo_name, cache_type)
3245 3245
3246 3246 with cache_context as context:
3247 3247 context.invalidate()
3248 3248 computed = context.compute()
3249 3249
3250 3250 assert computed == 'result'
3251 3251 """
3252 3252 from rhodecode.lib import caches
3253 3253 return caches.InvalidationContext(
3254 3254 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3255 3255
3256 3256
3257 3257 class ChangesetComment(Base, BaseModel):
3258 3258 __tablename__ = 'changeset_comments'
3259 3259 __table_args__ = (
3260 3260 Index('cc_revision_idx', 'revision'),
3261 3261 base_table_args,
3262 3262 )
3263 3263
3264 3264 COMMENT_OUTDATED = u'comment_outdated'
3265 3265 COMMENT_TYPE_NOTE = u'note'
3266 3266 COMMENT_TYPE_TODO = u'todo'
3267 3267 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3268 3268
3269 3269 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3270 3270 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3271 3271 revision = Column('revision', String(40), nullable=True)
3272 3272 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3273 3273 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3274 3274 line_no = Column('line_no', Unicode(10), nullable=True)
3275 3275 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3276 3276 f_path = Column('f_path', Unicode(1000), nullable=True)
3277 3277 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3278 3278 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3279 3279 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3280 3280 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3281 3281 renderer = Column('renderer', Unicode(64), nullable=True)
3282 3282 display_state = Column('display_state', Unicode(128), nullable=True)
3283 3283
3284 3284 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3285 3285 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3286 3286 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3287 3287 author = relationship('User', lazy='joined')
3288 3288 repo = relationship('Repository')
3289 3289 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3290 3290 pull_request = relationship('PullRequest', lazy='joined')
3291 3291 pull_request_version = relationship('PullRequestVersion')
3292 3292
3293 3293 @classmethod
3294 3294 def get_users(cls, revision=None, pull_request_id=None):
3295 3295 """
3296 3296 Returns user associated with this ChangesetComment. ie those
3297 3297 who actually commented
3298 3298
3299 3299 :param cls:
3300 3300 :param revision:
3301 3301 """
3302 3302 q = Session().query(User)\
3303 3303 .join(ChangesetComment.author)
3304 3304 if revision:
3305 3305 q = q.filter(cls.revision == revision)
3306 3306 elif pull_request_id:
3307 3307 q = q.filter(cls.pull_request_id == pull_request_id)
3308 3308 return q.all()
3309 3309
3310 3310 @classmethod
3311 3311 def get_index_from_version(cls, pr_version, versions):
3312 3312 num_versions = [x.pull_request_version_id for x in versions]
3313 3313 try:
3314 3314 return num_versions.index(pr_version) +1
3315 3315 except (IndexError, ValueError):
3316 3316 return
3317 3317
3318 3318 @property
3319 3319 def outdated(self):
3320 3320 return self.display_state == self.COMMENT_OUTDATED
3321 3321
3322 3322 def outdated_at_version(self, version):
3323 3323 """
3324 3324 Checks if comment is outdated for given pull request version
3325 3325 """
3326 3326 return self.outdated and self.pull_request_version_id != version
3327 3327
3328 3328 def older_than_version(self, version):
3329 3329 """
3330 3330 Checks if comment is made from previous version than given
3331 3331 """
3332 3332 if version is None:
3333 3333 return self.pull_request_version_id is not None
3334 3334
3335 3335 return self.pull_request_version_id < version
3336 3336
3337 3337 @property
3338 3338 def resolved(self):
3339 3339 return self.resolved_by[0] if self.resolved_by else None
3340 3340
3341 3341 @property
3342 3342 def is_todo(self):
3343 3343 return self.comment_type == self.COMMENT_TYPE_TODO
3344 3344
3345 3345 @property
3346 3346 def is_inline(self):
3347 3347 return self.line_no and self.f_path
3348 3348
3349 3349 def get_index_version(self, versions):
3350 3350 return self.get_index_from_version(
3351 3351 self.pull_request_version_id, versions)
3352 3352
3353 3353 def __repr__(self):
3354 3354 if self.comment_id:
3355 3355 return '<DB:Comment #%s>' % self.comment_id
3356 3356 else:
3357 3357 return '<DB:Comment at %#x>' % id(self)
3358 3358
3359 3359 def get_api_data(self):
3360 3360 comment = self
3361 3361 data = {
3362 3362 'comment_id': comment.comment_id,
3363 3363 'comment_type': comment.comment_type,
3364 3364 'comment_text': comment.text,
3365 3365 'comment_status': comment.status_change,
3366 3366 'comment_f_path': comment.f_path,
3367 3367 'comment_lineno': comment.line_no,
3368 3368 'comment_author': comment.author,
3369 3369 'comment_created_on': comment.created_on
3370 3370 }
3371 3371 return data
3372 3372
3373 3373 def __json__(self):
3374 3374 data = dict()
3375 3375 data.update(self.get_api_data())
3376 3376 return data
3377 3377
3378 3378
3379 3379 class ChangesetStatus(Base, BaseModel):
3380 3380 __tablename__ = 'changeset_statuses'
3381 3381 __table_args__ = (
3382 3382 Index('cs_revision_idx', 'revision'),
3383 3383 Index('cs_version_idx', 'version'),
3384 3384 UniqueConstraint('repo_id', 'revision', 'version'),
3385 3385 base_table_args
3386 3386 )
3387 3387
3388 3388 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3389 3389 STATUS_APPROVED = 'approved'
3390 3390 STATUS_REJECTED = 'rejected'
3391 3391 STATUS_UNDER_REVIEW = 'under_review'
3392 3392
3393 3393 STATUSES = [
3394 3394 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3395 3395 (STATUS_APPROVED, _("Approved")),
3396 3396 (STATUS_REJECTED, _("Rejected")),
3397 3397 (STATUS_UNDER_REVIEW, _("Under Review")),
3398 3398 ]
3399 3399
3400 3400 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3401 3401 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3402 3402 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3403 3403 revision = Column('revision', String(40), nullable=False)
3404 3404 status = Column('status', String(128), nullable=False, default=DEFAULT)
3405 3405 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3406 3406 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3407 3407 version = Column('version', Integer(), nullable=False, default=0)
3408 3408 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3409 3409
3410 3410 author = relationship('User', lazy='joined')
3411 3411 repo = relationship('Repository')
3412 3412 comment = relationship('ChangesetComment', lazy='joined')
3413 3413 pull_request = relationship('PullRequest', lazy='joined')
3414 3414
3415 3415 def __unicode__(self):
3416 3416 return u"<%s('%s[v%s]:%s')>" % (
3417 3417 self.__class__.__name__,
3418 3418 self.status, self.version, self.author
3419 3419 )
3420 3420
3421 3421 @classmethod
3422 3422 def get_status_lbl(cls, value):
3423 3423 return dict(cls.STATUSES).get(value)
3424 3424
3425 3425 @property
3426 3426 def status_lbl(self):
3427 3427 return ChangesetStatus.get_status_lbl(self.status)
3428 3428
3429 3429 def get_api_data(self):
3430 3430 status = self
3431 3431 data = {
3432 3432 'status_id': status.changeset_status_id,
3433 3433 'status': status.status,
3434 3434 }
3435 3435 return data
3436 3436
3437 3437 def __json__(self):
3438 3438 data = dict()
3439 3439 data.update(self.get_api_data())
3440 3440 return data
3441 3441
3442 3442
3443 3443 class _PullRequestBase(BaseModel):
3444 3444 """
3445 3445 Common attributes of pull request and version entries.
3446 3446 """
3447 3447
3448 3448 # .status values
3449 3449 STATUS_NEW = u'new'
3450 3450 STATUS_OPEN = u'open'
3451 3451 STATUS_CLOSED = u'closed'
3452 3452
3453 3453 title = Column('title', Unicode(255), nullable=True)
3454 3454 description = Column(
3455 3455 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3456 3456 nullable=True)
3457 3457 # new/open/closed status of pull request (not approve/reject/etc)
3458 3458 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3459 3459 created_on = Column(
3460 3460 'created_on', DateTime(timezone=False), nullable=False,
3461 3461 default=datetime.datetime.now)
3462 3462 updated_on = Column(
3463 3463 'updated_on', DateTime(timezone=False), nullable=False,
3464 3464 default=datetime.datetime.now)
3465 3465
3466 3466 @declared_attr
3467 3467 def user_id(cls):
3468 3468 return Column(
3469 3469 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3470 3470 unique=None)
3471 3471
3472 3472 # 500 revisions max
3473 3473 _revisions = Column(
3474 3474 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3475 3475
3476 3476 @declared_attr
3477 3477 def source_repo_id(cls):
3478 3478 # TODO: dan: rename column to source_repo_id
3479 3479 return Column(
3480 3480 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3481 3481 nullable=False)
3482 3482
3483 3483 source_ref = Column('org_ref', Unicode(255), nullable=False)
3484 3484
3485 3485 @declared_attr
3486 3486 def target_repo_id(cls):
3487 3487 # TODO: dan: rename column to target_repo_id
3488 3488 return Column(
3489 3489 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3490 3490 nullable=False)
3491 3491
3492 3492 target_ref = Column('other_ref', Unicode(255), nullable=False)
3493 3493 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3494 3494
3495 3495 # TODO: dan: rename column to last_merge_source_rev
3496 3496 _last_merge_source_rev = Column(
3497 3497 'last_merge_org_rev', String(40), nullable=True)
3498 3498 # TODO: dan: rename column to last_merge_target_rev
3499 3499 _last_merge_target_rev = Column(
3500 3500 'last_merge_other_rev', String(40), nullable=True)
3501 3501 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3502 3502 merge_rev = Column('merge_rev', String(40), nullable=True)
3503 3503
3504 3504 reviewer_data = Column(
3505 3505 'reviewer_data_json', MutationObj.as_mutable(
3506 3506 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3507 3507
3508 3508 @property
3509 3509 def reviewer_data_json(self):
3510 3510 return json.dumps(self.reviewer_data)
3511 3511
3512 3512 @hybrid_property
3513 3513 def description_safe(self):
3514 3514 from rhodecode.lib import helpers as h
3515 3515 return h.escape(self.description)
3516 3516
3517 3517 @hybrid_property
3518 3518 def revisions(self):
3519 3519 return self._revisions.split(':') if self._revisions else []
3520 3520
3521 3521 @revisions.setter
3522 3522 def revisions(self, val):
3523 3523 self._revisions = ':'.join(val)
3524 3524
3525 3525 @hybrid_property
3526 3526 def last_merge_status(self):
3527 3527 return safe_int(self._last_merge_status)
3528 3528
3529 3529 @last_merge_status.setter
3530 3530 def last_merge_status(self, val):
3531 3531 self._last_merge_status = val
3532 3532
3533 3533 @declared_attr
3534 3534 def author(cls):
3535 3535 return relationship('User', lazy='joined')
3536 3536
3537 3537 @declared_attr
3538 3538 def source_repo(cls):
3539 3539 return relationship(
3540 3540 'Repository',
3541 3541 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3542 3542
3543 3543 @property
3544 3544 def source_ref_parts(self):
3545 3545 return self.unicode_to_reference(self.source_ref)
3546 3546
3547 3547 @declared_attr
3548 3548 def target_repo(cls):
3549 3549 return relationship(
3550 3550 'Repository',
3551 3551 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3552 3552
3553 3553 @property
3554 3554 def target_ref_parts(self):
3555 3555 return self.unicode_to_reference(self.target_ref)
3556 3556
3557 3557 @property
3558 3558 def shadow_merge_ref(self):
3559 3559 return self.unicode_to_reference(self._shadow_merge_ref)
3560 3560
3561 3561 @shadow_merge_ref.setter
3562 3562 def shadow_merge_ref(self, ref):
3563 3563 self._shadow_merge_ref = self.reference_to_unicode(ref)
3564 3564
3565 3565 def unicode_to_reference(self, raw):
3566 3566 """
3567 3567 Convert a unicode (or string) to a reference object.
3568 3568 If unicode evaluates to False it returns None.
3569 3569 """
3570 3570 if raw:
3571 3571 refs = raw.split(':')
3572 3572 return Reference(*refs)
3573 3573 else:
3574 3574 return None
3575 3575
3576 3576 def reference_to_unicode(self, ref):
3577 3577 """
3578 3578 Convert a reference object to unicode.
3579 3579 If reference is None it returns None.
3580 3580 """
3581 3581 if ref:
3582 3582 return u':'.join(ref)
3583 3583 else:
3584 3584 return None
3585 3585
3586 3586 def get_api_data(self, with_merge_state=True):
3587 3587 from rhodecode.model.pull_request import PullRequestModel
3588 3588
3589 3589 pull_request = self
3590 3590 if with_merge_state:
3591 3591 merge_status = PullRequestModel().merge_status(pull_request)
3592 3592 merge_state = {
3593 3593 'status': merge_status[0],
3594 3594 'message': safe_unicode(merge_status[1]),
3595 3595 }
3596 3596 else:
3597 3597 merge_state = {'status': 'not_available',
3598 3598 'message': 'not_available'}
3599 3599
3600 3600 merge_data = {
3601 3601 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3602 3602 'reference': (
3603 3603 pull_request.shadow_merge_ref._asdict()
3604 3604 if pull_request.shadow_merge_ref else None),
3605 3605 }
3606 3606
3607 3607 data = {
3608 3608 'pull_request_id': pull_request.pull_request_id,
3609 3609 'url': PullRequestModel().get_url(pull_request),
3610 3610 'title': pull_request.title,
3611 3611 'description': pull_request.description,
3612 3612 'status': pull_request.status,
3613 3613 'created_on': pull_request.created_on,
3614 3614 'updated_on': pull_request.updated_on,
3615 3615 'commit_ids': pull_request.revisions,
3616 3616 'review_status': pull_request.calculated_review_status(),
3617 3617 'mergeable': merge_state,
3618 3618 'source': {
3619 3619 'clone_url': pull_request.source_repo.clone_url(),
3620 3620 'repository': pull_request.source_repo.repo_name,
3621 3621 'reference': {
3622 3622 'name': pull_request.source_ref_parts.name,
3623 3623 'type': pull_request.source_ref_parts.type,
3624 3624 'commit_id': pull_request.source_ref_parts.commit_id,
3625 3625 },
3626 3626 },
3627 3627 'target': {
3628 3628 'clone_url': pull_request.target_repo.clone_url(),
3629 3629 'repository': pull_request.target_repo.repo_name,
3630 3630 'reference': {
3631 3631 'name': pull_request.target_ref_parts.name,
3632 3632 'type': pull_request.target_ref_parts.type,
3633 3633 'commit_id': pull_request.target_ref_parts.commit_id,
3634 3634 },
3635 3635 },
3636 3636 'merge': merge_data,
3637 3637 'author': pull_request.author.get_api_data(include_secrets=False,
3638 3638 details='basic'),
3639 3639 'reviewers': [
3640 3640 {
3641 3641 'user': reviewer.get_api_data(include_secrets=False,
3642 3642 details='basic'),
3643 3643 'reasons': reasons,
3644 3644 'review_status': st[0][1].status if st else 'not_reviewed',
3645 3645 }
3646 3646 for obj, reviewer, reasons, mandatory, st in
3647 3647 pull_request.reviewers_statuses()
3648 3648 ]
3649 3649 }
3650 3650
3651 3651 return data
3652 3652
3653 3653
3654 3654 class PullRequest(Base, _PullRequestBase):
3655 3655 __tablename__ = 'pull_requests'
3656 3656 __table_args__ = (
3657 3657 base_table_args,
3658 3658 )
3659 3659
3660 3660 pull_request_id = Column(
3661 3661 'pull_request_id', Integer(), nullable=False, primary_key=True)
3662 3662
3663 3663 def __repr__(self):
3664 3664 if self.pull_request_id:
3665 3665 return '<DB:PullRequest #%s>' % self.pull_request_id
3666 3666 else:
3667 3667 return '<DB:PullRequest at %#x>' % id(self)
3668 3668
3669 3669 reviewers = relationship('PullRequestReviewers',
3670 3670 cascade="all, delete, delete-orphan")
3671 3671 statuses = relationship('ChangesetStatus',
3672 3672 cascade="all, delete, delete-orphan")
3673 3673 comments = relationship('ChangesetComment',
3674 3674 cascade="all, delete, delete-orphan")
3675 3675 versions = relationship('PullRequestVersion',
3676 3676 cascade="all, delete, delete-orphan",
3677 3677 lazy='dynamic')
3678 3678
3679 3679 @classmethod
3680 3680 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3681 3681 internal_methods=None):
3682 3682
3683 3683 class PullRequestDisplay(object):
3684 3684 """
3685 3685 Special object wrapper for showing PullRequest data via Versions
3686 3686 It mimics PR object as close as possible. This is read only object
3687 3687 just for display
3688 3688 """
3689 3689
3690 3690 def __init__(self, attrs, internal=None):
3691 3691 self.attrs = attrs
3692 3692 # internal have priority over the given ones via attrs
3693 3693 self.internal = internal or ['versions']
3694 3694
3695 3695 def __getattr__(self, item):
3696 3696 if item in self.internal:
3697 3697 return getattr(self, item)
3698 3698 try:
3699 3699 return self.attrs[item]
3700 3700 except KeyError:
3701 3701 raise AttributeError(
3702 3702 '%s object has no attribute %s' % (self, item))
3703 3703
3704 3704 def __repr__(self):
3705 3705 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3706 3706
3707 3707 def versions(self):
3708 3708 return pull_request_obj.versions.order_by(
3709 3709 PullRequestVersion.pull_request_version_id).all()
3710 3710
3711 3711 def is_closed(self):
3712 3712 return pull_request_obj.is_closed()
3713 3713
3714 3714 @property
3715 3715 def pull_request_version_id(self):
3716 3716 return getattr(pull_request_obj, 'pull_request_version_id', None)
3717 3717
3718 3718 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3719 3719
3720 3720 attrs.author = StrictAttributeDict(
3721 3721 pull_request_obj.author.get_api_data())
3722 3722 if pull_request_obj.target_repo:
3723 3723 attrs.target_repo = StrictAttributeDict(
3724 3724 pull_request_obj.target_repo.get_api_data())
3725 3725 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3726 3726
3727 3727 if pull_request_obj.source_repo:
3728 3728 attrs.source_repo = StrictAttributeDict(
3729 3729 pull_request_obj.source_repo.get_api_data())
3730 3730 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3731 3731
3732 3732 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3733 3733 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3734 3734 attrs.revisions = pull_request_obj.revisions
3735 3735
3736 3736 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3737 3737 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3738 3738 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3739 3739
3740 3740 return PullRequestDisplay(attrs, internal=internal_methods)
3741 3741
3742 3742 def is_closed(self):
3743 3743 return self.status == self.STATUS_CLOSED
3744 3744
3745 3745 def __json__(self):
3746 3746 return {
3747 3747 'revisions': self.revisions,
3748 3748 }
3749 3749
3750 3750 def calculated_review_status(self):
3751 3751 from rhodecode.model.changeset_status import ChangesetStatusModel
3752 3752 return ChangesetStatusModel().calculated_review_status(self)
3753 3753
3754 3754 def reviewers_statuses(self):
3755 3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3756 3756 return ChangesetStatusModel().reviewers_statuses(self)
3757 3757
3758 3758 @property
3759 3759 def workspace_id(self):
3760 3760 from rhodecode.model.pull_request import PullRequestModel
3761 3761 return PullRequestModel()._workspace_id(self)
3762 3762
3763 3763 def get_shadow_repo(self):
3764 3764 workspace_id = self.workspace_id
3765 3765 vcs_obj = self.target_repo.scm_instance()
3766 3766 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3767 3767 self.target_repo.repo_id, workspace_id)
3768 3768 if os.path.isdir(shadow_repository_path):
3769 3769 return vcs_obj._get_shadow_instance(shadow_repository_path)
3770 3770
3771 3771
3772 3772 class PullRequestVersion(Base, _PullRequestBase):
3773 3773 __tablename__ = 'pull_request_versions'
3774 3774 __table_args__ = (
3775 3775 base_table_args,
3776 3776 )
3777 3777
3778 3778 pull_request_version_id = Column(
3779 3779 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3780 3780 pull_request_id = Column(
3781 3781 'pull_request_id', Integer(),
3782 3782 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3783 3783 pull_request = relationship('PullRequest')
3784 3784
3785 3785 def __repr__(self):
3786 3786 if self.pull_request_version_id:
3787 3787 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3788 3788 else:
3789 3789 return '<DB:PullRequestVersion at %#x>' % id(self)
3790 3790
3791 3791 @property
3792 3792 def reviewers(self):
3793 3793 return self.pull_request.reviewers
3794 3794
3795 3795 @property
3796 3796 def versions(self):
3797 3797 return self.pull_request.versions
3798 3798
3799 3799 def is_closed(self):
3800 3800 # calculate from original
3801 3801 return self.pull_request.status == self.STATUS_CLOSED
3802 3802
3803 3803 def calculated_review_status(self):
3804 3804 return self.pull_request.calculated_review_status()
3805 3805
3806 3806 def reviewers_statuses(self):
3807 3807 return self.pull_request.reviewers_statuses()
3808 3808
3809 3809
3810 3810 class PullRequestReviewers(Base, BaseModel):
3811 3811 __tablename__ = 'pull_request_reviewers'
3812 3812 __table_args__ = (
3813 3813 base_table_args,
3814 3814 )
3815 3815
3816 3816 @hybrid_property
3817 3817 def reasons(self):
3818 3818 if not self._reasons:
3819 3819 return []
3820 3820 return self._reasons
3821 3821
3822 3822 @reasons.setter
3823 3823 def reasons(self, val):
3824 3824 val = val or []
3825 3825 if any(not isinstance(x, basestring) for x in val):
3826 3826 raise Exception('invalid reasons type, must be list of strings')
3827 3827 self._reasons = val
3828 3828
3829 3829 pull_requests_reviewers_id = Column(
3830 3830 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 3831 primary_key=True)
3832 3832 pull_request_id = Column(
3833 3833 "pull_request_id", Integer(),
3834 3834 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 3835 user_id = Column(
3836 3836 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 3837 _reasons = Column(
3838 3838 'reason', MutationList.as_mutable(
3839 3839 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840 3840
3841 3841 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 3842 user = relationship('User')
3843 3843 pull_request = relationship('PullRequest')
3844 3844
3845 3845 rule_data = Column(
3846 3846 'rule_data_json',
3847 3847 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848 3848
3849 3849 def rule_user_group_data(self):
3850 3850 """
3851 3851 Returns the voting user group rule data for this reviewer
3852 3852 """
3853 3853
3854 3854 if self.rule_data and 'vote_rule' in self.rule_data:
3855 3855 user_group_data = {}
3856 3856 if 'rule_user_group_entry_id' in self.rule_data:
3857 3857 # means a group with voting rules !
3858 3858 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 3859 user_group_data['name'] = self.rule_data['rule_name']
3860 3860 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861 3861
3862 3862 return user_group_data
3863 3863
3864 3864 def __unicode__(self):
3865 3865 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 3866 self.pull_requests_reviewers_id)
3867 3867
3868 3868
3869 3869 class Notification(Base, BaseModel):
3870 3870 __tablename__ = 'notifications'
3871 3871 __table_args__ = (
3872 3872 Index('notification_type_idx', 'type'),
3873 3873 base_table_args,
3874 3874 )
3875 3875
3876 3876 TYPE_CHANGESET_COMMENT = u'cs_comment'
3877 3877 TYPE_MESSAGE = u'message'
3878 3878 TYPE_MENTION = u'mention'
3879 3879 TYPE_REGISTRATION = u'registration'
3880 3880 TYPE_PULL_REQUEST = u'pull_request'
3881 3881 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3882 3882
3883 3883 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3884 3884 subject = Column('subject', Unicode(512), nullable=True)
3885 3885 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3886 3886 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3887 3887 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3888 3888 type_ = Column('type', Unicode(255))
3889 3889
3890 3890 created_by_user = relationship('User')
3891 3891 notifications_to_users = relationship('UserNotification', lazy='joined',
3892 3892 cascade="all, delete, delete-orphan")
3893 3893
3894 3894 @property
3895 3895 def recipients(self):
3896 3896 return [x.user for x in UserNotification.query()\
3897 3897 .filter(UserNotification.notification == self)\
3898 3898 .order_by(UserNotification.user_id.asc()).all()]
3899 3899
3900 3900 @classmethod
3901 3901 def create(cls, created_by, subject, body, recipients, type_=None):
3902 3902 if type_ is None:
3903 3903 type_ = Notification.TYPE_MESSAGE
3904 3904
3905 3905 notification = cls()
3906 3906 notification.created_by_user = created_by
3907 3907 notification.subject = subject
3908 3908 notification.body = body
3909 3909 notification.type_ = type_
3910 3910 notification.created_on = datetime.datetime.now()
3911 3911
3912 3912 for u in recipients:
3913 3913 assoc = UserNotification()
3914 3914 assoc.notification = notification
3915 3915
3916 3916 # if created_by is inside recipients mark his notification
3917 3917 # as read
3918 3918 if u.user_id == created_by.user_id:
3919 3919 assoc.read = True
3920 3920
3921 3921 u.notifications.append(assoc)
3922 3922 Session().add(notification)
3923 3923
3924 3924 return notification
3925 3925
3926 3926
3927 3927 class UserNotification(Base, BaseModel):
3928 3928 __tablename__ = 'user_to_notification'
3929 3929 __table_args__ = (
3930 3930 UniqueConstraint('user_id', 'notification_id'),
3931 3931 base_table_args
3932 3932 )
3933 3933
3934 3934 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3935 3935 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3936 3936 read = Column('read', Boolean, default=False)
3937 3937 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3938 3938
3939 3939 user = relationship('User', lazy="joined")
3940 3940 notification = relationship('Notification', lazy="joined",
3941 3941 order_by=lambda: Notification.created_on.desc(),)
3942 3942
3943 3943 def mark_as_read(self):
3944 3944 self.read = True
3945 3945 Session().add(self)
3946 3946
3947 3947
3948 3948 class Gist(Base, BaseModel):
3949 3949 __tablename__ = 'gists'
3950 3950 __table_args__ = (
3951 3951 Index('g_gist_access_id_idx', 'gist_access_id'),
3952 3952 Index('g_created_on_idx', 'created_on'),
3953 3953 base_table_args
3954 3954 )
3955 3955
3956 3956 GIST_PUBLIC = u'public'
3957 3957 GIST_PRIVATE = u'private'
3958 3958 DEFAULT_FILENAME = u'gistfile1.txt'
3959 3959
3960 3960 ACL_LEVEL_PUBLIC = u'acl_public'
3961 3961 ACL_LEVEL_PRIVATE = u'acl_private'
3962 3962
3963 3963 gist_id = Column('gist_id', Integer(), primary_key=True)
3964 3964 gist_access_id = Column('gist_access_id', Unicode(250))
3965 3965 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3966 3966 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3967 3967 gist_expires = Column('gist_expires', Float(53), nullable=False)
3968 3968 gist_type = Column('gist_type', Unicode(128), nullable=False)
3969 3969 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3970 3970 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 3971 acl_level = Column('acl_level', Unicode(128), nullable=True)
3972 3972
3973 3973 owner = relationship('User')
3974 3974
3975 3975 def __repr__(self):
3976 3976 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3977 3977
3978 3978 @hybrid_property
3979 3979 def description_safe(self):
3980 3980 from rhodecode.lib import helpers as h
3981 3981 return h.escape(self.gist_description)
3982 3982
3983 3983 @classmethod
3984 3984 def get_or_404(cls, id_):
3985 3985 from pyramid.httpexceptions import HTTPNotFound
3986 3986
3987 3987 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3988 3988 if not res:
3989 3989 raise HTTPNotFound()
3990 3990 return res
3991 3991
3992 3992 @classmethod
3993 3993 def get_by_access_id(cls, gist_access_id):
3994 3994 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3995 3995
3996 3996 def gist_url(self):
3997 3997 from rhodecode.model.gist import GistModel
3998 3998 return GistModel().get_url(self)
3999 3999
4000 4000 @classmethod
4001 4001 def base_path(cls):
4002 4002 """
4003 4003 Returns base path when all gists are stored
4004 4004
4005 4005 :param cls:
4006 4006 """
4007 4007 from rhodecode.model.gist import GIST_STORE_LOC
4008 4008 q = Session().query(RhodeCodeUi)\
4009 4009 .filter(RhodeCodeUi.ui_key == URL_SEP)
4010 4010 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4011 4011 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4012 4012
4013 4013 def get_api_data(self):
4014 4014 """
4015 4015 Common function for generating gist related data for API
4016 4016 """
4017 4017 gist = self
4018 4018 data = {
4019 4019 'gist_id': gist.gist_id,
4020 4020 'type': gist.gist_type,
4021 4021 'access_id': gist.gist_access_id,
4022 4022 'description': gist.gist_description,
4023 4023 'url': gist.gist_url(),
4024 4024 'expires': gist.gist_expires,
4025 4025 'created_on': gist.created_on,
4026 4026 'modified_at': gist.modified_at,
4027 4027 'content': None,
4028 4028 'acl_level': gist.acl_level,
4029 4029 }
4030 4030 return data
4031 4031
4032 4032 def __json__(self):
4033 4033 data = dict(
4034 4034 )
4035 4035 data.update(self.get_api_data())
4036 4036 return data
4037 4037 # SCM functions
4038 4038
4039 4039 def scm_instance(self, **kwargs):
4040 4040 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4041 4041 return get_vcs_instance(
4042 4042 repo_path=safe_str(full_repo_path), create=False)
4043 4043
4044 4044
4045 4045 class ExternalIdentity(Base, BaseModel):
4046 4046 __tablename__ = 'external_identities'
4047 4047 __table_args__ = (
4048 4048 Index('local_user_id_idx', 'local_user_id'),
4049 4049 Index('external_id_idx', 'external_id'),
4050 4050 base_table_args
4051 4051 )
4052 4052
4053 4053 external_id = Column('external_id', Unicode(255), default=u'',
4054 4054 primary_key=True)
4055 4055 external_username = Column('external_username', Unicode(1024), default=u'')
4056 4056 local_user_id = Column('local_user_id', Integer(),
4057 4057 ForeignKey('users.user_id'), primary_key=True)
4058 4058 provider_name = Column('provider_name', Unicode(255), default=u'',
4059 4059 primary_key=True)
4060 4060 access_token = Column('access_token', String(1024), default=u'')
4061 4061 alt_token = Column('alt_token', String(1024), default=u'')
4062 4062 token_secret = Column('token_secret', String(1024), default=u'')
4063 4063
4064 4064 @classmethod
4065 4065 def by_external_id_and_provider(cls, external_id, provider_name,
4066 4066 local_user_id=None):
4067 4067 """
4068 4068 Returns ExternalIdentity instance based on search params
4069 4069
4070 4070 :param external_id:
4071 4071 :param provider_name:
4072 4072 :return: ExternalIdentity
4073 4073 """
4074 4074 query = cls.query()
4075 4075 query = query.filter(cls.external_id == external_id)
4076 4076 query = query.filter(cls.provider_name == provider_name)
4077 4077 if local_user_id:
4078 4078 query = query.filter(cls.local_user_id == local_user_id)
4079 4079 return query.first()
4080 4080
4081 4081 @classmethod
4082 4082 def user_by_external_id_and_provider(cls, external_id, provider_name):
4083 4083 """
4084 4084 Returns User instance based on search params
4085 4085
4086 4086 :param external_id:
4087 4087 :param provider_name:
4088 4088 :return: User
4089 4089 """
4090 4090 query = User.query()
4091 4091 query = query.filter(cls.external_id == external_id)
4092 4092 query = query.filter(cls.provider_name == provider_name)
4093 4093 query = query.filter(User.user_id == cls.local_user_id)
4094 4094 return query.first()
4095 4095
4096 4096 @classmethod
4097 4097 def by_local_user_id(cls, local_user_id):
4098 4098 """
4099 4099 Returns all tokens for user
4100 4100
4101 4101 :param local_user_id:
4102 4102 :return: ExternalIdentity
4103 4103 """
4104 4104 query = cls.query()
4105 4105 query = query.filter(cls.local_user_id == local_user_id)
4106 4106 return query
4107 4107
4108 4108
4109 4109 class Integration(Base, BaseModel):
4110 4110 __tablename__ = 'integrations'
4111 4111 __table_args__ = (
4112 4112 base_table_args
4113 4113 )
4114 4114
4115 4115 integration_id = Column('integration_id', Integer(), primary_key=True)
4116 4116 integration_type = Column('integration_type', String(255))
4117 4117 enabled = Column('enabled', Boolean(), nullable=False)
4118 4118 name = Column('name', String(255), nullable=False)
4119 4119 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4120 4120 default=False)
4121 4121
4122 4122 settings = Column(
4123 4123 'settings_json', MutationObj.as_mutable(
4124 4124 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4125 4125 repo_id = Column(
4126 4126 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4127 4127 nullable=True, unique=None, default=None)
4128 4128 repo = relationship('Repository', lazy='joined')
4129 4129
4130 4130 repo_group_id = Column(
4131 4131 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4132 4132 nullable=True, unique=None, default=None)
4133 4133 repo_group = relationship('RepoGroup', lazy='joined')
4134 4134
4135 4135 @property
4136 4136 def scope(self):
4137 4137 if self.repo:
4138 4138 return repr(self.repo)
4139 4139 if self.repo_group:
4140 4140 if self.child_repos_only:
4141 4141 return repr(self.repo_group) + ' (child repos only)'
4142 4142 else:
4143 4143 return repr(self.repo_group) + ' (recursive)'
4144 4144 if self.child_repos_only:
4145 4145 return 'root_repos'
4146 4146 return 'global'
4147 4147
4148 4148 def __repr__(self):
4149 4149 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4150 4150
4151 4151
4152 4152 class RepoReviewRuleUser(Base, BaseModel):
4153 4153 __tablename__ = 'repo_review_rules_users'
4154 4154 __table_args__ = (
4155 4155 base_table_args
4156 4156 )
4157 4157
4158 4158 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4159 4159 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4160 4160 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4161 4161 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4162 4162 user = relationship('User')
4163 4163
4164 4164 def rule_data(self):
4165 4165 return {
4166 4166 'mandatory': self.mandatory
4167 4167 }
4168 4168
4169 4169
4170 4170 class RepoReviewRuleUserGroup(Base, BaseModel):
4171 4171 __tablename__ = 'repo_review_rules_users_groups'
4172 4172 __table_args__ = (
4173 4173 base_table_args
4174 4174 )
4175 4175
4176 4176 VOTE_RULE_ALL = -1
4177 4177
4178 4178 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4179 4179 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4180 4180 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4181 4181 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4182 4182 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4183 4183 users_group = relationship('UserGroup')
4184 4184
4185 4185 def rule_data(self):
4186 4186 return {
4187 4187 'mandatory': self.mandatory,
4188 4188 'vote_rule': self.vote_rule
4189 4189 }
4190 4190
4191 4191 @property
4192 4192 def vote_rule_label(self):
4193 4193 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4194 4194 return 'all must vote'
4195 4195 else:
4196 4196 return 'min. vote {}'.format(self.vote_rule)
4197 4197
4198 4198
4199 4199 class RepoReviewRule(Base, BaseModel):
4200 4200 __tablename__ = 'repo_review_rules'
4201 4201 __table_args__ = (
4202 4202 base_table_args
4203 4203 )
4204 4204
4205 4205 repo_review_rule_id = Column(
4206 4206 'repo_review_rule_id', Integer(), primary_key=True)
4207 4207 repo_id = Column(
4208 4208 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4209 4209 repo = relationship('Repository', backref='review_rules')
4210 4210
4211 4211 review_rule_name = Column('review_rule_name', String(255))
4212 4212 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4213 4213 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4214 4214 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4215 4215
4216 4216 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4217 4217 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4218 4218 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4219 4219 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4220 4220
4221 4221 rule_users = relationship('RepoReviewRuleUser')
4222 4222 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4223 4223
4224 4224 def _validate_pattern(self, value):
4225 4225 re.compile('^' + glob2re(value) + '$')
4226 4226
4227 4227 @hybrid_property
4228 4228 def source_branch_pattern(self):
4229 4229 return self._branch_pattern or '*'
4230 4230
4231 4231 @source_branch_pattern.setter
4232 4232 def source_branch_pattern(self, value):
4233 4233 self._validate_pattern(value)
4234 4234 self._branch_pattern = value or '*'
4235 4235
4236 4236 @hybrid_property
4237 4237 def target_branch_pattern(self):
4238 4238 return self._target_branch_pattern or '*'
4239 4239
4240 4240 @target_branch_pattern.setter
4241 4241 def target_branch_pattern(self, value):
4242 4242 self._validate_pattern(value)
4243 4243 self._target_branch_pattern = value or '*'
4244 4244
4245 4245 @hybrid_property
4246 4246 def file_pattern(self):
4247 4247 return self._file_pattern or '*'
4248 4248
4249 4249 @file_pattern.setter
4250 4250 def file_pattern(self, value):
4251 4251 self._validate_pattern(value)
4252 4252 self._file_pattern = value or '*'
4253 4253
4254 4254 def matches(self, source_branch, target_branch, files_changed):
4255 4255 """
4256 4256 Check if this review rule matches a branch/files in a pull request
4257 4257
4258 4258 :param source_branch: source branch name for the commit
4259 4259 :param target_branch: target branch name for the commit
4260 4260 :param files_changed: list of file paths changed in the pull request
4261 4261 """
4262 4262
4263 4263 source_branch = source_branch or ''
4264 4264 target_branch = target_branch or ''
4265 4265 files_changed = files_changed or []
4266 4266
4267 4267 branch_matches = True
4268 4268 if source_branch or target_branch:
4269 4269 if self.source_branch_pattern == '*':
4270 4270 source_branch_match = True
4271 4271 else:
4272 4272 if self.source_branch_pattern.startswith('re:'):
4273 4273 source_pattern = self.source_branch_pattern[3:]
4274 4274 else:
4275 4275 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4276 4276 source_branch_regex = re.compile(source_pattern)
4277 4277 source_branch_match = bool(source_branch_regex.search(source_branch))
4278 4278 if self.target_branch_pattern == '*':
4279 4279 target_branch_match = True
4280 4280 else:
4281 4281 if self.target_branch_pattern.startswith('re:'):
4282 4282 target_pattern = self.target_branch_pattern[3:]
4283 4283 else:
4284 4284 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4285 4285 target_branch_regex = re.compile(target_pattern)
4286 4286 target_branch_match = bool(target_branch_regex.search(target_branch))
4287 4287
4288 4288 branch_matches = source_branch_match and target_branch_match
4289 4289
4290 4290 files_matches = True
4291 4291 if self.file_pattern != '*':
4292 4292 files_matches = False
4293 4293 if self.file_pattern.startswith('re:'):
4294 4294 file_pattern = self.file_pattern[3:]
4295 4295 else:
4296 4296 file_pattern = glob2re(self.file_pattern)
4297 4297 file_regex = re.compile(file_pattern)
4298 4298 for filename in files_changed:
4299 4299 if file_regex.search(filename):
4300 4300 files_matches = True
4301 4301 break
4302 4302
4303 4303 return branch_matches and files_matches
4304 4304
4305 4305 @property
4306 4306 def review_users(self):
4307 4307 """ Returns the users which this rule applies to """
4308 4308
4309 4309 users = collections.OrderedDict()
4310 4310
4311 4311 for rule_user in self.rule_users:
4312 4312 if rule_user.user.active:
4313 4313 if rule_user.user not in users:
4314 4314 users[rule_user.user.username] = {
4315 4315 'user': rule_user.user,
4316 4316 'source': 'user',
4317 4317 'source_data': {},
4318 4318 'data': rule_user.rule_data()
4319 4319 }
4320 4320
4321 4321 for rule_user_group in self.rule_user_groups:
4322 4322 source_data = {
4323 4323 'user_group_id': rule_user_group.users_group.users_group_id,
4324 4324 'name': rule_user_group.users_group.users_group_name,
4325 4325 'members': len(rule_user_group.users_group.members)
4326 4326 }
4327 4327 for member in rule_user_group.users_group.members:
4328 4328 if member.user.active:
4329 4329 key = member.user.username
4330 4330 if key in users:
4331 4331 # skip this member as we have him already
4332 4332 # this prevents from override the "first" matched
4333 4333 # users with duplicates in multiple groups
4334 4334 continue
4335 4335
4336 4336 users[key] = {
4337 4337 'user': member.user,
4338 4338 'source': 'user_group',
4339 4339 'source_data': source_data,
4340 4340 'data': rule_user_group.rule_data()
4341 4341 }
4342 4342
4343 4343 return users
4344 4344
4345 4345 def user_group_vote_rule(self):
4346 4346 rules = []
4347 4347 if self.rule_user_groups:
4348 4348 for user_group in self.rule_user_groups:
4349 4349 rules.append(user_group)
4350 4350 return rules
4351 4351
4352 4352 def __repr__(self):
4353 4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4354 4354 self.repo_review_rule_id, self.repo)
4355 4355
4356 4356
4357 4357 class ScheduleEntry(Base, BaseModel):
4358 4358 __tablename__ = 'schedule_entries'
4359 4359 __table_args__ = (
4360 4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4361 4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4362 4362 base_table_args,
4363 4363 )
4364 4364
4365 4365 schedule_types = ['crontab', 'timedelta', 'integer']
4366 4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4367 4367
4368 4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4369 4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4370 4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4371 4371
4372 4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4373 4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4374 4374
4375 4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4376 4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4377 4377
4378 4378 # task
4379 4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4380 4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4381 4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4382 4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4383 4383
4384 4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4385 4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4386 4386
4387 4387 @hybrid_property
4388 4388 def schedule_type(self):
4389 4389 return self._schedule_type
4390 4390
4391 4391 @schedule_type.setter
4392 4392 def schedule_type(self, val):
4393 4393 if val not in self.schedule_types:
4394 4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4395 4395 val, self.schedule_type))
4396 4396
4397 4397 self._schedule_type = val
4398 4398
4399 4399 @classmethod
4400 4400 def get_uid(cls, obj):
4401 4401 args = obj.task_args
4402 4402 kwargs = obj.task_kwargs
4403 4403 if isinstance(args, JsonRaw):
4404 4404 try:
4405 4405 args = json.loads(args)
4406 4406 except ValueError:
4407 4407 args = tuple()
4408 4408
4409 4409 if isinstance(kwargs, JsonRaw):
4410 4410 try:
4411 4411 kwargs = json.loads(kwargs)
4412 4412 except ValueError:
4413 4413 kwargs = dict()
4414 4414
4415 4415 dot_notation = obj.task_dot_notation
4416 4416 val = '.'.join(map(safe_str, [
4417 4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4418 4418 return hashlib.sha1(val).hexdigest()
4419 4419
4420 4420 @classmethod
4421 4421 def get_by_schedule_name(cls, schedule_name):
4422 4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4423 4423
4424 4424 @classmethod
4425 4425 def get_by_schedule_id(cls, schedule_id):
4426 4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4427 4427
4428 4428 @property
4429 4429 def task(self):
4430 4430 return self.task_dot_notation
4431 4431
4432 4432 @property
4433 4433 def schedule(self):
4434 4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4435 4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4436 4436 return schedule
4437 4437
4438 4438 @property
4439 4439 def args(self):
4440 4440 try:
4441 4441 return list(self.task_args or [])
4442 4442 except ValueError:
4443 4443 return list()
4444 4444
4445 4445 @property
4446 4446 def kwargs(self):
4447 4447 try:
4448 4448 return dict(self.task_kwargs or {})
4449 4449 except ValueError:
4450 4450 return dict()
4451 4451
4452 4452 def _as_raw(self, val):
4453 4453 if hasattr(val, 'de_coerce'):
4454 4454 val = val.de_coerce()
4455 4455 if val:
4456 4456 val = json.dumps(val)
4457 4457
4458 4458 return val
4459 4459
4460 4460 @property
4461 4461 def schedule_definition_raw(self):
4462 4462 return self._as_raw(self.schedule_definition)
4463 4463
4464 4464 @property
4465 4465 def args_raw(self):
4466 4466 return self._as_raw(self.task_args)
4467 4467
4468 4468 @property
4469 4469 def kwargs_raw(self):
4470 4470 return self._as_raw(self.task_kwargs)
4471 4471
4472 4472 def __repr__(self):
4473 4473 return '<DB:ScheduleEntry({}:{})>'.format(
4474 4474 self.schedule_entry_id, self.schedule_name)
4475 4475
4476 4476
4477 4477 @event.listens_for(ScheduleEntry, 'before_update')
4478 4478 def update_task_uid(mapper, connection, target):
4479 4479 target.task_uid = ScheduleEntry.get_uid(target)
4480 4480
4481 4481
4482 4482 @event.listens_for(ScheduleEntry, 'before_insert')
4483 4483 def set_task_uid(mapper, connection, target):
4484 4484 target.task_uid = ScheduleEntry.get_uid(target)
4485 4485
4486 4486
4487 4487 class DbMigrateVersion(Base, BaseModel):
4488 4488 __tablename__ = 'db_migrate_version'
4489 4489 __table_args__ = (
4490 4490 base_table_args,
4491 4491 )
4492 4492
4493 4493 repository_id = Column('repository_id', String(250), primary_key=True)
4494 4494 repository_path = Column('repository_path', Text)
4495 4495 version = Column('version', Integer)
4496 4496
4497 4497
4498 4498 class DbSession(Base, BaseModel):
4499 4499 __tablename__ = 'db_session'
4500 4500 __table_args__ = (
4501 4501 base_table_args,
4502 4502 )
4503 4503
4504 4504 def __repr__(self):
4505 4505 return '<DB:DbSession({})>'.format(self.id)
4506 4506
4507 4507 id = Column('id', Integer())
4508 4508 namespace = Column('namespace', String(255), primary_key=True)
4509 4509 accessed = Column('accessed', DateTime, nullable=False)
4510 4510 created = Column('created', DateTime, nullable=False)
4511 4511 data = Column('data', PickleType, nullable=False)
4512 4512
4513 4513
4514 4514 class BeakerCache(Base, BaseModel):
4515 4515 __tablename__ = 'beaker_cache'
4516 4516 __table_args__ = (
4517 4517 base_table_args,
4518 4518 )
4519 4519
4520 4520 def __repr__(self):
4521 4521 return '<DB:DbSession({})>'.format(self.id)
4522 4522
4523 4523 id = Column('id', Integer())
4524 4524 namespace = Column('namespace', String(255), primary_key=True)
4525 4525 accessed = Column('accessed', DateTime, nullable=False)
4526 4526 created = Column('created', DateTime, nullable=False)
4527 4527 data = Column('data', PickleType, nullable=False)
@@ -1,50 +1,45 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SQLAlchemy Metadata and Session object
23 23 """
24 24
25 25 from sqlalchemy.ext.declarative import declarative_base
26 26 from sqlalchemy.orm import scoped_session, sessionmaker
27 from beaker import cache
28 27
29 28 from rhodecode.lib import caching_query
30 29
31
32 # Beaker CacheManager. A home base for cache configurations.
33 cache_manager = cache.CacheManager()
30 __all__ = ['Base', 'Session']
34 31
35 __all__ = ['Base', 'Session']
36 #
37 # SQLAlchemy session manager. Updated by model.init_model()
38 #
32 # scoped_session. Apply our custom CachingQuery class to it,
33 # using a callable that will associate the dictionary
34 # of regions with the Query.
35 # to use cache use this in query
36 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
39 37 Session = scoped_session(
40 38 sessionmaker(
41 query_cls=caching_query.query_callable(cache_manager),
39 query_cls=caching_query.query_callable(),
42 40 expire_on_commit=True,
43 41 )
44 42 )
45 43
46 44 # The declarative Base
47 45 Base = declarative_base()
48
49 #to use cache use this in query
50 #.options(FromCache("sqlalchemy_cache_type", "cachekey"))
@@ -1,830 +1,829 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import hashlib
23 23 import logging
24 import time
24 25 from collections import namedtuple
25 26 from functools import wraps
26 27 import bleach
27 28
28 from rhodecode.lib import caches
29 from rhodecode.lib import caches, rc_cache
29 30 from rhodecode.lib.utils2 import (
30 31 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
31 32 from rhodecode.lib.vcs.backends import base
32 33 from rhodecode.model import BaseModel
33 34 from rhodecode.model.db import (
34 35 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
35 36 from rhodecode.model.meta import Session
36 37
37 38
38 39 log = logging.getLogger(__name__)
39 40
40 41
41 42 UiSetting = namedtuple(
42 43 'UiSetting', ['section', 'key', 'value', 'active'])
43 44
44 45 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
45 46
46 47
47 48 class SettingNotFound(Exception):
48 49 def __init__(self, setting_id):
49 50 msg = 'Setting `{}` is not found'.format(setting_id)
50 51 super(SettingNotFound, self).__init__(msg)
51 52
52 53
53 54 class SettingsModel(BaseModel):
54 55 BUILTIN_HOOKS = (
55 56 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
56 57 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
57 58 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
58 59 RhodeCodeUi.HOOK_PUSH_KEY,)
59 60 HOOKS_SECTION = 'hooks'
60 61
61 62 def __init__(self, sa=None, repo=None):
62 63 self.repo = repo
63 64 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
64 65 self.SettingsDbModel = (
65 66 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
66 67 super(SettingsModel, self).__init__(sa)
67 68
68 69 def get_ui_by_key(self, key):
69 70 q = self.UiDbModel.query()
70 71 q = q.filter(self.UiDbModel.ui_key == key)
71 72 q = self._filter_by_repo(RepoRhodeCodeUi, q)
72 73 return q.scalar()
73 74
74 75 def get_ui_by_section(self, section):
75 76 q = self.UiDbModel.query()
76 77 q = q.filter(self.UiDbModel.ui_section == section)
77 78 q = self._filter_by_repo(RepoRhodeCodeUi, q)
78 79 return q.all()
79 80
80 81 def get_ui_by_section_and_key(self, section, key):
81 82 q = self.UiDbModel.query()
82 83 q = q.filter(self.UiDbModel.ui_section == section)
83 84 q = q.filter(self.UiDbModel.ui_key == key)
84 85 q = self._filter_by_repo(RepoRhodeCodeUi, q)
85 86 return q.scalar()
86 87
87 88 def get_ui(self, section=None, key=None):
88 89 q = self.UiDbModel.query()
89 90 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 91
91 92 if section:
92 93 q = q.filter(self.UiDbModel.ui_section == section)
93 94 if key:
94 95 q = q.filter(self.UiDbModel.ui_key == key)
95 96
96 97 # TODO: mikhail: add caching
97 98 result = [
98 99 UiSetting(
99 100 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
100 101 value=safe_str(r.ui_value), active=r.ui_active
101 102 )
102 103 for r in q.all()
103 104 ]
104 105 return result
105 106
106 107 def get_builtin_hooks(self):
107 108 q = self.UiDbModel.query()
108 109 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
109 110 return self._get_hooks(q)
110 111
111 112 def get_custom_hooks(self):
112 113 q = self.UiDbModel.query()
113 114 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 115 return self._get_hooks(q)
115 116
116 117 def create_ui_section_value(self, section, val, key=None, active=True):
117 118 new_ui = self.UiDbModel()
118 119 new_ui.ui_section = section
119 120 new_ui.ui_value = val
120 121 new_ui.ui_active = active
121 122
122 123 if self.repo:
123 124 repo = self._get_repo(self.repo)
124 125 repository_id = repo.repo_id
125 126 new_ui.repository_id = repository_id
126 127
127 128 if not key:
128 129 # keys are unique so they need appended info
129 130 if self.repo:
130 131 key = hashlib.sha1(
131 132 '{}{}{}'.format(section, val, repository_id)).hexdigest()
132 133 else:
133 134 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
134 135
135 136 new_ui.ui_key = key
136 137
137 138 Session().add(new_ui)
138 139 return new_ui
139 140
140 141 def create_or_update_hook(self, key, value):
141 142 ui = (
142 143 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
143 144 self.UiDbModel())
144 145 ui.ui_section = self.HOOKS_SECTION
145 146 ui.ui_active = True
146 147 ui.ui_key = key
147 148 ui.ui_value = value
148 149
149 150 if self.repo:
150 151 repo = self._get_repo(self.repo)
151 152 repository_id = repo.repo_id
152 153 ui.repository_id = repository_id
153 154
154 155 Session().add(ui)
155 156 return ui
156 157
157 158 def delete_ui(self, id_):
158 159 ui = self.UiDbModel.get(id_)
159 160 if not ui:
160 161 raise SettingNotFound(id_)
161 162 Session().delete(ui)
162 163
163 164 def get_setting_by_name(self, name):
164 165 q = self._get_settings_query()
165 166 q = q.filter(self.SettingsDbModel.app_settings_name == name)
166 167 return q.scalar()
167 168
168 169 def create_or_update_setting(
169 170 self, name, val=Optional(''), type_=Optional('unicode')):
170 171 """
171 172 Creates or updates RhodeCode setting. If updates is triggered it will
172 173 only update parameters that are explicityl set Optional instance will
173 174 be skipped
174 175
175 176 :param name:
176 177 :param val:
177 178 :param type_:
178 179 :return:
179 180 """
180 181
181 182 res = self.get_setting_by_name(name)
182 183 repo = self._get_repo(self.repo) if self.repo else None
183 184
184 185 if not res:
185 186 val = Optional.extract(val)
186 187 type_ = Optional.extract(type_)
187 188
188 189 args = (
189 190 (repo.repo_id, name, val, type_)
190 191 if repo else (name, val, type_))
191 192 res = self.SettingsDbModel(*args)
192 193
193 194 else:
194 195 if self.repo:
195 196 res.repository_id = repo.repo_id
196 197
197 198 res.app_settings_name = name
198 199 if not isinstance(type_, Optional):
199 200 # update if set
200 201 res.app_settings_type = type_
201 202 if not isinstance(val, Optional):
202 203 # update if set
203 204 res.app_settings_value = val
204 205
205 206 Session().add(res)
206 207 return res
207 208
208 209 def invalidate_settings_cache(self):
209 namespace = 'rhodecode_settings'
210 cache_manager = caches.get_cache_manager('sql_cache_short', namespace)
211 caches.clear_cache_manager(cache_manager)
210 # NOTE:(marcink) we flush the whole sql_cache_short region, because it
211 # reads different settings etc. It's little too much but those caches are
212 # anyway very short lived and it's a safest way.
213 region = rc_cache.get_or_create_region('sql_cache_short')
214 region.invalidate()
212 215
213 216 def get_all_settings(self, cache=False):
217 region = rc_cache.get_or_create_region('sql_cache_short')
214 218
215 def _compute():
219 @region.cache_on_arguments(should_cache_fn=lambda v: cache)
220 def _get_all_settings(name, key):
216 221 q = self._get_settings_query()
217 222 if not q:
218 223 raise Exception('Could not get application settings !')
219 224
220 225 settings = {
221 226 'rhodecode_' + result.app_settings_name: result.app_settings_value
222 227 for result in q
223 228 }
224 229 return settings
225 230
226 if cache:
227 log.debug('Fetching app settings using cache')
228 repo = self._get_repo(self.repo) if self.repo else None
229 namespace = 'rhodecode_settings'
230 cache_manager = caches.get_cache_manager(
231 'sql_cache_short', namespace)
232 _cache_key = (
233 "get_repo_{}_settings".format(repo.repo_id)
234 if repo else "get_app_settings")
231 repo = self._get_repo(self.repo) if self.repo else None
232 key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app"
233 start = time.time()
234 result = _get_all_settings('rhodecode_settings', key)
235 total = time.time() - start
236 log.debug('Fetching app settings for key: %s took: %.3fs', key, total)
235 237
236 return cache_manager.get(_cache_key, createfunc=_compute)
237
238 else:
239 return _compute()
238 return result
240 239
241 240 def get_auth_settings(self):
242 241 q = self._get_settings_query()
243 242 q = q.filter(
244 243 self.SettingsDbModel.app_settings_name.startswith('auth_'))
245 244 rows = q.all()
246 245 auth_settings = {
247 246 row.app_settings_name: row.app_settings_value for row in rows}
248 247 return auth_settings
249 248
250 249 def get_auth_plugins(self):
251 250 auth_plugins = self.get_setting_by_name("auth_plugins")
252 251 return auth_plugins.app_settings_value
253 252
254 253 def get_default_repo_settings(self, strip_prefix=False):
255 254 q = self._get_settings_query()
256 255 q = q.filter(
257 256 self.SettingsDbModel.app_settings_name.startswith('default_'))
258 257 rows = q.all()
259 258
260 259 result = {}
261 260 for row in rows:
262 261 key = row.app_settings_name
263 262 if strip_prefix:
264 263 key = remove_prefix(key, prefix='default_')
265 264 result.update({key: row.app_settings_value})
266 265 return result
267 266
268 267 def get_repo(self):
269 268 repo = self._get_repo(self.repo)
270 269 if not repo:
271 270 raise Exception(
272 271 'Repository `{}` cannot be found inside the database'.format(
273 272 self.repo))
274 273 return repo
275 274
276 275 def _filter_by_repo(self, model, query):
277 276 if self.repo:
278 277 repo = self.get_repo()
279 278 query = query.filter(model.repository_id == repo.repo_id)
280 279 return query
281 280
282 281 def _get_hooks(self, query):
283 282 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
284 283 query = self._filter_by_repo(RepoRhodeCodeUi, query)
285 284 return query.all()
286 285
287 286 def _get_settings_query(self):
288 287 q = self.SettingsDbModel.query()
289 288 return self._filter_by_repo(RepoRhodeCodeSetting, q)
290 289
291 290 def list_enabled_social_plugins(self, settings):
292 291 enabled = []
293 292 for plug in SOCIAL_PLUGINS_LIST:
294 293 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
295 294 )):
296 295 enabled.append(plug)
297 296 return enabled
298 297
299 298
300 299 def assert_repo_settings(func):
301 300 @wraps(func)
302 301 def _wrapper(self, *args, **kwargs):
303 302 if not self.repo_settings:
304 303 raise Exception('Repository is not specified')
305 304 return func(self, *args, **kwargs)
306 305 return _wrapper
307 306
308 307
309 308 class IssueTrackerSettingsModel(object):
310 309 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
311 310 SETTINGS_PREFIX = 'issuetracker_'
312 311
313 312 def __init__(self, sa=None, repo=None):
314 313 self.global_settings = SettingsModel(sa=sa)
315 314 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
316 315
317 316 @property
318 317 def inherit_global_settings(self):
319 318 if not self.repo_settings:
320 319 return True
321 320 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
322 321 return setting.app_settings_value if setting else True
323 322
324 323 @inherit_global_settings.setter
325 324 def inherit_global_settings(self, value):
326 325 if self.repo_settings:
327 326 settings = self.repo_settings.create_or_update_setting(
328 327 self.INHERIT_SETTINGS, value, type_='bool')
329 328 Session().add(settings)
330 329
331 330 def _get_keyname(self, key, uid, prefix=''):
332 331 return '{0}{1}{2}_{3}'.format(
333 332 prefix, self.SETTINGS_PREFIX, key, uid)
334 333
335 334 def _make_dict_for_settings(self, qs):
336 335 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
337 336
338 337 issuetracker_entries = {}
339 338 # create keys
340 339 for k, v in qs.items():
341 340 if k.startswith(prefix_match):
342 341 uid = k[len(prefix_match):]
343 342 issuetracker_entries[uid] = None
344 343
345 344 # populate
346 345 for uid in issuetracker_entries:
347 346 issuetracker_entries[uid] = AttributeDict({
348 347 'pat': qs.get(
349 348 self._get_keyname('pat', uid, 'rhodecode_')),
350 349 'url': bleach.clean(
351 350 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
352 351 'pref': bleach.clean(
353 352 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
354 353 'desc': qs.get(
355 354 self._get_keyname('desc', uid, 'rhodecode_')),
356 355 })
357 356 return issuetracker_entries
358 357
359 358 def get_global_settings(self, cache=False):
360 359 """
361 360 Returns list of global issue tracker settings
362 361 """
363 362 defaults = self.global_settings.get_all_settings(cache=cache)
364 363 settings = self._make_dict_for_settings(defaults)
365 364 return settings
366 365
367 366 def get_repo_settings(self, cache=False):
368 367 """
369 368 Returns list of issue tracker settings per repository
370 369 """
371 370 if not self.repo_settings:
372 371 raise Exception('Repository is not specified')
373 372 all_settings = self.repo_settings.get_all_settings(cache=cache)
374 373 settings = self._make_dict_for_settings(all_settings)
375 374 return settings
376 375
377 376 def get_settings(self, cache=False):
378 377 if self.inherit_global_settings:
379 378 return self.get_global_settings(cache=cache)
380 379 else:
381 380 return self.get_repo_settings(cache=cache)
382 381
383 382 def delete_entries(self, uid):
384 383 if self.repo_settings:
385 384 all_patterns = self.get_repo_settings()
386 385 settings_model = self.repo_settings
387 386 else:
388 387 all_patterns = self.get_global_settings()
389 388 settings_model = self.global_settings
390 389 entries = all_patterns.get(uid, [])
391 390
392 391 for del_key in entries:
393 392 setting_name = self._get_keyname(del_key, uid)
394 393 entry = settings_model.get_setting_by_name(setting_name)
395 394 if entry:
396 395 Session().delete(entry)
397 396
398 397 Session().commit()
399 398
400 399 def create_or_update_setting(
401 400 self, name, val=Optional(''), type_=Optional('unicode')):
402 401 if self.repo_settings:
403 402 setting = self.repo_settings.create_or_update_setting(
404 403 name, val, type_)
405 404 else:
406 405 setting = self.global_settings.create_or_update_setting(
407 406 name, val, type_)
408 407 return setting
409 408
410 409
411 410 class VcsSettingsModel(object):
412 411
413 412 INHERIT_SETTINGS = 'inherit_vcs_settings'
414 413 GENERAL_SETTINGS = (
415 414 'use_outdated_comments',
416 415 'pr_merge_enabled',
417 416 'hg_use_rebase_for_merging',
418 417 'hg_close_branch_before_merging',
419 418 'git_use_rebase_for_merging',
420 419 'git_close_branch_before_merging',
421 420 'diff_cache',
422 421 )
423 422
424 423 HOOKS_SETTINGS = (
425 424 ('hooks', 'changegroup.repo_size'),
426 425 ('hooks', 'changegroup.push_logger'),
427 426 ('hooks', 'outgoing.pull_logger'),)
428 427 HG_SETTINGS = (
429 428 ('extensions', 'largefiles'),
430 429 ('phases', 'publish'),
431 430 ('extensions', 'evolve'),)
432 431 GIT_SETTINGS = (
433 432 ('vcs_git_lfs', 'enabled'),)
434 433 GLOBAL_HG_SETTINGS = (
435 434 ('extensions', 'largefiles'),
436 435 ('largefiles', 'usercache'),
437 436 ('phases', 'publish'),
438 437 ('extensions', 'hgsubversion'),
439 438 ('extensions', 'evolve'),)
440 439 GLOBAL_GIT_SETTINGS = (
441 440 ('vcs_git_lfs', 'enabled'),
442 441 ('vcs_git_lfs', 'store_location'))
443 442
444 443 GLOBAL_SVN_SETTINGS = (
445 444 ('vcs_svn_proxy', 'http_requests_enabled'),
446 445 ('vcs_svn_proxy', 'http_server_url'))
447 446
448 447 SVN_BRANCH_SECTION = 'vcs_svn_branch'
449 448 SVN_TAG_SECTION = 'vcs_svn_tag'
450 449 SSL_SETTING = ('web', 'push_ssl')
451 450 PATH_SETTING = ('paths', '/')
452 451
453 452 def __init__(self, sa=None, repo=None):
454 453 self.global_settings = SettingsModel(sa=sa)
455 454 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
456 455 self._ui_settings = (
457 456 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
458 457 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
459 458
460 459 @property
461 460 @assert_repo_settings
462 461 def inherit_global_settings(self):
463 462 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
464 463 return setting.app_settings_value if setting else True
465 464
466 465 @inherit_global_settings.setter
467 466 @assert_repo_settings
468 467 def inherit_global_settings(self, value):
469 468 self.repo_settings.create_or_update_setting(
470 469 self.INHERIT_SETTINGS, value, type_='bool')
471 470
472 471 def get_global_svn_branch_patterns(self):
473 472 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
474 473
475 474 @assert_repo_settings
476 475 def get_repo_svn_branch_patterns(self):
477 476 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
478 477
479 478 def get_global_svn_tag_patterns(self):
480 479 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
481 480
482 481 @assert_repo_settings
483 482 def get_repo_svn_tag_patterns(self):
484 483 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
485 484
486 485 def get_global_settings(self):
487 486 return self._collect_all_settings(global_=True)
488 487
489 488 @assert_repo_settings
490 489 def get_repo_settings(self):
491 490 return self._collect_all_settings(global_=False)
492 491
493 492 @assert_repo_settings
494 493 def create_or_update_repo_settings(
495 494 self, data, inherit_global_settings=False):
496 495 from rhodecode.model.scm import ScmModel
497 496
498 497 self.inherit_global_settings = inherit_global_settings
499 498
500 499 repo = self.repo_settings.get_repo()
501 500 if not inherit_global_settings:
502 501 if repo.repo_type == 'svn':
503 502 self.create_repo_svn_settings(data)
504 503 else:
505 504 self.create_or_update_repo_hook_settings(data)
506 505 self.create_or_update_repo_pr_settings(data)
507 506
508 507 if repo.repo_type == 'hg':
509 508 self.create_or_update_repo_hg_settings(data)
510 509
511 510 if repo.repo_type == 'git':
512 511 self.create_or_update_repo_git_settings(data)
513 512
514 513 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
515 514
516 515 @assert_repo_settings
517 516 def create_or_update_repo_hook_settings(self, data):
518 517 for section, key in self.HOOKS_SETTINGS:
519 518 data_key = self._get_form_ui_key(section, key)
520 519 if data_key not in data:
521 520 raise ValueError(
522 521 'The given data does not contain {} key'.format(data_key))
523 522
524 523 active = data.get(data_key)
525 524 repo_setting = self.repo_settings.get_ui_by_section_and_key(
526 525 section, key)
527 526 if not repo_setting:
528 527 global_setting = self.global_settings.\
529 528 get_ui_by_section_and_key(section, key)
530 529 self.repo_settings.create_ui_section_value(
531 530 section, global_setting.ui_value, key=key, active=active)
532 531 else:
533 532 repo_setting.ui_active = active
534 533 Session().add(repo_setting)
535 534
536 535 def update_global_hook_settings(self, data):
537 536 for section, key in self.HOOKS_SETTINGS:
538 537 data_key = self._get_form_ui_key(section, key)
539 538 if data_key not in data:
540 539 raise ValueError(
541 540 'The given data does not contain {} key'.format(data_key))
542 541 active = data.get(data_key)
543 542 repo_setting = self.global_settings.get_ui_by_section_and_key(
544 543 section, key)
545 544 repo_setting.ui_active = active
546 545 Session().add(repo_setting)
547 546
548 547 @assert_repo_settings
549 548 def create_or_update_repo_pr_settings(self, data):
550 549 return self._create_or_update_general_settings(
551 550 self.repo_settings, data)
552 551
553 552 def create_or_update_global_pr_settings(self, data):
554 553 return self._create_or_update_general_settings(
555 554 self.global_settings, data)
556 555
557 556 @assert_repo_settings
558 557 def create_repo_svn_settings(self, data):
559 558 return self._create_svn_settings(self.repo_settings, data)
560 559
561 560 @assert_repo_settings
562 561 def create_or_update_repo_hg_settings(self, data):
563 562 largefiles, phases, evolve = \
564 563 self.HG_SETTINGS
565 564 largefiles_key, phases_key, evolve_key = \
566 565 self._get_settings_keys(self.HG_SETTINGS, data)
567 566
568 567 self._create_or_update_ui(
569 568 self.repo_settings, *largefiles, value='',
570 569 active=data[largefiles_key])
571 570 self._create_or_update_ui(
572 571 self.repo_settings, *evolve, value='',
573 572 active=data[evolve_key])
574 573 self._create_or_update_ui(
575 574 self.repo_settings, *phases, value=safe_str(data[phases_key]))
576 575
577 576
578 577 def create_or_update_global_hg_settings(self, data):
579 578 largefiles, largefiles_store, phases, hgsubversion, evolve \
580 579 = self.GLOBAL_HG_SETTINGS
581 580 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
582 581 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
583 582
584 583 self._create_or_update_ui(
585 584 self.global_settings, *largefiles, value='',
586 585 active=data[largefiles_key])
587 586 self._create_or_update_ui(
588 587 self.global_settings, *largefiles_store,
589 588 value=data[largefiles_store_key])
590 589 self._create_or_update_ui(
591 590 self.global_settings, *phases, value=safe_str(data[phases_key]))
592 591 self._create_or_update_ui(
593 592 self.global_settings, *hgsubversion, active=data[subversion_key])
594 593 self._create_or_update_ui(
595 594 self.global_settings, *evolve, value='',
596 595 active=data[evolve_key])
597 596
598 597 def create_or_update_repo_git_settings(self, data):
599 598 # NOTE(marcink): # comma make unpack work properly
600 599 lfs_enabled, \
601 600 = self.GIT_SETTINGS
602 601
603 602 lfs_enabled_key, \
604 603 = self._get_settings_keys(self.GIT_SETTINGS, data)
605 604
606 605 self._create_or_update_ui(
607 606 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
608 607 active=data[lfs_enabled_key])
609 608
610 609 def create_or_update_global_git_settings(self, data):
611 610 lfs_enabled, lfs_store_location \
612 611 = self.GLOBAL_GIT_SETTINGS
613 612 lfs_enabled_key, lfs_store_location_key \
614 613 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
615 614
616 615 self._create_or_update_ui(
617 616 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
618 617 active=data[lfs_enabled_key])
619 618 self._create_or_update_ui(
620 619 self.global_settings, *lfs_store_location,
621 620 value=data[lfs_store_location_key])
622 621
623 622 def create_or_update_global_svn_settings(self, data):
624 623 # branch/tags patterns
625 624 self._create_svn_settings(self.global_settings, data)
626 625
627 626 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
628 627 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
629 628 self.GLOBAL_SVN_SETTINGS, data)
630 629
631 630 self._create_or_update_ui(
632 631 self.global_settings, *http_requests_enabled,
633 632 value=safe_str(data[http_requests_enabled_key]))
634 633 self._create_or_update_ui(
635 634 self.global_settings, *http_server_url,
636 635 value=data[http_server_url_key])
637 636
638 637 def update_global_ssl_setting(self, value):
639 638 self._create_or_update_ui(
640 639 self.global_settings, *self.SSL_SETTING, value=value)
641 640
642 641 def update_global_path_setting(self, value):
643 642 self._create_or_update_ui(
644 643 self.global_settings, *self.PATH_SETTING, value=value)
645 644
646 645 @assert_repo_settings
647 646 def delete_repo_svn_pattern(self, id_):
648 647 ui = self.repo_settings.UiDbModel.get(id_)
649 648 if ui and ui.repository.repo_name == self.repo_settings.repo:
650 649 # only delete if it's the same repo as initialized settings
651 650 self.repo_settings.delete_ui(id_)
652 651 else:
653 652 # raise error as if we wouldn't find this option
654 653 self.repo_settings.delete_ui(-1)
655 654
656 655 def delete_global_svn_pattern(self, id_):
657 656 self.global_settings.delete_ui(id_)
658 657
659 658 @assert_repo_settings
660 659 def get_repo_ui_settings(self, section=None, key=None):
661 660 global_uis = self.global_settings.get_ui(section, key)
662 661 repo_uis = self.repo_settings.get_ui(section, key)
663 662 filtered_repo_uis = self._filter_ui_settings(repo_uis)
664 663 filtered_repo_uis_keys = [
665 664 (s.section, s.key) for s in filtered_repo_uis]
666 665
667 666 def _is_global_ui_filtered(ui):
668 667 return (
669 668 (ui.section, ui.key) in filtered_repo_uis_keys
670 669 or ui.section in self._svn_sections)
671 670
672 671 filtered_global_uis = [
673 672 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
674 673
675 674 return filtered_global_uis + filtered_repo_uis
676 675
677 676 def get_global_ui_settings(self, section=None, key=None):
678 677 return self.global_settings.get_ui(section, key)
679 678
680 679 def get_ui_settings_as_config_obj(self, section=None, key=None):
681 680 config = base.Config()
682 681
683 682 ui_settings = self.get_ui_settings(section=section, key=key)
684 683
685 684 for entry in ui_settings:
686 685 config.set(entry.section, entry.key, entry.value)
687 686
688 687 return config
689 688
690 689 def get_ui_settings(self, section=None, key=None):
691 690 if not self.repo_settings or self.inherit_global_settings:
692 691 return self.get_global_ui_settings(section, key)
693 692 else:
694 693 return self.get_repo_ui_settings(section, key)
695 694
696 695 def get_svn_patterns(self, section=None):
697 696 if not self.repo_settings:
698 697 return self.get_global_ui_settings(section)
699 698 else:
700 699 return self.get_repo_ui_settings(section)
701 700
702 701 @assert_repo_settings
703 702 def get_repo_general_settings(self):
704 703 global_settings = self.global_settings.get_all_settings()
705 704 repo_settings = self.repo_settings.get_all_settings()
706 705 filtered_repo_settings = self._filter_general_settings(repo_settings)
707 706 global_settings.update(filtered_repo_settings)
708 707 return global_settings
709 708
710 709 def get_global_general_settings(self):
711 710 return self.global_settings.get_all_settings()
712 711
713 712 def get_general_settings(self):
714 713 if not self.repo_settings or self.inherit_global_settings:
715 714 return self.get_global_general_settings()
716 715 else:
717 716 return self.get_repo_general_settings()
718 717
719 718 def get_repos_location(self):
720 719 return self.global_settings.get_ui_by_key('/').ui_value
721 720
722 721 def _filter_ui_settings(self, settings):
723 722 filtered_settings = [
724 723 s for s in settings if self._should_keep_setting(s)]
725 724 return filtered_settings
726 725
727 726 def _should_keep_setting(self, setting):
728 727 keep = (
729 728 (setting.section, setting.key) in self._ui_settings or
730 729 setting.section in self._svn_sections)
731 730 return keep
732 731
733 732 def _filter_general_settings(self, settings):
734 733 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
735 734 return {
736 735 k: settings[k]
737 736 for k in settings if k in keys}
738 737
739 738 def _collect_all_settings(self, global_=False):
740 739 settings = self.global_settings if global_ else self.repo_settings
741 740 result = {}
742 741
743 742 for section, key in self._ui_settings:
744 743 ui = settings.get_ui_by_section_and_key(section, key)
745 744 result_key = self._get_form_ui_key(section, key)
746 745
747 746 if ui:
748 747 if section in ('hooks', 'extensions'):
749 748 result[result_key] = ui.ui_active
750 749 elif result_key in ['vcs_git_lfs_enabled']:
751 750 result[result_key] = ui.ui_active
752 751 else:
753 752 result[result_key] = ui.ui_value
754 753
755 754 for name in self.GENERAL_SETTINGS:
756 755 setting = settings.get_setting_by_name(name)
757 756 if setting:
758 757 result_key = 'rhodecode_{}'.format(name)
759 758 result[result_key] = setting.app_settings_value
760 759
761 760 return result
762 761
763 762 def _get_form_ui_key(self, section, key):
764 763 return '{section}_{key}'.format(
765 764 section=section, key=key.replace('.', '_'))
766 765
767 766 def _create_or_update_ui(
768 767 self, settings, section, key, value=None, active=None):
769 768 ui = settings.get_ui_by_section_and_key(section, key)
770 769 if not ui:
771 770 active = True if active is None else active
772 771 settings.create_ui_section_value(
773 772 section, value, key=key, active=active)
774 773 else:
775 774 if active is not None:
776 775 ui.ui_active = active
777 776 if value is not None:
778 777 ui.ui_value = value
779 778 Session().add(ui)
780 779
781 780 def _create_svn_settings(self, settings, data):
782 781 svn_settings = {
783 782 'new_svn_branch': self.SVN_BRANCH_SECTION,
784 783 'new_svn_tag': self.SVN_TAG_SECTION
785 784 }
786 785 for key in svn_settings:
787 786 if data.get(key):
788 787 settings.create_ui_section_value(svn_settings[key], data[key])
789 788
790 789 def _create_or_update_general_settings(self, settings, data):
791 790 for name in self.GENERAL_SETTINGS:
792 791 data_key = 'rhodecode_{}'.format(name)
793 792 if data_key not in data:
794 793 raise ValueError(
795 794 'The given data does not contain {} key'.format(data_key))
796 795 setting = settings.create_or_update_setting(
797 796 name, data[data_key], 'bool')
798 797 Session().add(setting)
799 798
800 799 def _get_settings_keys(self, settings, data):
801 800 data_keys = [self._get_form_ui_key(*s) for s in settings]
802 801 for data_key in data_keys:
803 802 if data_key not in data:
804 803 raise ValueError(
805 804 'The given data does not contain {} key'.format(data_key))
806 805 return data_keys
807 806
808 807 def create_largeobjects_dirs_if_needed(self, repo_store_path):
809 808 """
810 809 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
811 810 does a repository scan if enabled in the settings.
812 811 """
813 812
814 813 from rhodecode.lib.vcs.backends.hg import largefiles_store
815 814 from rhodecode.lib.vcs.backends.git import lfs_store
816 815
817 816 paths = [
818 817 largefiles_store(repo_store_path),
819 818 lfs_store(repo_store_path)]
820 819
821 820 for path in paths:
822 821 if os.path.isdir(path):
823 822 continue
824 823 if os.path.isfile(path):
825 824 continue
826 825 # not a file nor dir, we try to create it
827 826 try:
828 827 os.makedirs(path)
829 828 except Exception:
830 829 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,243 +1,245 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import time
23 23 import logging
24 24 import datetime
25 25 import hashlib
26 26 import tempfile
27 27 from os.path import join as jn
28 28
29 29 from tempfile import _RandomNameSequence
30 30
31 31 import pytest
32 32
33 33 from rhodecode.model.db import User
34 34 from rhodecode.lib import auth
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib.helpers import flash, link_to
37 37 from rhodecode.lib.utils2 import safe_str
38 38
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 __all__ = [
43 43 'get_new_dir', 'TestController',
44 'link_to', 'clear_all_caches',
44 'link_to', 'clear_cache_regions',
45 45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 54 ]
55 55
56 56
57 57 # SOME GLOBALS FOR TESTS
58 58 TEST_DIR = tempfile.gettempdir()
59 59
60 60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 62 TEST_USER_ADMIN_PASS = 'test12'
63 63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64 64
65 65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 66 TEST_USER_REGULAR_PASS = 'test12'
67 67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68 68
69 69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 70 TEST_USER_REGULAR2_PASS = 'test12'
71 71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72 72
73 73 HG_REPO = 'vcs_test_hg'
74 74 GIT_REPO = 'vcs_test_git'
75 75 SVN_REPO = 'vcs_test_svn'
76 76
77 77 NEW_HG_REPO = 'vcs_test_hg_new'
78 78 NEW_GIT_REPO = 'vcs_test_git_new'
79 79
80 80 HG_FORK = 'vcs_test_hg_fork'
81 81 GIT_FORK = 'vcs_test_git_fork'
82 82
83 83 ## VCS
84 84 SCM_TESTS = ['hg', 'git']
85 85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86 86
87 87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90 90
91 91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94 94
95 95 TEST_REPO_PREFIX = 'vcs-test'
96 96
97 97
98 def clear_all_caches():
99 from beaker.cache import cache_managers
100 for _cache in cache_managers.values():
101 _cache.clear()
98 def clear_cache_regions(regions=None):
99 # dogpile
100 from rhodecode.lib.rc_cache import region_meta
101 for region_name, region in region_meta.dogpile_cache_regions.items():
102 if not regions or region_name in regions:
103 region.invalidate()
102 104
103 105
104 106 def get_new_dir(title):
105 107 """
106 108 Returns always new directory path.
107 109 """
108 110 from rhodecode.tests.vcs.utils import get_normalized_path
109 111 name_parts = [TEST_REPO_PREFIX]
110 112 if title:
111 113 name_parts.append(title)
112 114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
113 115 name_parts.append(hex_str)
114 116 name = '-'.join(name_parts)
115 117 path = os.path.join(TEST_DIR, name)
116 118 return get_normalized_path(path)
117 119
118 120
119 121 def repo_id_generator(name):
120 122 numeric_hash = 0
121 123 for char in name:
122 124 numeric_hash += (ord(char))
123 125 return numeric_hash
124 126
125 127
126 128 @pytest.mark.usefixtures('app', 'index_location')
127 129 class TestController(object):
128 130
129 131 maxDiff = None
130 132
131 133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
132 134 password=TEST_USER_ADMIN_PASS):
133 135 self._logged_username = username
134 136 self._session = login_user_session(self.app, username, password)
135 137 self.csrf_token = auth.get_csrf_token(self._session)
136 138
137 139 return self._session['rhodecode_user']
138 140
139 141 def logout_user(self):
140 142 logout_user_session(self.app, auth.get_csrf_token(self._session))
141 143 self.csrf_token = None
142 144 self._logged_username = None
143 145 self._session = None
144 146
145 147 def _get_logged_user(self):
146 148 return User.get_by_username(self._logged_username)
147 149
148 150
149 151 def login_user_session(
150 152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
151 153
152 154 response = app.post(
153 155 h.route_path('login'),
154 156 {'username': username, 'password': password})
155 157 if 'invalid user name' in response.body:
156 158 pytest.fail('could not login using %s %s' % (username, password))
157 159
158 160 assert response.status == '302 Found'
159 161 response = response.follow()
160 162 assert response.status == '200 OK'
161 163
162 164 session = response.get_session_from_response()
163 165 assert 'rhodecode_user' in session
164 166 rc_user = session['rhodecode_user']
165 167 assert rc_user.get('username') == username
166 168 assert rc_user.get('is_authenticated')
167 169
168 170 return session
169 171
170 172
171 173 def logout_user_session(app, csrf_token):
172 174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
173 175
174 176
175 177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
176 178 password=TEST_USER_ADMIN_PASS):
177 179 return login_user_session(app, username, password)['rhodecode_user']
178 180
179 181
180 182 def assert_session_flash(response, msg=None, category=None, no_=None):
181 183 """
182 184 Assert on a flash message in the current session.
183 185
184 186 :param response: Response from give calll, it will contain flash
185 187 messages or bound session with them.
186 188 :param msg: The expected message. Will be evaluated if a
187 189 :class:`LazyString` is passed in.
188 190 :param category: Optional. If passed, the message category will be
189 191 checked as well.
190 192 :param no_: Optional. If passed, the message will be checked to NOT
191 193 be in the flash session
192 194 """
193 195 if msg is None and no_ is None:
194 196 raise ValueError("Parameter msg or no_ is required.")
195 197
196 198 if msg and no_:
197 199 raise ValueError("Please specify either msg or no_, but not both")
198 200
199 201 session = response.get_session_from_response()
200 202 messages = flash.pop_messages(session=session)
201 203 msg = _eval_if_lazy(msg)
202 204
203 205 if no_:
204 206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
205 207 else:
206 208 error_msg = 'unable to find message `%s` in empty flash list' % msg
207 209 assert messages, error_msg
208 210 message = messages[0]
209 211
210 212 message_text = _eval_if_lazy(message.message) or ''
211 213
212 214 if no_:
213 215 if no_ in message_text:
214 216 msg = u'msg `%s` found in session flash.' % (no_,)
215 217 pytest.fail(safe_str(msg))
216 218 else:
217 219 if msg not in message_text:
218 220 fail_msg = u'msg `%s` not found in session ' \
219 221 u'flash: got `%s` (type:%s) instead' % (
220 222 msg, message_text, type(message_text))
221 223
222 224 pytest.fail(safe_str(fail_msg))
223 225 if category:
224 226 assert category == message.category
225 227
226 228
227 229 def _eval_if_lazy(value):
228 230 return value.eval() if hasattr(value, 'eval') else value
229 231
230 232
231 233 def no_newline_id_generator(test_name):
232 234 """
233 235 Generates a test name without spaces or newlines characters. Used for
234 236 nicer output of progress of test
235 237 """
236 238 org_name = test_name
237 239 test_name = test_name\
238 240 .replace('\n', '_N') \
239 241 .replace('\r', '_N') \
240 242 .replace('\t', '_T') \
241 243 .replace(' ', '_S')
242 244
243 245 return test_name or 'test-with-empty-name'
@@ -1,446 +1,445 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import multiprocessing
23 23 import os
24 24
25 25 import mock
26 26 import py
27 27 import pytest
28 28
29 29 from rhodecode.lib import caching_query
30 30 from rhodecode.lib import utils
31 31 from rhodecode.lib.utils2 import md5
32 32 from rhodecode.model import settings
33 33 from rhodecode.model import db
34 34 from rhodecode.model import meta
35 35 from rhodecode.model.repo import RepoModel
36 36 from rhodecode.model.repo_group import RepoGroupModel
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.settings import UiSetting, SettingsModel
39 39 from rhodecode.tests.fixture import Fixture
40 40
41 41
42 42 fixture = Fixture()
43 43
44 44
45 45 def extract_hooks(config):
46 46 """Return a dictionary with the hook entries of the given config."""
47 47 hooks = {}
48 48 config_items = config.serialize()
49 49 for section, name, value in config_items:
50 50 if section != 'hooks':
51 51 continue
52 52 hooks[name] = value
53 53
54 54 return hooks
55 55
56 56
57 57 def disable_hooks(request, hooks):
58 58 """Disables the given hooks from the UI settings."""
59 59 session = meta.Session()
60 60
61 61 model = SettingsModel()
62 62 for hook_key in hooks:
63 63 sett = model.get_ui_by_key(hook_key)
64 64 sett.ui_active = False
65 65 session.add(sett)
66 66
67 67 # Invalidate cache
68 68 ui_settings = session.query(db.RhodeCodeUi).options(
69 69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 70 ui_settings.invalidate()
71 71
72 72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache(
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
73 caching_query.FromCache('sql_cache_short', 'get_hook_settings'))
75 74 ui_settings.invalidate()
76 75
77 76 @request.addfinalizer
78 77 def rollback():
79 78 session.rollback()
80 79
81 80
82 81 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 82 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 83 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 84 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 85 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 86 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 87 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89 88
90 89 HG_HOOKS = frozenset(
91 90 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 91 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93 92
94 93
95 94 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 95 ([], HG_HOOKS),
97 96 (HG_HOOKS, []),
98 97
99 98 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100 99
101 100 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 101 # be disabled too.
103 102 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 103 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 104 HOOK_PUSH_KEY]),
106 105 ])
107 106 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 107 expected_hooks):
109 108 disable_hooks(request, disabled_hooks)
110 109
111 110 config = utils.make_db_config()
112 111 hooks = extract_hooks(config)
113 112
114 113 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115 114
116 115
117 116 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 117 ([], ['pull', 'push']),
119 118 ([HOOK_PUSH], ['pull']),
120 119 ([HOOK_PULL], ['push']),
121 120 ([HOOK_PULL, HOOK_PUSH], []),
122 121 ])
123 122 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 123 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 124 ui_settings = [
126 125 ('hooks', key, 'some value', key not in disabled_hooks)
127 126 for key in hook_keys]
128 127
129 128 result = utils.get_enabled_hook_classes(ui_settings)
130 129 assert sorted(result) == expected_hooks
131 130
132 131
133 132 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 133 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 134 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 135 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137 136
138 137
139 138 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 139 tmpdir.ensure('not-a-repo', dir=True)
141 140 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 141 assert repos == []
143 142
144 143
145 144 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 145 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 146 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 147 assert repos == []
149 148
150 149
151 150 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 151 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 152 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 153 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155 154
156 155
157 156 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 157 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 158 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 159 assert repos == []
161 160
162 161
163 162 def test_get_filesystem_repos_skips_files(tmpdir):
164 163 tmpdir.ensure('test-file')
165 164 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 165 assert repos == []
167 166
168 167
169 168 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 169 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 170 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 171 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 172 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 173 assert repos == []
175 174
176 175
177 176 def _stub_git_repo(repo_path):
178 177 """
179 178 Make `repo_path` look like a Git repository.
180 179 """
181 180 repo_path.ensure('.git', dir=True)
182 181
183 182
184 183 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 184 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 185 tmpdir.ensure('test-file')
187 186 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 187 assert dirpaths == ['test-file']
189 188
190 189
191 190 def test_get_dirpaths_returns_all_paths_bytes(
192 191 tmpdir, platform_encodes_filenames):
193 192 if platform_encodes_filenames:
194 193 pytest.skip("This platform seems to encode filenames.")
195 194 tmpdir.ensure('repo-a-umlaut-\xe4')
196 195 dirpaths = utils._get_dirpaths(str(tmpdir))
197 196 assert dirpaths == ['repo-a-umlaut-\xe4']
198 197
199 198
200 199 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 200 tmpdir, platform_encodes_filenames):
202 201 if platform_encodes_filenames:
203 202 pytest.skip("This platform seems to encode filenames.")
204 203 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 204 tmpdir.ensure(path_with_latin1)
206 205 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 206 assert dirpaths == []
208 207
209 208
210 209 @pytest.fixture(scope='session')
211 210 def platform_encodes_filenames():
212 211 """
213 212 Boolean indicator if the current platform changes filename encodings.
214 213 """
215 214 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 215 tmpdir = py.path.local.mkdtemp()
217 216 tmpdir.ensure(path_with_latin1)
218 217 read_path = tmpdir.listdir()[0].basename
219 218 tmpdir.remove()
220 219 return path_with_latin1 != read_path
221 220
222 221
223 222
224 223
225 224 def test_repo2db_mapper_groups(repo_groups):
226 225 session = meta.Session()
227 226 zombie_group, parent_group, child_group = repo_groups
228 227 zombie_path = os.path.join(
229 228 RepoGroupModel().repos_path, zombie_group.full_path)
230 229 os.rmdir(zombie_path)
231 230
232 231 # Avoid removing test repos when calling repo2db_mapper
233 232 repo_list = {
234 233 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 234 }
236 235 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237 236
238 237 groups_in_db = session.query(db.RepoGroup).all()
239 238 assert child_group in groups_in_db
240 239 assert parent_group in groups_in_db
241 240 assert zombie_path not in groups_in_db
242 241
243 242
244 243 def test_repo2db_mapper_enables_largefiles(backend):
245 244 repo = backend.create_repo()
246 245 repo_list = {repo.repo_name: 'test'}
247 246 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 247 utils.repo2db_mapper(repo_list, remove_obsolete=False)
249 248 _, kwargs = scm_mock.call_args
250 249 assert kwargs['config'].get('extensions', 'largefiles') == ''
251 250
252 251
253 252 @pytest.mark.backends("git", "svn")
254 253 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
255 254 repo = backend.create_repo()
256 255 repo_list = {repo.repo_name: 'test'}
257 256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
258 257
259 258
260 259 @pytest.mark.backends("git", "svn")
261 260 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
262 261 repo = backend.create_repo()
263 262 RepoModel().delete(repo, fs_remove=False)
264 263 meta.Session().commit()
265 264 repo_list = {repo.repo_name: repo.scm_instance()}
266 265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
267 266
268 267
269 268 class TestPasswordChanged(object):
270 269 def setup(self):
271 270 self.session = {
272 271 'rhodecode_user': {
273 272 'password': '0cc175b9c0f1b6a831c399e269772661'
274 273 }
275 274 }
276 275 self.auth_user = mock.Mock()
277 276 self.auth_user.userame = 'test'
278 277 self.auth_user.password = 'abc123'
279 278
280 279 def test_returns_false_for_default_user(self):
281 280 self.auth_user.username = db.User.DEFAULT_USER
282 281 result = utils.password_changed(self.auth_user, self.session)
283 282 assert result is False
284 283
285 284 def test_returns_false_if_password_was_not_changed(self):
286 285 self.session['rhodecode_user']['password'] = md5(
287 286 self.auth_user.password)
288 287 result = utils.password_changed(self.auth_user, self.session)
289 288 assert result is False
290 289
291 290 def test_returns_true_if_password_was_changed(self):
292 291 result = utils.password_changed(self.auth_user, self.session)
293 292 assert result is True
294 293
295 294 def test_returns_true_if_auth_user_password_is_empty(self):
296 295 self.auth_user.password = None
297 296 result = utils.password_changed(self.auth_user, self.session)
298 297 assert result is True
299 298
300 299 def test_returns_true_if_session_password_is_empty(self):
301 300 self.session['rhodecode_user'].pop('password')
302 301 result = utils.password_changed(self.auth_user, self.session)
303 302 assert result is True
304 303
305 304
306 305 class TestReadOpensourceLicenses(object):
307 306 def test_success(self):
308 307 utils._license_cache = None
309 308 json_data = '''
310 309 {
311 310 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
312 311 "python2.7-Markdown-2.6.2": {
313 312 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
314 313 }
315 314 }
316 315 '''
317 316 resource_string_patch = mock.patch.object(
318 317 utils.pkg_resources, 'resource_string', return_value=json_data)
319 318 with resource_string_patch:
320 319 result = utils.read_opensource_licenses()
321 320 assert result == json.loads(json_data)
322 321
323 322 def test_caching(self):
324 323 utils._license_cache = {
325 324 "python2.7-pytest-2.7.1": {
326 325 "UNKNOWN": None
327 326 },
328 327 "python2.7-Markdown-2.6.2": {
329 328 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
330 329 }
331 330 }
332 331 resource_patch = mock.patch.object(
333 332 utils.pkg_resources, 'resource_string', side_effect=Exception)
334 333 json_patch = mock.patch.object(
335 334 utils.json, 'loads', side_effect=Exception)
336 335
337 336 with resource_patch as resource_mock, json_patch as json_mock:
338 337 result = utils.read_opensource_licenses()
339 338
340 339 assert resource_mock.call_count == 0
341 340 assert json_mock.call_count == 0
342 341 assert result == utils._license_cache
343 342
344 343 def test_licenses_file_contains_no_unknown_licenses(self):
345 344 utils._license_cache = None
346 345 result = utils.read_opensource_licenses()
347 346 license_names = []
348 347 for licenses in result.values():
349 348 license_names.extend(licenses.keys())
350 349 assert 'UNKNOWN' not in license_names
351 350
352 351
353 352 class TestMakeDbConfig(object):
354 353 def test_data_from_config_data_from_db_returned(self):
355 354 test_data = [
356 355 ('section1', 'option1', 'value1'),
357 356 ('section2', 'option2', 'value2'),
358 357 ('section3', 'option3', 'value3'),
359 358 ]
360 359 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
361 360 config_mock.return_value = test_data
362 361 kwargs = {'clear_session': False, 'repo': 'test_repo'}
363 362 result = utils.make_db_config(**kwargs)
364 363 config_mock.assert_called_once_with(**kwargs)
365 364 for section, option, expected_value in test_data:
366 365 value = result.get(section, option)
367 366 assert value == expected_value
368 367
369 368
370 369 class TestConfigDataFromDb(object):
371 370 def test_config_data_from_db_returns_active_settings(self):
372 371 test_data = [
373 372 UiSetting('section1', 'option1', 'value1', True),
374 373 UiSetting('section2', 'option2', 'value2', True),
375 374 UiSetting('section3', 'option3', 'value3', False),
376 375 ]
377 376 repo_name = 'test_repo'
378 377
379 378 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
380 379 hooks_patch = mock.patch.object(
381 380 utils, 'get_enabled_hook_classes',
382 381 return_value=['pull', 'push', 'repo_size'])
383 382 with model_patch as model_mock, hooks_patch:
384 383 instance_mock = mock.Mock()
385 384 model_mock.return_value = instance_mock
386 385 instance_mock.get_ui_settings.return_value = test_data
387 386 result = utils.config_data_from_db(
388 387 clear_session=False, repo=repo_name)
389 388
390 389 self._assert_repo_name_passed(model_mock, repo_name)
391 390
392 391 expected_result = [
393 392 ('section1', 'option1', 'value1'),
394 393 ('section2', 'option2', 'value2'),
395 394 ]
396 395 assert result == expected_result
397 396
398 397 def _assert_repo_name_passed(self, model_mock, repo_name):
399 398 assert model_mock.call_count == 1
400 399 call_args, call_kwargs = model_mock.call_args
401 400 assert call_kwargs['repo'] == repo_name
402 401
403 402
404 403 class TestIsDirWritable(object):
405 404 def test_returns_false_when_not_writable(self):
406 405 with mock.patch('__builtin__.open', side_effect=OSError):
407 406 assert not utils._is_dir_writable('/stub-path')
408 407
409 408 def test_returns_true_when_writable(self, tmpdir):
410 409 assert utils._is_dir_writable(str(tmpdir))
411 410
412 411 def test_is_safe_against_race_conditions(self, tmpdir):
413 412 workers = multiprocessing.Pool()
414 413 directories = [str(tmpdir)] * 10
415 414 workers.map(utils._is_dir_writable, directories)
416 415
417 416
418 417 class TestGetEnabledHooks(object):
419 418 def test_only_active_hooks_are_enabled(self):
420 419 ui_settings = [
421 420 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
422 421 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
423 422 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
424 423 ]
425 424 result = utils.get_enabled_hook_classes(ui_settings)
426 425 assert result == ['push', 'repo_size']
427 426
428 427 def test_all_hooks_are_enabled(self):
429 428 ui_settings = [
430 429 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
431 430 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
432 431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
433 432 ]
434 433 result = utils.get_enabled_hook_classes(ui_settings)
435 434 assert result == ['push', 'repo_size', 'pull']
436 435
437 436 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
438 437 ui_settings = []
439 438 result = utils.get_enabled_hook_classes(ui_settings)
440 439 assert result == []
441 440
442 441
443 442 def test_obfuscate_url_pw():
444 443 from rhodecode.lib.utils2 import obfuscate_url_pw
445 444 engine = u'/home/repos/malmΓΆ'
446 445 assert obfuscate_url_pw(engine) No newline at end of file
@@ -1,671 +1,674 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 0.0.0.0
47 47 port = 5000
48 48
49 49 ##########################
50 50 ## GUNICORN WSGI SERVER ##
51 51 ##########################
52 52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53 53
54 54 use = egg:gunicorn#main
55 55 ## Sets the number of process workers. You must set `instance_id = *`
56 56 ## when this option is set to more than one worker, recommended
57 57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 58 ## The `instance_id = *` must be set in the [app:main] section below
59 59 #workers = 2
60 60 ## number of threads for each of the worker, must be set to 1 for gevent
61 61 ## generally recommened to be at 1
62 62 #threads = 1
63 63 ## process name
64 64 #proc_name = rhodecode
65 65 ## type of worker class, one of sync, gevent
66 66 ## recommended for bigger setup is using of of other than sync one
67 67 #worker_class = sync
68 68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 69 #worker_connections = 10
70 70 ## max number of requests that worker will handle before being gracefully
71 71 ## restarted, could prevent memory leaks
72 72 #max_requests = 1000
73 73 #max_requests_jitter = 30
74 74 ## amount of time a worker can spend with handling a request before it
75 75 ## gets killed and restarted. Set to 6hrs
76 76 #timeout = 21600
77 77
78 78 ## prefix middleware for RhodeCode.
79 79 ## recommended when using proxy setup.
80 80 ## allows to set RhodeCode under a prefix in server.
81 81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 82 ## And set your prefix like: `prefix = /custom_prefix`
83 83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 84 ## to make your cookies only work on prefix url
85 85 [filter:proxy-prefix]
86 86 use = egg:PasteDeploy#prefix
87 87 prefix = /
88 88
89 89 [app:main]
90 90 is_test = True
91 91 use = egg:rhodecode-enterprise-ce
92 92
93 93 ## enable proxy prefix middleware, defined above
94 94 #filter-with = proxy-prefix
95 95
96 96
97 97 ## RHODECODE PLUGINS ##
98 98 rhodecode.includes = rhodecode.api
99 99
100 100 # api prefix url
101 101 rhodecode.api.url = /_admin/api
102 102
103 103
104 104 ## END RHODECODE PLUGINS ##
105 105
106 106 ## encryption key used to encrypt social plugin tokens,
107 107 ## remote_urls with credentials etc, if not set it defaults to
108 108 ## `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ## return gzipped responses from Rhodecode (static files/application)
116 116 gzip_responses = false
117 117
118 118 ## autogenerate javascript routes file on startup
119 119 generate_js_files = false
120 120
121 121 ## Optional Languages
122 122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 123 lang = en
124 124
125 125 ## perform a full repository scan on each server start, this should be
126 126 ## set to false after first startup, to allow faster server restarts.
127 127 startup.import_repos = true
128 128
129 129 ## Uncomment and set this path to use archive download cache.
130 130 ## Once enabled, generated archives will be cached at this location
131 131 ## and served from the cache during subsequent requests for the same archive of
132 132 ## the repository.
133 133 #archive_cache_dir = /tmp/tarballcache
134 134
135 135 ## URL at which the application is running. This is used for bootstraping
136 136 ## requests in context when no web request is available. Used in ishell, or
137 137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 138 app.base_url = http://rhodecode.local
139 139
140 140 ## change this to unique ID for security
141 141 app_instance_uuid = rc-production
142 142
143 143 ## cut off limit for large diffs (size in bytes)
144 144 cut_off_limit_diff = 1024000
145 145 cut_off_limit_file = 256000
146 146
147 147 ## use cache version of scm repo everywhere
148 148 vcs_full_cache = false
149 149
150 150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 151 ## Normally this is controlled by proper http flags sent from http server
152 152 force_https = false
153 153
154 154 ## use Strict-Transport-Security headers
155 155 use_htsts = false
156 156
157 157 ## git rev filter option, --all is the default filter, if you need to
158 158 ## hide all refs in changelog switch this to --branches --tags
159 159 git_rev_filter = --all
160 160
161 161 # Set to true if your repos are exposed using the dumb protocol
162 162 git_update_server_info = false
163 163
164 164 ## RSS/ATOM feed options
165 165 rss_cut_off_limit = 256000
166 166 rss_items_per_page = 10
167 167 rss_include_diff = false
168 168
169 169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 170 ## url that does rewrites to _admin/gists/{gistid}.
171 171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 173 gist_alias_url =
174 174
175 175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 176 ## used for access.
177 177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 178 ## came from the the logged in user who own this authentication token.
179 179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 180 ## authentication token. Such view would be only accessible when used together
181 181 ## with this authentication token
182 182 ##
183 183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 184 ## The list should be "," separated and on a single line.
185 185 ##
186 186 ## Most common views to enable:
187 187 # RepoCommitsView:repo_commit_download
188 188 # RepoCommitsView:repo_commit_patch
189 189 # RepoCommitsView:repo_commit_raw
190 190 # RepoCommitsView:repo_commit_raw@TOKEN
191 191 # RepoFilesView:repo_files_diff
192 192 # RepoFilesView:repo_archivefile
193 193 # RepoFilesView:repo_file_raw
194 194 # GistView:*
195 195 api_access_controllers_whitelist =
196 196
197 197 ## default encoding used to convert from and to unicode
198 198 ## can be also a comma separated list of encoding in case of mixed encodings
199 199 default_encoding = UTF-8
200 200
201 201 ## instance-id prefix
202 202 ## a prefix key for this instance used for cache invalidation when running
203 203 ## multiple instances of rhodecode, make sure it's globally unique for
204 204 ## all running rhodecode instances. Leave empty if you don't use it
205 205 instance_id =
206 206
207 207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 208 ## of an authentication plugin also if it is disabled by it's settings.
209 209 ## This could be useful if you are unable to log in to the system due to broken
210 210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 211 ## module to log in again and fix the settings.
212 212 ##
213 213 ## Available builtin plugin IDs (hash is part of the ID):
214 214 ## egg:rhodecode-enterprise-ce#rhodecode
215 215 ## egg:rhodecode-enterprise-ce#pam
216 216 ## egg:rhodecode-enterprise-ce#ldap
217 217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 218 ## egg:rhodecode-enterprise-ce#headers
219 219 ## egg:rhodecode-enterprise-ce#crowd
220 220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221 221
222 222 ## alternative return HTTP header for failed authentication. Default HTTP
223 223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 224 ## handling that causing a series of failed authentication calls.
225 225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 226 ## This will be served instead of default 401 on bad authnetication
227 227 auth_ret_code =
228 228
229 229 ## use special detection method when serving auth_ret_code, instead of serving
230 230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 231 ## and then serve auth_ret_code to clients
232 232 auth_ret_code_detection = false
233 233
234 234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 235 ## codes don't break the transactions while 4XX codes do
236 236 lock_ret_code = 423
237 237
238 238 ## allows to change the repository location in settings page
239 239 allow_repo_location_change = true
240 240
241 241 ## allows to setup custom hooks in settings page
242 242 allow_custom_hooks_settings = true
243 243
244 244 ## generated license token, goto license page in RhodeCode settings to obtain
245 245 ## new token
246 246 license_token = abra-cada-bra1-rce3
247 247
248 248 ## supervisor connection uri, for managing supervisor and logs.
249 249 supervisor.uri =
250 250 ## supervisord group name/id we only want this RC instance to handle
251 251 supervisor.group_id = dev
252 252
253 253 ## Display extended labs settings
254 254 labs_settings_active = true
255 255
256 256 ####################################
257 257 ### CELERY CONFIG ####
258 258 ####################################
259 259 use_celery = false
260 260 broker.host = localhost
261 261 broker.vhost = rabbitmqhost
262 262 broker.port = 5672
263 263 broker.user = rabbitmq
264 264 broker.password = qweqwe
265 265
266 266 celery.imports = rhodecode.lib.celerylib.tasks
267 267
268 268 celery.result.backend = amqp
269 269 celery.result.dburi = amqp://
270 270 celery.result.serialier = json
271 271
272 272 #celery.send.task.error.emails = true
273 273 #celery.amqp.task.result.expires = 18000
274 274
275 275 celeryd.concurrency = 2
276 276 #celeryd.log.file = celeryd.log
277 277 celeryd.log.level = debug
278 278 celeryd.max.tasks.per.child = 1
279 279
280 280 ## tasks will never be sent to the queue, but executed locally instead.
281 281 celery.always.eager = false
282 282
283 283 ####################################
284 284 ### BEAKER CACHE ####
285 285 ####################################
286 286 # default cache dir for templates. Putting this into a ramdisk
287 287 ## can boost performance, eg. %(here)s/data_ramdisk
288 288 cache_dir = %(here)s/data
289 289
290 290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294 294
295 beaker.cache.regions = long_term, sql_cache_short
295 beaker.cache.regions = long_term
296 296
297 297 beaker.cache.long_term.type = memory
298 298 beaker.cache.long_term.expire = 36000
299 299 beaker.cache.long_term.key_length = 256
300 300
301 beaker.cache.sql_cache_short.type = memory
302 beaker.cache.sql_cache_short.expire = 1
303 beaker.cache.sql_cache_short.key_length = 256
304 301
305 302 #####################################
306 303 ### DOGPILE CACHE ####
307 304 #####################################
308 305
309 306 ## permission tree cache settings
310 307 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
311 308 rc_cache.cache_perms.expiration_time = 0
312 309 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
313 310
311
312 ## cache settings for SQL queries
313 rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru
314 rc_cache.sql_cache_short.expiration_time = 0
315
316
314 317 ####################################
315 318 ### BEAKER SESSION ####
316 319 ####################################
317 320
318 321 ## .session.type is type of storage options for the session, current allowed
319 322 ## types are file, ext:memcached, ext:database, and memory (default).
320 323 beaker.session.type = file
321 324 beaker.session.data_dir = %(here)s/rc/data/sessions/data
322 325
323 326 ## db based session, fast, and allows easy management over logged in users
324 327 #beaker.session.type = ext:database
325 328 #beaker.session.table_name = db_session
326 329 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
327 330 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
328 331 #beaker.session.sa.pool_recycle = 3600
329 332 #beaker.session.sa.echo = false
330 333
331 334 beaker.session.key = rhodecode
332 335 beaker.session.secret = test-rc-uytcxaz
333 336 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
334 337
335 338 ## Secure encrypted cookie. Requires AES and AES python libraries
336 339 ## you must disable beaker.session.secret to use this
337 340 #beaker.session.encrypt_key = key_for_encryption
338 341 #beaker.session.validate_key = validation_key
339 342
340 343 ## sets session as invalid(also logging out user) if it haven not been
341 344 ## accessed for given amount of time in seconds
342 345 beaker.session.timeout = 2592000
343 346 beaker.session.httponly = true
344 347 ## Path to use for the cookie. Set to prefix if you use prefix middleware
345 348 #beaker.session.cookie_path = /custom_prefix
346 349
347 350 ## uncomment for https secure cookie
348 351 beaker.session.secure = false
349 352
350 353 ## auto save the session to not to use .save()
351 354 beaker.session.auto = false
352 355
353 356 ## default cookie expiration time in seconds, set to `true` to set expire
354 357 ## at browser close
355 358 #beaker.session.cookie_expires = 3600
356 359
357 360 ###################################
358 361 ## SEARCH INDEXING CONFIGURATION ##
359 362 ###################################
360 363 ## Full text search indexer is available in rhodecode-tools under
361 364 ## `rhodecode-tools index` command
362 365
363 366 ## WHOOSH Backend, doesn't require additional services to run
364 367 ## it works good with few dozen repos
365 368 search.module = rhodecode.lib.index.whoosh
366 369 search.location = %(here)s/data/index
367 370
368 371 ########################################
369 372 ### CHANNELSTREAM CONFIG ####
370 373 ########################################
371 374 ## channelstream enables persistent connections and live notification
372 375 ## in the system. It's also used by the chat system
373 376
374 377 channelstream.enabled = false
375 378
376 379 ## server address for channelstream server on the backend
377 380 channelstream.server = 127.0.0.1:9800
378 381 ## location of the channelstream server from outside world
379 382 ## use ws:// for http or wss:// for https. This address needs to be handled
380 383 ## by external HTTP server such as Nginx or Apache
381 384 ## see nginx/apache configuration examples in our docs
382 385 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
383 386 channelstream.secret = secret
384 387 channelstream.history.location = %(here)s/channelstream_history
385 388
386 389 ## Internal application path that Javascript uses to connect into.
387 390 ## If you use proxy-prefix the prefix should be added before /_channelstream
388 391 channelstream.proxy_path = /_channelstream
389 392
390 393
391 394 ###################################
392 395 ## APPENLIGHT CONFIG ##
393 396 ###################################
394 397
395 398 ## Appenlight is tailored to work with RhodeCode, see
396 399 ## http://appenlight.com for details how to obtain an account
397 400
398 401 ## appenlight integration enabled
399 402 appenlight = false
400 403
401 404 appenlight.server_url = https://api.appenlight.com
402 405 appenlight.api_key = YOUR_API_KEY
403 406 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
404 407
405 408 # used for JS client
406 409 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
407 410
408 411 ## TWEAK AMOUNT OF INFO SENT HERE
409 412
410 413 ## enables 404 error logging (default False)
411 414 appenlight.report_404 = false
412 415
413 416 ## time in seconds after request is considered being slow (default 1)
414 417 appenlight.slow_request_time = 1
415 418
416 419 ## record slow requests in application
417 420 ## (needs to be enabled for slow datastore recording and time tracking)
418 421 appenlight.slow_requests = true
419 422
420 423 ## enable hooking to application loggers
421 424 appenlight.logging = true
422 425
423 426 ## minimum log level for log capture
424 427 appenlight.logging.level = WARNING
425 428
426 429 ## send logs only from erroneous/slow requests
427 430 ## (saves API quota for intensive logging)
428 431 appenlight.logging_on_error = false
429 432
430 433 ## list of additonal keywords that should be grabbed from environ object
431 434 ## can be string with comma separated list of words in lowercase
432 435 ## (by default client will always send following info:
433 436 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
434 437 ## start with HTTP* this list be extended with additional keywords here
435 438 appenlight.environ_keys_whitelist =
436 439
437 440 ## list of keywords that should be blanked from request object
438 441 ## can be string with comma separated list of words in lowercase
439 442 ## (by default client will always blank keys that contain following words
440 443 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
441 444 ## this list be extended with additional keywords set here
442 445 appenlight.request_keys_blacklist =
443 446
444 447 ## list of namespaces that should be ignores when gathering log entries
445 448 ## can be string with comma separated list of namespaces
446 449 ## (by default the client ignores own entries: appenlight_client.client)
447 450 appenlight.log_namespace_blacklist =
448 451
449 452
450 453 ################################################################################
451 454 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
452 455 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
453 456 ## execute malicious code after an exception is raised. ##
454 457 ################################################################################
455 458 set debug = false
456 459
457 460
458 461 ##############
459 462 ## STYLING ##
460 463 ##############
461 464 debug_style = false
462 465
463 466 ###########################################
464 467 ### MAIN RHODECODE DATABASE CONFIG ###
465 468 ###########################################
466 469 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
467 470 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
468 471 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
469 472 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
470 473
471 474 # see sqlalchemy docs for other advanced settings
472 475
473 476 ## print the sql statements to output
474 477 sqlalchemy.db1.echo = false
475 478 ## recycle the connections after this amount of seconds
476 479 sqlalchemy.db1.pool_recycle = 3600
477 480 sqlalchemy.db1.convert_unicode = true
478 481
479 482 ## the number of connections to keep open inside the connection pool.
480 483 ## 0 indicates no limit
481 484 #sqlalchemy.db1.pool_size = 5
482 485
483 486 ## the number of connections to allow in connection pool "overflow", that is
484 487 ## connections that can be opened above and beyond the pool_size setting,
485 488 ## which defaults to five.
486 489 #sqlalchemy.db1.max_overflow = 10
487 490
488 491
489 492 ##################
490 493 ### VCS CONFIG ###
491 494 ##################
492 495 vcs.server.enable = true
493 496 vcs.server = localhost:9901
494 497
495 498 ## Web server connectivity protocol, responsible for web based VCS operatations
496 499 ## Available protocols are:
497 500 ## `http` - use http-rpc backend (default)
498 501 vcs.server.protocol = http
499 502
500 503 ## Push/Pull operations protocol, available options are:
501 504 ## `http` - use http-rpc backend (default)
502 505 ## `vcsserver.scm_app` - internal app (EE only)
503 506 vcs.scm_app_implementation = http
504 507
505 508 ## Push/Pull operations hooks protocol, available options are:
506 509 ## `http` - use http-rpc backend (default)
507 510 vcs.hooks.protocol = http
508 511 vcs.hooks.host = 127.0.0.1
509 512
510 513 vcs.server.log_level = debug
511 514 ## Start VCSServer with this instance as a subprocess, usefull for development
512 515 vcs.start_server = false
513 516
514 517 ## List of enabled VCS backends, available options are:
515 518 ## `hg` - mercurial
516 519 ## `git` - git
517 520 ## `svn` - subversion
518 521 vcs.backends = hg, git, svn
519 522
520 523 vcs.connection_timeout = 3600
521 524 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
522 525 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
523 526 #vcs.svn.compatible_version = pre-1.8-compatible
524 527
525 528
526 529 ############################################################
527 530 ### Subversion proxy support (mod_dav_svn) ###
528 531 ### Maps RhodeCode repo groups into SVN paths for Apache ###
529 532 ############################################################
530 533 ## Enable or disable the config file generation.
531 534 svn.proxy.generate_config = false
532 535 ## Generate config file with `SVNListParentPath` set to `On`.
533 536 svn.proxy.list_parent_path = true
534 537 ## Set location and file name of generated config file.
535 538 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
536 539 ## Used as a prefix to the `Location` block in the generated config file.
537 540 ## In most cases it should be set to `/`.
538 541 svn.proxy.location_root = /
539 542 ## Command to reload the mod dav svn configuration on change.
540 543 ## Example: `/etc/init.d/apache2 reload`
541 544 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
542 545 ## If the timeout expires before the reload command finishes, the command will
543 546 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
544 547 #svn.proxy.reload_timeout = 10
545 548
546 549 ############################################################
547 550 ### SSH Support Settings ###
548 551 ############################################################
549 552
550 553 ## Defines if the authorized_keys file should be written on any change of
551 554 ## user ssh keys, setting this to false also disables posibility of adding
552 555 ## ssh keys for users from web interface.
553 556 ssh.generate_authorized_keyfile = true
554 557
555 558 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
556 559 # ssh.authorized_keys_ssh_opts =
557 560
558 561 ## File to generate the authorized keys together with options
559 562 ## It is possible to have multiple key files specified in `sshd_config` e.g.
560 563 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
561 564 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
562 565
563 566 ## Command to execute the SSH wrapper. The binary is available in the
564 567 ## rhodecode installation directory.
565 568 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
566 569 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
567 570
568 571 ## Allow shell when executing the ssh-wrapper command
569 572 ssh.wrapper_cmd_allow_shell = false
570 573
571 574 ## Enables logging, and detailed output send back to the client. Usefull for
572 575 ## debugging, shouldn't be used in production.
573 576 ssh.enable_debug_logging = false
574 577
575 578 ## Paths to binary executrables, by default they are the names, but we can
576 579 ## override them if we want to use a custom one
577 580 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
578 581 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
579 582 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
580 583
581 584
582 585 ## Dummy marker to add new entries after.
583 586 ## Add any custom entries below. Please don't remove.
584 587 custom.conf = 1
585 588
586 589
587 590 ################################
588 591 ### LOGGING CONFIGURATION ####
589 592 ################################
590 593 [loggers]
591 594 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
592 595
593 596 [handlers]
594 597 keys = console, console_sql
595 598
596 599 [formatters]
597 600 keys = generic, color_formatter, color_formatter_sql
598 601
599 602 #############
600 603 ## LOGGERS ##
601 604 #############
602 605 [logger_root]
603 606 level = NOTSET
604 607 handlers = console
605 608
606 609 [logger_routes]
607 610 level = DEBUG
608 611 handlers =
609 612 qualname = routes.middleware
610 613 ## "level = DEBUG" logs the route matched and routing variables.
611 614 propagate = 1
612 615
613 616 [logger_beaker]
614 617 level = DEBUG
615 618 handlers =
616 619 qualname = beaker.container
617 620 propagate = 1
618 621
619 622 [logger_rhodecode]
620 623 level = DEBUG
621 624 handlers =
622 625 qualname = rhodecode
623 626 propagate = 1
624 627
625 628 [logger_sqlalchemy]
626 629 level = ERROR
627 630 handlers = console_sql
628 631 qualname = sqlalchemy.engine
629 632 propagate = 0
630 633
631 634 [logger_ssh_wrapper]
632 635 level = DEBUG
633 636 handlers =
634 637 qualname = ssh_wrapper
635 638 propagate = 1
636 639
637 640
638 641 ##############
639 642 ## HANDLERS ##
640 643 ##############
641 644
642 645 [handler_console]
643 646 class = StreamHandler
644 647 args = (sys.stderr,)
645 648 level = DEBUG
646 649 formatter = generic
647 650
648 651 [handler_console_sql]
649 652 class = StreamHandler
650 653 args = (sys.stderr,)
651 654 level = WARN
652 655 formatter = generic
653 656
654 657 ################
655 658 ## FORMATTERS ##
656 659 ################
657 660
658 661 [formatter_generic]
659 662 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
660 663 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
661 664 datefmt = %Y-%m-%d %H:%M:%S
662 665
663 666 [formatter_color_formatter]
664 667 class = rhodecode.lib.logging_formatter.ColorFormatter
665 668 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
666 669 datefmt = %Y-%m-%d %H:%M:%S
667 670
668 671 [formatter_color_formatter_sql]
669 672 class = rhodecode.lib.logging_formatter.ColorFormatterSql
670 673 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
671 674 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now