##// END OF EJS Templates
caches: don't use beaker for file caches anymore
marcink -
r2846:bbc96602 default
parent child Browse files
Show More
@@ -1,705 +1,738 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 use = egg:waitress#main
55 55 ## number of worker threads
56 56 threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 #use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 #workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 #proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 #worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 #max_requests = 1000
88 88 #max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 #timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 # During development the we want to have the debug toolbar enabled
112 112 pyramid.includes =
113 113 pyramid_debugtoolbar
114 114 rhodecode.lib.middleware.request_wrapper
115 115
116 116 pyramid.reload_templates = true
117 117
118 118 debugtoolbar.hosts = 0.0.0.0/0
119 119 debugtoolbar.exclude_prefixes =
120 120 /css
121 121 /fonts
122 122 /images
123 123 /js
124 124
125 125 ## RHODECODE PLUGINS ##
126 126 rhodecode.includes =
127 127 rhodecode.api
128 128
129 129
130 130 # api prefix url
131 131 rhodecode.api.url = /_admin/api
132 132
133 133
134 134 ## END RHODECODE PLUGINS ##
135 135
136 136 ## encryption key used to encrypt social plugin tokens,
137 137 ## remote_urls with credentials etc, if not set it defaults to
138 138 ## `beaker.session.secret`
139 139 #rhodecode.encrypted_values.secret =
140 140
141 141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 143 #rhodecode.encrypted_values.strict = false
144 144
145 145 ## return gzipped responses from Rhodecode (static files/application)
146 146 gzip_responses = false
147 147
148 148 ## autogenerate javascript routes file on startup
149 149 generate_js_files = false
150 150
151 151 ## Optional Languages
152 152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 153 lang = en
154 154
155 155 ## perform a full repository scan on each server start, this should be
156 156 ## set to false after first startup, to allow faster server restarts.
157 157 startup.import_repos = false
158 158
159 159 ## Uncomment and set this path to use archive download cache.
160 160 ## Once enabled, generated archives will be cached at this location
161 161 ## and served from the cache during subsequent requests for the same archive of
162 162 ## the repository.
163 163 #archive_cache_dir = /tmp/tarballcache
164 164
165 165 ## URL at which the application is running. This is used for bootstraping
166 166 ## requests in context when no web request is available. Used in ishell, or
167 167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 168 app.base_url = http://rhodecode.local
169 169
170 170 ## change this to unique ID for security
171 171 app_instance_uuid = rc-production
172 172
173 173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 174 ## commit, or pull request exceeds this limit this diff will be displayed
175 175 ## partially. E.g 512000 == 512Kb
176 176 cut_off_limit_diff = 512000
177 177
178 178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 179 ## file inside diff which exceeds this limit will be displayed partially.
180 180 ## E.g 128000 == 128Kb
181 181 cut_off_limit_file = 128000
182 182
183 183 ## use cache version of scm repo everywhere
184 184 vcs_full_cache = true
185 185
186 186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 187 ## Normally this is controlled by proper http flags sent from http server
188 188 force_https = false
189 189
190 190 ## use Strict-Transport-Security headers
191 191 use_htsts = false
192 192
193 193 ## git rev filter option, --all is the default filter, if you need to
194 194 ## hide all refs in changelog switch this to --branches --tags
195 195 git_rev_filter = --branches --tags
196 196
197 197 # Set to true if your repos are exposed using the dumb protocol
198 198 git_update_server_info = false
199 199
200 200 ## RSS/ATOM feed options
201 201 rss_cut_off_limit = 256000
202 202 rss_items_per_page = 10
203 203 rss_include_diff = false
204 204
205 205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 206 ## url that does rewrites to _admin/gists/{gistid}.
207 207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 209 gist_alias_url =
210 210
211 211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 212 ## used for access.
213 213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 214 ## came from the the logged in user who own this authentication token.
215 215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 216 ## authentication token. Such view would be only accessible when used together
217 217 ## with this authentication token
218 218 ##
219 219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 220 ## The list should be "," separated and on a single line.
221 221 ##
222 222 ## Most common views to enable:
223 223 # RepoCommitsView:repo_commit_download
224 224 # RepoCommitsView:repo_commit_patch
225 225 # RepoCommitsView:repo_commit_raw
226 226 # RepoCommitsView:repo_commit_raw@TOKEN
227 227 # RepoFilesView:repo_files_diff
228 228 # RepoFilesView:repo_archivefile
229 229 # RepoFilesView:repo_file_raw
230 230 # GistView:*
231 231 api_access_controllers_whitelist =
232 232
233 233 ## default encoding used to convert from and to unicode
234 234 ## can be also a comma separated list of encoding in case of mixed encodings
235 235 default_encoding = UTF-8
236 236
237 237 ## instance-id prefix
238 238 ## a prefix key for this instance used for cache invalidation when running
239 239 ## multiple instances of rhodecode, make sure it's globally unique for
240 240 ## all running rhodecode instances. Leave empty if you don't use it
241 241 instance_id =
242 242
243 243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 244 ## of an authentication plugin also if it is disabled by it's settings.
245 245 ## This could be useful if you are unable to log in to the system due to broken
246 246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 247 ## module to log in again and fix the settings.
248 248 ##
249 249 ## Available builtin plugin IDs (hash is part of the ID):
250 250 ## egg:rhodecode-enterprise-ce#rhodecode
251 251 ## egg:rhodecode-enterprise-ce#pam
252 252 ## egg:rhodecode-enterprise-ce#ldap
253 253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 254 ## egg:rhodecode-enterprise-ce#headers
255 255 ## egg:rhodecode-enterprise-ce#crowd
256 256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257 257
258 258 ## alternative return HTTP header for failed authentication. Default HTTP
259 259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 260 ## handling that causing a series of failed authentication calls.
261 261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 262 ## This will be served instead of default 401 on bad authnetication
263 263 auth_ret_code =
264 264
265 265 ## use special detection method when serving auth_ret_code, instead of serving
266 266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 267 ## and then serve auth_ret_code to clients
268 268 auth_ret_code_detection = false
269 269
270 270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 271 ## codes don't break the transactions while 4XX codes do
272 272 lock_ret_code = 423
273 273
274 274 ## allows to change the repository location in settings page
275 275 allow_repo_location_change = true
276 276
277 277 ## allows to setup custom hooks in settings page
278 278 allow_custom_hooks_settings = true
279 279
280 280 ## generated license token, goto license page in RhodeCode settings to obtain
281 281 ## new token
282 282 license_token =
283 283
284 284 ## supervisor connection uri, for managing supervisor and logs.
285 285 supervisor.uri =
286 286 ## supervisord group name/id we only want this RC instance to handle
287 287 supervisor.group_id = dev
288 288
289 289 ## Display extended labs settings
290 290 labs_settings_active = true
291 291
292 292 ####################################
293 293 ### CELERY CONFIG ####
294 294 ####################################
295 295 ## run: /path/to/celery worker \
296 296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299 299
300 300 use_celery = false
301 301
302 302 ## connection url to the message broker (default rabbitmq)
303 303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304 304
305 305 ## maximum tasks to execute before worker restart
306 306 celery.max_tasks_per_child = 100
307 307
308 308 ## tasks will never be sent to the queue, but executed locally instead.
309 309 celery.task_always_eager = false
310 310
311 #####################################
312 ### DOGPILE CACHE ####
313 #####################################
314 ## Default cache dir for caches. Putting this into a ramdisk
315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 ## of space
317 cache_dir = /tmp/rcdev/data
318
319 ## cache settings for permission tree, auth TTL.
320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 rc_cache.cache_perms.expiration_time = 300
322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323
324 ## redis backend with distributed locks
325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 #rc_cache.cache_perms.expiration_time = 300
327 #rc_cache.cache_perms.arguments.host = localhost
328 #rc_cache.cache_perms.arguments.port = 6379
329 #rc_cache.cache_perms.arguments.db = 0
330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 #rc_cache.cache_perms.arguments.distributed_lock = true
332
333
334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_repo.expiration_time = 2592000
336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337
338 ## redis backend with distributed locks
339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 #rc_cache.cache_repo.expiration_time = 2592000
341 ## this needs to be greater then expiration_time
342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 #rc_cache.cache_repo.arguments.host = localhost
344 #rc_cache.cache_repo.arguments.port = 6379
345 #rc_cache.cache_repo.arguments.db = 1
346 #rc_cache.cache_repo.arguments.distributed_lock = true
347
348
311 349 ####################################
312 350 ### BEAKER CACHE ####
313 351 ####################################
314 # default cache dir for templates. Putting this into a ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk
316 cache_dir = %(here)s/data
317 352
318 353 ## locking and default file storage for Beaker. Putting this into a ramdisk
319 354 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
320 355 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
321 356 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
322 357
323 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
358 beaker.cache.regions = long_term, sql_cache_short
324 359
325 beaker.cache.long_term.type = memory
326 beaker.cache.long_term.expire = 36000
360 beaker.cache.long_term.type = memorylru_base
361 beaker.cache.long_term.expire = 172800
327 362 beaker.cache.long_term.key_length = 256
328 363
329 beaker.cache.sql_cache_short.type = memory
364 beaker.cache.sql_cache_short.type = memorylru_base
330 365 beaker.cache.sql_cache_short.expire = 10
331 366 beaker.cache.sql_cache_short.key_length = 256
332 367
333 beaker.cache.repo_cache_long.type = memorylru_base
334 beaker.cache.repo_cache_long.max_items = 4096
335 beaker.cache.repo_cache_long.expire = 2592000
336
337 ## default is memorylru_base cache, configure only if required
338 ## using multi-node or multi-worker setup
339 #beaker.cache.repo_cache_long.type = ext:memcached
340 #beaker.cache.repo_cache_long.url = localhost:11211
341 #beaker.cache.repo_cache_long.expire = 1209600
342 #beaker.cache.repo_cache_long.key_length = 256
343 368
344 369 ####################################
345 370 ### BEAKER SESSION ####
346 371 ####################################
347 372
348 373 ## .session.type is type of storage options for the session, current allowed
349 ## types are file, ext:memcached, ext:database, and memory (default).
374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 375 beaker.session.type = file
351 beaker.session.data_dir = %(here)s/data/sessions/data
376 beaker.session.data_dir = %(here)s/data/sessions
352 377
353 378 ## db based session, fast, and allows easy management over logged in users
354 379 #beaker.session.type = ext:database
355 380 #beaker.session.table_name = db_session
356 381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
357 382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
358 383 #beaker.session.sa.pool_recycle = 3600
359 384 #beaker.session.sa.echo = false
360 385
361 386 beaker.session.key = rhodecode
362 387 beaker.session.secret = develop-rc-uytcxaz
363 388 beaker.session.lock_dir = %(here)s/data/sessions/lock
364 389
365 390 ## Secure encrypted cookie. Requires AES and AES python libraries
366 391 ## you must disable beaker.session.secret to use this
367 392 #beaker.session.encrypt_key = key_for_encryption
368 393 #beaker.session.validate_key = validation_key
369 394
370 395 ## sets session as invalid(also logging out user) if it haven not been
371 396 ## accessed for given amount of time in seconds
372 397 beaker.session.timeout = 2592000
373 398 beaker.session.httponly = true
374 399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
375 400 #beaker.session.cookie_path = /custom_prefix
376 401
377 402 ## uncomment for https secure cookie
378 403 beaker.session.secure = false
379 404
380 405 ## auto save the session to not to use .save()
381 406 beaker.session.auto = false
382 407
383 408 ## default cookie expiration time in seconds, set to `true` to set expire
384 409 ## at browser close
385 410 #beaker.session.cookie_expires = 3600
386 411
387 412 ###################################
388 413 ## SEARCH INDEXING CONFIGURATION ##
389 414 ###################################
390 415 ## Full text search indexer is available in rhodecode-tools under
391 416 ## `rhodecode-tools index` command
392 417
393 418 ## WHOOSH Backend, doesn't require additional services to run
394 419 ## it works good with few dozen repos
395 420 search.module = rhodecode.lib.index.whoosh
396 421 search.location = %(here)s/data/index
397 422
398 423 ########################################
399 424 ### CHANNELSTREAM CONFIG ####
400 425 ########################################
401 426 ## channelstream enables persistent connections and live notification
402 427 ## in the system. It's also used by the chat system
403 428 channelstream.enabled = false
404 429
405 430 ## server address for channelstream server on the backend
406 431 channelstream.server = 127.0.0.1:9800
407 432
408 433 ## location of the channelstream server from outside world
409 434 ## use ws:// for http or wss:// for https. This address needs to be handled
410 435 ## by external HTTP server such as Nginx or Apache
411 436 ## see nginx/apache configuration examples in our docs
412 437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 438 channelstream.secret = secret
414 439 channelstream.history.location = %(here)s/channelstream_history
415 440
416 441 ## Internal application path that Javascript uses to connect into.
417 442 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 443 channelstream.proxy_path = /_channelstream
419 444
420 445
421 446 ###################################
422 447 ## APPENLIGHT CONFIG ##
423 448 ###################################
424 449
425 450 ## Appenlight is tailored to work with RhodeCode, see
426 451 ## http://appenlight.com for details how to obtain an account
427 452
428 453 ## appenlight integration enabled
429 454 appenlight = false
430 455
431 456 appenlight.server_url = https://api.appenlight.com
432 457 appenlight.api_key = YOUR_API_KEY
433 458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434 459
435 460 # used for JS client
436 461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437 462
438 463 ## TWEAK AMOUNT OF INFO SENT HERE
439 464
440 465 ## enables 404 error logging (default False)
441 466 appenlight.report_404 = false
442 467
443 468 ## time in seconds after request is considered being slow (default 1)
444 469 appenlight.slow_request_time = 1
445 470
446 471 ## record slow requests in application
447 472 ## (needs to be enabled for slow datastore recording and time tracking)
448 473 appenlight.slow_requests = true
449 474
450 475 ## enable hooking to application loggers
451 476 appenlight.logging = true
452 477
453 478 ## minimum log level for log capture
454 479 appenlight.logging.level = WARNING
455 480
456 481 ## send logs only from erroneous/slow requests
457 482 ## (saves API quota for intensive logging)
458 483 appenlight.logging_on_error = false
459 484
460 485 ## list of additonal keywords that should be grabbed from environ object
461 486 ## can be string with comma separated list of words in lowercase
462 487 ## (by default client will always send following info:
463 488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 489 ## start with HTTP* this list be extended with additional keywords here
465 490 appenlight.environ_keys_whitelist =
466 491
467 492 ## list of keywords that should be blanked from request object
468 493 ## can be string with comma separated list of words in lowercase
469 494 ## (by default client will always blank keys that contain following words
470 495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 496 ## this list be extended with additional keywords set here
472 497 appenlight.request_keys_blacklist =
473 498
474 499 ## list of namespaces that should be ignores when gathering log entries
475 500 ## can be string with comma separated list of namespaces
476 501 ## (by default the client ignores own entries: appenlight_client.client)
477 502 appenlight.log_namespace_blacklist =
478 503
479 504
480 505 ################################################################################
481 506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
482 507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
483 508 ## execute malicious code after an exception is raised. ##
484 509 ################################################################################
485 510 #set debug = false
486 511
487 512
488 513 ##############
489 514 ## STYLING ##
490 515 ##############
491 516 debug_style = true
492 517
493 518 ###########################################
494 519 ### MAIN RHODECODE DATABASE CONFIG ###
495 520 ###########################################
496 521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
497 522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
498 523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525
499 526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
500 527
501 528 # see sqlalchemy docs for other advanced settings
502 529
503 530 ## print the sql statements to output
504 531 sqlalchemy.db1.echo = false
505 532 ## recycle the connections after this amount of seconds
506 533 sqlalchemy.db1.pool_recycle = 3600
507 534 sqlalchemy.db1.convert_unicode = true
508 535
509 536 ## the number of connections to keep open inside the connection pool.
510 537 ## 0 indicates no limit
511 538 #sqlalchemy.db1.pool_size = 5
512 539
513 540 ## the number of connections to allow in connection pool "overflow", that is
514 541 ## connections that can be opened above and beyond the pool_size setting,
515 542 ## which defaults to five.
516 543 #sqlalchemy.db1.max_overflow = 10
517 544
545 ## Connection check ping, used to detect broken database connections
546 ## could be enabled to better handle cases if MySQL has gone away errors
547 #sqlalchemy.db1.ping_connection = true
518 548
519 549 ##################
520 550 ### VCS CONFIG ###
521 551 ##################
522 552 vcs.server.enable = true
523 553 vcs.server = localhost:9900
524 554
525 555 ## Web server connectivity protocol, responsible for web based VCS operatations
526 556 ## Available protocols are:
527 557 ## `http` - use http-rpc backend (default)
528 558 vcs.server.protocol = http
529 559
530 560 ## Push/Pull operations protocol, available options are:
531 561 ## `http` - use http-rpc backend (default)
532 562 ##
533 563 vcs.scm_app_implementation = http
534 564
535 565 ## Push/Pull operations hooks protocol, available options are:
536 566 ## `http` - use http-rpc backend (default)
537 567 vcs.hooks.protocol = http
538 568
539 569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
540 570 ## this should be adjusted.
541 571 vcs.hooks.host = 127.0.0.1
542 572
543 573 vcs.server.log_level = debug
544 574 ## Start VCSServer with this instance as a subprocess, usefull for development
545 575 vcs.start_server = false
546 576
547 577 ## List of enabled VCS backends, available options are:
548 578 ## `hg` - mercurial
549 579 ## `git` - git
550 580 ## `svn` - subversion
551 581 vcs.backends = hg, git, svn
552 582
553 583 vcs.connection_timeout = 3600
554 584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
555 585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
556 586 #vcs.svn.compatible_version = pre-1.8-compatible
557 587
558 588
559 589 ############################################################
560 590 ### Subversion proxy support (mod_dav_svn) ###
561 591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
562 592 ############################################################
563 593 ## Enable or disable the config file generation.
564 594 svn.proxy.generate_config = false
565 595 ## Generate config file with `SVNListParentPath` set to `On`.
566 596 svn.proxy.list_parent_path = true
567 597 ## Set location and file name of generated config file.
568 598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
569 599 ## alternative mod_dav config template. This needs to be a mako template
570 600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
571 601 ## Used as a prefix to the `Location` block in the generated config file.
572 602 ## In most cases it should be set to `/`.
573 603 svn.proxy.location_root = /
574 604 ## Command to reload the mod dav svn configuration on change.
575 605 ## Example: `/etc/init.d/apache2 reload`
576 606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
577 607 ## If the timeout expires before the reload command finishes, the command will
578 608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
579 609 #svn.proxy.reload_timeout = 10
580 610
581 611 ############################################################
582 612 ### SSH Support Settings ###
583 613 ############################################################
584 614
585 615 ## Defines if a custom authorized_keys file should be created and written on
586 616 ## any change user ssh keys. Setting this to false also disables posibility
587 617 ## of adding SSH keys by users from web interface. Super admins can still
588 618 ## manage SSH Keys.
589 619 ssh.generate_authorized_keyfile = false
590 620
591 621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
592 622 # ssh.authorized_keys_ssh_opts =
593 623
594 624 ## Path to the authrozied_keys file where the generate entries are placed.
595 625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
596 626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
597 627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
598 628
599 629 ## Command to execute the SSH wrapper. The binary is available in the
600 630 ## rhodecode installation directory.
601 631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
602 632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
603 633
604 634 ## Allow shell when executing the ssh-wrapper command
605 635 ssh.wrapper_cmd_allow_shell = false
606 636
607 637 ## Enables logging, and detailed output send back to the client during SSH
608 638 ## operations. Usefull for debugging, shouldn't be used in production.
609 639 ssh.enable_debug_logging = true
610 640
611 641 ## Paths to binary executable, by default they are the names, but we can
612 642 ## override them if we want to use a custom one
613 643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
614 644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
615 645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
616 646
617 647
618 648 ## Dummy marker to add new entries after.
619 649 ## Add any custom entries below. Please don't remove.
620 650 custom.conf = 1
621 651
622 652
623 653 ################################
624 654 ### LOGGING CONFIGURATION ####
625 655 ################################
626 656 [loggers]
627 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
628 658
629 659 [handlers]
630 660 keys = console, console_sql
631 661
632 662 [formatters]
633 663 keys = generic, color_formatter, color_formatter_sql
634 664
635 665 #############
636 666 ## LOGGERS ##
637 667 #############
638 668 [logger_root]
639 669 level = NOTSET
640 670 handlers = console
641 671
642 672 [logger_sqlalchemy]
643 673 level = INFO
644 674 handlers = console_sql
645 675 qualname = sqlalchemy.engine
646 676 propagate = 0
647 677
648 678 [logger_beaker]
649 679 level = DEBUG
650 680 handlers =
651 681 qualname = beaker.container
652 682 propagate = 1
653 683
654 684 [logger_rhodecode]
655 685 level = DEBUG
656 686 handlers =
657 687 qualname = rhodecode
658 688 propagate = 1
659 689
660 690 [logger_ssh_wrapper]
661 691 level = DEBUG
662 692 handlers =
663 693 qualname = ssh_wrapper
664 694 propagate = 1
665 695
666 696 [logger_celery]
667 697 level = DEBUG
668 698 handlers =
669 699 qualname = celery
670 700
671 701
672 702 ##############
673 703 ## HANDLERS ##
674 704 ##############
675 705
676 706 [handler_console]
677 707 class = StreamHandler
678 708 args = (sys.stderr, )
679 709 level = DEBUG
680 710 formatter = color_formatter
681 711
682 712 [handler_console_sql]
713 # "level = DEBUG" logs SQL queries and results.
714 # "level = INFO" logs SQL queries.
715 # "level = WARN" logs neither. (Recommended for production systems.)
683 716 class = StreamHandler
684 717 args = (sys.stderr, )
685 level = DEBUG
718 level = WARN
686 719 formatter = color_formatter_sql
687 720
688 721 ################
689 722 ## FORMATTERS ##
690 723 ################
691 724
692 725 [formatter_generic]
693 726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
694 727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
695 728 datefmt = %Y-%m-%d %H:%M:%S
696 729
697 730 [formatter_color_formatter]
698 731 class = rhodecode.lib.logging_formatter.ColorFormatter
699 732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
700 733 datefmt = %Y-%m-%d %H:%M:%S
701 734
702 735 [formatter_color_formatter_sql]
703 736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
704 737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
705 738 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,674 +1,707 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 127.0.0.1
47 47 port = 5000
48 48
49 49 ##################################
50 50 ## WAITRESS WSGI SERVER ##
51 51 ## Recommended for Development ##
52 52 ##################################
53 53
54 54 #use = egg:waitress#main
55 55 ## number of worker threads
56 56 #threads = 5
57 57 ## MAX BODY SIZE 100GB
58 58 #max_request_body_size = 107374182400
59 59 ## Use poll instead of select, fixes file descriptors limits problems.
60 60 ## May not work on old windows systems.
61 61 #asyncore_use_poll = true
62 62
63 63
64 64 ##########################
65 65 ## GUNICORN WSGI SERVER ##
66 66 ##########################
67 67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68 68
69 69 use = egg:gunicorn#main
70 70 ## Sets the number of process workers. You must set `instance_id = *`
71 71 ## when this option is set to more than one worker, recommended
72 72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 73 ## The `instance_id = *` must be set in the [app:main] section below
74 74 workers = 2
75 75 ## number of threads for each of the worker, must be set to 1 for gevent
76 76 ## generally recommended to be at 1
77 77 #threads = 1
78 78 ## process name
79 79 proc_name = rhodecode
80 80 ## type of worker class, one of sync, gevent
81 81 ## recommended for bigger setup is using of of other than sync one
82 82 worker_class = gevent
83 83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 84 #worker_connections = 10
85 85 ## max number of requests that worker will handle before being gracefully
86 86 ## restarted, could prevent memory leaks
87 87 max_requests = 1000
88 88 max_requests_jitter = 30
89 89 ## amount of time a worker can spend with handling a request before it
90 90 ## gets killed and restarted. Set to 6hrs
91 91 timeout = 21600
92 92
93 93
94 94 ## prefix middleware for RhodeCode.
95 95 ## recommended when using proxy setup.
96 96 ## allows to set RhodeCode under a prefix in server.
97 97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 98 ## And set your prefix like: `prefix = /custom_prefix`
99 99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 100 ## to make your cookies only work on prefix url
101 101 [filter:proxy-prefix]
102 102 use = egg:PasteDeploy#prefix
103 103 prefix = /
104 104
105 105 [app:main]
106 106 use = egg:rhodecode-enterprise-ce
107 107
108 108 ## enable proxy prefix middleware, defined above
109 109 #filter-with = proxy-prefix
110 110
111 111 ## encryption key used to encrypt social plugin tokens,
112 112 ## remote_urls with credentials etc, if not set it defaults to
113 113 ## `beaker.session.secret`
114 114 #rhodecode.encrypted_values.secret =
115 115
116 116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 118 #rhodecode.encrypted_values.strict = false
119 119
120 120 ## return gzipped responses from Rhodecode (static files/application)
121 121 gzip_responses = false
122 122
123 123 ## autogenerate javascript routes file on startup
124 124 generate_js_files = false
125 125
126 126 ## Optional Languages
127 127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 128 lang = en
129 129
130 130 ## perform a full repository scan on each server start, this should be
131 131 ## set to false after first startup, to allow faster server restarts.
132 132 startup.import_repos = false
133 133
134 134 ## Uncomment and set this path to use archive download cache.
135 135 ## Once enabled, generated archives will be cached at this location
136 136 ## and served from the cache during subsequent requests for the same archive of
137 137 ## the repository.
138 138 #archive_cache_dir = /tmp/tarballcache
139 139
140 140 ## URL at which the application is running. This is used for bootstraping
141 141 ## requests in context when no web request is available. Used in ishell, or
142 142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 143 app.base_url = http://rhodecode.local
144 144
145 145 ## change this to unique ID for security
146 146 app_instance_uuid = rc-production
147 147
148 148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 149 ## commit, or pull request exceeds this limit this diff will be displayed
150 150 ## partially. E.g 512000 == 512Kb
151 151 cut_off_limit_diff = 512000
152 152
153 153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 154 ## file inside diff which exceeds this limit will be displayed partially.
155 155 ## E.g 128000 == 128Kb
156 156 cut_off_limit_file = 128000
157 157
158 158 ## use cache version of scm repo everywhere
159 159 vcs_full_cache = true
160 160
161 161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 162 ## Normally this is controlled by proper http flags sent from http server
163 163 force_https = false
164 164
165 165 ## use Strict-Transport-Security headers
166 166 use_htsts = false
167 167
168 168 ## git rev filter option, --all is the default filter, if you need to
169 169 ## hide all refs in changelog switch this to --branches --tags
170 170 git_rev_filter = --branches --tags
171 171
172 172 # Set to true if your repos are exposed using the dumb protocol
173 173 git_update_server_info = false
174 174
175 175 ## RSS/ATOM feed options
176 176 rss_cut_off_limit = 256000
177 177 rss_items_per_page = 10
178 178 rss_include_diff = false
179 179
180 180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 181 ## url that does rewrites to _admin/gists/{gistid}.
182 182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 184 gist_alias_url =
185 185
186 186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 187 ## used for access.
188 188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 189 ## came from the the logged in user who own this authentication token.
190 190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 191 ## authentication token. Such view would be only accessible when used together
192 192 ## with this authentication token
193 193 ##
194 194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 195 ## The list should be "," separated and on a single line.
196 196 ##
197 197 ## Most common views to enable:
198 198 # RepoCommitsView:repo_commit_download
199 199 # RepoCommitsView:repo_commit_patch
200 200 # RepoCommitsView:repo_commit_raw
201 201 # RepoCommitsView:repo_commit_raw@TOKEN
202 202 # RepoFilesView:repo_files_diff
203 203 # RepoFilesView:repo_archivefile
204 204 # RepoFilesView:repo_file_raw
205 205 # GistView:*
206 206 api_access_controllers_whitelist =
207 207
208 208 ## default encoding used to convert from and to unicode
209 209 ## can be also a comma separated list of encoding in case of mixed encodings
210 210 default_encoding = UTF-8
211 211
212 212 ## instance-id prefix
213 213 ## a prefix key for this instance used for cache invalidation when running
214 214 ## multiple instances of rhodecode, make sure it's globally unique for
215 215 ## all running rhodecode instances. Leave empty if you don't use it
216 216 instance_id =
217 217
218 218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 219 ## of an authentication plugin also if it is disabled by it's settings.
220 220 ## This could be useful if you are unable to log in to the system due to broken
221 221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 222 ## module to log in again and fix the settings.
223 223 ##
224 224 ## Available builtin plugin IDs (hash is part of the ID):
225 225 ## egg:rhodecode-enterprise-ce#rhodecode
226 226 ## egg:rhodecode-enterprise-ce#pam
227 227 ## egg:rhodecode-enterprise-ce#ldap
228 228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 229 ## egg:rhodecode-enterprise-ce#headers
230 230 ## egg:rhodecode-enterprise-ce#crowd
231 231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232 232
233 233 ## alternative return HTTP header for failed authentication. Default HTTP
234 234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 235 ## handling that causing a series of failed authentication calls.
236 236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 237 ## This will be served instead of default 401 on bad authnetication
238 238 auth_ret_code =
239 239
240 240 ## use special detection method when serving auth_ret_code, instead of serving
241 241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 242 ## and then serve auth_ret_code to clients
243 243 auth_ret_code_detection = false
244 244
245 245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 246 ## codes don't break the transactions while 4XX codes do
247 247 lock_ret_code = 423
248 248
249 249 ## allows to change the repository location in settings page
250 250 allow_repo_location_change = true
251 251
252 252 ## allows to setup custom hooks in settings page
253 253 allow_custom_hooks_settings = true
254 254
255 255 ## generated license token, goto license page in RhodeCode settings to obtain
256 256 ## new token
257 257 license_token =
258 258
259 259 ## supervisor connection uri, for managing supervisor and logs.
260 260 supervisor.uri =
261 261 ## supervisord group name/id we only want this RC instance to handle
262 262 supervisor.group_id = prod
263 263
264 264 ## Display extended labs settings
265 265 labs_settings_active = true
266 266
267 267 ####################################
268 268 ### CELERY CONFIG ####
269 269 ####################################
270 270 ## run: /path/to/celery worker \
271 271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274 274
275 275 use_celery = false
276 276
277 277 ## connection url to the message broker (default rabbitmq)
278 278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279 279
280 280 ## maximum tasks to execute before worker restart
281 281 celery.max_tasks_per_child = 100
282 282
283 283 ## tasks will never be sent to the queue, but executed locally instead.
284 284 celery.task_always_eager = false
285 285
286 #####################################
287 ### DOGPILE CACHE ####
288 #####################################
289 ## Default cache dir for caches. Putting this into a ramdisk
290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 ## of space
292 cache_dir = /tmp/rcdev/data
293
294 ## cache settings for permission tree, auth TTL.
295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 rc_cache.cache_perms.expiration_time = 300
297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298
299 ## redis backend with distributed locks
300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 #rc_cache.cache_perms.expiration_time = 300
302 #rc_cache.cache_perms.arguments.host = localhost
303 #rc_cache.cache_perms.arguments.port = 6379
304 #rc_cache.cache_perms.arguments.db = 0
305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 #rc_cache.cache_perms.arguments.distributed_lock = true
307
308
309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 rc_cache.cache_repo.expiration_time = 2592000
311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312
313 ## redis backend with distributed locks
314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 #rc_cache.cache_repo.expiration_time = 2592000
316 ## this needs to be greater then expiration_time
317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 #rc_cache.cache_repo.arguments.host = localhost
319 #rc_cache.cache_repo.arguments.port = 6379
320 #rc_cache.cache_repo.arguments.db = 1
321 #rc_cache.cache_repo.arguments.distributed_lock = true
322
323
286 324 ####################################
287 325 ### BEAKER CACHE ####
288 326 ####################################
289 # default cache dir for templates. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
291 cache_dir = %(here)s/data
292 327
293 328 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 329 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 330 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 331 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297 332
298 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
333 beaker.cache.regions = long_term, sql_cache_short
299 334
300 335 beaker.cache.long_term.type = memory
301 beaker.cache.long_term.expire = 36000
336 beaker.cache.long_term.expire = 172800
302 337 beaker.cache.long_term.key_length = 256
303 338
304 339 beaker.cache.sql_cache_short.type = memory
305 340 beaker.cache.sql_cache_short.expire = 10
306 341 beaker.cache.sql_cache_short.key_length = 256
307 342
308 beaker.cache.repo_cache_long.type = memorylru_base
309 beaker.cache.repo_cache_long.max_items = 4096
310 beaker.cache.repo_cache_long.expire = 2592000
311
312 ## default is memorylru_base cache, configure only if required
313 ## using multi-node or multi-worker setup
314 #beaker.cache.repo_cache_long.type = ext:memcached
315 #beaker.cache.repo_cache_long.url = localhost:11211
316 #beaker.cache.repo_cache_long.expire = 1209600
317 #beaker.cache.repo_cache_long.key_length = 256
318 343
319 344 ####################################
320 345 ### BEAKER SESSION ####
321 346 ####################################
322 347
323 348 ## .session.type is type of storage options for the session, current allowed
324 ## types are file, ext:memcached, ext:database, and memory (default).
349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
325 350 beaker.session.type = file
326 beaker.session.data_dir = %(here)s/data/sessions/data
351 beaker.session.data_dir = %(here)s/data/sessions
327 352
328 353 ## db based session, fast, and allows easy management over logged in users
329 354 #beaker.session.type = ext:database
330 355 #beaker.session.table_name = db_session
331 356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
332 357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
333 358 #beaker.session.sa.pool_recycle = 3600
334 359 #beaker.session.sa.echo = false
335 360
336 361 beaker.session.key = rhodecode
337 362 beaker.session.secret = production-rc-uytcxaz
338 363 beaker.session.lock_dir = %(here)s/data/sessions/lock
339 364
340 365 ## Secure encrypted cookie. Requires AES and AES python libraries
341 366 ## you must disable beaker.session.secret to use this
342 367 #beaker.session.encrypt_key = key_for_encryption
343 368 #beaker.session.validate_key = validation_key
344 369
345 370 ## sets session as invalid(also logging out user) if it haven not been
346 371 ## accessed for given amount of time in seconds
347 372 beaker.session.timeout = 2592000
348 373 beaker.session.httponly = true
349 374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
350 375 #beaker.session.cookie_path = /custom_prefix
351 376
352 377 ## uncomment for https secure cookie
353 378 beaker.session.secure = false
354 379
355 380 ## auto save the session to not to use .save()
356 381 beaker.session.auto = false
357 382
358 383 ## default cookie expiration time in seconds, set to `true` to set expire
359 384 ## at browser close
360 385 #beaker.session.cookie_expires = 3600
361 386
362 387 ###################################
363 388 ## SEARCH INDEXING CONFIGURATION ##
364 389 ###################################
365 390 ## Full text search indexer is available in rhodecode-tools under
366 391 ## `rhodecode-tools index` command
367 392
368 393 ## WHOOSH Backend, doesn't require additional services to run
369 394 ## it works good with few dozen repos
370 395 search.module = rhodecode.lib.index.whoosh
371 396 search.location = %(here)s/data/index
372 397
373 398 ########################################
374 399 ### CHANNELSTREAM CONFIG ####
375 400 ########################################
376 401 ## channelstream enables persistent connections and live notification
377 402 ## in the system. It's also used by the chat system
378 403 channelstream.enabled = false
379 404
380 405 ## server address for channelstream server on the backend
381 406 channelstream.server = 127.0.0.1:9800
382 407
383 408 ## location of the channelstream server from outside world
384 409 ## use ws:// for http or wss:// for https. This address needs to be handled
385 410 ## by external HTTP server such as Nginx or Apache
386 411 ## see nginx/apache configuration examples in our docs
387 412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
388 413 channelstream.secret = secret
389 414 channelstream.history.location = %(here)s/channelstream_history
390 415
391 416 ## Internal application path that Javascript uses to connect into.
392 417 ## If you use proxy-prefix the prefix should be added before /_channelstream
393 418 channelstream.proxy_path = /_channelstream
394 419
395 420
396 421 ###################################
397 422 ## APPENLIGHT CONFIG ##
398 423 ###################################
399 424
400 425 ## Appenlight is tailored to work with RhodeCode, see
401 426 ## http://appenlight.com for details how to obtain an account
402 427
403 428 ## appenlight integration enabled
404 429 appenlight = false
405 430
406 431 appenlight.server_url = https://api.appenlight.com
407 432 appenlight.api_key = YOUR_API_KEY
408 433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
409 434
410 435 # used for JS client
411 436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
412 437
413 438 ## TWEAK AMOUNT OF INFO SENT HERE
414 439
415 440 ## enables 404 error logging (default False)
416 441 appenlight.report_404 = false
417 442
418 443 ## time in seconds after request is considered being slow (default 1)
419 444 appenlight.slow_request_time = 1
420 445
421 446 ## record slow requests in application
422 447 ## (needs to be enabled for slow datastore recording and time tracking)
423 448 appenlight.slow_requests = true
424 449
425 450 ## enable hooking to application loggers
426 451 appenlight.logging = true
427 452
428 453 ## minimum log level for log capture
429 454 appenlight.logging.level = WARNING
430 455
431 456 ## send logs only from erroneous/slow requests
432 457 ## (saves API quota for intensive logging)
433 458 appenlight.logging_on_error = false
434 459
435 460 ## list of additonal keywords that should be grabbed from environ object
436 461 ## can be string with comma separated list of words in lowercase
437 462 ## (by default client will always send following info:
438 463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
439 464 ## start with HTTP* this list be extended with additional keywords here
440 465 appenlight.environ_keys_whitelist =
441 466
442 467 ## list of keywords that should be blanked from request object
443 468 ## can be string with comma separated list of words in lowercase
444 469 ## (by default client will always blank keys that contain following words
445 470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
446 471 ## this list be extended with additional keywords set here
447 472 appenlight.request_keys_blacklist =
448 473
449 474 ## list of namespaces that should be ignores when gathering log entries
450 475 ## can be string with comma separated list of namespaces
451 476 ## (by default the client ignores own entries: appenlight_client.client)
452 477 appenlight.log_namespace_blacklist =
453 478
454 479
455 480 ################################################################################
456 481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
457 482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
458 483 ## execute malicious code after an exception is raised. ##
459 484 ################################################################################
460 485 set debug = false
461 486
462 487
463 488 ###########################################
464 489 ### MAIN RHODECODE DATABASE CONFIG ###
465 490 ###########################################
466 491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
467 492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
468 493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495
469 496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
470 497
471 498 # see sqlalchemy docs for other advanced settings
472 499
473 500 ## print the sql statements to output
474 501 sqlalchemy.db1.echo = false
475 502 ## recycle the connections after this amount of seconds
476 503 sqlalchemy.db1.pool_recycle = 3600
477 504 sqlalchemy.db1.convert_unicode = true
478 505
479 506 ## the number of connections to keep open inside the connection pool.
480 507 ## 0 indicates no limit
481 508 #sqlalchemy.db1.pool_size = 5
482 509
483 510 ## the number of connections to allow in connection pool "overflow", that is
484 511 ## connections that can be opened above and beyond the pool_size setting,
485 512 ## which defaults to five.
486 513 #sqlalchemy.db1.max_overflow = 10
487 514
515 ## Connection check ping, used to detect broken database connections
516 ## could be enabled to better handle cases if MySQL has gone away errors
517 #sqlalchemy.db1.ping_connection = true
488 518
489 519 ##################
490 520 ### VCS CONFIG ###
491 521 ##################
492 522 vcs.server.enable = true
493 523 vcs.server = localhost:9900
494 524
495 525 ## Web server connectivity protocol, responsible for web based VCS operatations
496 526 ## Available protocols are:
497 527 ## `http` - use http-rpc backend (default)
498 528 vcs.server.protocol = http
499 529
500 530 ## Push/Pull operations protocol, available options are:
501 531 ## `http` - use http-rpc backend (default)
502 532 ##
503 533 vcs.scm_app_implementation = http
504 534
505 535 ## Push/Pull operations hooks protocol, available options are:
506 536 ## `http` - use http-rpc backend (default)
507 537 vcs.hooks.protocol = http
508 538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
509 539 ## this should be adjusted.
510 540 vcs.hooks.host = 127.0.0.1
511 541
512 542 vcs.server.log_level = info
513 543 ## Start VCSServer with this instance as a subprocess, usefull for development
514 544 vcs.start_server = false
515 545
516 546 ## List of enabled VCS backends, available options are:
517 547 ## `hg` - mercurial
518 548 ## `git` - git
519 549 ## `svn` - subversion
520 550 vcs.backends = hg, git, svn
521 551
522 552 vcs.connection_timeout = 3600
523 553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
524 554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
525 555 #vcs.svn.compatible_version = pre-1.8-compatible
526 556
527 557
528 558 ############################################################
529 559 ### Subversion proxy support (mod_dav_svn) ###
530 560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
531 561 ############################################################
532 562 ## Enable or disable the config file generation.
533 563 svn.proxy.generate_config = false
534 564 ## Generate config file with `SVNListParentPath` set to `On`.
535 565 svn.proxy.list_parent_path = true
536 566 ## Set location and file name of generated config file.
537 567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
538 568 ## alternative mod_dav config template. This needs to be a mako template
539 569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
540 570 ## Used as a prefix to the `Location` block in the generated config file.
541 571 ## In most cases it should be set to `/`.
542 572 svn.proxy.location_root = /
543 573 ## Command to reload the mod dav svn configuration on change.
544 574 ## Example: `/etc/init.d/apache2 reload`
545 575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
546 576 ## If the timeout expires before the reload command finishes, the command will
547 577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
548 578 #svn.proxy.reload_timeout = 10
549 579
550 580 ############################################################
551 581 ### SSH Support Settings ###
552 582 ############################################################
553 583
554 584 ## Defines if a custom authorized_keys file should be created and written on
555 585 ## any change user ssh keys. Setting this to false also disables posibility
556 586 ## of adding SSH keys by users from web interface. Super admins can still
557 587 ## manage SSH Keys.
558 588 ssh.generate_authorized_keyfile = false
559 589
560 590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
561 591 # ssh.authorized_keys_ssh_opts =
562 592
563 593 ## Path to the authrozied_keys file where the generate entries are placed.
564 594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
565 595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
566 596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
567 597
568 598 ## Command to execute the SSH wrapper. The binary is available in the
569 599 ## rhodecode installation directory.
570 600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
571 601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
572 602
573 603 ## Allow shell when executing the ssh-wrapper command
574 604 ssh.wrapper_cmd_allow_shell = false
575 605
576 606 ## Enables logging, and detailed output send back to the client during SSH
577 607 ## operations. Usefull for debugging, shouldn't be used in production.
578 608 ssh.enable_debug_logging = false
579 609
580 610 ## Paths to binary executable, by default they are the names, but we can
581 611 ## override them if we want to use a custom one
582 612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
583 613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
584 614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
585 615
586 616
587 617 ## Dummy marker to add new entries after.
588 618 ## Add any custom entries below. Please don't remove.
589 619 custom.conf = 1
590 620
591 621
592 622 ################################
593 623 ### LOGGING CONFIGURATION ####
594 624 ################################
595 625 [loggers]
596 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
597 627
598 628 [handlers]
599 629 keys = console, console_sql
600 630
601 631 [formatters]
602 632 keys = generic, color_formatter, color_formatter_sql
603 633
604 634 #############
605 635 ## LOGGERS ##
606 636 #############
607 637 [logger_root]
608 638 level = NOTSET
609 639 handlers = console
610 640
611 641 [logger_sqlalchemy]
612 642 level = INFO
613 643 handlers = console_sql
614 644 qualname = sqlalchemy.engine
615 645 propagate = 0
616 646
617 647 [logger_beaker]
618 648 level = DEBUG
619 649 handlers =
620 650 qualname = beaker.container
621 651 propagate = 1
622 652
623 653 [logger_rhodecode]
624 654 level = DEBUG
625 655 handlers =
626 656 qualname = rhodecode
627 657 propagate = 1
628 658
629 659 [logger_ssh_wrapper]
630 660 level = DEBUG
631 661 handlers =
632 662 qualname = ssh_wrapper
633 663 propagate = 1
634 664
635 665 [logger_celery]
636 666 level = DEBUG
637 667 handlers =
638 668 qualname = celery
639 669
640 670
641 671 ##############
642 672 ## HANDLERS ##
643 673 ##############
644 674
645 675 [handler_console]
646 676 class = StreamHandler
647 677 args = (sys.stderr, )
648 678 level = INFO
649 679 formatter = generic
650 680
651 681 [handler_console_sql]
682 # "level = DEBUG" logs SQL queries and results.
683 # "level = INFO" logs SQL queries.
684 # "level = WARN" logs neither. (Recommended for production systems.)
652 685 class = StreamHandler
653 686 args = (sys.stderr, )
654 687 level = WARN
655 688 formatter = generic
656 689
657 690 ################
658 691 ## FORMATTERS ##
659 692 ################
660 693
661 694 [formatter_generic]
662 695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
663 696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
664 697 datefmt = %Y-%m-%d %H:%M:%S
665 698
666 699 [formatter_color_formatter]
667 700 class = rhodecode.lib.logging_formatter.ColorFormatter
668 701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
669 702 datefmt = %Y-%m-%d %H:%M:%S
670 703
671 704 [formatter_color_formatter_sql]
672 705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
673 706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
674 707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1254 +1,1241 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27 from pyramid.view import view_config
28 28 from pyramid.renderers import render
29 29 from pyramid.response import Response
30 30
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
32 32 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
33 33 from rhodecode.authentication.plugins import auth_rhodecode
34 34 from rhodecode.events import trigger
35 35 from rhodecode.model.db import true
36 36
37 37 from rhodecode.lib import audit_logger, rc_cache
38 38 from rhodecode.lib.exceptions import (
39 39 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
40 40 UserOwnsUserGroupsException, DefaultUserException)
41 41 from rhodecode.lib.ext_json import json
42 42 from rhodecode.lib.auth import (
43 43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
44 44 from rhodecode.lib import helpers as h
45 45 from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict
46 46 from rhodecode.model.auth_token import AuthTokenModel
47 47 from rhodecode.model.forms import (
48 48 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
49 49 UserExtraEmailForm, UserExtraIpForm)
50 50 from rhodecode.model.permission import PermissionModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.ssh_key import SshKeyModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.db import (
56 56 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
57 57 UserApiKeys, UserSshKeys, RepoGroup)
58 58 from rhodecode.model.meta import Session
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 class AdminUsersView(BaseAppView, DataGridAppView):
64 64
65 65 def load_default_context(self):
66 66 c = self._get_local_tmpl_context()
67 67 return c
68 68
69 69 @LoginRequired()
70 70 @HasPermissionAllDecorator('hg.admin')
71 71 @view_config(
72 72 route_name='users', request_method='GET',
73 73 renderer='rhodecode:templates/admin/users/users.mako')
74 74 def users_list(self):
75 75 c = self.load_default_context()
76 76 return self._get_template_context(c)
77 77
78 78 @LoginRequired()
79 79 @HasPermissionAllDecorator('hg.admin')
80 80 @view_config(
81 81 # renderer defined below
82 82 route_name='users_data', request_method='GET',
83 83 renderer='json_ext', xhr=True)
84 84 def users_list_data(self):
85 85 self.load_default_context()
86 86 column_map = {
87 87 'first_name': 'name',
88 88 'last_name': 'lastname',
89 89 }
90 90 draw, start, limit = self._extract_chunk(self.request)
91 91 search_q, order_by, order_dir = self._extract_ordering(
92 92 self.request, column_map=column_map)
93 93 _render = self.request.get_partial_renderer(
94 94 'rhodecode:templates/data_table/_dt_elements.mako')
95 95
96 96 def user_actions(user_id, username):
97 97 return _render("user_actions", user_id, username)
98 98
99 99 users_data_total_count = User.query()\
100 100 .filter(User.username != User.DEFAULT_USER) \
101 101 .count()
102 102
103 103 users_data_total_inactive_count = User.query()\
104 104 .filter(User.username != User.DEFAULT_USER) \
105 105 .filter(User.active != true())\
106 106 .count()
107 107
108 108 # json generate
109 109 base_q = User.query().filter(User.username != User.DEFAULT_USER)
110 110 base_inactive_q = base_q.filter(User.active != true())
111 111
112 112 if search_q:
113 113 like_expression = u'%{}%'.format(safe_unicode(search_q))
114 114 base_q = base_q.filter(or_(
115 115 User.username.ilike(like_expression),
116 116 User._email.ilike(like_expression),
117 117 User.name.ilike(like_expression),
118 118 User.lastname.ilike(like_expression),
119 119 ))
120 120 base_inactive_q = base_q.filter(User.active != true())
121 121
122 122 users_data_total_filtered_count = base_q.count()
123 123 users_data_total_filtered_inactive_count = base_inactive_q.count()
124 124
125 125 sort_col = getattr(User, order_by, None)
126 126 if sort_col:
127 127 if order_dir == 'asc':
128 128 # handle null values properly to order by NULL last
129 129 if order_by in ['last_activity']:
130 130 sort_col = coalesce(sort_col, datetime.date.max)
131 131 sort_col = sort_col.asc()
132 132 else:
133 133 # handle null values properly to order by NULL last
134 134 if order_by in ['last_activity']:
135 135 sort_col = coalesce(sort_col, datetime.date.min)
136 136 sort_col = sort_col.desc()
137 137
138 138 base_q = base_q.order_by(sort_col)
139 139 base_q = base_q.offset(start).limit(limit)
140 140
141 141 users_list = base_q.all()
142 142
143 143 users_data = []
144 144 for user in users_list:
145 145 users_data.append({
146 146 "username": h.gravatar_with_user(self.request, user.username),
147 147 "email": user.email,
148 148 "first_name": user.first_name,
149 149 "last_name": user.last_name,
150 150 "last_login": h.format_date(user.last_login),
151 151 "last_activity": h.format_date(user.last_activity),
152 152 "active": h.bool2icon(user.active),
153 153 "active_raw": user.active,
154 154 "admin": h.bool2icon(user.admin),
155 155 "extern_type": user.extern_type,
156 156 "extern_name": user.extern_name,
157 157 "action": user_actions(user.user_id, user.username),
158 158 })
159 159 data = ({
160 160 'draw': draw,
161 161 'data': users_data,
162 162 'recordsTotal': users_data_total_count,
163 163 'recordsFiltered': users_data_total_filtered_count,
164 164 'recordsTotalInactive': users_data_total_inactive_count,
165 165 'recordsFilteredInactive': users_data_total_filtered_inactive_count
166 166 })
167 167
168 168 return data
169 169
170 170 def _set_personal_repo_group_template_vars(self, c_obj):
171 171 DummyUser = AttributeDict({
172 172 'username': '${username}',
173 173 'user_id': '${user_id}',
174 174 })
175 175 c_obj.default_create_repo_group = RepoGroupModel() \
176 176 .get_default_create_personal_repo_group()
177 177 c_obj.personal_repo_group_name = RepoGroupModel() \
178 178 .get_personal_group_name(DummyUser)
179 179
180 180 @LoginRequired()
181 181 @HasPermissionAllDecorator('hg.admin')
182 182 @view_config(
183 183 route_name='users_new', request_method='GET',
184 184 renderer='rhodecode:templates/admin/users/user_add.mako')
185 185 def users_new(self):
186 186 _ = self.request.translate
187 187 c = self.load_default_context()
188 188 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
189 189 self._set_personal_repo_group_template_vars(c)
190 190 return self._get_template_context(c)
191 191
192 192 @LoginRequired()
193 193 @HasPermissionAllDecorator('hg.admin')
194 194 @CSRFRequired()
195 195 @view_config(
196 196 route_name='users_create', request_method='POST',
197 197 renderer='rhodecode:templates/admin/users/user_add.mako')
198 198 def users_create(self):
199 199 _ = self.request.translate
200 200 c = self.load_default_context()
201 201 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
202 202 user_model = UserModel()
203 203 user_form = UserForm(self.request.translate)()
204 204 try:
205 205 form_result = user_form.to_python(dict(self.request.POST))
206 206 user = user_model.create(form_result)
207 207 Session().flush()
208 208 creation_data = user.get_api_data()
209 209 username = form_result['username']
210 210
211 211 audit_logger.store_web(
212 212 'user.create', action_data={'data': creation_data},
213 213 user=c.rhodecode_user)
214 214
215 215 user_link = h.link_to(
216 216 h.escape(username),
217 217 h.route_path('user_edit', user_id=user.user_id))
218 218 h.flash(h.literal(_('Created user %(user_link)s')
219 219 % {'user_link': user_link}), category='success')
220 220 Session().commit()
221 221 except formencode.Invalid as errors:
222 222 self._set_personal_repo_group_template_vars(c)
223 223 data = render(
224 224 'rhodecode:templates/admin/users/user_add.mako',
225 225 self._get_template_context(c), self.request)
226 226 html = formencode.htmlfill.render(
227 227 data,
228 228 defaults=errors.value,
229 229 errors=errors.error_dict or {},
230 230 prefix_error=False,
231 231 encoding="UTF-8",
232 232 force_defaults=False
233 233 )
234 234 return Response(html)
235 235 except UserCreationError as e:
236 236 h.flash(e, 'error')
237 237 except Exception:
238 238 log.exception("Exception creation of user")
239 239 h.flash(_('Error occurred during creation of user %s')
240 240 % self.request.POST.get('username'), category='error')
241 241 raise HTTPFound(h.route_path('users'))
242 242
243 243
244 244 class UsersView(UserAppView):
245 245 ALLOW_SCOPED_TOKENS = False
246 246 """
247 247 This view has alternative version inside EE, if modified please take a look
248 248 in there as well.
249 249 """
250 250
251 251 def load_default_context(self):
252 252 c = self._get_local_tmpl_context()
253 253 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
254 254 c.allowed_languages = [
255 255 ('en', 'English (en)'),
256 256 ('de', 'German (de)'),
257 257 ('fr', 'French (fr)'),
258 258 ('it', 'Italian (it)'),
259 259 ('ja', 'Japanese (ja)'),
260 260 ('pl', 'Polish (pl)'),
261 261 ('pt', 'Portuguese (pt)'),
262 262 ('ru', 'Russian (ru)'),
263 263 ('zh', 'Chinese (zh)'),
264 264 ]
265 265 req = self.request
266 266
267 267 c.available_permissions = req.registry.settings['available_permissions']
268 268 PermissionModel().set_global_permission_choices(
269 269 c, gettext_translator=req.translate)
270 270
271 271 return c
272 272
273 273 @LoginRequired()
274 274 @HasPermissionAllDecorator('hg.admin')
275 275 @CSRFRequired()
276 276 @view_config(
277 277 route_name='user_update', request_method='POST',
278 278 renderer='rhodecode:templates/admin/users/user_edit.mako')
279 279 def user_update(self):
280 280 _ = self.request.translate
281 281 c = self.load_default_context()
282 282
283 283 user_id = self.db_user_id
284 284 c.user = self.db_user
285 285
286 286 c.active = 'profile'
287 287 c.extern_type = c.user.extern_type
288 288 c.extern_name = c.user.extern_name
289 289 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
290 290 available_languages = [x[0] for x in c.allowed_languages]
291 291 _form = UserForm(self.request.translate, edit=True,
292 292 available_languages=available_languages,
293 293 old_data={'user_id': user_id,
294 294 'email': c.user.email})()
295 295 form_result = {}
296 296 old_values = c.user.get_api_data()
297 297 try:
298 298 form_result = _form.to_python(dict(self.request.POST))
299 299 skip_attrs = ['extern_type', 'extern_name']
300 300 # TODO: plugin should define if username can be updated
301 301 if c.extern_type != "rhodecode":
302 302 # forbid updating username for external accounts
303 303 skip_attrs.append('username')
304 304
305 305 UserModel().update_user(
306 306 user_id, skip_attrs=skip_attrs, **form_result)
307 307
308 308 audit_logger.store_web(
309 309 'user.edit', action_data={'old_data': old_values},
310 310 user=c.rhodecode_user)
311 311
312 312 Session().commit()
313 313 h.flash(_('User updated successfully'), category='success')
314 314 except formencode.Invalid as errors:
315 315 data = render(
316 316 'rhodecode:templates/admin/users/user_edit.mako',
317 317 self._get_template_context(c), self.request)
318 318 html = formencode.htmlfill.render(
319 319 data,
320 320 defaults=errors.value,
321 321 errors=errors.error_dict or {},
322 322 prefix_error=False,
323 323 encoding="UTF-8",
324 324 force_defaults=False
325 325 )
326 326 return Response(html)
327 327 except UserCreationError as e:
328 328 h.flash(e, 'error')
329 329 except Exception:
330 330 log.exception("Exception updating user")
331 331 h.flash(_('Error occurred during update of user %s')
332 332 % form_result.get('username'), category='error')
333 333 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
334 334
335 335 @LoginRequired()
336 336 @HasPermissionAllDecorator('hg.admin')
337 337 @CSRFRequired()
338 338 @view_config(
339 339 route_name='user_delete', request_method='POST',
340 340 renderer='rhodecode:templates/admin/users/user_edit.mako')
341 341 def user_delete(self):
342 342 _ = self.request.translate
343 343 c = self.load_default_context()
344 344 c.user = self.db_user
345 345
346 346 _repos = c.user.repositories
347 347 _repo_groups = c.user.repository_groups
348 348 _user_groups = c.user.user_groups
349 349
350 350 handle_repos = None
351 351 handle_repo_groups = None
352 352 handle_user_groups = None
353 353 # dummy call for flash of handle
354 354 set_handle_flash_repos = lambda: None
355 355 set_handle_flash_repo_groups = lambda: None
356 356 set_handle_flash_user_groups = lambda: None
357 357
358 358 if _repos and self.request.POST.get('user_repos'):
359 359 do = self.request.POST['user_repos']
360 360 if do == 'detach':
361 361 handle_repos = 'detach'
362 362 set_handle_flash_repos = lambda: h.flash(
363 363 _('Detached %s repositories') % len(_repos),
364 364 category='success')
365 365 elif do == 'delete':
366 366 handle_repos = 'delete'
367 367 set_handle_flash_repos = lambda: h.flash(
368 368 _('Deleted %s repositories') % len(_repos),
369 369 category='success')
370 370
371 371 if _repo_groups and self.request.POST.get('user_repo_groups'):
372 372 do = self.request.POST['user_repo_groups']
373 373 if do == 'detach':
374 374 handle_repo_groups = 'detach'
375 375 set_handle_flash_repo_groups = lambda: h.flash(
376 376 _('Detached %s repository groups') % len(_repo_groups),
377 377 category='success')
378 378 elif do == 'delete':
379 379 handle_repo_groups = 'delete'
380 380 set_handle_flash_repo_groups = lambda: h.flash(
381 381 _('Deleted %s repository groups') % len(_repo_groups),
382 382 category='success')
383 383
384 384 if _user_groups and self.request.POST.get('user_user_groups'):
385 385 do = self.request.POST['user_user_groups']
386 386 if do == 'detach':
387 387 handle_user_groups = 'detach'
388 388 set_handle_flash_user_groups = lambda: h.flash(
389 389 _('Detached %s user groups') % len(_user_groups),
390 390 category='success')
391 391 elif do == 'delete':
392 392 handle_user_groups = 'delete'
393 393 set_handle_flash_user_groups = lambda: h.flash(
394 394 _('Deleted %s user groups') % len(_user_groups),
395 395 category='success')
396 396
397 397 old_values = c.user.get_api_data()
398 398 try:
399 399 UserModel().delete(c.user, handle_repos=handle_repos,
400 400 handle_repo_groups=handle_repo_groups,
401 401 handle_user_groups=handle_user_groups)
402 402
403 403 audit_logger.store_web(
404 404 'user.delete', action_data={'old_data': old_values},
405 405 user=c.rhodecode_user)
406 406
407 407 Session().commit()
408 408 set_handle_flash_repos()
409 409 set_handle_flash_repo_groups()
410 410 set_handle_flash_user_groups()
411 411 h.flash(_('Successfully deleted user'), category='success')
412 412 except (UserOwnsReposException, UserOwnsRepoGroupsException,
413 413 UserOwnsUserGroupsException, DefaultUserException) as e:
414 414 h.flash(e, category='warning')
415 415 except Exception:
416 416 log.exception("Exception during deletion of user")
417 417 h.flash(_('An error occurred during deletion of user'),
418 418 category='error')
419 419 raise HTTPFound(h.route_path('users'))
420 420
421 421 @LoginRequired()
422 422 @HasPermissionAllDecorator('hg.admin')
423 423 @view_config(
424 424 route_name='user_edit', request_method='GET',
425 425 renderer='rhodecode:templates/admin/users/user_edit.mako')
426 426 def user_edit(self):
427 427 _ = self.request.translate
428 428 c = self.load_default_context()
429 429 c.user = self.db_user
430 430
431 431 c.active = 'profile'
432 432 c.extern_type = c.user.extern_type
433 433 c.extern_name = c.user.extern_name
434 434 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
435 435
436 436 defaults = c.user.get_dict()
437 437 defaults.update({'language': c.user.user_data.get('language')})
438 438
439 439 data = render(
440 440 'rhodecode:templates/admin/users/user_edit.mako',
441 441 self._get_template_context(c), self.request)
442 442 html = formencode.htmlfill.render(
443 443 data,
444 444 defaults=defaults,
445 445 encoding="UTF-8",
446 446 force_defaults=False
447 447 )
448 448 return Response(html)
449 449
450 450 @LoginRequired()
451 451 @HasPermissionAllDecorator('hg.admin')
452 452 @view_config(
453 453 route_name='user_edit_advanced', request_method='GET',
454 454 renderer='rhodecode:templates/admin/users/user_edit.mako')
455 455 def user_edit_advanced(self):
456 456 _ = self.request.translate
457 457 c = self.load_default_context()
458 458
459 459 user_id = self.db_user_id
460 460 c.user = self.db_user
461 461
462 462 c.active = 'advanced'
463 463 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
464 464 c.personal_repo_group_name = RepoGroupModel()\
465 465 .get_personal_group_name(c.user)
466 466
467 467 c.user_to_review_rules = sorted(
468 468 (x.user for x in c.user.user_review_rules),
469 469 key=lambda u: u.username.lower())
470 470
471 471 c.first_admin = User.get_first_super_admin()
472 472 defaults = c.user.get_dict()
473 473
474 474 # Interim workaround if the user participated on any pull requests as a
475 475 # reviewer.
476 476 has_review = len(c.user.reviewer_pull_requests)
477 477 c.can_delete_user = not has_review
478 478 c.can_delete_user_message = ''
479 479 inactive_link = h.link_to(
480 480 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
481 481 if has_review == 1:
482 482 c.can_delete_user_message = h.literal(_(
483 483 'The user participates as reviewer in {} pull request and '
484 484 'cannot be deleted. \nYou can set the user to '
485 485 '"{}" instead of deleting it.').format(
486 486 has_review, inactive_link))
487 487 elif has_review:
488 488 c.can_delete_user_message = h.literal(_(
489 489 'The user participates as reviewer in {} pull requests and '
490 490 'cannot be deleted. \nYou can set the user to '
491 491 '"{}" instead of deleting it.').format(
492 492 has_review, inactive_link))
493 493
494 494 data = render(
495 495 'rhodecode:templates/admin/users/user_edit.mako',
496 496 self._get_template_context(c), self.request)
497 497 html = formencode.htmlfill.render(
498 498 data,
499 499 defaults=defaults,
500 500 encoding="UTF-8",
501 501 force_defaults=False
502 502 )
503 503 return Response(html)
504 504
505 505 @LoginRequired()
506 506 @HasPermissionAllDecorator('hg.admin')
507 507 @view_config(
508 508 route_name='user_edit_global_perms', request_method='GET',
509 509 renderer='rhodecode:templates/admin/users/user_edit.mako')
510 510 def user_edit_global_perms(self):
511 511 _ = self.request.translate
512 512 c = self.load_default_context()
513 513 c.user = self.db_user
514 514
515 515 c.active = 'global_perms'
516 516
517 517 c.default_user = User.get_default_user()
518 518 defaults = c.user.get_dict()
519 519 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
520 520 defaults.update(c.default_user.get_default_perms())
521 521 defaults.update(c.user.get_default_perms())
522 522
523 523 data = render(
524 524 'rhodecode:templates/admin/users/user_edit.mako',
525 525 self._get_template_context(c), self.request)
526 526 html = formencode.htmlfill.render(
527 527 data,
528 528 defaults=defaults,
529 529 encoding="UTF-8",
530 530 force_defaults=False
531 531 )
532 532 return Response(html)
533 533
534 534 @LoginRequired()
535 535 @HasPermissionAllDecorator('hg.admin')
536 536 @CSRFRequired()
537 537 @view_config(
538 538 route_name='user_edit_global_perms_update', request_method='POST',
539 539 renderer='rhodecode:templates/admin/users/user_edit.mako')
540 540 def user_edit_global_perms_update(self):
541 541 _ = self.request.translate
542 542 c = self.load_default_context()
543 543
544 544 user_id = self.db_user_id
545 545 c.user = self.db_user
546 546
547 547 c.active = 'global_perms'
548 548 try:
549 549 # first stage that verifies the checkbox
550 550 _form = UserIndividualPermissionsForm(self.request.translate)
551 551 form_result = _form.to_python(dict(self.request.POST))
552 552 inherit_perms = form_result['inherit_default_permissions']
553 553 c.user.inherit_default_permissions = inherit_perms
554 554 Session().add(c.user)
555 555
556 556 if not inherit_perms:
557 557 # only update the individual ones if we un check the flag
558 558 _form = UserPermissionsForm(
559 559 self.request.translate,
560 560 [x[0] for x in c.repo_create_choices],
561 561 [x[0] for x in c.repo_create_on_write_choices],
562 562 [x[0] for x in c.repo_group_create_choices],
563 563 [x[0] for x in c.user_group_create_choices],
564 564 [x[0] for x in c.fork_choices],
565 565 [x[0] for x in c.inherit_default_permission_choices])()
566 566
567 567 form_result = _form.to_python(dict(self.request.POST))
568 568 form_result.update({'perm_user_id': c.user.user_id})
569 569
570 570 PermissionModel().update_user_permissions(form_result)
571 571
572 572 # TODO(marcink): implement global permissions
573 573 # audit_log.store_web('user.edit.permissions')
574 574
575 575 Session().commit()
576 576 h.flash(_('User global permissions updated successfully'),
577 577 category='success')
578 578
579 579 except formencode.Invalid as errors:
580 580 data = render(
581 581 'rhodecode:templates/admin/users/user_edit.mako',
582 582 self._get_template_context(c), self.request)
583 583 html = formencode.htmlfill.render(
584 584 data,
585 585 defaults=errors.value,
586 586 errors=errors.error_dict or {},
587 587 prefix_error=False,
588 588 encoding="UTF-8",
589 589 force_defaults=False
590 590 )
591 591 return Response(html)
592 592 except Exception:
593 593 log.exception("Exception during permissions saving")
594 594 h.flash(_('An error occurred during permissions saving'),
595 595 category='error')
596 596 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
597 597
598 598 @LoginRequired()
599 599 @HasPermissionAllDecorator('hg.admin')
600 600 @CSRFRequired()
601 601 @view_config(
602 602 route_name='user_force_password_reset', request_method='POST',
603 603 renderer='rhodecode:templates/admin/users/user_edit.mako')
604 604 def user_force_password_reset(self):
605 605 """
606 606 toggle reset password flag for this user
607 607 """
608 608 _ = self.request.translate
609 609 c = self.load_default_context()
610 610
611 611 user_id = self.db_user_id
612 612 c.user = self.db_user
613 613
614 614 try:
615 615 old_value = c.user.user_data.get('force_password_change')
616 616 c.user.update_userdata(force_password_change=not old_value)
617 617
618 618 if old_value:
619 619 msg = _('Force password change disabled for user')
620 620 audit_logger.store_web(
621 621 'user.edit.password_reset.disabled',
622 622 user=c.rhodecode_user)
623 623 else:
624 624 msg = _('Force password change enabled for user')
625 625 audit_logger.store_web(
626 626 'user.edit.password_reset.enabled',
627 627 user=c.rhodecode_user)
628 628
629 629 Session().commit()
630 630 h.flash(msg, category='success')
631 631 except Exception:
632 632 log.exception("Exception during password reset for user")
633 633 h.flash(_('An error occurred during password reset for user'),
634 634 category='error')
635 635
636 636 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
637 637
638 638 @LoginRequired()
639 639 @HasPermissionAllDecorator('hg.admin')
640 640 @CSRFRequired()
641 641 @view_config(
642 642 route_name='user_create_personal_repo_group', request_method='POST',
643 643 renderer='rhodecode:templates/admin/users/user_edit.mako')
644 644 def user_create_personal_repo_group(self):
645 645 """
646 646 Create personal repository group for this user
647 647 """
648 648 from rhodecode.model.repo_group import RepoGroupModel
649 649
650 650 _ = self.request.translate
651 651 c = self.load_default_context()
652 652
653 653 user_id = self.db_user_id
654 654 c.user = self.db_user
655 655
656 656 personal_repo_group = RepoGroup.get_user_personal_repo_group(
657 657 c.user.user_id)
658 658 if personal_repo_group:
659 659 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
660 660
661 661 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
662 662 c.user)
663 663 named_personal_group = RepoGroup.get_by_group_name(
664 664 personal_repo_group_name)
665 665 try:
666 666
667 667 if named_personal_group and named_personal_group.user_id == c.user.user_id:
668 668 # migrate the same named group, and mark it as personal
669 669 named_personal_group.personal = True
670 670 Session().add(named_personal_group)
671 671 Session().commit()
672 672 msg = _('Linked repository group `%s` as personal' % (
673 673 personal_repo_group_name,))
674 674 h.flash(msg, category='success')
675 675 elif not named_personal_group:
676 676 RepoGroupModel().create_personal_repo_group(c.user)
677 677
678 678 msg = _('Created repository group `%s`' % (
679 679 personal_repo_group_name,))
680 680 h.flash(msg, category='success')
681 681 else:
682 682 msg = _('Repository group `%s` is already taken' % (
683 683 personal_repo_group_name,))
684 684 h.flash(msg, category='warning')
685 685 except Exception:
686 686 log.exception("Exception during repository group creation")
687 687 msg = _(
688 688 'An error occurred during repository group creation for user')
689 689 h.flash(msg, category='error')
690 690 Session().rollback()
691 691
692 692 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
693 693
694 694 @LoginRequired()
695 695 @HasPermissionAllDecorator('hg.admin')
696 696 @view_config(
697 697 route_name='edit_user_auth_tokens', request_method='GET',
698 698 renderer='rhodecode:templates/admin/users/user_edit.mako')
699 699 def auth_tokens(self):
700 700 _ = self.request.translate
701 701 c = self.load_default_context()
702 702 c.user = self.db_user
703 703
704 704 c.active = 'auth_tokens'
705 705
706 706 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
707 707 c.role_values = [
708 708 (x, AuthTokenModel.cls._get_role_name(x))
709 709 for x in AuthTokenModel.cls.ROLES]
710 710 c.role_options = [(c.role_values, _("Role"))]
711 711 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
712 712 c.user.user_id, show_expired=True)
713 713 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
714 714 return self._get_template_context(c)
715 715
716 716 def maybe_attach_token_scope(self, token):
717 717 # implemented in EE edition
718 718 pass
719 719
720 720 @LoginRequired()
721 721 @HasPermissionAllDecorator('hg.admin')
722 722 @CSRFRequired()
723 723 @view_config(
724 724 route_name='edit_user_auth_tokens_add', request_method='POST')
725 725 def auth_tokens_add(self):
726 726 _ = self.request.translate
727 727 c = self.load_default_context()
728 728
729 729 user_id = self.db_user_id
730 730 c.user = self.db_user
731 731
732 732 user_data = c.user.get_api_data()
733 733 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
734 734 description = self.request.POST.get('description')
735 735 role = self.request.POST.get('role')
736 736
737 737 token = AuthTokenModel().create(
738 738 c.user.user_id, description, lifetime, role)
739 739 token_data = token.get_api_data()
740 740
741 741 self.maybe_attach_token_scope(token)
742 742 audit_logger.store_web(
743 743 'user.edit.token.add', action_data={
744 744 'data': {'token': token_data, 'user': user_data}},
745 745 user=self._rhodecode_user, )
746 746 Session().commit()
747 747
748 748 h.flash(_("Auth token successfully created"), category='success')
749 749 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
750 750
751 751 @LoginRequired()
752 752 @HasPermissionAllDecorator('hg.admin')
753 753 @CSRFRequired()
754 754 @view_config(
755 755 route_name='edit_user_auth_tokens_delete', request_method='POST')
756 756 def auth_tokens_delete(self):
757 757 _ = self.request.translate
758 758 c = self.load_default_context()
759 759
760 760 user_id = self.db_user_id
761 761 c.user = self.db_user
762 762
763 763 user_data = c.user.get_api_data()
764 764
765 765 del_auth_token = self.request.POST.get('del_auth_token')
766 766
767 767 if del_auth_token:
768 768 token = UserApiKeys.get_or_404(del_auth_token)
769 769 token_data = token.get_api_data()
770 770
771 771 AuthTokenModel().delete(del_auth_token, c.user.user_id)
772 772 audit_logger.store_web(
773 773 'user.edit.token.delete', action_data={
774 774 'data': {'token': token_data, 'user': user_data}},
775 775 user=self._rhodecode_user,)
776 776 Session().commit()
777 777 h.flash(_("Auth token successfully deleted"), category='success')
778 778
779 779 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
780 780
781 781 @LoginRequired()
782 782 @HasPermissionAllDecorator('hg.admin')
783 783 @view_config(
784 784 route_name='edit_user_ssh_keys', request_method='GET',
785 785 renderer='rhodecode:templates/admin/users/user_edit.mako')
786 786 def ssh_keys(self):
787 787 _ = self.request.translate
788 788 c = self.load_default_context()
789 789 c.user = self.db_user
790 790
791 791 c.active = 'ssh_keys'
792 792 c.default_key = self.request.GET.get('default_key')
793 793 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
794 794 return self._get_template_context(c)
795 795
796 796 @LoginRequired()
797 797 @HasPermissionAllDecorator('hg.admin')
798 798 @view_config(
799 799 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
800 800 renderer='rhodecode:templates/admin/users/user_edit.mako')
801 801 def ssh_keys_generate_keypair(self):
802 802 _ = self.request.translate
803 803 c = self.load_default_context()
804 804
805 805 c.user = self.db_user
806 806
807 807 c.active = 'ssh_keys_generate'
808 808 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
809 809 c.private, c.public = SshKeyModel().generate_keypair(comment=comment)
810 810
811 811 return self._get_template_context(c)
812 812
813 813 @LoginRequired()
814 814 @HasPermissionAllDecorator('hg.admin')
815 815 @CSRFRequired()
816 816 @view_config(
817 817 route_name='edit_user_ssh_keys_add', request_method='POST')
818 818 def ssh_keys_add(self):
819 819 _ = self.request.translate
820 820 c = self.load_default_context()
821 821
822 822 user_id = self.db_user_id
823 823 c.user = self.db_user
824 824
825 825 user_data = c.user.get_api_data()
826 826 key_data = self.request.POST.get('key_data')
827 827 description = self.request.POST.get('description')
828 828
829 829 fingerprint = 'unknown'
830 830 try:
831 831 if not key_data:
832 832 raise ValueError('Please add a valid public key')
833 833
834 834 key = SshKeyModel().parse_key(key_data.strip())
835 835 fingerprint = key.hash_md5()
836 836
837 837 ssh_key = SshKeyModel().create(
838 838 c.user.user_id, fingerprint, key.keydata, description)
839 839 ssh_key_data = ssh_key.get_api_data()
840 840
841 841 audit_logger.store_web(
842 842 'user.edit.ssh_key.add', action_data={
843 843 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
844 844 user=self._rhodecode_user, )
845 845 Session().commit()
846 846
847 847 # Trigger an event on change of keys.
848 848 trigger(SshKeyFileChangeEvent(), self.request.registry)
849 849
850 850 h.flash(_("Ssh Key successfully created"), category='success')
851 851
852 852 except IntegrityError:
853 853 log.exception("Exception during ssh key saving")
854 854 err = 'Such key with fingerprint `{}` already exists, ' \
855 855 'please use a different one'.format(fingerprint)
856 856 h.flash(_('An error occurred during ssh key saving: {}').format(err),
857 857 category='error')
858 858 except Exception as e:
859 859 log.exception("Exception during ssh key saving")
860 860 h.flash(_('An error occurred during ssh key saving: {}').format(e),
861 861 category='error')
862 862
863 863 return HTTPFound(
864 864 h.route_path('edit_user_ssh_keys', user_id=user_id))
865 865
866 866 @LoginRequired()
867 867 @HasPermissionAllDecorator('hg.admin')
868 868 @CSRFRequired()
869 869 @view_config(
870 870 route_name='edit_user_ssh_keys_delete', request_method='POST')
871 871 def ssh_keys_delete(self):
872 872 _ = self.request.translate
873 873 c = self.load_default_context()
874 874
875 875 user_id = self.db_user_id
876 876 c.user = self.db_user
877 877
878 878 user_data = c.user.get_api_data()
879 879
880 880 del_ssh_key = self.request.POST.get('del_ssh_key')
881 881
882 882 if del_ssh_key:
883 883 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
884 884 ssh_key_data = ssh_key.get_api_data()
885 885
886 886 SshKeyModel().delete(del_ssh_key, c.user.user_id)
887 887 audit_logger.store_web(
888 888 'user.edit.ssh_key.delete', action_data={
889 889 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
890 890 user=self._rhodecode_user,)
891 891 Session().commit()
892 892 # Trigger an event on change of keys.
893 893 trigger(SshKeyFileChangeEvent(), self.request.registry)
894 894 h.flash(_("Ssh key successfully deleted"), category='success')
895 895
896 896 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
897 897
898 898 @LoginRequired()
899 899 @HasPermissionAllDecorator('hg.admin')
900 900 @view_config(
901 901 route_name='edit_user_emails', request_method='GET',
902 902 renderer='rhodecode:templates/admin/users/user_edit.mako')
903 903 def emails(self):
904 904 _ = self.request.translate
905 905 c = self.load_default_context()
906 906 c.user = self.db_user
907 907
908 908 c.active = 'emails'
909 909 c.user_email_map = UserEmailMap.query() \
910 910 .filter(UserEmailMap.user == c.user).all()
911 911
912 912 return self._get_template_context(c)
913 913
914 914 @LoginRequired()
915 915 @HasPermissionAllDecorator('hg.admin')
916 916 @CSRFRequired()
917 917 @view_config(
918 918 route_name='edit_user_emails_add', request_method='POST')
919 919 def emails_add(self):
920 920 _ = self.request.translate
921 921 c = self.load_default_context()
922 922
923 923 user_id = self.db_user_id
924 924 c.user = self.db_user
925 925
926 926 email = self.request.POST.get('new_email')
927 927 user_data = c.user.get_api_data()
928 928 try:
929 929
930 930 form = UserExtraEmailForm(self.request.translate)()
931 931 data = form.to_python({'email': email})
932 932 email = data['email']
933 933
934 934 UserModel().add_extra_email(c.user.user_id, email)
935 935 audit_logger.store_web(
936 936 'user.edit.email.add',
937 937 action_data={'email': email, 'user': user_data},
938 938 user=self._rhodecode_user)
939 939 Session().commit()
940 940 h.flash(_("Added new email address `%s` for user account") % email,
941 941 category='success')
942 942 except formencode.Invalid as error:
943 943 h.flash(h.escape(error.error_dict['email']), category='error')
944 944 except IntegrityError:
945 945 log.warning("Email %s already exists", email)
946 946 h.flash(_('Email `{}` is already registered for another user.').format(email),
947 947 category='error')
948 948 except Exception:
949 949 log.exception("Exception during email saving")
950 950 h.flash(_('An error occurred during email saving'),
951 951 category='error')
952 952 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
953 953
954 954 @LoginRequired()
955 955 @HasPermissionAllDecorator('hg.admin')
956 956 @CSRFRequired()
957 957 @view_config(
958 958 route_name='edit_user_emails_delete', request_method='POST')
959 959 def emails_delete(self):
960 960 _ = self.request.translate
961 961 c = self.load_default_context()
962 962
963 963 user_id = self.db_user_id
964 964 c.user = self.db_user
965 965
966 966 email_id = self.request.POST.get('del_email_id')
967 967 user_model = UserModel()
968 968
969 969 email = UserEmailMap.query().get(email_id).email
970 970 user_data = c.user.get_api_data()
971 971 user_model.delete_extra_email(c.user.user_id, email_id)
972 972 audit_logger.store_web(
973 973 'user.edit.email.delete',
974 974 action_data={'email': email, 'user': user_data},
975 975 user=self._rhodecode_user)
976 976 Session().commit()
977 977 h.flash(_("Removed email address from user account"),
978 978 category='success')
979 979 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
980 980
981 981 @LoginRequired()
982 982 @HasPermissionAllDecorator('hg.admin')
983 983 @view_config(
984 984 route_name='edit_user_ips', request_method='GET',
985 985 renderer='rhodecode:templates/admin/users/user_edit.mako')
986 986 def ips(self):
987 987 _ = self.request.translate
988 988 c = self.load_default_context()
989 989 c.user = self.db_user
990 990
991 991 c.active = 'ips'
992 992 c.user_ip_map = UserIpMap.query() \
993 993 .filter(UserIpMap.user == c.user).all()
994 994
995 995 c.inherit_default_ips = c.user.inherit_default_permissions
996 996 c.default_user_ip_map = UserIpMap.query() \
997 997 .filter(UserIpMap.user == User.get_default_user()).all()
998 998
999 999 return self._get_template_context(c)
1000 1000
1001 1001 @LoginRequired()
1002 1002 @HasPermissionAllDecorator('hg.admin')
1003 1003 @CSRFRequired()
1004 1004 @view_config(
1005 1005 route_name='edit_user_ips_add', request_method='POST')
1006 1006 # NOTE(marcink): this view is allowed for default users, as we can
1007 1007 # edit their IP white list
1008 1008 def ips_add(self):
1009 1009 _ = self.request.translate
1010 1010 c = self.load_default_context()
1011 1011
1012 1012 user_id = self.db_user_id
1013 1013 c.user = self.db_user
1014 1014
1015 1015 user_model = UserModel()
1016 1016 desc = self.request.POST.get('description')
1017 1017 try:
1018 1018 ip_list = user_model.parse_ip_range(
1019 1019 self.request.POST.get('new_ip'))
1020 1020 except Exception as e:
1021 1021 ip_list = []
1022 1022 log.exception("Exception during ip saving")
1023 1023 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1024 1024 category='error')
1025 1025 added = []
1026 1026 user_data = c.user.get_api_data()
1027 1027 for ip in ip_list:
1028 1028 try:
1029 1029 form = UserExtraIpForm(self.request.translate)()
1030 1030 data = form.to_python({'ip': ip})
1031 1031 ip = data['ip']
1032 1032
1033 1033 user_model.add_extra_ip(c.user.user_id, ip, desc)
1034 1034 audit_logger.store_web(
1035 1035 'user.edit.ip.add',
1036 1036 action_data={'ip': ip, 'user': user_data},
1037 1037 user=self._rhodecode_user)
1038 1038 Session().commit()
1039 1039 added.append(ip)
1040 1040 except formencode.Invalid as error:
1041 1041 msg = error.error_dict['ip']
1042 1042 h.flash(msg, category='error')
1043 1043 except Exception:
1044 1044 log.exception("Exception during ip saving")
1045 1045 h.flash(_('An error occurred during ip saving'),
1046 1046 category='error')
1047 1047 if added:
1048 1048 h.flash(
1049 1049 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1050 1050 category='success')
1051 1051 if 'default_user' in self.request.POST:
1052 1052 # case for editing global IP list we do it for 'DEFAULT' user
1053 1053 raise HTTPFound(h.route_path('admin_permissions_ips'))
1054 1054 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1055 1055
1056 1056 @LoginRequired()
1057 1057 @HasPermissionAllDecorator('hg.admin')
1058 1058 @CSRFRequired()
1059 1059 @view_config(
1060 1060 route_name='edit_user_ips_delete', request_method='POST')
1061 1061 # NOTE(marcink): this view is allowed for default users, as we can
1062 1062 # edit their IP white list
1063 1063 def ips_delete(self):
1064 1064 _ = self.request.translate
1065 1065 c = self.load_default_context()
1066 1066
1067 1067 user_id = self.db_user_id
1068 1068 c.user = self.db_user
1069 1069
1070 1070 ip_id = self.request.POST.get('del_ip_id')
1071 1071 user_model = UserModel()
1072 1072 user_data = c.user.get_api_data()
1073 1073 ip = UserIpMap.query().get(ip_id).ip_addr
1074 1074 user_model.delete_extra_ip(c.user.user_id, ip_id)
1075 1075 audit_logger.store_web(
1076 1076 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1077 1077 user=self._rhodecode_user)
1078 1078 Session().commit()
1079 1079 h.flash(_("Removed ip address from user whitelist"), category='success')
1080 1080
1081 1081 if 'default_user' in self.request.POST:
1082 1082 # case for editing global IP list we do it for 'DEFAULT' user
1083 1083 raise HTTPFound(h.route_path('admin_permissions_ips'))
1084 1084 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1085 1085
1086 1086 @LoginRequired()
1087 1087 @HasPermissionAllDecorator('hg.admin')
1088 1088 @view_config(
1089 1089 route_name='edit_user_groups_management', request_method='GET',
1090 1090 renderer='rhodecode:templates/admin/users/user_edit.mako')
1091 1091 def groups_management(self):
1092 1092 c = self.load_default_context()
1093 1093 c.user = self.db_user
1094 1094 c.data = c.user.group_member
1095 1095
1096 1096 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1097 1097 for group in c.user.group_member]
1098 1098 c.groups = json.dumps(groups)
1099 1099 c.active = 'groups'
1100 1100
1101 1101 return self._get_template_context(c)
1102 1102
1103 1103 @LoginRequired()
1104 1104 @HasPermissionAllDecorator('hg.admin')
1105 1105 @CSRFRequired()
1106 1106 @view_config(
1107 1107 route_name='edit_user_groups_management_updates', request_method='POST')
1108 1108 def groups_management_updates(self):
1109 1109 _ = self.request.translate
1110 1110 c = self.load_default_context()
1111 1111
1112 1112 user_id = self.db_user_id
1113 1113 c.user = self.db_user
1114 1114
1115 1115 user_groups = set(self.request.POST.getall('users_group_id'))
1116 1116 user_groups_objects = []
1117 1117
1118 1118 for ugid in user_groups:
1119 1119 user_groups_objects.append(
1120 1120 UserGroupModel().get_group(safe_int(ugid)))
1121 1121 user_group_model = UserGroupModel()
1122 1122 added_to_groups, removed_from_groups = \
1123 1123 user_group_model.change_groups(c.user, user_groups_objects)
1124 1124
1125 1125 user_data = c.user.get_api_data()
1126 1126 for user_group_id in added_to_groups:
1127 1127 user_group = UserGroup.get(user_group_id)
1128 1128 old_values = user_group.get_api_data()
1129 1129 audit_logger.store_web(
1130 1130 'user_group.edit.member.add',
1131 1131 action_data={'user': user_data, 'old_data': old_values},
1132 1132 user=self._rhodecode_user)
1133 1133
1134 1134 for user_group_id in removed_from_groups:
1135 1135 user_group = UserGroup.get(user_group_id)
1136 1136 old_values = user_group.get_api_data()
1137 1137 audit_logger.store_web(
1138 1138 'user_group.edit.member.delete',
1139 1139 action_data={'user': user_data, 'old_data': old_values},
1140 1140 user=self._rhodecode_user)
1141 1141
1142 1142 Session().commit()
1143 1143 c.active = 'user_groups_management'
1144 1144 h.flash(_("Groups successfully changed"), category='success')
1145 1145
1146 1146 return HTTPFound(h.route_path(
1147 1147 'edit_user_groups_management', user_id=user_id))
1148 1148
1149 1149 @LoginRequired()
1150 1150 @HasPermissionAllDecorator('hg.admin')
1151 1151 @view_config(
1152 1152 route_name='edit_user_audit_logs', request_method='GET',
1153 1153 renderer='rhodecode:templates/admin/users/user_edit.mako')
1154 1154 def user_audit_logs(self):
1155 1155 _ = self.request.translate
1156 1156 c = self.load_default_context()
1157 1157 c.user = self.db_user
1158 1158
1159 1159 c.active = 'audit'
1160 1160
1161 1161 p = safe_int(self.request.GET.get('page', 1), 1)
1162 1162
1163 1163 filter_term = self.request.GET.get('filter')
1164 1164 user_log = UserModel().get_user_log(c.user, filter_term)
1165 1165
1166 1166 def url_generator(**kw):
1167 1167 if filter_term:
1168 1168 kw['filter'] = filter_term
1169 1169 return self.request.current_route_path(_query=kw)
1170 1170
1171 1171 c.audit_logs = h.Page(
1172 1172 user_log, page=p, items_per_page=10, url=url_generator)
1173 1173 c.filter_term = filter_term
1174 1174 return self._get_template_context(c)
1175 1175
1176 1176 @LoginRequired()
1177 1177 @HasPermissionAllDecorator('hg.admin')
1178 1178 @view_config(
1179 1179 route_name='edit_user_perms_summary', request_method='GET',
1180 1180 renderer='rhodecode:templates/admin/users/user_edit.mako')
1181 1181 def user_perms_summary(self):
1182 1182 _ = self.request.translate
1183 1183 c = self.load_default_context()
1184 1184 c.user = self.db_user
1185 1185
1186 1186 c.active = 'perms_summary'
1187 1187 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1188 1188
1189 1189 return self._get_template_context(c)
1190 1190
1191 1191 @LoginRequired()
1192 1192 @HasPermissionAllDecorator('hg.admin')
1193 1193 @view_config(
1194 1194 route_name='edit_user_perms_summary_json', request_method='GET',
1195 1195 renderer='json_ext')
1196 1196 def user_perms_summary_json(self):
1197 1197 self.load_default_context()
1198 1198 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1199 1199
1200 1200 return perm_user.permissions
1201 1201
1202 def _get_user_cache_keys(self, cache_namespace_uid, keys):
1203 user_keys = []
1204 for k in sorted(keys):
1205 if k.startswith(cache_namespace_uid):
1206 user_keys.append(k)
1207 return user_keys
1208
1209 1202 @LoginRequired()
1210 1203 @HasPermissionAllDecorator('hg.admin')
1211 1204 @view_config(
1212 1205 route_name='edit_user_caches', request_method='GET',
1213 1206 renderer='rhodecode:templates/admin/users/user_edit.mako')
1214 1207 def user_caches(self):
1215 1208 _ = self.request.translate
1216 1209 c = self.load_default_context()
1217 1210 c.user = self.db_user
1218 1211
1219 1212 c.active = 'caches'
1220 1213 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1221 1214
1222 1215 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1223 1216 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1224 1217 c.backend = c.region.backend
1225 c.user_keys = self._get_user_cache_keys(
1226 cache_namespace_uid, c.region.backend.list_keys())
1218 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1227 1219
1228 1220 return self._get_template_context(c)
1229 1221
1230 1222 @LoginRequired()
1231 1223 @HasPermissionAllDecorator('hg.admin')
1232 1224 @CSRFRequired()
1233 1225 @view_config(
1234 1226 route_name='edit_user_caches_update', request_method='POST')
1235 1227 def user_caches_update(self):
1236 1228 _ = self.request.translate
1237 1229 c = self.load_default_context()
1238 1230 c.user = self.db_user
1239 1231
1240 1232 c.active = 'caches'
1241 1233 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1242 1234
1243 1235 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1244 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1236 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1245 1237
1246 c.user_keys = self._get_user_cache_keys(
1247 cache_namespace_uid, c.region.backend.list_keys())
1248 for k in c.user_keys:
1249 c.region.delete(k)
1250
1251 h.flash(_("Deleted {} cache keys").format(len(c.user_keys)), category='success')
1238 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1252 1239
1253 1240 return HTTPFound(h.route_path(
1254 1241 'edit_user_caches', user_id=c.user.user_id))
@@ -1,80 +1,88 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPFound
25 25 from pyramid.view import view_config
26 26
27 27 from rhodecode.apps._base import RepoAppView
28 28 from rhodecode.lib.auth import (
29 29 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
30 from rhodecode.lib import helpers as h
30 from rhodecode.lib import helpers as h, rc_cache
31 31 from rhodecode.lib import system_info
32 32 from rhodecode.model.meta import Session
33 33 from rhodecode.model.scm import ScmModel
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 class RepoCachesView(RepoAppView):
39 39 def load_default_context(self):
40 40 c = self._get_local_tmpl_context()
41 41 return c
42 42
43 43 @LoginRequired()
44 44 @HasRepoPermissionAnyDecorator('repository.admin')
45 45 @view_config(
46 46 route_name='edit_repo_caches', request_method='GET',
47 47 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
48 48 def repo_caches(self):
49 49 c = self.load_default_context()
50 50 c.active = 'caches'
51 51 cached_diffs_dir = c.rhodecode_db_repo.cached_diffs_dir
52 52 c.cached_diff_count = len(c.rhodecode_db_repo.cached_diffs())
53 53 c.cached_diff_size = 0
54 54 if os.path.isdir(cached_diffs_dir):
55 55 c.cached_diff_size = system_info.get_storage_size(cached_diffs_dir)
56 56 c.shadow_repos = c.rhodecode_db_repo.shadow_repos()
57
58 cache_namespace_uid = 'cache_repo.{}'.format(self.db_repo.repo_id)
59 c.region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
60 c.backend = c.region.backend
61 c.repo_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
62
57 63 return self._get_template_context(c)
58 64
59 65 @LoginRequired()
60 66 @HasRepoPermissionAnyDecorator('repository.admin')
61 67 @CSRFRequired()
62 68 @view_config(
63 69 route_name='edit_repo_caches', request_method='POST')
64 70 def repo_caches_purge(self):
65 71 _ = self.request.translate
66 72 c = self.load_default_context()
67 73 c.active = 'caches'
68 74
69 75 try:
70 76 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
77
71 78 Session().commit()
79
72 80 h.flash(_('Cache invalidation successful'),
73 81 category='success')
74 82 except Exception:
75 83 log.exception("Exception during cache invalidation")
76 84 h.flash(_('An error occurred during cache invalidation'),
77 85 category='error')
78 86
79 87 raise HTTPFound(h.route_path(
80 88 'edit_repo_caches', repo_name=self.db_repo_name)) No newline at end of file
@@ -1,1289 +1,1278 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import itertools
22 22 import logging
23 23 import os
24 24 import shutil
25 25 import tempfile
26 26 import collections
27 27
28 28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31 from pyramid.response import Response
32 32
33 33 from rhodecode.apps._base import RepoAppView
34 34
35 35 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.lib import diffs, helpers as h, caches
36 from rhodecode.lib import diffs, helpers as h, caches, rc_cache
37 37 from rhodecode.lib import audit_logger
38 38 from rhodecode.lib.exceptions import NonRelativePathError
39 39 from rhodecode.lib.codeblocks import (
40 40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 41 from rhodecode.lib.utils2 import (
42 42 convert_line_endings, detect_mode, safe_str, str2bool)
43 43 from rhodecode.lib.auth import (
44 44 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
45 45 from rhodecode.lib.vcs import path as vcspath
46 46 from rhodecode.lib.vcs.backends.base import EmptyCommit
47 47 from rhodecode.lib.vcs.conf import settings
48 48 from rhodecode.lib.vcs.nodes import FileNode
49 49 from rhodecode.lib.vcs.exceptions import (
50 50 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
51 51 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
52 52 NodeDoesNotExistError, CommitError, NodeError)
53 53
54 54 from rhodecode.model.scm import ScmModel
55 55 from rhodecode.model.db import Repository
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoFilesView(RepoAppView):
61 61
62 62 @staticmethod
63 63 def adjust_file_path_for_svn(f_path, repo):
64 64 """
65 65 Computes the relative path of `f_path`.
66 66
67 67 This is mainly based on prefix matching of the recognized tags and
68 68 branches in the underlying repository.
69 69 """
70 70 tags_and_branches = itertools.chain(
71 71 repo.branches.iterkeys(),
72 72 repo.tags.iterkeys())
73 73 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
74 74
75 75 for name in tags_and_branches:
76 76 if f_path.startswith('{}/'.format(name)):
77 77 f_path = vcspath.relpath(f_path, name)
78 78 break
79 79 return f_path
80 80
81 81 def load_default_context(self):
82 82 c = self._get_local_tmpl_context(include_app_defaults=True)
83 83 c.rhodecode_repo = self.rhodecode_vcs_repo
84 84 return c
85 85
86 86 def _ensure_not_locked(self):
87 87 _ = self.request.translate
88 88
89 89 repo = self.db_repo
90 90 if repo.enable_locking and repo.locked[0]:
91 91 h.flash(_('This repository has been locked by %s on %s')
92 92 % (h.person_by_id(repo.locked[0]),
93 93 h.format_date(h.time_to_datetime(repo.locked[1]))),
94 94 'warning')
95 95 files_url = h.route_path(
96 96 'repo_files:default_path',
97 97 repo_name=self.db_repo_name, commit_id='tip')
98 98 raise HTTPFound(files_url)
99 99
100 100 def _get_commit_and_path(self):
101 101 default_commit_id = self.db_repo.landing_rev[1]
102 102 default_f_path = '/'
103 103
104 104 commit_id = self.request.matchdict.get(
105 105 'commit_id', default_commit_id)
106 106 f_path = self._get_f_path(self.request.matchdict, default_f_path)
107 107 return commit_id, f_path
108 108
109 109 def _get_default_encoding(self, c):
110 110 enc_list = getattr(c, 'default_encodings', [])
111 111 return enc_list[0] if enc_list else 'UTF-8'
112 112
113 113 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
114 114 """
115 115 This is a safe way to get commit. If an error occurs it redirects to
116 116 tip with proper message
117 117
118 118 :param commit_id: id of commit to fetch
119 119 :param redirect_after: toggle redirection
120 120 """
121 121 _ = self.request.translate
122 122
123 123 try:
124 124 return self.rhodecode_vcs_repo.get_commit(commit_id)
125 125 except EmptyRepositoryError:
126 126 if not redirect_after:
127 127 return None
128 128
129 129 _url = h.route_path(
130 130 'repo_files_add_file',
131 131 repo_name=self.db_repo_name, commit_id=0, f_path='',
132 132 _anchor='edit')
133 133
134 134 if h.HasRepoPermissionAny(
135 135 'repository.write', 'repository.admin')(self.db_repo_name):
136 136 add_new = h.link_to(
137 137 _('Click here to add a new file.'), _url, class_="alert-link")
138 138 else:
139 139 add_new = ""
140 140
141 141 h.flash(h.literal(
142 142 _('There are no files yet. %s') % add_new), category='warning')
143 143 raise HTTPFound(
144 144 h.route_path('repo_summary', repo_name=self.db_repo_name))
145 145
146 146 except (CommitDoesNotExistError, LookupError):
147 147 msg = _('No such commit exists for this repository')
148 148 h.flash(msg, category='error')
149 149 raise HTTPNotFound()
150 150 except RepositoryError as e:
151 151 h.flash(safe_str(h.escape(e)), category='error')
152 152 raise HTTPNotFound()
153 153
154 154 def _get_filenode_or_redirect(self, commit_obj, path):
155 155 """
156 156 Returns file_node, if error occurs or given path is directory,
157 157 it'll redirect to top level path
158 158 """
159 159 _ = self.request.translate
160 160
161 161 try:
162 162 file_node = commit_obj.get_node(path)
163 163 if file_node.is_dir():
164 164 raise RepositoryError('The given path is a directory')
165 165 except CommitDoesNotExistError:
166 166 log.exception('No such commit exists for this repository')
167 167 h.flash(_('No such commit exists for this repository'), category='error')
168 168 raise HTTPNotFound()
169 169 except RepositoryError as e:
170 170 log.warning('Repository error while fetching '
171 171 'filenode `%s`. Err:%s', path, e)
172 172 h.flash(safe_str(h.escape(e)), category='error')
173 173 raise HTTPNotFound()
174 174
175 175 return file_node
176 176
177 177 def _is_valid_head(self, commit_id, repo):
178 178 # check if commit is a branch identifier- basically we cannot
179 179 # create multiple heads via file editing
180 180 valid_heads = repo.branches.keys() + repo.branches.values()
181 181
182 182 if h.is_svn(repo) and not repo.is_empty():
183 183 # Note: Subversion only has one head, we add it here in case there
184 184 # is no branch matched.
185 185 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
186 186
187 187 # check if commit is a branch name or branch hash
188 188 return commit_id in valid_heads
189 189
190 def _get_tree_cache_manager(self, namespace_type):
191 _namespace = caches.get_repo_namespace_key(
192 namespace_type, self.db_repo_name)
193 return caches.get_cache_manager('repo_cache_long', _namespace)
190 def _get_tree_at_commit(
191 self, c, commit_id, f_path, full_load=False):
192
193 repo_id = self.db_repo.repo_id
194 194
195 def _get_tree_at_commit(
196 self, c, commit_id, f_path, full_load=False, force=False):
197 def _cached_tree():
198 log.debug('Generating cached file tree for %s, %s, %s',
199 self.db_repo_name, commit_id, f_path)
195 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
196 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
197
198 @region.cache_on_arguments(namespace=cache_namespace_uid)
199 def compute_file_tree(repo_id, commit_id, f_path, full_load):
200 log.debug('Generating cached file tree for repo_id: %s, %s, %s',
201 repo_id, commit_id, f_path)
200 202
201 203 c.full_load = full_load
202 204 return render(
203 205 'rhodecode:templates/files/files_browser_tree.mako',
204 206 self._get_template_context(c), self.request)
205 207
206 cache_manager = self._get_tree_cache_manager(caches.FILE_TREE)
207
208 cache_key = caches.compute_key_from_params(
209 self.db_repo_name, commit_id, f_path)
210
211 if force:
212 # we want to force recompute of caches
213 cache_manager.remove_value(cache_key)
214
215 return cache_manager.get(cache_key, createfunc=_cached_tree)
208 return compute_file_tree(self.db_repo.repo_id, commit_id, f_path, full_load)
216 209
217 210 def _get_archive_spec(self, fname):
218 211 log.debug('Detecting archive spec for: `%s`', fname)
219 212
220 213 fileformat = None
221 214 ext = None
222 215 content_type = None
223 216 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
224 217 content_type, extension = ext_data
225 218
226 219 if fname.endswith(extension):
227 220 fileformat = a_type
228 221 log.debug('archive is of type: %s', fileformat)
229 222 ext = extension
230 223 break
231 224
232 225 if not fileformat:
233 226 raise ValueError()
234 227
235 228 # left over part of whole fname is the commit
236 229 commit_id = fname[:-len(ext)]
237 230
238 231 return commit_id, ext, fileformat, content_type
239 232
240 233 @LoginRequired()
241 234 @HasRepoPermissionAnyDecorator(
242 235 'repository.read', 'repository.write', 'repository.admin')
243 236 @view_config(
244 237 route_name='repo_archivefile', request_method='GET',
245 238 renderer=None)
246 239 def repo_archivefile(self):
247 240 # archive cache config
248 241 from rhodecode import CONFIG
249 242 _ = self.request.translate
250 243 self.load_default_context()
251 244
252 245 fname = self.request.matchdict['fname']
253 246 subrepos = self.request.GET.get('subrepos') == 'true'
254 247
255 248 if not self.db_repo.enable_downloads:
256 249 return Response(_('Downloads disabled'))
257 250
258 251 try:
259 252 commit_id, ext, fileformat, content_type = \
260 253 self._get_archive_spec(fname)
261 254 except ValueError:
262 255 return Response(_('Unknown archive type for: `{}`').format(
263 256 h.escape(fname)))
264 257
265 258 try:
266 259 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
267 260 except CommitDoesNotExistError:
268 261 return Response(_('Unknown commit_id {}').format(
269 262 h.escape(commit_id)))
270 263 except EmptyRepositoryError:
271 264 return Response(_('Empty repository'))
272 265
273 266 archive_name = '%s-%s%s%s' % (
274 267 safe_str(self.db_repo_name.replace('/', '_')),
275 268 '-sub' if subrepos else '',
276 269 safe_str(commit.short_id), ext)
277 270
278 271 use_cached_archive = False
279 272 archive_cache_enabled = CONFIG.get(
280 273 'archive_cache_dir') and not self.request.GET.get('no_cache')
281 274
282 275 if archive_cache_enabled:
283 276 # check if we it's ok to write
284 277 if not os.path.isdir(CONFIG['archive_cache_dir']):
285 278 os.makedirs(CONFIG['archive_cache_dir'])
286 279 cached_archive_path = os.path.join(
287 280 CONFIG['archive_cache_dir'], archive_name)
288 281 if os.path.isfile(cached_archive_path):
289 282 log.debug('Found cached archive in %s', cached_archive_path)
290 283 fd, archive = None, cached_archive_path
291 284 use_cached_archive = True
292 285 else:
293 286 log.debug('Archive %s is not yet cached', archive_name)
294 287
295 288 if not use_cached_archive:
296 289 # generate new archive
297 290 fd, archive = tempfile.mkstemp()
298 291 log.debug('Creating new temp archive in %s', archive)
299 292 try:
300 293 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
301 294 except ImproperArchiveTypeError:
302 295 return _('Unknown archive type')
303 296 if archive_cache_enabled:
304 297 # if we generated the archive and we have cache enabled
305 298 # let's use this for future
306 299 log.debug('Storing new archive in %s', cached_archive_path)
307 300 shutil.move(archive, cached_archive_path)
308 301 archive = cached_archive_path
309 302
310 303 # store download action
311 304 audit_logger.store_web(
312 305 'repo.archive.download', action_data={
313 306 'user_agent': self.request.user_agent,
314 307 'archive_name': archive_name,
315 308 'archive_spec': fname,
316 309 'archive_cached': use_cached_archive},
317 310 user=self._rhodecode_user,
318 311 repo=self.db_repo,
319 312 commit=True
320 313 )
321 314
322 315 def get_chunked_archive(archive):
323 316 with open(archive, 'rb') as stream:
324 317 while True:
325 318 data = stream.read(16 * 1024)
326 319 if not data:
327 320 if fd: # fd means we used temporary file
328 321 os.close(fd)
329 322 if not archive_cache_enabled:
330 323 log.debug('Destroying temp archive %s', archive)
331 324 os.remove(archive)
332 325 break
333 326 yield data
334 327
335 328 response = Response(app_iter=get_chunked_archive(archive))
336 329 response.content_disposition = str(
337 330 'attachment; filename=%s' % archive_name)
338 331 response.content_type = str(content_type)
339 332
340 333 return response
341 334
342 335 def _get_file_node(self, commit_id, f_path):
343 336 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
344 337 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
345 338 try:
346 339 node = commit.get_node(f_path)
347 340 if node.is_dir():
348 341 raise NodeError('%s path is a %s not a file'
349 342 % (node, type(node)))
350 343 except NodeDoesNotExistError:
351 344 commit = EmptyCommit(
352 345 commit_id=commit_id,
353 346 idx=commit.idx,
354 347 repo=commit.repository,
355 348 alias=commit.repository.alias,
356 349 message=commit.message,
357 350 author=commit.author,
358 351 date=commit.date)
359 352 node = FileNode(f_path, '', commit=commit)
360 353 else:
361 354 commit = EmptyCommit(
362 355 repo=self.rhodecode_vcs_repo,
363 356 alias=self.rhodecode_vcs_repo.alias)
364 357 node = FileNode(f_path, '', commit=commit)
365 358 return node
366 359
367 360 @LoginRequired()
368 361 @HasRepoPermissionAnyDecorator(
369 362 'repository.read', 'repository.write', 'repository.admin')
370 363 @view_config(
371 364 route_name='repo_files_diff', request_method='GET',
372 365 renderer=None)
373 366 def repo_files_diff(self):
374 367 c = self.load_default_context()
375 368 f_path = self._get_f_path(self.request.matchdict)
376 369 diff1 = self.request.GET.get('diff1', '')
377 370 diff2 = self.request.GET.get('diff2', '')
378 371
379 372 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
380 373
381 374 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
382 375 line_context = self.request.GET.get('context', 3)
383 376
384 377 if not any((diff1, diff2)):
385 378 h.flash(
386 379 'Need query parameter "diff1" or "diff2" to generate a diff.',
387 380 category='error')
388 381 raise HTTPBadRequest()
389 382
390 383 c.action = self.request.GET.get('diff')
391 384 if c.action not in ['download', 'raw']:
392 385 compare_url = h.route_path(
393 386 'repo_compare',
394 387 repo_name=self.db_repo_name,
395 388 source_ref_type='rev',
396 389 source_ref=diff1,
397 390 target_repo=self.db_repo_name,
398 391 target_ref_type='rev',
399 392 target_ref=diff2,
400 393 _query=dict(f_path=f_path))
401 394 # redirect to new view if we render diff
402 395 raise HTTPFound(compare_url)
403 396
404 397 try:
405 398 node1 = self._get_file_node(diff1, path1)
406 399 node2 = self._get_file_node(diff2, f_path)
407 400 except (RepositoryError, NodeError):
408 401 log.exception("Exception while trying to get node from repository")
409 402 raise HTTPFound(
410 403 h.route_path('repo_files', repo_name=self.db_repo_name,
411 404 commit_id='tip', f_path=f_path))
412 405
413 406 if all(isinstance(node.commit, EmptyCommit)
414 407 for node in (node1, node2)):
415 408 raise HTTPNotFound()
416 409
417 410 c.commit_1 = node1.commit
418 411 c.commit_2 = node2.commit
419 412
420 413 if c.action == 'download':
421 414 _diff = diffs.get_gitdiff(node1, node2,
422 415 ignore_whitespace=ignore_whitespace,
423 416 context=line_context)
424 417 diff = diffs.DiffProcessor(_diff, format='gitdiff')
425 418
426 419 response = Response(self.path_filter.get_raw_patch(diff))
427 420 response.content_type = 'text/plain'
428 421 response.content_disposition = (
429 422 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
430 423 )
431 424 charset = self._get_default_encoding(c)
432 425 if charset:
433 426 response.charset = charset
434 427 return response
435 428
436 429 elif c.action == 'raw':
437 430 _diff = diffs.get_gitdiff(node1, node2,
438 431 ignore_whitespace=ignore_whitespace,
439 432 context=line_context)
440 433 diff = diffs.DiffProcessor(_diff, format='gitdiff')
441 434
442 435 response = Response(self.path_filter.get_raw_patch(diff))
443 436 response.content_type = 'text/plain'
444 437 charset = self._get_default_encoding(c)
445 438 if charset:
446 439 response.charset = charset
447 440 return response
448 441
449 442 # in case we ever end up here
450 443 raise HTTPNotFound()
451 444
452 445 @LoginRequired()
453 446 @HasRepoPermissionAnyDecorator(
454 447 'repository.read', 'repository.write', 'repository.admin')
455 448 @view_config(
456 449 route_name='repo_files_diff_2way_redirect', request_method='GET',
457 450 renderer=None)
458 451 def repo_files_diff_2way_redirect(self):
459 452 """
460 453 Kept only to make OLD links work
461 454 """
462 455 f_path = self._get_f_path_unchecked(self.request.matchdict)
463 456 diff1 = self.request.GET.get('diff1', '')
464 457 diff2 = self.request.GET.get('diff2', '')
465 458
466 459 if not any((diff1, diff2)):
467 460 h.flash(
468 461 'Need query parameter "diff1" or "diff2" to generate a diff.',
469 462 category='error')
470 463 raise HTTPBadRequest()
471 464
472 465 compare_url = h.route_path(
473 466 'repo_compare',
474 467 repo_name=self.db_repo_name,
475 468 source_ref_type='rev',
476 469 source_ref=diff1,
477 470 target_ref_type='rev',
478 471 target_ref=diff2,
479 472 _query=dict(f_path=f_path, diffmode='sideside',
480 473 target_repo=self.db_repo_name,))
481 474 raise HTTPFound(compare_url)
482 475
483 476 @LoginRequired()
484 477 @HasRepoPermissionAnyDecorator(
485 478 'repository.read', 'repository.write', 'repository.admin')
486 479 @view_config(
487 480 route_name='repo_files', request_method='GET',
488 481 renderer=None)
489 482 @view_config(
490 483 route_name='repo_files:default_path', request_method='GET',
491 484 renderer=None)
492 485 @view_config(
493 486 route_name='repo_files:default_commit', request_method='GET',
494 487 renderer=None)
495 488 @view_config(
496 489 route_name='repo_files:rendered', request_method='GET',
497 490 renderer=None)
498 491 @view_config(
499 492 route_name='repo_files:annotated', request_method='GET',
500 493 renderer=None)
501 494 def repo_files(self):
502 495 c = self.load_default_context()
503 496
504 497 view_name = getattr(self.request.matched_route, 'name', None)
505 498
506 499 c.annotate = view_name == 'repo_files:annotated'
507 500 # default is false, but .rst/.md files later are auto rendered, we can
508 501 # overwrite auto rendering by setting this GET flag
509 502 c.renderer = view_name == 'repo_files:rendered' or \
510 503 not self.request.GET.get('no-render', False)
511 504
512 505 # redirect to given commit_id from form if given
513 506 get_commit_id = self.request.GET.get('at_rev', None)
514 507 if get_commit_id:
515 508 self._get_commit_or_redirect(get_commit_id)
516 509
517 510 commit_id, f_path = self._get_commit_and_path()
518 511 c.commit = self._get_commit_or_redirect(commit_id)
519 512 c.branch = self.request.GET.get('branch', None)
520 513 c.f_path = f_path
521 514
522 515 # prev link
523 516 try:
524 517 prev_commit = c.commit.prev(c.branch)
525 518 c.prev_commit = prev_commit
526 519 c.url_prev = h.route_path(
527 520 'repo_files', repo_name=self.db_repo_name,
528 521 commit_id=prev_commit.raw_id, f_path=f_path)
529 522 if c.branch:
530 523 c.url_prev += '?branch=%s' % c.branch
531 524 except (CommitDoesNotExistError, VCSError):
532 525 c.url_prev = '#'
533 526 c.prev_commit = EmptyCommit()
534 527
535 528 # next link
536 529 try:
537 530 next_commit = c.commit.next(c.branch)
538 531 c.next_commit = next_commit
539 532 c.url_next = h.route_path(
540 533 'repo_files', repo_name=self.db_repo_name,
541 534 commit_id=next_commit.raw_id, f_path=f_path)
542 535 if c.branch:
543 536 c.url_next += '?branch=%s' % c.branch
544 537 except (CommitDoesNotExistError, VCSError):
545 538 c.url_next = '#'
546 539 c.next_commit = EmptyCommit()
547 540
548 541 # files or dirs
549 542 try:
550 543 c.file = c.commit.get_node(f_path)
551 544 c.file_author = True
552 545 c.file_tree = ''
553 546
554 547 # load file content
555 548 if c.file.is_file():
556 549 c.lf_node = c.file.get_largefile_node()
557 550
558 551 c.file_source_page = 'true'
559 552 c.file_last_commit = c.file.last_commit
560 553 if c.file.size < c.visual.cut_off_limit_diff:
561 554 if c.annotate: # annotation has precedence over renderer
562 555 c.annotated_lines = filenode_as_annotated_lines_tokens(
563 556 c.file
564 557 )
565 558 else:
566 559 c.renderer = (
567 560 c.renderer and h.renderer_from_filename(c.file.path)
568 561 )
569 562 if not c.renderer:
570 563 c.lines = filenode_as_lines_tokens(c.file)
571 564
572 565 c.on_branch_head = self._is_valid_head(
573 566 commit_id, self.rhodecode_vcs_repo)
574 567
575 568 branch = c.commit.branch if (
576 569 c.commit.branch and '/' not in c.commit.branch) else None
577 570 c.branch_or_raw_id = branch or c.commit.raw_id
578 571 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
579 572
580 573 author = c.file_last_commit.author
581 574 c.authors = [[
582 575 h.email(author),
583 576 h.person(author, 'username_or_name_or_email'),
584 577 1
585 578 ]]
586 579
587 580 else: # load tree content at path
588 581 c.file_source_page = 'false'
589 582 c.authors = []
590 583 # this loads a simple tree without metadata to speed things up
591 584 # later via ajax we call repo_nodetree_full and fetch whole
592 585 c.file_tree = self._get_tree_at_commit(
593 586 c, c.commit.raw_id, f_path)
594 587
595 588 except RepositoryError as e:
596 589 h.flash(safe_str(h.escape(e)), category='error')
597 590 raise HTTPNotFound()
598 591
599 592 if self.request.environ.get('HTTP_X_PJAX'):
600 593 html = render('rhodecode:templates/files/files_pjax.mako',
601 594 self._get_template_context(c), self.request)
602 595 else:
603 596 html = render('rhodecode:templates/files/files.mako',
604 597 self._get_template_context(c), self.request)
605 598 return Response(html)
606 599
607 600 @HasRepoPermissionAnyDecorator(
608 601 'repository.read', 'repository.write', 'repository.admin')
609 602 @view_config(
610 603 route_name='repo_files:annotated_previous', request_method='GET',
611 604 renderer=None)
612 605 def repo_files_annotated_previous(self):
613 606 self.load_default_context()
614 607
615 608 commit_id, f_path = self._get_commit_and_path()
616 609 commit = self._get_commit_or_redirect(commit_id)
617 610 prev_commit_id = commit.raw_id
618 611 line_anchor = self.request.GET.get('line_anchor')
619 612 is_file = False
620 613 try:
621 614 _file = commit.get_node(f_path)
622 615 is_file = _file.is_file()
623 616 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
624 617 pass
625 618
626 619 if is_file:
627 620 history = commit.get_file_history(f_path)
628 621 prev_commit_id = history[1].raw_id \
629 622 if len(history) > 1 else prev_commit_id
630 623 prev_url = h.route_path(
631 624 'repo_files:annotated', repo_name=self.db_repo_name,
632 625 commit_id=prev_commit_id, f_path=f_path,
633 626 _anchor='L{}'.format(line_anchor))
634 627
635 628 raise HTTPFound(prev_url)
636 629
637 630 @LoginRequired()
638 631 @HasRepoPermissionAnyDecorator(
639 632 'repository.read', 'repository.write', 'repository.admin')
640 633 @view_config(
641 634 route_name='repo_nodetree_full', request_method='GET',
642 635 renderer=None, xhr=True)
643 636 @view_config(
644 637 route_name='repo_nodetree_full:default_path', request_method='GET',
645 638 renderer=None, xhr=True)
646 639 def repo_nodetree_full(self):
647 640 """
648 641 Returns rendered html of file tree that contains commit date,
649 642 author, commit_id for the specified combination of
650 643 repo, commit_id and file path
651 644 """
652 645 c = self.load_default_context()
653 646
654 647 commit_id, f_path = self._get_commit_and_path()
655 648 commit = self._get_commit_or_redirect(commit_id)
656 649 try:
657 650 dir_node = commit.get_node(f_path)
658 651 except RepositoryError as e:
659 652 return Response('error: {}'.format(h.escape(safe_str(e))))
660 653
661 654 if dir_node.is_file():
662 655 return Response('')
663 656
664 657 c.file = dir_node
665 658 c.commit = commit
666 659
667 # using force=True here, make a little trick. We flush the cache and
668 # compute it using the same key as without previous full_load, so now
669 # the fully loaded tree is now returned instead of partial,
670 # and we store this in caches
671 660 html = self._get_tree_at_commit(
672 c, commit.raw_id, dir_node.path, full_load=True, force=True)
661 c, commit.raw_id, dir_node.path, full_load=True)
673 662
674 663 return Response(html)
675 664
676 665 def _get_attachement_disposition(self, f_path):
677 666 return 'attachment; filename=%s' % \
678 667 safe_str(f_path.split(Repository.NAME_SEP)[-1])
679 668
680 669 @LoginRequired()
681 670 @HasRepoPermissionAnyDecorator(
682 671 'repository.read', 'repository.write', 'repository.admin')
683 672 @view_config(
684 673 route_name='repo_file_raw', request_method='GET',
685 674 renderer=None)
686 675 def repo_file_raw(self):
687 676 """
688 677 Action for show as raw, some mimetypes are "rendered",
689 678 those include images, icons.
690 679 """
691 680 c = self.load_default_context()
692 681
693 682 commit_id, f_path = self._get_commit_and_path()
694 683 commit = self._get_commit_or_redirect(commit_id)
695 684 file_node = self._get_filenode_or_redirect(commit, f_path)
696 685
697 686 raw_mimetype_mapping = {
698 687 # map original mimetype to a mimetype used for "show as raw"
699 688 # you can also provide a content-disposition to override the
700 689 # default "attachment" disposition.
701 690 # orig_type: (new_type, new_dispo)
702 691
703 692 # show images inline:
704 693 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
705 694 # for example render an SVG with javascript inside or even render
706 695 # HTML.
707 696 'image/x-icon': ('image/x-icon', 'inline'),
708 697 'image/png': ('image/png', 'inline'),
709 698 'image/gif': ('image/gif', 'inline'),
710 699 'image/jpeg': ('image/jpeg', 'inline'),
711 700 'application/pdf': ('application/pdf', 'inline'),
712 701 }
713 702
714 703 mimetype = file_node.mimetype
715 704 try:
716 705 mimetype, disposition = raw_mimetype_mapping[mimetype]
717 706 except KeyError:
718 707 # we don't know anything special about this, handle it safely
719 708 if file_node.is_binary:
720 709 # do same as download raw for binary files
721 710 mimetype, disposition = 'application/octet-stream', 'attachment'
722 711 else:
723 712 # do not just use the original mimetype, but force text/plain,
724 713 # otherwise it would serve text/html and that might be unsafe.
725 714 # Note: underlying vcs library fakes text/plain mimetype if the
726 715 # mimetype can not be determined and it thinks it is not
727 716 # binary.This might lead to erroneous text display in some
728 717 # cases, but helps in other cases, like with text files
729 718 # without extension.
730 719 mimetype, disposition = 'text/plain', 'inline'
731 720
732 721 if disposition == 'attachment':
733 722 disposition = self._get_attachement_disposition(f_path)
734 723
735 724 def stream_node():
736 725 yield file_node.raw_bytes
737 726
738 727 response = Response(app_iter=stream_node())
739 728 response.content_disposition = disposition
740 729 response.content_type = mimetype
741 730
742 731 charset = self._get_default_encoding(c)
743 732 if charset:
744 733 response.charset = charset
745 734
746 735 return response
747 736
748 737 @LoginRequired()
749 738 @HasRepoPermissionAnyDecorator(
750 739 'repository.read', 'repository.write', 'repository.admin')
751 740 @view_config(
752 741 route_name='repo_file_download', request_method='GET',
753 742 renderer=None)
754 743 @view_config(
755 744 route_name='repo_file_download:legacy', request_method='GET',
756 745 renderer=None)
757 746 def repo_file_download(self):
758 747 c = self.load_default_context()
759 748
760 749 commit_id, f_path = self._get_commit_and_path()
761 750 commit = self._get_commit_or_redirect(commit_id)
762 751 file_node = self._get_filenode_or_redirect(commit, f_path)
763 752
764 753 if self.request.GET.get('lf'):
765 754 # only if lf get flag is passed, we download this file
766 755 # as LFS/Largefile
767 756 lf_node = file_node.get_largefile_node()
768 757 if lf_node:
769 758 # overwrite our pointer with the REAL large-file
770 759 file_node = lf_node
771 760
772 761 disposition = self._get_attachement_disposition(f_path)
773 762
774 763 def stream_node():
775 764 yield file_node.raw_bytes
776 765
777 766 response = Response(app_iter=stream_node())
778 767 response.content_disposition = disposition
779 768 response.content_type = file_node.mimetype
780 769
781 770 charset = self._get_default_encoding(c)
782 771 if charset:
783 772 response.charset = charset
784 773
785 774 return response
786 775
787 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
788 def _cached_nodes():
789 log.debug('Generating cached nodelist for %s, %s, %s',
790 repo_name, commit_id, f_path)
776 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
777
778 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
779 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
780
781 @region.cache_on_arguments(namespace=cache_namespace_uid)
782 def compute_file_search(repo_id, commit_id, f_path):
783 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
784 repo_id, commit_id, f_path)
791 785 try:
792 786 _d, _f = ScmModel().get_nodes(
793 787 repo_name, commit_id, f_path, flat=False)
794 788 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
795 789 log.exception(safe_str(e))
796 790 h.flash(safe_str(h.escape(e)), category='error')
797 791 raise HTTPFound(h.route_path(
798 792 'repo_files', repo_name=self.db_repo_name,
799 793 commit_id='tip', f_path='/'))
800 794 return _d + _f
801 795
802 cache_manager = self._get_tree_cache_manager(
803 caches.FILE_SEARCH_TREE_META)
804
805 cache_key = caches.compute_key_from_params(
806 repo_name, commit_id, f_path)
807 return cache_manager.get(cache_key, createfunc=_cached_nodes)
796 return compute_file_search(self.db_repo.repo_id, commit_id, f_path)
808 797
809 798 @LoginRequired()
810 799 @HasRepoPermissionAnyDecorator(
811 800 'repository.read', 'repository.write', 'repository.admin')
812 801 @view_config(
813 802 route_name='repo_files_nodelist', request_method='GET',
814 803 renderer='json_ext', xhr=True)
815 804 def repo_nodelist(self):
816 805 self.load_default_context()
817 806
818 807 commit_id, f_path = self._get_commit_and_path()
819 808 commit = self._get_commit_or_redirect(commit_id)
820 809
821 810 metadata = self._get_nodelist_at_commit(
822 self.db_repo_name, commit.raw_id, f_path)
811 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
823 812 return {'nodes': metadata}
824 813
825 814 def _create_references(
826 815 self, branches_or_tags, symbolic_reference, f_path):
827 816 items = []
828 817 for name, commit_id in branches_or_tags.items():
829 818 sym_ref = symbolic_reference(commit_id, name, f_path)
830 819 items.append((sym_ref, name))
831 820 return items
832 821
833 822 def _symbolic_reference(self, commit_id, name, f_path):
834 823 return commit_id
835 824
836 825 def _symbolic_reference_svn(self, commit_id, name, f_path):
837 826 new_f_path = vcspath.join(name, f_path)
838 827 return u'%s@%s' % (new_f_path, commit_id)
839 828
840 829 def _get_node_history(self, commit_obj, f_path, commits=None):
841 830 """
842 831 get commit history for given node
843 832
844 833 :param commit_obj: commit to calculate history
845 834 :param f_path: path for node to calculate history for
846 835 :param commits: if passed don't calculate history and take
847 836 commits defined in this list
848 837 """
849 838 _ = self.request.translate
850 839
851 840 # calculate history based on tip
852 841 tip = self.rhodecode_vcs_repo.get_commit()
853 842 if commits is None:
854 843 pre_load = ["author", "branch"]
855 844 try:
856 845 commits = tip.get_file_history(f_path, pre_load=pre_load)
857 846 except (NodeDoesNotExistError, CommitError):
858 847 # this node is not present at tip!
859 848 commits = commit_obj.get_file_history(f_path, pre_load=pre_load)
860 849
861 850 history = []
862 851 commits_group = ([], _("Changesets"))
863 852 for commit in commits:
864 853 branch = ' (%s)' % commit.branch if commit.branch else ''
865 854 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
866 855 commits_group[0].append((commit.raw_id, n_desc,))
867 856 history.append(commits_group)
868 857
869 858 symbolic_reference = self._symbolic_reference
870 859
871 860 if self.rhodecode_vcs_repo.alias == 'svn':
872 861 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
873 862 f_path, self.rhodecode_vcs_repo)
874 863 if adjusted_f_path != f_path:
875 864 log.debug(
876 865 'Recognized svn tag or branch in file "%s", using svn '
877 866 'specific symbolic references', f_path)
878 867 f_path = adjusted_f_path
879 868 symbolic_reference = self._symbolic_reference_svn
880 869
881 870 branches = self._create_references(
882 871 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path)
883 872 branches_group = (branches, _("Branches"))
884 873
885 874 tags = self._create_references(
886 875 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path)
887 876 tags_group = (tags, _("Tags"))
888 877
889 878 history.append(branches_group)
890 879 history.append(tags_group)
891 880
892 881 return history, commits
893 882
894 883 @LoginRequired()
895 884 @HasRepoPermissionAnyDecorator(
896 885 'repository.read', 'repository.write', 'repository.admin')
897 886 @view_config(
898 887 route_name='repo_file_history', request_method='GET',
899 888 renderer='json_ext')
900 889 def repo_file_history(self):
901 890 self.load_default_context()
902 891
903 892 commit_id, f_path = self._get_commit_and_path()
904 893 commit = self._get_commit_or_redirect(commit_id)
905 894 file_node = self._get_filenode_or_redirect(commit, f_path)
906 895
907 896 if file_node.is_file():
908 897 file_history, _hist = self._get_node_history(commit, f_path)
909 898
910 899 res = []
911 900 for obj in file_history:
912 901 res.append({
913 902 'text': obj[1],
914 903 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
915 904 })
916 905
917 906 data = {
918 907 'more': False,
919 908 'results': res
920 909 }
921 910 return data
922 911
923 912 log.warning('Cannot fetch history for directory')
924 913 raise HTTPBadRequest()
925 914
926 915 @LoginRequired()
927 916 @HasRepoPermissionAnyDecorator(
928 917 'repository.read', 'repository.write', 'repository.admin')
929 918 @view_config(
930 919 route_name='repo_file_authors', request_method='GET',
931 920 renderer='rhodecode:templates/files/file_authors_box.mako')
932 921 def repo_file_authors(self):
933 922 c = self.load_default_context()
934 923
935 924 commit_id, f_path = self._get_commit_and_path()
936 925 commit = self._get_commit_or_redirect(commit_id)
937 926 file_node = self._get_filenode_or_redirect(commit, f_path)
938 927
939 928 if not file_node.is_file():
940 929 raise HTTPBadRequest()
941 930
942 931 c.file_last_commit = file_node.last_commit
943 932 if self.request.GET.get('annotate') == '1':
944 933 # use _hist from annotation if annotation mode is on
945 934 commit_ids = set(x[1] for x in file_node.annotate)
946 935 _hist = (
947 936 self.rhodecode_vcs_repo.get_commit(commit_id)
948 937 for commit_id in commit_ids)
949 938 else:
950 939 _f_history, _hist = self._get_node_history(commit, f_path)
951 940 c.file_author = False
952 941
953 942 unique = collections.OrderedDict()
954 943 for commit in _hist:
955 944 author = commit.author
956 945 if author not in unique:
957 946 unique[commit.author] = [
958 947 h.email(author),
959 948 h.person(author, 'username_or_name_or_email'),
960 949 1 # counter
961 950 ]
962 951
963 952 else:
964 953 # increase counter
965 954 unique[commit.author][2] += 1
966 955
967 956 c.authors = [val for val in unique.values()]
968 957
969 958 return self._get_template_context(c)
970 959
971 960 @LoginRequired()
972 961 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
973 962 @view_config(
974 963 route_name='repo_files_remove_file', request_method='GET',
975 964 renderer='rhodecode:templates/files/files_delete.mako')
976 965 def repo_files_remove_file(self):
977 966 _ = self.request.translate
978 967 c = self.load_default_context()
979 968 commit_id, f_path = self._get_commit_and_path()
980 969
981 970 self._ensure_not_locked()
982 971
983 972 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
984 973 h.flash(_('You can only delete files with commit '
985 974 'being a valid branch '), category='warning')
986 975 raise HTTPFound(
987 976 h.route_path('repo_files',
988 977 repo_name=self.db_repo_name, commit_id='tip',
989 978 f_path=f_path))
990 979
991 980 c.commit = self._get_commit_or_redirect(commit_id)
992 981 c.file = self._get_filenode_or_redirect(c.commit, f_path)
993 982
994 983 c.default_message = _(
995 984 'Deleted file {} via RhodeCode Enterprise').format(f_path)
996 985 c.f_path = f_path
997 986
998 987 return self._get_template_context(c)
999 988
1000 989 @LoginRequired()
1001 990 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1002 991 @CSRFRequired()
1003 992 @view_config(
1004 993 route_name='repo_files_delete_file', request_method='POST',
1005 994 renderer=None)
1006 995 def repo_files_delete_file(self):
1007 996 _ = self.request.translate
1008 997
1009 998 c = self.load_default_context()
1010 999 commit_id, f_path = self._get_commit_and_path()
1011 1000
1012 1001 self._ensure_not_locked()
1013 1002
1014 1003 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1015 1004 h.flash(_('You can only delete files with commit '
1016 1005 'being a valid branch '), category='warning')
1017 1006 raise HTTPFound(
1018 1007 h.route_path('repo_files',
1019 1008 repo_name=self.db_repo_name, commit_id='tip',
1020 1009 f_path=f_path))
1021 1010
1022 1011 c.commit = self._get_commit_or_redirect(commit_id)
1023 1012 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1024 1013
1025 1014 c.default_message = _(
1026 1015 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1027 1016 c.f_path = f_path
1028 1017 node_path = f_path
1029 1018 author = self._rhodecode_db_user.full_contact
1030 1019 message = self.request.POST.get('message') or c.default_message
1031 1020 try:
1032 1021 nodes = {
1033 1022 node_path: {
1034 1023 'content': ''
1035 1024 }
1036 1025 }
1037 1026 ScmModel().delete_nodes(
1038 1027 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1039 1028 message=message,
1040 1029 nodes=nodes,
1041 1030 parent_commit=c.commit,
1042 1031 author=author,
1043 1032 )
1044 1033
1045 1034 h.flash(
1046 1035 _('Successfully deleted file `{}`').format(
1047 1036 h.escape(f_path)), category='success')
1048 1037 except Exception:
1049 1038 log.exception('Error during commit operation')
1050 1039 h.flash(_('Error occurred during commit'), category='error')
1051 1040 raise HTTPFound(
1052 1041 h.route_path('repo_commit', repo_name=self.db_repo_name,
1053 1042 commit_id='tip'))
1054 1043
1055 1044 @LoginRequired()
1056 1045 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1057 1046 @view_config(
1058 1047 route_name='repo_files_edit_file', request_method='GET',
1059 1048 renderer='rhodecode:templates/files/files_edit.mako')
1060 1049 def repo_files_edit_file(self):
1061 1050 _ = self.request.translate
1062 1051 c = self.load_default_context()
1063 1052 commit_id, f_path = self._get_commit_and_path()
1064 1053
1065 1054 self._ensure_not_locked()
1066 1055
1067 1056 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1068 1057 h.flash(_('You can only edit files with commit '
1069 1058 'being a valid branch '), category='warning')
1070 1059 raise HTTPFound(
1071 1060 h.route_path('repo_files',
1072 1061 repo_name=self.db_repo_name, commit_id='tip',
1073 1062 f_path=f_path))
1074 1063
1075 1064 c.commit = self._get_commit_or_redirect(commit_id)
1076 1065 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1077 1066
1078 1067 if c.file.is_binary:
1079 1068 files_url = h.route_path(
1080 1069 'repo_files',
1081 1070 repo_name=self.db_repo_name,
1082 1071 commit_id=c.commit.raw_id, f_path=f_path)
1083 1072 raise HTTPFound(files_url)
1084 1073
1085 1074 c.default_message = _(
1086 1075 'Edited file {} via RhodeCode Enterprise').format(f_path)
1087 1076 c.f_path = f_path
1088 1077
1089 1078 return self._get_template_context(c)
1090 1079
1091 1080 @LoginRequired()
1092 1081 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1093 1082 @CSRFRequired()
1094 1083 @view_config(
1095 1084 route_name='repo_files_update_file', request_method='POST',
1096 1085 renderer=None)
1097 1086 def repo_files_update_file(self):
1098 1087 _ = self.request.translate
1099 1088 c = self.load_default_context()
1100 1089 commit_id, f_path = self._get_commit_and_path()
1101 1090
1102 1091 self._ensure_not_locked()
1103 1092
1104 1093 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1105 1094 h.flash(_('You can only edit files with commit '
1106 1095 'being a valid branch '), category='warning')
1107 1096 raise HTTPFound(
1108 1097 h.route_path('repo_files',
1109 1098 repo_name=self.db_repo_name, commit_id='tip',
1110 1099 f_path=f_path))
1111 1100
1112 1101 c.commit = self._get_commit_or_redirect(commit_id)
1113 1102 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1114 1103
1115 1104 if c.file.is_binary:
1116 1105 raise HTTPFound(
1117 1106 h.route_path('repo_files',
1118 1107 repo_name=self.db_repo_name,
1119 1108 commit_id=c.commit.raw_id,
1120 1109 f_path=f_path))
1121 1110
1122 1111 c.default_message = _(
1123 1112 'Edited file {} via RhodeCode Enterprise').format(f_path)
1124 1113 c.f_path = f_path
1125 1114 old_content = c.file.content
1126 1115 sl = old_content.splitlines(1)
1127 1116 first_line = sl[0] if sl else ''
1128 1117
1129 1118 r_post = self.request.POST
1130 1119 # modes: 0 - Unix, 1 - Mac, 2 - DOS
1131 1120 mode = detect_mode(first_line, 0)
1132 1121 content = convert_line_endings(r_post.get('content', ''), mode)
1133 1122
1134 1123 message = r_post.get('message') or c.default_message
1135 1124 org_f_path = c.file.unicode_path
1136 1125 filename = r_post['filename']
1137 1126 org_filename = c.file.name
1138 1127
1139 1128 if content == old_content and filename == org_filename:
1140 1129 h.flash(_('No changes'), category='warning')
1141 1130 raise HTTPFound(
1142 1131 h.route_path('repo_commit', repo_name=self.db_repo_name,
1143 1132 commit_id='tip'))
1144 1133 try:
1145 1134 mapping = {
1146 1135 org_f_path: {
1147 1136 'org_filename': org_f_path,
1148 1137 'filename': os.path.join(c.file.dir_path, filename),
1149 1138 'content': content,
1150 1139 'lexer': '',
1151 1140 'op': 'mod',
1152 1141 }
1153 1142 }
1154 1143
1155 1144 ScmModel().update_nodes(
1156 1145 user=self._rhodecode_db_user.user_id,
1157 1146 repo=self.db_repo,
1158 1147 message=message,
1159 1148 nodes=mapping,
1160 1149 parent_commit=c.commit,
1161 1150 )
1162 1151
1163 1152 h.flash(
1164 1153 _('Successfully committed changes to file `{}`').format(
1165 1154 h.escape(f_path)), category='success')
1166 1155 except Exception:
1167 1156 log.exception('Error occurred during commit')
1168 1157 h.flash(_('Error occurred during commit'), category='error')
1169 1158 raise HTTPFound(
1170 1159 h.route_path('repo_commit', repo_name=self.db_repo_name,
1171 1160 commit_id='tip'))
1172 1161
1173 1162 @LoginRequired()
1174 1163 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1175 1164 @view_config(
1176 1165 route_name='repo_files_add_file', request_method='GET',
1177 1166 renderer='rhodecode:templates/files/files_add.mako')
1178 1167 def repo_files_add_file(self):
1179 1168 _ = self.request.translate
1180 1169 c = self.load_default_context()
1181 1170 commit_id, f_path = self._get_commit_and_path()
1182 1171
1183 1172 self._ensure_not_locked()
1184 1173
1185 1174 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1186 1175 if c.commit is None:
1187 1176 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1188 1177 c.default_message = (_('Added file via RhodeCode Enterprise'))
1189 1178 c.f_path = f_path.lstrip('/') # ensure not relative path
1190 1179
1191 1180 return self._get_template_context(c)
1192 1181
1193 1182 @LoginRequired()
1194 1183 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1195 1184 @CSRFRequired()
1196 1185 @view_config(
1197 1186 route_name='repo_files_create_file', request_method='POST',
1198 1187 renderer=None)
1199 1188 def repo_files_create_file(self):
1200 1189 _ = self.request.translate
1201 1190 c = self.load_default_context()
1202 1191 commit_id, f_path = self._get_commit_and_path()
1203 1192
1204 1193 self._ensure_not_locked()
1205 1194
1206 1195 r_post = self.request.POST
1207 1196
1208 1197 c.commit = self._get_commit_or_redirect(
1209 1198 commit_id, redirect_after=False)
1210 1199 if c.commit is None:
1211 1200 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1212 1201 c.default_message = (_('Added file via RhodeCode Enterprise'))
1213 1202 c.f_path = f_path
1214 1203 unix_mode = 0
1215 1204 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1216 1205
1217 1206 message = r_post.get('message') or c.default_message
1218 1207 filename = r_post.get('filename')
1219 1208 location = r_post.get('location', '') # dir location
1220 1209 file_obj = r_post.get('upload_file', None)
1221 1210
1222 1211 if file_obj is not None and hasattr(file_obj, 'filename'):
1223 1212 filename = r_post.get('filename_upload')
1224 1213 content = file_obj.file
1225 1214
1226 1215 if hasattr(content, 'file'):
1227 1216 # non posix systems store real file under file attr
1228 1217 content = content.file
1229 1218
1230 1219 if self.rhodecode_vcs_repo.is_empty:
1231 1220 default_redirect_url = h.route_path(
1232 1221 'repo_summary', repo_name=self.db_repo_name)
1233 1222 else:
1234 1223 default_redirect_url = h.route_path(
1235 1224 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1236 1225
1237 1226 # If there's no commit, redirect to repo summary
1238 1227 if type(c.commit) is EmptyCommit:
1239 1228 redirect_url = h.route_path(
1240 1229 'repo_summary', repo_name=self.db_repo_name)
1241 1230 else:
1242 1231 redirect_url = default_redirect_url
1243 1232
1244 1233 if not filename:
1245 1234 h.flash(_('No filename'), category='warning')
1246 1235 raise HTTPFound(redirect_url)
1247 1236
1248 1237 # extract the location from filename,
1249 1238 # allows using foo/bar.txt syntax to create subdirectories
1250 1239 subdir_loc = filename.rsplit('/', 1)
1251 1240 if len(subdir_loc) == 2:
1252 1241 location = os.path.join(location, subdir_loc[0])
1253 1242
1254 1243 # strip all crap out of file, just leave the basename
1255 1244 filename = os.path.basename(filename)
1256 1245 node_path = os.path.join(location, filename)
1257 1246 author = self._rhodecode_db_user.full_contact
1258 1247
1259 1248 try:
1260 1249 nodes = {
1261 1250 node_path: {
1262 1251 'content': content
1263 1252 }
1264 1253 }
1265 1254 ScmModel().create_nodes(
1266 1255 user=self._rhodecode_db_user.user_id,
1267 1256 repo=self.db_repo,
1268 1257 message=message,
1269 1258 nodes=nodes,
1270 1259 parent_commit=c.commit,
1271 1260 author=author,
1272 1261 )
1273 1262
1274 1263 h.flash(
1275 1264 _('Successfully committed new file `{}`').format(
1276 1265 h.escape(node_path)), category='success')
1277 1266 except NonRelativePathError:
1278 1267 log.exception('Non Relative path found')
1279 1268 h.flash(_(
1280 1269 'The location specified must be a relative path and must not '
1281 1270 'contain .. in the path'), category='warning')
1282 1271 raise HTTPFound(default_redirect_url)
1283 1272 except (NodeError, NodeAlreadyExistsError) as e:
1284 1273 h.flash(_(h.escape(e)), category='error')
1285 1274 except Exception:
1286 1275 log.exception('Error occurred during commit')
1287 1276 h.flash(_('Error occurred during commit'), category='error')
1288 1277
1289 1278 raise HTTPFound(default_redirect_url)
@@ -1,372 +1,369 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import string
23 23
24 24 from pyramid.view import view_config
25 25 from beaker.cache import cache_region
26 26
27 27 from rhodecode.controllers import utils
28 28 from rhodecode.apps._base import RepoAppView
29 29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 from rhodecode.lib import caches, helpers as h
31 from rhodecode.lib.helpers import RepoPage
30 from rhodecode.lib import helpers as h, rc_cache
32 31 from rhodecode.lib.utils2 import safe_str, safe_int
33 32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 34 from rhodecode.lib.ext_json import json
36 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError, \
38 CommitDoesNotExistError
36 from rhodecode.lib.vcs.exceptions import (
37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
39 38 from rhodecode.model.db import Statistics, CacheKey, User
40 39 from rhodecode.model.meta import Session
41 40 from rhodecode.model.repo import ReadmeFinder
42 41 from rhodecode.model.scm import ScmModel
43 42
44 43 log = logging.getLogger(__name__)
45 44
46 45
47 46 class RepoSummaryView(RepoAppView):
48 47
49 48 def load_default_context(self):
50 49 c = self._get_local_tmpl_context(include_app_defaults=True)
51 50 c.rhodecode_repo = None
52 51 if not c.repository_requirements_missing:
53 52 c.rhodecode_repo = self.rhodecode_vcs_repo
54 53 return c
55 54
56 55 def _get_readme_data(self, db_repo, default_renderer):
57 56 repo_name = db_repo.repo_name
58 57 log.debug('Looking for README file')
59 58
60 59 @cache_region('long_term')
61 60 def _generate_readme(cache_key):
62 61 readme_data = None
63 62 readme_node = None
64 63 readme_filename = None
65 64 commit = self._get_landing_commit_or_none(db_repo)
66 65 if commit:
67 66 log.debug("Searching for a README file.")
68 67 readme_node = ReadmeFinder(default_renderer).search(commit)
69 68 if readme_node:
70 69 relative_urls = {
71 70 'raw': h.route_path(
72 71 'repo_file_raw', repo_name=repo_name,
73 72 commit_id=commit.raw_id, f_path=readme_node.path),
74 73 'standard': h.route_path(
75 74 'repo_files', repo_name=repo_name,
76 75 commit_id=commit.raw_id, f_path=readme_node.path),
77 76 }
78 77 readme_data = self._render_readme_or_none(
79 78 commit, readme_node, relative_urls)
80 79 readme_filename = readme_node.path
81 80 return readme_data, readme_filename
82 81
83 82 invalidator_context = CacheKey.repo_context_cache(
84 83 _generate_readme, repo_name, CacheKey.CACHE_TYPE_README)
85 84
86 85 with invalidator_context as context:
87 86 context.invalidate()
88 87 computed = context.compute()
89 88
90 89 return computed
91 90
92 91 def _get_landing_commit_or_none(self, db_repo):
93 92 log.debug("Getting the landing commit.")
94 93 try:
95 94 commit = db_repo.get_landing_commit()
96 95 if not isinstance(commit, EmptyCommit):
97 96 return commit
98 97 else:
99 98 log.debug("Repository is empty, no README to render.")
100 99 except CommitError:
101 100 log.exception(
102 101 "Problem getting commit when trying to render the README.")
103 102
104 103 def _render_readme_or_none(self, commit, readme_node, relative_urls):
105 104 log.debug(
106 105 'Found README file `%s` rendering...', readme_node.path)
107 106 renderer = MarkupRenderer()
108 107 try:
109 108 html_source = renderer.render(
110 109 readme_node.content, filename=readme_node.path)
111 110 if relative_urls:
112 111 return relative_links(html_source, relative_urls)
113 112 return html_source
114 113 except Exception:
115 114 log.exception(
116 115 "Exception while trying to render the README")
117 116
118 117 def _load_commits_context(self, c):
119 118 p = safe_int(self.request.GET.get('page'), 1)
120 119 size = safe_int(self.request.GET.get('size'), 10)
121 120
122 121 def url_generator(**kw):
123 122 query_params = {
124 123 'size': size
125 124 }
126 125 query_params.update(kw)
127 126 return h.route_path(
128 127 'repo_summary_commits',
129 128 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
130 129
131 130 pre_load = ['author', 'branch', 'date', 'message']
132 131 try:
133 132 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
134 133 except EmptyRepositoryError:
135 134 collection = self.rhodecode_vcs_repo
136 135
137 c.repo_commits = RepoPage(
136 c.repo_commits = h.RepoPage(
138 137 collection, page=p, items_per_page=size, url=url_generator)
139 138 page_ids = [x.raw_id for x in c.repo_commits]
140 139 c.comments = self.db_repo.get_comments(page_ids)
141 140 c.statuses = self.db_repo.statuses(page_ids)
142 141
143 142 @LoginRequired()
144 143 @HasRepoPermissionAnyDecorator(
145 144 'repository.read', 'repository.write', 'repository.admin')
146 145 @view_config(
147 146 route_name='repo_summary_commits', request_method='GET',
148 147 renderer='rhodecode:templates/summary/summary_commits.mako')
149 148 def summary_commits(self):
150 149 c = self.load_default_context()
151 150 self._load_commits_context(c)
152 151 return self._get_template_context(c)
153 152
154 153 @LoginRequired()
155 154 @HasRepoPermissionAnyDecorator(
156 155 'repository.read', 'repository.write', 'repository.admin')
157 156 @view_config(
158 157 route_name='repo_summary', request_method='GET',
159 158 renderer='rhodecode:templates/summary/summary.mako')
160 159 @view_config(
161 160 route_name='repo_summary_slash', request_method='GET',
162 161 renderer='rhodecode:templates/summary/summary.mako')
163 162 @view_config(
164 163 route_name='repo_summary_explicit', request_method='GET',
165 164 renderer='rhodecode:templates/summary/summary.mako')
166 165 def summary(self):
167 166 c = self.load_default_context()
168 167
169 168 # Prepare the clone URL
170 169 username = ''
171 170 if self._rhodecode_user.username != User.DEFAULT_USER:
172 171 username = safe_str(self._rhodecode_user.username)
173 172
174 173 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
175 174 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
176 175
177 176 if '{repo}' in _def_clone_uri:
178 177 _def_clone_uri_id = _def_clone_uri.replace(
179 178 '{repo}', '_{repoid}')
180 179 elif '{repoid}' in _def_clone_uri:
181 180 _def_clone_uri_id = _def_clone_uri.replace(
182 181 '_{repoid}', '{repo}')
183 182
184 183 c.clone_repo_url = self.db_repo.clone_url(
185 184 user=username, uri_tmpl=_def_clone_uri)
186 185 c.clone_repo_url_id = self.db_repo.clone_url(
187 186 user=username, uri_tmpl=_def_clone_uri_id)
188 187 c.clone_repo_url_ssh = self.db_repo.clone_url(
189 188 uri_tmpl=_def_clone_uri_ssh, ssh=True)
190 189
191 190 # If enabled, get statistics data
192 191
193 192 c.show_stats = bool(self.db_repo.enable_statistics)
194 193
195 194 stats = Session().query(Statistics) \
196 195 .filter(Statistics.repository == self.db_repo) \
197 196 .scalar()
198 197
199 198 c.stats_percentage = 0
200 199
201 200 if stats and stats.languages:
202 201 c.no_data = False is self.db_repo.enable_statistics
203 202 lang_stats_d = json.loads(stats.languages)
204 203
205 204 # Sort first by decreasing count and second by the file extension,
206 205 # so we have a consistent output.
207 206 lang_stats_items = sorted(lang_stats_d.iteritems(),
208 207 key=lambda k: (-k[1], k[0]))[:10]
209 208 lang_stats = [(x, {"count": y,
210 209 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
211 210 for x, y in lang_stats_items]
212 211
213 212 c.trending_languages = json.dumps(lang_stats)
214 213 else:
215 214 c.no_data = True
216 215 c.trending_languages = json.dumps({})
217 216
218 217 scm_model = ScmModel()
219 218 c.enable_downloads = self.db_repo.enable_downloads
220 219 c.repository_followers = scm_model.get_followers(self.db_repo)
221 220 c.repository_forks = scm_model.get_forks(self.db_repo)
222 221 c.repository_is_user_following = scm_model.is_following_repo(
223 222 self.db_repo_name, self._rhodecode_user.user_id)
224 223
225 224 # first interaction with the VCS instance after here...
226 225 if c.repository_requirements_missing:
227 226 self.request.override_renderer = \
228 227 'rhodecode:templates/summary/missing_requirements.mako'
229 228 return self._get_template_context(c)
230 229
231 230 c.readme_data, c.readme_file = \
232 231 self._get_readme_data(self.db_repo, c.visual.default_renderer)
233 232
234 233 # loads the summary commits template context
235 234 self._load_commits_context(c)
236 235
237 236 return self._get_template_context(c)
238 237
239 238 def get_request_commit_id(self):
240 239 return self.request.matchdict['commit_id']
241 240
242 241 @LoginRequired()
243 242 @HasRepoPermissionAnyDecorator(
244 243 'repository.read', 'repository.write', 'repository.admin')
245 244 @view_config(
246 245 route_name='repo_stats', request_method='GET',
247 246 renderer='json_ext')
248 247 def repo_stats(self):
249 248 commit_id = self.get_request_commit_id()
249 show_stats = bool(self.db_repo.enable_statistics)
250 repo_id = self.db_repo.repo_id
250 251
251 _namespace = caches.get_repo_namespace_key(
252 caches.SUMMARY_STATS, self.db_repo_name)
253 show_stats = bool(self.db_repo.enable_statistics)
254 cache_manager = caches.get_cache_manager(
255 'repo_cache_long', _namespace)
256 _cache_key = caches.compute_key_from_params(
257 self.db_repo_name, commit_id, show_stats)
252 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
253 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
258 254
259 def compute_stats():
255 @region.cache_on_arguments(namespace=cache_namespace_uid)
256 def compute_stats(repo_id, commit_id, show_stats):
260 257 code_stats = {}
261 258 size = 0
262 259 try:
263 260 scm_instance = self.db_repo.scm_instance()
264 261 commit = scm_instance.get_commit(commit_id)
265 262
266 263 for node in commit.get_filenodes_generator():
267 264 size += node.size
268 265 if not show_stats:
269 266 continue
270 267 ext = string.lower(node.extension)
271 268 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
272 269 if ext_info:
273 270 if ext in code_stats:
274 271 code_stats[ext]['count'] += 1
275 272 else:
276 273 code_stats[ext] = {"count": 1, "desc": ext_info}
277 274 except (EmptyRepositoryError, CommitDoesNotExistError):
278 275 pass
279 276 return {'size': h.format_byte_size_binary(size),
280 277 'code_stats': code_stats}
281 278
282 stats = cache_manager.get(_cache_key, createfunc=compute_stats)
279 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
283 280 return stats
284 281
285 282 @LoginRequired()
286 283 @HasRepoPermissionAnyDecorator(
287 284 'repository.read', 'repository.write', 'repository.admin')
288 285 @view_config(
289 286 route_name='repo_refs_data', request_method='GET',
290 287 renderer='json_ext')
291 288 def repo_refs_data(self):
292 289 _ = self.request.translate
293 290 self.load_default_context()
294 291
295 292 repo = self.rhodecode_vcs_repo
296 293 refs_to_create = [
297 294 (_("Branch"), repo.branches, 'branch'),
298 295 (_("Tag"), repo.tags, 'tag'),
299 296 (_("Bookmark"), repo.bookmarks, 'book'),
300 297 ]
301 298 res = self._create_reference_data(
302 299 repo, self.db_repo_name, refs_to_create)
303 300 data = {
304 301 'more': False,
305 302 'results': res
306 303 }
307 304 return data
308 305
309 306 @LoginRequired()
310 307 @HasRepoPermissionAnyDecorator(
311 308 'repository.read', 'repository.write', 'repository.admin')
312 309 @view_config(
313 310 route_name='repo_refs_changelog_data', request_method='GET',
314 311 renderer='json_ext')
315 312 def repo_refs_changelog_data(self):
316 313 _ = self.request.translate
317 314 self.load_default_context()
318 315
319 316 repo = self.rhodecode_vcs_repo
320 317
321 318 refs_to_create = [
322 319 (_("Branches"), repo.branches, 'branch'),
323 320 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
324 321 # TODO: enable when vcs can handle bookmarks filters
325 322 # (_("Bookmarks"), repo.bookmarks, "book"),
326 323 ]
327 324 res = self._create_reference_data(
328 325 repo, self.db_repo_name, refs_to_create)
329 326 data = {
330 327 'more': False,
331 328 'results': res
332 329 }
333 330 return data
334 331
335 332 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
336 333 format_ref_id = utils.get_format_ref_id(repo)
337 334
338 335 result = []
339 336 for title, refs, ref_type in refs_to_create:
340 337 if refs:
341 338 result.append({
342 339 'text': title,
343 340 'children': self._create_reference_items(
344 341 repo, full_repo_name, refs, ref_type,
345 342 format_ref_id),
346 343 })
347 344 return result
348 345
349 346 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
350 347 format_ref_id):
351 348 result = []
352 349 is_svn = h.is_svn(repo)
353 350 for ref_name, raw_id in refs.iteritems():
354 351 files_url = self._create_files_url(
355 352 repo, full_repo_name, ref_name, raw_id, is_svn)
356 353 result.append({
357 354 'text': ref_name,
358 355 'id': format_ref_id(ref_name, raw_id),
359 356 'raw_id': raw_id,
360 357 'type': ref_type,
361 358 'files_url': files_url,
362 359 })
363 360 return result
364 361
365 362 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
366 363 use_commit_id = '/' in ref_name or is_svn
367 364 return h.route_path(
368 365 'repo_files',
369 366 repo_name=full_repo_name,
370 367 f_path=ref_name if is_svn else '',
371 368 commit_id=raw_id if use_commit_id else ref_name,
372 369 _query=dict(at=ref_name))
@@ -1,476 +1,483 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import traceback
24 24 import collections
25 25 import tempfile
26 26
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pyramid.wsgi import wsgiapp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.settings import asbool, aslist
32 32 from pyramid.httpexceptions import (
33 33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
34 34 from pyramid.events import ApplicationCreated
35 35 from pyramid.renderers import render_to_response
36 36
37 37 from rhodecode.model import meta
38 38 from rhodecode.config import patches
39 39 from rhodecode.config import utils as config_utils
40 40 from rhodecode.config.environment import load_pyramid_environment
41 41
42 42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 43 from rhodecode.lib.request import Request
44 44 from rhodecode.lib.vcs import VCSCommunicationError
45 45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.celerylib.loader import configure_celery
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 51 from rhodecode.subscribers import (
52 52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 53 write_metadata_if_needed, inject_app_settings)
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def is_http_error(response):
60 60 # error which should have traceback
61 61 return response.status_code > 499
62 62
63 63
64 64 def make_pyramid_app(global_config, **settings):
65 65 """
66 66 Constructs the WSGI application based on Pyramid.
67 67
68 68 Specials:
69 69
70 70 * The application can also be integrated like a plugin via the call to
71 71 `includeme`. This is accompanied with the other utility functions which
72 72 are called. Changing this should be done with great care to not break
73 73 cases when these fragments are assembled from another place.
74 74
75 75 """
76 76
77 77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
78 78 # will be replaced by the value of the environment variable "NAME" in this case.
79 79 environ = {
80 80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
81 81
82 82 global_config = _substitute_values(global_config, environ)
83 83 settings = _substitute_values(settings, environ)
84 84
85 85 sanitize_settings_and_apply_defaults(settings)
86 86
87 87 config = Configurator(settings=settings)
88 88
89 89 # Apply compatibility patches
90 90 patches.inspect_getargspec()
91 91
92 92 load_pyramid_environment(global_config, settings)
93 93
94 94 # Static file view comes first
95 95 includeme_first(config)
96 96
97 97 includeme(config)
98 98
99 99 pyramid_app = config.make_wsgi_app()
100 100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
101 101 pyramid_app.config = config
102 102
103 103 config.configure_celery(global_config['__file__'])
104 104 # creating the app uses a connection - return it after we are done
105 105 meta.Session.remove()
106 106
107 107 log.info('Pyramid app %s created and configured.', pyramid_app)
108 108 return pyramid_app
109 109
110 110
111 111 def not_found_view(request):
112 112 """
113 113 This creates the view which should be registered as not-found-view to
114 114 pyramid.
115 115 """
116 116
117 117 if not getattr(request, 'vcs_call', None):
118 118 # handle like regular case with our error_handler
119 119 return error_handler(HTTPNotFound(), request)
120 120
121 121 # handle not found view as a vcs call
122 122 settings = request.registry.settings
123 123 ae_client = getattr(request, 'ae_client', None)
124 124 vcs_app = VCSMiddleware(
125 125 HTTPNotFound(), request.registry, settings,
126 126 appenlight_client=ae_client)
127 127
128 128 return wsgiapp(vcs_app)(None, request)
129 129
130 130
131 131 def error_handler(exception, request):
132 132 import rhodecode
133 133 from rhodecode.lib import helpers
134 134
135 135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
136 136
137 137 base_response = HTTPInternalServerError()
138 138 # prefer original exception for the response since it may have headers set
139 139 if isinstance(exception, HTTPException):
140 140 base_response = exception
141 141 elif isinstance(exception, VCSCommunicationError):
142 142 base_response = VCSServerUnavailable()
143 143
144 144 if is_http_error(base_response):
145 145 log.exception(
146 146 'error occurred handling this request for path: %s', request.path)
147 147
148 148 error_explanation = base_response.explanation or str(base_response)
149 149 if base_response.status_code == 404:
150 150 error_explanation += " Or you don't have permission to access it."
151 151 c = AttributeDict()
152 152 c.error_message = base_response.status
153 153 c.error_explanation = error_explanation
154 154 c.visual = AttributeDict()
155 155
156 156 c.visual.rhodecode_support_url = (
157 157 request.registry.settings.get('rhodecode_support_url') or
158 158 request.route_url('rhodecode_support')
159 159 )
160 160 c.redirect_time = 0
161 161 c.rhodecode_name = rhodecode_title
162 162 if not c.rhodecode_name:
163 163 c.rhodecode_name = 'Rhodecode'
164 164
165 165 c.causes = []
166 166 if is_http_error(base_response):
167 167 c.causes.append('Server is overloaded.')
168 168 c.causes.append('Server database connection is lost.')
169 169 c.causes.append('Server expected unhandled error.')
170 170
171 171 if hasattr(base_response, 'causes'):
172 172 c.causes = base_response.causes
173 173
174 174 c.messages = helpers.flash.pop_messages(request=request)
175 175 c.traceback = traceback.format_exc()
176 176 response = render_to_response(
177 177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
178 178 response=base_response)
179 179
180 180 return response
181 181
182 182
183 183 def includeme_first(config):
184 184 # redirect automatic browser favicon.ico requests to correct place
185 185 def favicon_redirect(context, request):
186 186 return HTTPFound(
187 187 request.static_path('rhodecode:public/images/favicon.ico'))
188 188
189 189 config.add_view(favicon_redirect, route_name='favicon')
190 190 config.add_route('favicon', '/favicon.ico')
191 191
192 192 def robots_redirect(context, request):
193 193 return HTTPFound(
194 194 request.static_path('rhodecode:public/robots.txt'))
195 195
196 196 config.add_view(robots_redirect, route_name='robots')
197 197 config.add_route('robots', '/robots.txt')
198 198
199 199 config.add_static_view(
200 200 '_static/deform', 'deform:static')
201 201 config.add_static_view(
202 202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207 config.set_request_factory(Request)
208 208
209 209 # plugin information
210 210 config.registry.rhodecode_plugins = collections.OrderedDict()
211 211
212 212 config.add_directive(
213 213 'register_rhodecode_plugin', register_rhodecode_plugin)
214 214
215 215 config.add_directive('configure_celery', configure_celery)
216 216
217 217 if asbool(settings.get('appenlight', 'false')):
218 218 config.include('appenlight_client.ext.pyramid_tween')
219 219
220 220 # Includes which are required. The application would fail without them.
221 221 config.include('pyramid_mako')
222 222 config.include('pyramid_beaker')
223 223 config.include('rhodecode.lib.caches')
224 224 config.include('rhodecode.lib.rc_cache')
225 225
226 226 config.include('rhodecode.authentication')
227 227 config.include('rhodecode.integrations')
228 228
229 229 # apps
230 230 config.include('rhodecode.apps._base')
231 231 config.include('rhodecode.apps.ops')
232 232
233 233 config.include('rhodecode.apps.admin')
234 234 config.include('rhodecode.apps.channelstream')
235 235 config.include('rhodecode.apps.login')
236 236 config.include('rhodecode.apps.home')
237 237 config.include('rhodecode.apps.journal')
238 238 config.include('rhodecode.apps.repository')
239 239 config.include('rhodecode.apps.repo_group')
240 240 config.include('rhodecode.apps.user_group')
241 241 config.include('rhodecode.apps.search')
242 242 config.include('rhodecode.apps.user_profile')
243 243 config.include('rhodecode.apps.user_group_profile')
244 244 config.include('rhodecode.apps.my_account')
245 245 config.include('rhodecode.apps.svn_support')
246 246 config.include('rhodecode.apps.ssh_support')
247 247 config.include('rhodecode.apps.gist')
248 248
249 249 config.include('rhodecode.apps.debug_style')
250 250 config.include('rhodecode.tweens')
251 251 config.include('rhodecode.api')
252 252
253 253 config.add_route(
254 254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
255 255
256 256 config.add_translation_dirs('rhodecode:i18n/')
257 257 settings['default_locale_name'] = settings.get('lang', 'en')
258 258
259 259 # Add subscribers.
260 260 config.add_subscriber(inject_app_settings, ApplicationCreated)
261 261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
262 262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
263 263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
264 264
265 265 # events
266 266 # TODO(marcink): this should be done when pyramid migration is finished
267 267 # config.add_subscriber(
268 268 # 'rhodecode.integrations.integrations_event_handler',
269 269 # 'rhodecode.events.RhodecodeEvent')
270 270
271 271 # request custom methods
272 272 config.add_request_method(
273 273 'rhodecode.lib.partial_renderer.get_partial_renderer',
274 274 'get_partial_renderer')
275 275
276 276 # Set the authorization policy.
277 277 authz_policy = ACLAuthorizationPolicy()
278 278 config.set_authorization_policy(authz_policy)
279 279
280 280 # Set the default renderer for HTML templates to mako.
281 281 config.add_mako_renderer('.html')
282 282
283 283 config.add_renderer(
284 284 name='json_ext',
285 285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
286 286
287 287 # include RhodeCode plugins
288 288 includes = aslist(settings.get('rhodecode.includes', []))
289 289 for inc in includes:
290 290 config.include(inc)
291 291
292 292 # custom not found view, if our pyramid app doesn't know how to handle
293 293 # the request pass it to potential VCS handling ap
294 294 config.add_notfound_view(not_found_view)
295 295 if not settings.get('debugtoolbar.enabled', False):
296 296 # disabled debugtoolbar handle all exceptions via the error_handlers
297 297 config.add_view(error_handler, context=Exception)
298 298
299 299 # all errors including 403/404/50X
300 300 config.add_view(error_handler, context=HTTPError)
301 301
302 302
303 303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
304 304 """
305 305 Apply outer WSGI middlewares around the application.
306 306 """
307 307 settings = config.registry.settings
308 308
309 309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
310 310 pyramid_app = HttpsFixup(pyramid_app, settings)
311 311
312 312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
313 313 pyramid_app, settings)
314 314 config.registry.ae_client = _ae_client
315 315
316 316 if settings['gzip_responses']:
317 317 pyramid_app = make_gzip_middleware(
318 318 pyramid_app, settings, compress_level=1)
319 319
320 320 # this should be the outer most middleware in the wsgi stack since
321 321 # middleware like Routes make database calls
322 322 def pyramid_app_with_cleanup(environ, start_response):
323 323 try:
324 324 return pyramid_app(environ, start_response)
325 325 finally:
326 326 # Dispose current database session and rollback uncommitted
327 327 # transactions.
328 328 meta.Session.remove()
329 329
330 330 # In a single threaded mode server, on non sqlite db we should have
331 331 # '0 Current Checked out connections' at the end of a request,
332 332 # if not, then something, somewhere is leaving a connection open
333 333 pool = meta.Base.metadata.bind.engine.pool
334 334 log.debug('sa pool status: %s', pool.status())
335 335
336 336 return pyramid_app_with_cleanup
337 337
338 338
339 339 def sanitize_settings_and_apply_defaults(settings):
340 340 """
341 341 Applies settings defaults and does all type conversion.
342 342
343 343 We would move all settings parsing and preparation into this place, so that
344 344 we have only one place left which deals with this part. The remaining parts
345 345 of the application would start to rely fully on well prepared settings.
346 346
347 347 This piece would later be split up per topic to avoid a big fat monster
348 348 function.
349 349 """
350 350
351 351 settings.setdefault('rhodecode.edition', 'Community Edition')
352 352
353 353 if 'mako.default_filters' not in settings:
354 354 # set custom default filters if we don't have it defined
355 355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
356 356 settings['mako.default_filters'] = 'h_filter'
357 357
358 358 if 'mako.directories' not in settings:
359 359 mako_directories = settings.setdefault('mako.directories', [
360 360 # Base templates of the original application
361 361 'rhodecode:templates',
362 362 ])
363 363 log.debug(
364 364 "Using the following Mako template directories: %s",
365 365 mako_directories)
366 366
367 367 # Default includes, possible to change as a user
368 368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 369 'rhodecode.lib.middleware.request_wrapper',
370 370 ])
371 371 log.debug(
372 372 "Using the following pyramid.includes: %s",
373 373 pyramid_includes)
374 374
375 375 # TODO: johbo: Re-think this, usually the call to config.include
376 376 # should allow to pass in a prefix.
377 377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 378
379 379 # Sanitize generic settings.
380 380 _list_setting(settings, 'default_encoding', 'UTF-8')
381 381 _bool_setting(settings, 'is_test', 'false')
382 382 _bool_setting(settings, 'gzip_responses', 'false')
383 383
384 384 # Call split out functions that sanitize settings for each topic.
385 385 _sanitize_appenlight_settings(settings)
386 386 _sanitize_vcs_settings(settings)
387 387 _sanitize_cache_settings(settings)
388 388
389 389 # configure instance id
390 390 config_utils.set_instance_id(settings)
391 391
392 392 return settings
393 393
394 394
395 395 def _sanitize_appenlight_settings(settings):
396 396 _bool_setting(settings, 'appenlight', 'false')
397 397
398 398
399 399 def _sanitize_vcs_settings(settings):
400 400 """
401 401 Applies settings defaults and does type conversion for all VCS related
402 402 settings.
403 403 """
404 404 _string_setting(settings, 'vcs.svn.compatible_version', '')
405 405 _string_setting(settings, 'git_rev_filter', '--all')
406 406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
407 407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
408 408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
409 409 _string_setting(settings, 'vcs.server', '')
410 410 _string_setting(settings, 'vcs.server.log_level', 'debug')
411 411 _string_setting(settings, 'vcs.server.protocol', 'http')
412 412 _bool_setting(settings, 'startup.import_repos', 'false')
413 413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
414 414 _bool_setting(settings, 'vcs.server.enable', 'true')
415 415 _bool_setting(settings, 'vcs.start_server', 'false')
416 416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
417 417 _int_setting(settings, 'vcs.connection_timeout', 3600)
418 418
419 419 # Support legacy values of vcs.scm_app_implementation. Legacy
420 420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
421 421 # which is now mapped to 'http'.
422 422 scm_app_impl = settings['vcs.scm_app_implementation']
423 423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
424 424 settings['vcs.scm_app_implementation'] = 'http'
425 425
426 426
427 427 def _sanitize_cache_settings(settings):
428 428 _string_setting(settings, 'cache_dir',
429 429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
430 430
431 431 _string_setting(settings, 'rc_cache.cache_perms.backend',
432 432 'dogpile.cache.rc.file_namespace')
433 433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
434 434 60)
435 435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
436 436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
437 437
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
441 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
444
438 445
439 446 def _int_setting(settings, name, default):
440 447 settings[name] = int(settings.get(name, default))
441 448
442 449
443 450 def _bool_setting(settings, name, default):
444 451 input_val = settings.get(name, default)
445 452 if isinstance(input_val, unicode):
446 453 input_val = input_val.encode('utf8')
447 454 settings[name] = asbool(input_val)
448 455
449 456
450 457 def _list_setting(settings, name, default):
451 458 raw_value = settings.get(name, default)
452 459
453 460 old_separator = ','
454 461 if old_separator in raw_value:
455 462 # If we get a comma separated list, pass it to our own function.
456 463 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
457 464 else:
458 465 # Otherwise we assume it uses pyramids space/newline separation.
459 466 settings[name] = aslist(raw_value)
460 467
461 468
462 469 def _string_setting(settings, name, default, lower=True):
463 470 value = settings.get(name, default)
464 471 if lower:
465 472 value = value.lower()
466 473 settings[name] = value
467 474
468 475
469 476 def _substitute_values(mapping, substitutions):
470 477 result = {
471 478 # Note: Cannot use regular replacements, since they would clash
472 479 # with the implementation of ConfigParser. Using "format" instead.
473 480 key: value.format(**substitutions)
474 481 for key, value in mapping.items()
475 482 }
476 483 return result
@@ -1,295 +1,226 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import functools
21 21
22 22 import beaker
23 23 import logging
24 24 import threading
25 25
26 26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
27 27 from sqlalchemy.exc import IntegrityError
28 28
29 29 from rhodecode.lib.utils import safe_str, sha1
30 30 from rhodecode.model.db import Session, CacheKey
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 FILE_TREE = 'cache_file_tree'
35 FILE_TREE_META = 'cache_file_tree_metadata'
36 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
37 SUMMARY_STATS = 'cache_summary_stats'
38
39 # This list of caches gets purged when invalidation happens
40 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
41 34
42 35 DEFAULT_CACHE_MANAGER_CONFIG = {
43 36 'type': 'memorylru_base',
44 37 'max_items': 10240,
45 38 'key_length': 256,
46 39 'enabled': True
47 40 }
48 41
49 42
50 43 def get_default_cache_settings(settings):
51 44 cache_settings = {}
52 45 for key in settings.keys():
53 46 for prefix in ['beaker.cache.', 'cache.']:
54 47 if key.startswith(prefix):
55 48 name = key.split(prefix)[1].strip()
56 49 cache_settings[name] = settings[key].strip()
57 50 return cache_settings
58 51
59 52
60 53 # set cache regions for beaker so celery can utilise it
61 54 def configure_caches(settings, default_region_settings=None):
62 55 cache_settings = {'regions': None}
63 56 # main cache settings used as default ...
64 57 cache_settings.update(get_default_cache_settings(settings))
65 58 default_region_settings = default_region_settings or \
66 59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
67 60 if cache_settings['regions']:
68 61 for region in cache_settings['regions'].split(','):
69 62 region = region.strip()
70 63 region_settings = default_region_settings.copy()
71 64 for key, value in cache_settings.items():
72 65 if key.startswith(region):
73 66 region_settings[key.split(region + '.')[-1]] = value
74 67 log.debug('Configuring cache region `%s` with settings %s',
75 68 region, region_settings)
76 69 configure_cache_region(
77 70 region, region_settings, cache_settings)
78 71
79 72
80 73 def configure_cache_region(
81 74 region_name, region_settings, default_cache_kw, default_expire=60):
82 75 default_type = default_cache_kw.get('type', 'memory')
83 76 default_lock_dir = default_cache_kw.get('lock_dir')
84 77 default_data_dir = default_cache_kw.get('data_dir')
85 78
86 79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
87 80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
88 81 region_settings['type'] = region_settings.get('type', default_type)
89 82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
90 83
91 84 beaker.cache.cache_regions[region_name] = region_settings
92 85
93 86
94 87 def get_cache_manager(region_name, cache_name, custom_ttl=None):
95 88 """
96 89 Creates a Beaker cache manager. Such instance can be used like that::
97 90
98 91 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
99 92 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
100 93 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
101 94 def heavy_compute():
102 95 ...
103 96 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
104 97
105 98 :param region_name: region from ini file
106 99 :param cache_name: custom cache name, usually prefix+repo_name. eg
107 100 file_switcher_repo1
108 101 :param custom_ttl: override .ini file timeout on this cache
109 102 :return: instance of cache manager
110 103 """
111 104
112 105 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
113 106 if custom_ttl:
114 107 log.debug('Updating region %s with custom ttl: %s',
115 108 region_name, custom_ttl)
116 109 cache_config.update({'expire': custom_ttl})
117 110
118 111 return beaker.cache.Cache._get_cache(cache_name, cache_config)
119 112
120 113
121 114 def clear_cache_manager(cache_manager):
122 115 """
123 116 namespace = 'foobar'
124 117 cache_manager = get_cache_manager('some_namespace_name', namespace)
125 118 clear_cache_manager(cache_manager)
126 119 """
127 120
128 121 log.debug('Clearing all values for cache manager %s', cache_manager)
129 122 cache_manager.clear()
130 123
131 124
132 def clear_repo_caches(repo_name):
133 # invalidate cache manager for this repo
134 for prefix in USED_REPO_CACHES:
135 namespace = get_repo_namespace_key(prefix, repo_name)
136 cache_manager = get_cache_manager('repo_cache_long', namespace)
137 clear_cache_manager(cache_manager)
138
139
140 125 def compute_key_from_params(*args):
141 126 """
142 127 Helper to compute key from given params to be used in cache manager
143 128 """
144 129 return sha1("_".join(map(safe_str, args)))
145 130
146 131
147 132 def get_repo_namespace_key(prefix, repo_name):
148 133 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
149 134
150 135
151 def conditional_cache(region, cache_namespace, condition, func):
152 """
153 Conditional caching function use like::
154 def _c(arg):
155 # heavy computation function
156 return data
157
158 # depending on the condition the compute is wrapped in cache or not
159 compute = conditional_cache('short_term', 'cache_namespace_id',
160 condition=True, func=func)
161 return compute(arg)
162
163 :param region: name of cache region
164 :param cache_namespace: cache namespace
165 :param condition: condition for cache to be triggered, and
166 return data cached
167 :param func: wrapped heavy function to compute
168
169 """
170 wrapped = func
171 if condition:
172 log.debug('conditional_cache: True, wrapping call of '
173 'func: %s into %s region cache', region, func)
174
175 def _cache_wrap(region_name, cache_namespace):
176 """Return a caching wrapper"""
177
178 def decorate(func):
179 @functools.wraps(func)
180 def cached(*args, **kwargs):
181 if kwargs:
182 raise AttributeError(
183 'Usage of kwargs is not allowed. '
184 'Use only positional arguments in wrapped function')
185 manager = get_cache_manager(region_name, cache_namespace)
186 cache_key = compute_key_from_params(*args)
187
188 def go():
189 return func(*args, **kwargs)
190
191 # save org function name
192 go.__name__ = '_cached_%s' % (func.__name__,)
193
194 return manager.get(cache_key, createfunc=go)
195 return cached
196
197 return decorate
198
199 cached_region = _cache_wrap(region, cache_namespace)
200 wrapped = cached_region(func)
201
202 return wrapped
203
204
205 136 class ActiveRegionCache(object):
206 137 def __init__(self, context):
207 138 self.context = context
208 139
209 140 def invalidate(self, *args, **kwargs):
210 141 return False
211 142
212 143 def compute(self):
213 144 log.debug('Context cache: getting obj %s from cache', self.context)
214 145 return self.context.compute_func(self.context.cache_key)
215 146
216 147
217 148 class FreshRegionCache(ActiveRegionCache):
218 149 def invalidate(self):
219 150 log.debug('Context cache: invalidating cache for %s', self.context)
220 151 region_invalidate(
221 152 self.context.compute_func, None, self.context.cache_key)
222 153 return True
223 154
224 155
225 156 class InvalidationContext(object):
226 157 def __repr__(self):
227 158 return '<InvalidationContext:{}[{}]>'.format(
228 159 safe_str(self.repo_name), safe_str(self.cache_type))
229 160
230 161 def __init__(self, compute_func, repo_name, cache_type,
231 162 raise_exception=False, thread_scoped=False):
232 163 self.compute_func = compute_func
233 164 self.repo_name = repo_name
234 165 self.cache_type = cache_type
235 166 self.cache_key = compute_key_from_params(
236 167 repo_name, cache_type)
237 168 self.raise_exception = raise_exception
238 169
239 170 # Append the thread id to the cache key if this invalidation context
240 171 # should be scoped to the current thread.
241 172 if thread_scoped:
242 173 thread_id = threading.current_thread().ident
243 174 self.cache_key = '{cache_key}_{thread_id}'.format(
244 175 cache_key=self.cache_key, thread_id=thread_id)
245 176
246 177 def get_cache_obj(self):
247 178 cache_key = CacheKey.get_cache_key(
248 179 self.repo_name, self.cache_type)
249 180 cache_obj = CacheKey.get_active_cache(cache_key)
250 181 if not cache_obj:
251 182 cache_obj = CacheKey(cache_key, self.repo_name)
252 183 return cache_obj
253 184
254 185 def __enter__(self):
255 186 """
256 187 Test if current object is valid, and return CacheRegion function
257 188 that does invalidation and calculation
258 189 """
259 190
260 191 self.cache_obj = self.get_cache_obj()
261 192 if self.cache_obj.cache_active:
262 193 # means our cache obj is existing and marked as it's
263 194 # cache is not outdated, we return BaseInvalidator
264 195 self.skip_cache_active_change = True
265 196 return ActiveRegionCache(self)
266 197
267 198 # the key is either not existing or set to False, we return
268 199 # the real invalidator which re-computes value. We additionally set
269 200 # the flag to actually update the Database objects
270 201 self.skip_cache_active_change = False
271 202 return FreshRegionCache(self)
272 203
273 204 def __exit__(self, exc_type, exc_val, exc_tb):
274 205
275 206 if self.skip_cache_active_change:
276 207 return
277 208
278 209 try:
279 210 self.cache_obj.cache_active = True
280 211 Session().add(self.cache_obj)
281 212 Session().commit()
282 213 except IntegrityError:
283 214 # if we catch integrity error, it means we inserted this object
284 215 # assumption is that's really an edge race-condition case and
285 216 # it's safe is to skip it
286 217 Session().rollback()
287 218 except Exception:
288 219 log.exception('Failed to commit on cache key update')
289 220 Session().rollback()
290 221 if self.raise_exception:
291 222 raise
292 223
293 224
294 225 def includeme(config):
295 226 configure_caches(config.registry.settings)
@@ -1,66 +1,68 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from dogpile.cache import register_backend
22 22 from dogpile.cache import make_region
23 23
24 24 register_backend(
25 25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
26 26 "LRUMemoryBackend")
27 27
28 28 register_backend(
29 29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
30 30 "FileNamespaceBackend")
31 31
32 32 register_backend(
33 33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
34 34 "RedisPickleBackend")
35 35
36 36
37 37 from . import region_meta
38 from .utils import get_default_cache_settings, key_generator, get_or_create_region
38 from .utils import (
39 get_default_cache_settings, key_generator, get_or_create_region,
40 clear_cache_namespace)
39 41
40 42
41 43 def configure_dogpile_cache(settings):
42 44 cache_dir = settings.get('cache_dir')
43 45 if cache_dir:
44 46 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
45 47
46 48 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
47 49
48 50 # inspect available namespaces
49 51 avail_regions = set()
50 52 for key in rc_cache_data.keys():
51 53 namespace_name = key.split('.', 1)[0]
52 54 avail_regions.add(namespace_name)
53 55
54 56 # register them into namespace
55 57 for region_name in avail_regions:
56 58 new_region = make_region(
57 59 name=region_name,
58 60 function_key_generator=key_generator
59 61 )
60 62
61 63 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
62 64 region_meta.dogpile_cache_regions[region_name] = new_region
63 65
64 66
65 67 def includeme(config):
66 68 configure_dogpile_cache(config.registry.settings)
@@ -1,109 +1,120 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from dogpile.cache.backends import memory as memory_backend
22 22 from dogpile.cache.backends import file as file_backend
23 23 from dogpile.cache.backends import redis as redis_backend
24 24 from dogpile.cache.backends.file import NO_VALUE, compat
25 25
26 26 from rhodecode.lib.memory_lru_debug import LRUDict
27 27
28 28 _default_max_size = 1024
29 29
30 30
31 31 class LRUMemoryBackend(memory_backend.MemoryBackend):
32 32
33 33 def __init__(self, arguments):
34 34 max_size = arguments.pop('max_size', _default_max_size)
35 35 arguments['cache_dict'] = LRUDict(max_size)
36 36 super(LRUMemoryBackend, self).__init__(arguments)
37 37
38 38
39 39 class Serializer(object):
40 40 def _dumps(self, value):
41 41 return compat.pickle.dumps(value)
42 42
43 43 def _loads(self, value):
44 44 return compat.pickle.loads(value)
45 45
46 46
47 47 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
48 48
49 49 def __init__(self, arguments):
50 50 super(FileNamespaceBackend, self).__init__(arguments)
51 51
52 def list_keys(self):
52 def list_keys(self, prefix=''):
53 def cond(v):
54 if not prefix:
55 return True
56
57 if v.startswith(prefix):
58 return True
59 return False
60
53 61 with self._dbm_file(True) as dbm:
54 return dbm.keys()
62
63 return filter(cond, dbm.keys())
55 64
56 65 def get_store(self):
57 66 return self.filename
58 67
59 68 def get(self, key):
60 69 with self._dbm_file(False) as dbm:
61 70 if hasattr(dbm, 'get'):
62 71 value = dbm.get(key, NO_VALUE)
63 72 else:
64 73 # gdbm objects lack a .get method
65 74 try:
66 75 value = dbm[key]
67 76 except KeyError:
68 77 value = NO_VALUE
69 78 if value is not NO_VALUE:
70 79 value = self._loads(value)
71 80 return value
72 81
73 82 def set(self, key, value):
74 83 with self._dbm_file(True) as dbm:
75 84 dbm[key] = self._dumps(value)
76 85
77 86 def set_multi(self, mapping):
78 87 with self._dbm_file(True) as dbm:
79 88 for key, value in mapping.items():
80 89 dbm[key] = self._dumps(value)
81 90
82 91
83 92 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
84 def list_keys(self):
85 return self.client.keys()
93 def list_keys(self, prefix=''):
94 if prefix:
95 prefix = prefix + '*'
96 return self.client.keys(prefix)
86 97
87 98 def get_store(self):
88 99 return self.client.connection_pool
89 100
90 101 def set(self, key, value):
91 102 if self.redis_expiration_time:
92 103 self.client.setex(key, self.redis_expiration_time,
93 104 self._dumps(value))
94 105 else:
95 106 self.client.set(key, self._dumps(value))
96 107
97 108 def set_multi(self, mapping):
98 109 mapping = dict(
99 110 (k, self._dumps(v))
100 111 for k, v in mapping.items()
101 112 )
102 113
103 114 if not self.redis_expiration_time:
104 115 self.client.mset(mapping)
105 116 else:
106 117 pipe = self.client.pipeline()
107 118 for key, value in mapping.items():
108 119 pipe.setex(key, self.redis_expiration_time, value)
109 120 pipe.execute()
@@ -1,99 +1,107 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import logging
22 22 from dogpile.cache import make_region
23 23
24 24 from rhodecode.lib.utils import safe_str, sha1
25 25 from . import region_meta
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 def get_default_cache_settings(settings, prefixes=None):
31 31 prefixes = prefixes or []
32 32 cache_settings = {}
33 33 for key in settings.keys():
34 34 for prefix in prefixes:
35 35 if key.startswith(prefix):
36 36 name = key.split(prefix)[1].strip()
37 37 val = settings[key]
38 38 if isinstance(val, basestring):
39 39 val = val.strip()
40 40 cache_settings[name] = val
41 41 return cache_settings
42 42
43 43
44 44 def compute_key_from_params(*args):
45 45 """
46 46 Helper to compute key from given params to be used in cache manager
47 47 """
48 48 return sha1("_".join(map(safe_str, args)))
49 49
50 50
51 51 def key_generator(namespace, fn):
52 52 fname = fn.__name__
53 53
54 54 def generate_key(*args):
55 55 namespace_pref = namespace or 'default'
56 56 arg_key = compute_key_from_params(*args)
57 57 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
58 58
59 59 return final_key
60 60
61 61 return generate_key
62 62
63 63
64 64 def get_or_create_region(region_name, region_namespace=None):
65 65 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
66 66 region_obj = region_meta.dogpile_cache_regions.get(region_name)
67 67 if not region_obj:
68 68 raise EnvironmentError(
69 69 'Region `{}` not in configured: {}.'.format(
70 70 region_name, region_meta.dogpile_cache_regions.keys()))
71 71
72 72 region_uid_name = '{}:{}'.format(region_name, region_namespace)
73 73 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
74 74 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
75 75 if region_exist:
76 76 log.debug('Using already configured region: %s', region_namespace)
77 77 return region_exist
78 78 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
79 79 expiration_time = region_obj.expiration_time
80 80
81 81 if not os.path.isdir(cache_dir):
82 82 os.makedirs(cache_dir)
83 83 new_region = make_region(
84 84 name=region_uid_name, function_key_generator=key_generator
85 85 )
86 86 namespace_filename = os.path.join(
87 87 cache_dir, "{}.cache.dbm".format(region_namespace))
88 88 # special type that allows 1db per namespace
89 89 new_region.configure(
90 90 backend='dogpile.cache.rc.file_namespace',
91 91 expiration_time=expiration_time,
92 92 arguments={"filename": namespace_filename}
93 93 )
94 94
95 95 # create and save in region caches
96 96 log.debug('configuring new region: %s',region_uid_name)
97 97 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
98 98
99 99 return region_obj
100
101
102 def clear_cache_namespace(cache_region, cache_namespace_uid):
103 region = get_or_create_region(cache_region, cache_namespace_uid)
104 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
105 for k in cache_keys:
106 region.delete(k)
107 return len(cache_keys)
@@ -1,812 +1,815 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from sqlalchemy import func
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 import rhodecode
37 37 from rhodecode.lib.vcs import get_backend
38 38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 39 from rhodecode.lib.vcs.nodes import FileNode
40 40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h, rc_cache
42 42 from rhodecode.lib.auth import (
43 43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 44 HasUserGroupPermissionAny)
45 45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 46 from rhodecode.lib import hooks_utils, caches
47 47 from rhodecode.lib.utils import (
48 48 get_filesystem_repos, make_db_config)
49 49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 50 from rhodecode.lib.system_info import get_system_info
51 51 from rhodecode.model import BaseModel
52 52 from rhodecode.model.db import (
53 53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 54 PullRequest)
55 55 from rhodecode.model.settings import VcsSettingsModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class UserTemp(object):
61 61 def __init__(self, user_id):
62 62 self.user_id = user_id
63 63
64 64 def __repr__(self):
65 65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66 66
67 67
68 68 class RepoTemp(object):
69 69 def __init__(self, repo_id):
70 70 self.repo_id = repo_id
71 71
72 72 def __repr__(self):
73 73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74 74
75 75
76 76 class SimpleCachedRepoList(object):
77 77 """
78 78 Lighter version of of iteration of repos without the scm initialisation,
79 79 and with cache usage
80 80 """
81 81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 82 self.db_repo_list = db_repo_list
83 83 self.repos_path = repos_path
84 84 self.order_by = order_by
85 85 self.reversed = (order_by or '').startswith('-')
86 86 if not perm_set:
87 87 perm_set = ['repository.read', 'repository.write',
88 88 'repository.admin']
89 89 self.perm_set = perm_set
90 90
91 91 def __len__(self):
92 92 return len(self.db_repo_list)
93 93
94 94 def __repr__(self):
95 95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96 96
97 97 def __iter__(self):
98 98 for dbr in self.db_repo_list:
99 99 # check permission at this level
100 100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 101 dbr.repo_name, 'SimpleCachedRepoList check')
102 102 if not has_perm:
103 103 continue
104 104
105 105 tmp_d = {
106 106 'name': dbr.repo_name,
107 107 'dbrepo': dbr.get_dict(),
108 108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 109 }
110 110 yield tmp_d
111 111
112 112
113 113 class _PermCheckIterator(object):
114 114
115 115 def __init__(
116 116 self, obj_list, obj_attr, perm_set, perm_checker,
117 117 extra_kwargs=None):
118 118 """
119 119 Creates iterator from given list of objects, additionally
120 120 checking permission for them from perm_set var
121 121
122 122 :param obj_list: list of db objects
123 123 :param obj_attr: attribute of object to pass into perm_checker
124 124 :param perm_set: list of permissions to check
125 125 :param perm_checker: callable to check permissions against
126 126 """
127 127 self.obj_list = obj_list
128 128 self.obj_attr = obj_attr
129 129 self.perm_set = perm_set
130 130 self.perm_checker = perm_checker
131 131 self.extra_kwargs = extra_kwargs or {}
132 132
133 133 def __len__(self):
134 134 return len(self.obj_list)
135 135
136 136 def __repr__(self):
137 137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138 138
139 139 def __iter__(self):
140 140 checker = self.perm_checker(*self.perm_set)
141 141 for db_obj in self.obj_list:
142 142 # check permission at this level
143 143 name = getattr(db_obj, self.obj_attr, None)
144 144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 145 continue
146 146
147 147 yield db_obj
148 148
149 149
150 150 class RepoList(_PermCheckIterator):
151 151
152 152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 153 if not perm_set:
154 154 perm_set = [
155 155 'repository.read', 'repository.write', 'repository.admin']
156 156
157 157 super(RepoList, self).__init__(
158 158 obj_list=db_repo_list,
159 159 obj_attr='repo_name', perm_set=perm_set,
160 160 perm_checker=HasRepoPermissionAny,
161 161 extra_kwargs=extra_kwargs)
162 162
163 163
164 164 class RepoGroupList(_PermCheckIterator):
165 165
166 166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 167 if not perm_set:
168 168 perm_set = ['group.read', 'group.write', 'group.admin']
169 169
170 170 super(RepoGroupList, self).__init__(
171 171 obj_list=db_repo_group_list,
172 172 obj_attr='group_name', perm_set=perm_set,
173 173 perm_checker=HasRepoGroupPermissionAny,
174 174 extra_kwargs=extra_kwargs)
175 175
176 176
177 177 class UserGroupList(_PermCheckIterator):
178 178
179 179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 180 if not perm_set:
181 181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182 182
183 183 super(UserGroupList, self).__init__(
184 184 obj_list=db_user_group_list,
185 185 obj_attr='users_group_name', perm_set=perm_set,
186 186 perm_checker=HasUserGroupPermissionAny,
187 187 extra_kwargs=extra_kwargs)
188 188
189 189
190 190 class ScmModel(BaseModel):
191 191 """
192 192 Generic Scm Model
193 193 """
194 194
195 195 @LazyProperty
196 196 def repos_path(self):
197 197 """
198 198 Gets the repositories root path from database
199 199 """
200 200
201 201 settings_model = VcsSettingsModel(sa=self.sa)
202 202 return settings_model.get_repos_location()
203 203
204 204 def repo_scan(self, repos_path=None):
205 205 """
206 206 Listing of repositories in given path. This path should not be a
207 207 repository itself. Return a dictionary of repository objects
208 208
209 209 :param repos_path: path to directory containing repositories
210 210 """
211 211
212 212 if repos_path is None:
213 213 repos_path = self.repos_path
214 214
215 215 log.info('scanning for repositories in %s', repos_path)
216 216
217 217 config = make_db_config()
218 218 config.set('extensions', 'largefiles', '')
219 219 repos = {}
220 220
221 221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 222 # name need to be decomposed and put back together using the /
223 223 # since this is internal storage separator for rhodecode
224 224 name = Repository.normalize_repo_name(name)
225 225
226 226 try:
227 227 if name in repos:
228 228 raise RepositoryError('Duplicate repository name %s '
229 229 'found in %s' % (name, path))
230 230 elif path[0] in rhodecode.BACKENDS:
231 231 klass = get_backend(path[0])
232 232 repos[name] = klass(path[1], config=config)
233 233 except OSError:
234 234 continue
235 235 log.debug('found %s paths with repositories', len(repos))
236 236 return repos
237 237
238 238 def get_repos(self, all_repos=None, sort_key=None):
239 239 """
240 240 Get all repositories from db and for each repo create it's
241 241 backend instance and fill that backed with information from database
242 242
243 243 :param all_repos: list of repository names as strings
244 244 give specific repositories list, good for filtering
245 245
246 246 :param sort_key: initial sorting of repositories
247 247 """
248 248 if all_repos is None:
249 249 all_repos = self.sa.query(Repository)\
250 250 .filter(Repository.group_id == None)\
251 251 .order_by(func.lower(Repository.repo_name)).all()
252 252 repo_iter = SimpleCachedRepoList(
253 253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 254 return repo_iter
255 255
256 256 def get_repo_groups(self, all_groups=None):
257 257 if all_groups is None:
258 258 all_groups = RepoGroup.query()\
259 259 .filter(RepoGroup.group_parent_id == None).all()
260 260 return [x for x in RepoGroupList(all_groups)]
261 261
262 262 def mark_for_invalidation(self, repo_name, delete=False):
263 263 """
264 264 Mark caches of this repo invalid in the database. `delete` flag
265 265 removes the cache entries
266 266
267 267 :param repo_name: the repo_name for which caches should be marked
268 268 invalid, or deleted
269 269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them, and also purge caches used by the dogpile
271 271 """
272 272 CacheKey.set_invalidate(repo_name, delete=delete)
273 273 repo = Repository.get_by_repo_name(repo_name)
274 274
275 275 if repo:
276 repo_id = repo.repo_id
276 277 config = repo._config
277 278 config.set('extensions', 'largefiles', '')
278 279 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
280 if delete:
281 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
280 283
281 284 def toggle_following_repo(self, follow_repo_id, user_id):
282 285
283 286 f = self.sa.query(UserFollowing)\
284 287 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 288 .filter(UserFollowing.user_id == user_id).scalar()
286 289
287 290 if f is not None:
288 291 try:
289 292 self.sa.delete(f)
290 293 return
291 294 except Exception:
292 295 log.error(traceback.format_exc())
293 296 raise
294 297
295 298 try:
296 299 f = UserFollowing()
297 300 f.user_id = user_id
298 301 f.follows_repo_id = follow_repo_id
299 302 self.sa.add(f)
300 303 except Exception:
301 304 log.error(traceback.format_exc())
302 305 raise
303 306
304 307 def toggle_following_user(self, follow_user_id, user_id):
305 308 f = self.sa.query(UserFollowing)\
306 309 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 310 .filter(UserFollowing.user_id == user_id).scalar()
308 311
309 312 if f is not None:
310 313 try:
311 314 self.sa.delete(f)
312 315 return
313 316 except Exception:
314 317 log.error(traceback.format_exc())
315 318 raise
316 319
317 320 try:
318 321 f = UserFollowing()
319 322 f.user_id = user_id
320 323 f.follows_user_id = follow_user_id
321 324 self.sa.add(f)
322 325 except Exception:
323 326 log.error(traceback.format_exc())
324 327 raise
325 328
326 329 def is_following_repo(self, repo_name, user_id, cache=False):
327 330 r = self.sa.query(Repository)\
328 331 .filter(Repository.repo_name == repo_name).scalar()
329 332
330 333 f = self.sa.query(UserFollowing)\
331 334 .filter(UserFollowing.follows_repository == r)\
332 335 .filter(UserFollowing.user_id == user_id).scalar()
333 336
334 337 return f is not None
335 338
336 339 def is_following_user(self, username, user_id, cache=False):
337 340 u = User.get_by_username(username)
338 341
339 342 f = self.sa.query(UserFollowing)\
340 343 .filter(UserFollowing.follows_user == u)\
341 344 .filter(UserFollowing.user_id == user_id).scalar()
342 345
343 346 return f is not None
344 347
345 348 def get_followers(self, repo):
346 349 repo = self._get_repo(repo)
347 350
348 351 return self.sa.query(UserFollowing)\
349 352 .filter(UserFollowing.follows_repository == repo).count()
350 353
351 354 def get_forks(self, repo):
352 355 repo = self._get_repo(repo)
353 356 return self.sa.query(Repository)\
354 357 .filter(Repository.fork == repo).count()
355 358
356 359 def get_pull_requests(self, repo):
357 360 repo = self._get_repo(repo)
358 361 return self.sa.query(PullRequest)\
359 362 .filter(PullRequest.target_repo == repo)\
360 363 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361 364
362 365 def mark_as_fork(self, repo, fork, user):
363 366 repo = self._get_repo(repo)
364 367 fork = self._get_repo(fork)
365 368 if fork and repo.repo_id == fork.repo_id:
366 369 raise Exception("Cannot set repository as fork of itself")
367 370
368 371 if fork and repo.repo_type != fork.repo_type:
369 372 raise RepositoryError(
370 373 "Cannot set repository as fork of repository with other type")
371 374
372 375 repo.fork = fork
373 376 self.sa.add(repo)
374 377 return repo
375 378
376 379 def pull_changes(self, repo, username, remote_uri=None):
377 380 dbrepo = self._get_repo(repo)
378 381 remote_uri = remote_uri or dbrepo.clone_uri
379 382 if not remote_uri:
380 383 raise Exception("This repository doesn't have a clone uri")
381 384
382 385 repo = dbrepo.scm_instance(cache=False)
383 386 # TODO: marcink fix this an re-enable since we need common logic
384 387 # for hg/git remove hooks so we don't trigger them on fetching
385 388 # commits from remote
386 389 repo.config.clear_section('hooks')
387 390
388 391 repo_name = dbrepo.repo_name
389 392 try:
390 393 # TODO: we need to make sure those operations call proper hooks !
391 394 repo.pull(remote_uri)
392 395
393 396 self.mark_for_invalidation(repo_name)
394 397 except Exception:
395 398 log.error(traceback.format_exc())
396 399 raise
397 400
398 401 def push_changes(self, repo, username, remote_uri=None):
399 402 dbrepo = self._get_repo(repo)
400 403 remote_uri = remote_uri or dbrepo.push_uri
401 404 if not remote_uri:
402 405 raise Exception("This repository doesn't have a clone uri")
403 406
404 407 repo = dbrepo.scm_instance(cache=False)
405 408 repo.config.clear_section('hooks')
406 409
407 410 try:
408 411 repo.push(remote_uri)
409 412 except Exception:
410 413 log.error(traceback.format_exc())
411 414 raise
412 415
413 416 def commit_change(self, repo, repo_name, commit, user, author, message,
414 417 content, f_path):
415 418 """
416 419 Commits changes
417 420
418 421 :param repo: SCM instance
419 422
420 423 """
421 424 user = self._get_user(user)
422 425
423 426 # decoding here will force that we have proper encoded values
424 427 # in any other case this will throw exceptions and deny commit
425 428 content = safe_str(content)
426 429 path = safe_str(f_path)
427 430 # message and author needs to be unicode
428 431 # proper backend should then translate that into required type
429 432 message = safe_unicode(message)
430 433 author = safe_unicode(author)
431 434 imc = repo.in_memory_commit
432 435 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 436 try:
434 437 # TODO: handle pre-push action !
435 438 tip = imc.commit(
436 439 message=message, author=author, parents=[commit],
437 440 branch=commit.branch)
438 441 except Exception as e:
439 442 log.error(traceback.format_exc())
440 443 raise IMCCommitError(str(e))
441 444 finally:
442 445 # always clear caches, if commit fails we want fresh object also
443 446 self.mark_for_invalidation(repo_name)
444 447
445 448 # We trigger the post-push action
446 449 hooks_utils.trigger_post_push_hook(
447 450 username=user.username, action='push_local', repo_name=repo_name,
448 451 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 452 return tip
450 453
451 454 def _sanitize_path(self, f_path):
452 455 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 456 raise NonRelativePathError('%s is not an relative path' % f_path)
454 457 if f_path:
455 458 f_path = os.path.normpath(f_path)
456 459 return f_path
457 460
458 461 def get_dirnode_metadata(self, request, commit, dir_node):
459 462 if not dir_node.is_dir():
460 463 return []
461 464
462 465 data = []
463 466 for node in dir_node:
464 467 if not node.is_file():
465 468 # we skip file-nodes
466 469 continue
467 470
468 471 last_commit = node.last_commit
469 472 last_commit_date = last_commit.date
470 473 data.append({
471 474 'name': node.name,
472 475 'size': h.format_byte_size_binary(node.size),
473 476 'modified_at': h.format_date(last_commit_date),
474 477 'modified_ts': last_commit_date.isoformat(),
475 478 'revision': last_commit.revision,
476 479 'short_id': last_commit.short_id,
477 480 'message': h.escape(last_commit.message),
478 481 'author': h.escape(last_commit.author),
479 482 'user_profile': h.gravatar_with_user(
480 483 request, last_commit.author),
481 484 })
482 485
483 486 return data
484 487
485 488 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 489 extended_info=False, content=False, max_file_bytes=None):
487 490 """
488 491 recursive walk in root dir and return a set of all path in that dir
489 492 based on repository walk function
490 493
491 494 :param repo_name: name of repository
492 495 :param commit_id: commit id for which to list nodes
493 496 :param root_path: root path to list
494 497 :param flat: return as a list, if False returns a dict with description
495 498 :param max_file_bytes: will not return file contents over this limit
496 499
497 500 """
498 501 _files = list()
499 502 _dirs = list()
500 503 try:
501 504 _repo = self._get_repo(repo_name)
502 505 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 506 root_path = root_path.lstrip('/')
504 507 for __, dirs, files in commit.walk(root_path):
505 508 for f in files:
506 509 _content = None
507 510 _data = f.unicode_path
508 511 over_size_limit = (max_file_bytes is not None
509 512 and f.size > max_file_bytes)
510 513
511 514 if not flat:
512 515 _data = {
513 516 "name": h.escape(f.unicode_path),
514 517 "type": "file",
515 518 }
516 519 if extended_info:
517 520 _data.update({
518 521 "md5": f.md5,
519 522 "binary": f.is_binary,
520 523 "size": f.size,
521 524 "extension": f.extension,
522 525 "mimetype": f.mimetype,
523 526 "lines": f.lines()[0]
524 527 })
525 528
526 529 if content:
527 530 full_content = None
528 531 if not f.is_binary and not over_size_limit:
529 532 full_content = safe_str(f.content)
530 533
531 534 _data.update({
532 535 "content": full_content,
533 536 })
534 537 _files.append(_data)
535 538 for d in dirs:
536 539 _data = d.unicode_path
537 540 if not flat:
538 541 _data = {
539 542 "name": h.escape(d.unicode_path),
540 543 "type": "dir",
541 544 }
542 545 if extended_info:
543 546 _data.update({
544 547 "md5": None,
545 548 "binary": None,
546 549 "size": None,
547 550 "extension": None,
548 551 })
549 552 if content:
550 553 _data.update({
551 554 "content": None
552 555 })
553 556 _dirs.append(_data)
554 557 except RepositoryError:
555 558 log.debug("Exception in get_nodes", exc_info=True)
556 559 raise
557 560
558 561 return _dirs, _files
559 562
560 563 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 564 author=None, trigger_push_hook=True):
562 565 """
563 566 Commits given multiple nodes into repo
564 567
565 568 :param user: RhodeCode User object or user_id, the commiter
566 569 :param repo: RhodeCode Repository object
567 570 :param message: commit message
568 571 :param nodes: mapping {filename:{'content':content},...}
569 572 :param parent_commit: parent commit, can be empty than it's
570 573 initial commit
571 574 :param author: author of commit, cna be different that commiter
572 575 only for git
573 576 :param trigger_push_hook: trigger push hooks
574 577
575 578 :returns: new commited commit
576 579 """
577 580
578 581 user = self._get_user(user)
579 582 scm_instance = repo.scm_instance(cache=False)
580 583
581 584 processed_nodes = []
582 585 for f_path in nodes:
583 586 f_path = self._sanitize_path(f_path)
584 587 content = nodes[f_path]['content']
585 588 f_path = safe_str(f_path)
586 589 # decoding here will force that we have proper encoded values
587 590 # in any other case this will throw exceptions and deny commit
588 591 if isinstance(content, (basestring,)):
589 592 content = safe_str(content)
590 593 elif isinstance(content, (file, cStringIO.OutputType,)):
591 594 content = content.read()
592 595 else:
593 596 raise Exception('Content is of unrecognized type %s' % (
594 597 type(content)
595 598 ))
596 599 processed_nodes.append((f_path, content))
597 600
598 601 message = safe_unicode(message)
599 602 commiter = user.full_contact
600 603 author = safe_unicode(author) if author else commiter
601 604
602 605 imc = scm_instance.in_memory_commit
603 606
604 607 if not parent_commit:
605 608 parent_commit = EmptyCommit(alias=scm_instance.alias)
606 609
607 610 if isinstance(parent_commit, EmptyCommit):
608 611 # EmptyCommit means we we're editing empty repository
609 612 parents = None
610 613 else:
611 614 parents = [parent_commit]
612 615 # add multiple nodes
613 616 for path, content in processed_nodes:
614 617 imc.add(FileNode(path, content=content))
615 618 # TODO: handle pre push scenario
616 619 tip = imc.commit(message=message,
617 620 author=author,
618 621 parents=parents,
619 622 branch=parent_commit.branch)
620 623
621 624 self.mark_for_invalidation(repo.repo_name)
622 625 if trigger_push_hook:
623 626 hooks_utils.trigger_post_push_hook(
624 627 username=user.username, action='push_local',
625 628 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 629 commit_ids=[tip.raw_id])
627 630 return tip
628 631
629 632 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 633 author=None, trigger_push_hook=True):
631 634 user = self._get_user(user)
632 635 scm_instance = repo.scm_instance(cache=False)
633 636
634 637 message = safe_unicode(message)
635 638 commiter = user.full_contact
636 639 author = safe_unicode(author) if author else commiter
637 640
638 641 imc = scm_instance.in_memory_commit
639 642
640 643 if not parent_commit:
641 644 parent_commit = EmptyCommit(alias=scm_instance.alias)
642 645
643 646 if isinstance(parent_commit, EmptyCommit):
644 647 # EmptyCommit means we we're editing empty repository
645 648 parents = None
646 649 else:
647 650 parents = [parent_commit]
648 651
649 652 # add multiple nodes
650 653 for _filename, data in nodes.items():
651 654 # new filename, can be renamed from the old one, also sanitaze
652 655 # the path for any hack around relative paths like ../../ etc.
653 656 filename = self._sanitize_path(data['filename'])
654 657 old_filename = self._sanitize_path(_filename)
655 658 content = data['content']
656 659
657 660 filenode = FileNode(old_filename, content=content)
658 661 op = data['op']
659 662 if op == 'add':
660 663 imc.add(filenode)
661 664 elif op == 'del':
662 665 imc.remove(filenode)
663 666 elif op == 'mod':
664 667 if filename != old_filename:
665 668 # TODO: handle renames more efficient, needs vcs lib
666 669 # changes
667 670 imc.remove(filenode)
668 671 imc.add(FileNode(filename, content=content))
669 672 else:
670 673 imc.change(filenode)
671 674
672 675 try:
673 676 # TODO: handle pre push scenario
674 677 # commit changes
675 678 tip = imc.commit(message=message,
676 679 author=author,
677 680 parents=parents,
678 681 branch=parent_commit.branch)
679 682 except NodeNotChangedError:
680 683 raise
681 684 except Exception as e:
682 685 log.exception("Unexpected exception during call to imc.commit")
683 686 raise IMCCommitError(str(e))
684 687 finally:
685 688 # always clear caches, if commit fails we want fresh object also
686 689 self.mark_for_invalidation(repo.repo_name)
687 690
688 691 if trigger_push_hook:
689 692 hooks_utils.trigger_post_push_hook(
690 693 username=user.username, action='push_local',
691 694 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 695 commit_ids=[tip.raw_id])
693 696
694 697 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 698 author=None, trigger_push_hook=True):
696 699 """
697 700 Deletes given multiple nodes into `repo`
698 701
699 702 :param user: RhodeCode User object or user_id, the committer
700 703 :param repo: RhodeCode Repository object
701 704 :param message: commit message
702 705 :param nodes: mapping {filename:{'content':content},...}
703 706 :param parent_commit: parent commit, can be empty than it's initial
704 707 commit
705 708 :param author: author of commit, cna be different that commiter only
706 709 for git
707 710 :param trigger_push_hook: trigger push hooks
708 711
709 712 :returns: new commit after deletion
710 713 """
711 714
712 715 user = self._get_user(user)
713 716 scm_instance = repo.scm_instance(cache=False)
714 717
715 718 processed_nodes = []
716 719 for f_path in nodes:
717 720 f_path = self._sanitize_path(f_path)
718 721 # content can be empty but for compatabilty it allows same dicts
719 722 # structure as add_nodes
720 723 content = nodes[f_path].get('content')
721 724 processed_nodes.append((f_path, content))
722 725
723 726 message = safe_unicode(message)
724 727 commiter = user.full_contact
725 728 author = safe_unicode(author) if author else commiter
726 729
727 730 imc = scm_instance.in_memory_commit
728 731
729 732 if not parent_commit:
730 733 parent_commit = EmptyCommit(alias=scm_instance.alias)
731 734
732 735 if isinstance(parent_commit, EmptyCommit):
733 736 # EmptyCommit means we we're editing empty repository
734 737 parents = None
735 738 else:
736 739 parents = [parent_commit]
737 740 # add multiple nodes
738 741 for path, content in processed_nodes:
739 742 imc.remove(FileNode(path, content=content))
740 743
741 744 # TODO: handle pre push scenario
742 745 tip = imc.commit(message=message,
743 746 author=author,
744 747 parents=parents,
745 748 branch=parent_commit.branch)
746 749
747 750 self.mark_for_invalidation(repo.repo_name)
748 751 if trigger_push_hook:
749 752 hooks_utils.trigger_post_push_hook(
750 753 username=user.username, action='push_local',
751 754 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 755 commit_ids=[tip.raw_id])
753 756 return tip
754 757
755 758 def strip(self, repo, commit_id, branch):
756 759 scm_instance = repo.scm_instance(cache=False)
757 760 scm_instance.config.clear_section('hooks')
758 761 scm_instance.strip(commit_id, branch)
759 762 self.mark_for_invalidation(repo.repo_name)
760 763
761 764 def get_unread_journal(self):
762 765 return self.sa.query(UserLog).count()
763 766
764 767 def get_repo_landing_revs(self, translator, repo=None):
765 768 """
766 769 Generates select option with tags branches and bookmarks (for hg only)
767 770 grouped by type
768 771
769 772 :param repo:
770 773 """
771 774 _ = translator
772 775 repo = self._get_repo(repo)
773 776
774 777 hist_l = [
775 778 ['rev:tip', _('latest tip')]
776 779 ]
777 780 choices = [
778 781 'rev:tip'
779 782 ]
780 783
781 784 if not repo:
782 785 return choices, hist_l
783 786
784 787 repo = repo.scm_instance()
785 788
786 789 branches_group = (
787 790 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 791 for b in repo.branches],
789 792 _("Branches"))
790 793 hist_l.append(branches_group)
791 794 choices.extend([x[0] for x in branches_group[0]])
792 795
793 796 if repo.alias == 'hg':
794 797 bookmarks_group = (
795 798 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 799 for b in repo.bookmarks],
797 800 _("Bookmarks"))
798 801 hist_l.append(bookmarks_group)
799 802 choices.extend([x[0] for x in bookmarks_group[0]])
800 803
801 804 tags_group = (
802 805 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 806 for t in repo.tags],
804 807 _("Tags"))
805 808 hist_l.append(tags_group)
806 809 choices.extend([x[0] for x in tags_group[0]])
807 810
808 811 return choices, hist_l
809 812
810 813 def get_server_info(self, environ=None):
811 814 server_info = get_system_info(environ)
812 815 return server_info
@@ -1,98 +1,133 b''
1 1 <div class="panel panel-default">
2 2 <div class="panel-heading">
3 3 <h3 class="panel-title">${_('Invalidate Cache for Repository')}</h3>
4 4 </div>
5 5 <div class="panel-body">
6 6
7 7 <h4>${_('Manually invalidate the repository cache. On the next access a repository cache will be recreated.')}</h4>
8 8
9 9 <p>
10 10 ${_('Cache purge can be automated by such api call. Can be called periodically in crontab etc.')}
11 11 <br/>
12 12 <code>
13 13 ${h.api_call_example(method='invalidate_cache', args={"repoid": c.rhodecode_db_repo.repo_name})}
14 14 </code>
15 15 </p>
16 16
17 17 ${h.secure_form(h.route_path('edit_repo_caches', repo_name=c.repo_name), request=request)}
18 18 <div class="form">
19 19 <div class="fields">
20 20 ${h.submit('reset_cache_%s' % c.rhodecode_db_repo.repo_name,_('Invalidate repository cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate repository cache')+"');")}
21 21 </div>
22 22 </div>
23 23 ${h.end_form()}
24 24
25 25 </div>
26 26 </div>
27 27
28 28
29 29 <div class="panel panel-default">
30 30 <div class="panel-heading">
31 31 <h3 class="panel-title">
32 32 ${(_ungettext('List of repository caches (%(count)s entry)', 'List of repository caches (%(count)s entries)' ,len(c.rhodecode_db_repo.cache_keys)) % {'count': len(c.rhodecode_db_repo.cache_keys)})}
33 33 </h3>
34 34 </div>
35 35 <div class="panel-body">
36 36 <div class="field" >
37 37 <table class="rctable edit_cache">
38 38 <tr>
39 39 <th>${_('Prefix')}</th>
40 40 <th>${_('Key')}</th>
41 41 <th>${_('Active')}</th>
42 42 </tr>
43 43 %for cache in c.rhodecode_db_repo.cache_keys:
44 44 <tr>
45 45 <td class="td-prefix">${cache.get_prefix() or '-'}</td>
46 46 <td class="td-cachekey">${cache.cache_key}</td>
47 47 <td class="td-active">${h.bool2icon(cache.cache_active)}</td>
48 48 </tr>
49 49 %endfor
50 50 </table>
51 51 </div>
52 52 </div>
53 53 </div>
54 54
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading">
57 <h3 class="panel-title">
58 ${_('Cache keys')}
59 </h3>
60 </div>
61 <div class="panel-body">
62 <p>
63 Cache keys used for storing cached values of repository stats,
64 file tree history and file tree search.
65 Invalidating the cache will remove those entries.
66 </p>
67 <pre>
68 region: ${c.region.name}
69 backend: ${c.region.actual_backend.__class__}
70 store: ${c.region.actual_backend.get_store()}
71
72
73 % if c.repo_keys:
74 ${len(c.repo_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
75 <span id="show-keys" style="display: none">
76 % for k in c.repo_keys:
77 - ${k}
78 % endfor
79 </span>
80 % else:
81 NO KEYS FOUND
82 % endif
83
84 </pre>
85
86 </div>
87 </div>
88
89
90 <div class="panel panel-default">
91 <div class="panel-heading">
57 92 <h3 class="panel-title">${_('Shadow Repositories')}</h3>
58 93 </div>
59 94 <div class="panel-body">
60 95 <table class="rctable edit_cache">
61 96 % if c.shadow_repos:
62 97 % for shadow_repo in c.shadow_repos:
63 98 <tr>
64 99 <td>${shadow_repo}</td>
65 100 </tr>
66 101 % endfor
67 102 % else:
68 103 <tr>
69 104 <td>${_('No Shadow repositories exist for this repository.')}</td>
70 105 </tr>
71 106 % endif
72 107
73 108 </table>
74 109 </div>
75 110 </div>
76 111
77 112
78 113 <div class="panel panel-default">
79 114 <div class="panel-heading">
80 115 <h3 class="panel-title">${_('Diff Caches')}</h3>
81 116 </div>
82 117 <div class="panel-body">
83 118 <table class="rctable edit_cache">
84 119 <tr>
85 120 <td>${_('Cached diff name')}:</td>
86 121 <td>${c.rhodecode_db_repo.cached_diffs_relative_dir}</td>
87 122 </tr>
88 123 <tr>
89 124 <td>${_('Cached diff files')}:</td>
90 125 <td>${c.cached_diff_count}</td>
91 126 </tr>
92 127 <tr>
93 128 <td>${_('Cached diff size')}:</td>
94 129 <td>${h.format_byte_size(c.cached_diff_size)}</td>
95 130 </tr>
96 131 </table>
97 132 </div>
98 133 </div>
@@ -1,29 +1,41 b''
1 1 <%namespace name="base" file="/base/base.mako"/>
2 2
3 3 <div class="panel panel-default">
4 4 <div class="panel-heading">
5 5 <h3 class="panel-title">${_('Caches')}</h3>
6 6 </div>
7 7 <div class="panel-body">
8 <p>
9 Cache keys used for storing cached values of user permissions and authentication plugin cache.
10 Invalidating the cache will remove those entries.
11 </p>
12
8 13 <pre>
9 14 region: ${c.region.name}
10 15 backend: ${c.region.actual_backend.__class__}
11 16 store: ${c.region.actual_backend.get_store()}
12 17
18 % if c.user_keys:
19 ${len(c.user_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
20 <span id="show-keys" style="display: none">
13 21 % for k in c.user_keys:
14 22 - ${k}
15 23 % endfor
24 </span>
25 % else:
26 NO KEYS FOUND
27 % endif
16 28 </pre>
17
29 <p></p>
18 30 ${h.secure_form(h.route_path('edit_user_caches_update', user_id=c.user.user_id), request=request)}
19 31 <div class="form">
20 32 <div class="fields">
21 33 ${h.submit('reset_cache_%s' % c.user.user_id, _('Invalidate user cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate user cache')+"');")}
22 34 </div>
23 35 </div>
24 36 ${h.end_form()}
25 37
26 38 </div>
27 39 </div>
28 40
29 41
@@ -1,683 +1,671 b''
1 1
2 2
3 3 ################################################################################
4 4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 5 # The %(here)s variable will be replaced with the parent directory of this file#
6 6 ################################################################################
7 7
8 8 [DEFAULT]
9 9 debug = true
10 10
11 11 ################################################################################
12 12 ## EMAIL CONFIGURATION ##
13 13 ## Uncomment and replace with the email address which should receive ##
14 14 ## any error reports after an application crash ##
15 15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 16 ################################################################################
17 17
18 18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 19 #email_prefix = [RhodeCode]
20 20
21 21 ## email FROM address all mails will be sent
22 22 #app_email_from = rhodecode-noreply@localhost
23 23
24 24 ## Uncomment and replace with the address which should receive any error report
25 25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 26 #email_to = admin@localhost
27 27
28 28 ## in case of Application errors, sent an error email form
29 29 #error_email_from = rhodecode_error@localhost
30 30
31 31 ## additional error message to be send in case of server crash
32 32 #error_message =
33 33
34 34
35 35 #smtp_server = mail.server.com
36 36 #smtp_username =
37 37 #smtp_password =
38 38 #smtp_port =
39 39 #smtp_use_tls = false
40 40 #smtp_use_ssl = true
41 41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 42 #smtp_auth =
43 43
44 44 [server:main]
45 45 ## COMMON ##
46 46 host = 0.0.0.0
47 47 port = 5000
48 48
49 49 ##########################
50 50 ## GUNICORN WSGI SERVER ##
51 51 ##########################
52 52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53 53
54 54 use = egg:gunicorn#main
55 55 ## Sets the number of process workers. You must set `instance_id = *`
56 56 ## when this option is set to more than one worker, recommended
57 57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 58 ## The `instance_id = *` must be set in the [app:main] section below
59 59 #workers = 2
60 60 ## number of threads for each of the worker, must be set to 1 for gevent
61 61 ## generally recommened to be at 1
62 62 #threads = 1
63 63 ## process name
64 64 #proc_name = rhodecode
65 65 ## type of worker class, one of sync, gevent
66 66 ## recommended for bigger setup is using of of other than sync one
67 67 #worker_class = sync
68 68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 69 #worker_connections = 10
70 70 ## max number of requests that worker will handle before being gracefully
71 71 ## restarted, could prevent memory leaks
72 72 #max_requests = 1000
73 73 #max_requests_jitter = 30
74 74 ## amount of time a worker can spend with handling a request before it
75 75 ## gets killed and restarted. Set to 6hrs
76 76 #timeout = 21600
77 77
78 78 ## prefix middleware for RhodeCode.
79 79 ## recommended when using proxy setup.
80 80 ## allows to set RhodeCode under a prefix in server.
81 81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 82 ## And set your prefix like: `prefix = /custom_prefix`
83 83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 84 ## to make your cookies only work on prefix url
85 85 [filter:proxy-prefix]
86 86 use = egg:PasteDeploy#prefix
87 87 prefix = /
88 88
89 89 [app:main]
90 90 is_test = True
91 91 use = egg:rhodecode-enterprise-ce
92 92
93 93 ## enable proxy prefix middleware, defined above
94 94 #filter-with = proxy-prefix
95 95
96 96
97 97 ## RHODECODE PLUGINS ##
98 98 rhodecode.includes = rhodecode.api
99 99
100 100 # api prefix url
101 101 rhodecode.api.url = /_admin/api
102 102
103 103
104 104 ## END RHODECODE PLUGINS ##
105 105
106 106 ## encryption key used to encrypt social plugin tokens,
107 107 ## remote_urls with credentials etc, if not set it defaults to
108 108 ## `beaker.session.secret`
109 109 #rhodecode.encrypted_values.secret =
110 110
111 111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 113 #rhodecode.encrypted_values.strict = false
114 114
115 115 ## return gzipped responses from Rhodecode (static files/application)
116 116 gzip_responses = false
117 117
118 118 ## autogenerate javascript routes file on startup
119 119 generate_js_files = false
120 120
121 121 ## Optional Languages
122 122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 123 lang = en
124 124
125 125 ## perform a full repository scan on each server start, this should be
126 126 ## set to false after first startup, to allow faster server restarts.
127 127 startup.import_repos = true
128 128
129 129 ## Uncomment and set this path to use archive download cache.
130 130 ## Once enabled, generated archives will be cached at this location
131 131 ## and served from the cache during subsequent requests for the same archive of
132 132 ## the repository.
133 133 #archive_cache_dir = /tmp/tarballcache
134 134
135 135 ## URL at which the application is running. This is used for bootstraping
136 136 ## requests in context when no web request is available. Used in ishell, or
137 137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 138 app.base_url = http://rhodecode.local
139 139
140 140 ## change this to unique ID for security
141 141 app_instance_uuid = rc-production
142 142
143 143 ## cut off limit for large diffs (size in bytes)
144 144 cut_off_limit_diff = 1024000
145 145 cut_off_limit_file = 256000
146 146
147 147 ## use cache version of scm repo everywhere
148 148 vcs_full_cache = false
149 149
150 150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 151 ## Normally this is controlled by proper http flags sent from http server
152 152 force_https = false
153 153
154 154 ## use Strict-Transport-Security headers
155 155 use_htsts = false
156 156
157 157 ## git rev filter option, --all is the default filter, if you need to
158 158 ## hide all refs in changelog switch this to --branches --tags
159 159 git_rev_filter = --all
160 160
161 161 # Set to true if your repos are exposed using the dumb protocol
162 162 git_update_server_info = false
163 163
164 164 ## RSS/ATOM feed options
165 165 rss_cut_off_limit = 256000
166 166 rss_items_per_page = 10
167 167 rss_include_diff = false
168 168
169 169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 170 ## url that does rewrites to _admin/gists/{gistid}.
171 171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 173 gist_alias_url =
174 174
175 175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 176 ## used for access.
177 177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 178 ## came from the the logged in user who own this authentication token.
179 179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 180 ## authentication token. Such view would be only accessible when used together
181 181 ## with this authentication token
182 182 ##
183 183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 184 ## The list should be "," separated and on a single line.
185 185 ##
186 186 ## Most common views to enable:
187 187 # RepoCommitsView:repo_commit_download
188 188 # RepoCommitsView:repo_commit_patch
189 189 # RepoCommitsView:repo_commit_raw
190 190 # RepoCommitsView:repo_commit_raw@TOKEN
191 191 # RepoFilesView:repo_files_diff
192 192 # RepoFilesView:repo_archivefile
193 193 # RepoFilesView:repo_file_raw
194 194 # GistView:*
195 195 api_access_controllers_whitelist =
196 196
197 197 ## default encoding used to convert from and to unicode
198 198 ## can be also a comma separated list of encoding in case of mixed encodings
199 199 default_encoding = UTF-8
200 200
201 201 ## instance-id prefix
202 202 ## a prefix key for this instance used for cache invalidation when running
203 203 ## multiple instances of rhodecode, make sure it's globally unique for
204 204 ## all running rhodecode instances. Leave empty if you don't use it
205 205 instance_id =
206 206
207 207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 208 ## of an authentication plugin also if it is disabled by it's settings.
209 209 ## This could be useful if you are unable to log in to the system due to broken
210 210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 211 ## module to log in again and fix the settings.
212 212 ##
213 213 ## Available builtin plugin IDs (hash is part of the ID):
214 214 ## egg:rhodecode-enterprise-ce#rhodecode
215 215 ## egg:rhodecode-enterprise-ce#pam
216 216 ## egg:rhodecode-enterprise-ce#ldap
217 217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 218 ## egg:rhodecode-enterprise-ce#headers
219 219 ## egg:rhodecode-enterprise-ce#crowd
220 220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221 221
222 222 ## alternative return HTTP header for failed authentication. Default HTTP
223 223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 224 ## handling that causing a series of failed authentication calls.
225 225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 226 ## This will be served instead of default 401 on bad authnetication
227 227 auth_ret_code =
228 228
229 229 ## use special detection method when serving auth_ret_code, instead of serving
230 230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 231 ## and then serve auth_ret_code to clients
232 232 auth_ret_code_detection = false
233 233
234 234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 235 ## codes don't break the transactions while 4XX codes do
236 236 lock_ret_code = 423
237 237
238 238 ## allows to change the repository location in settings page
239 239 allow_repo_location_change = true
240 240
241 241 ## allows to setup custom hooks in settings page
242 242 allow_custom_hooks_settings = true
243 243
244 244 ## generated license token, goto license page in RhodeCode settings to obtain
245 245 ## new token
246 246 license_token = abra-cada-bra1-rce3
247 247
248 248 ## supervisor connection uri, for managing supervisor and logs.
249 249 supervisor.uri =
250 250 ## supervisord group name/id we only want this RC instance to handle
251 251 supervisor.group_id = dev
252 252
253 253 ## Display extended labs settings
254 254 labs_settings_active = true
255 255
256 256 ####################################
257 257 ### CELERY CONFIG ####
258 258 ####################################
259 259 use_celery = false
260 260 broker.host = localhost
261 261 broker.vhost = rabbitmqhost
262 262 broker.port = 5672
263 263 broker.user = rabbitmq
264 264 broker.password = qweqwe
265 265
266 266 celery.imports = rhodecode.lib.celerylib.tasks
267 267
268 268 celery.result.backend = amqp
269 269 celery.result.dburi = amqp://
270 270 celery.result.serialier = json
271 271
272 272 #celery.send.task.error.emails = true
273 273 #celery.amqp.task.result.expires = 18000
274 274
275 275 celeryd.concurrency = 2
276 276 #celeryd.log.file = celeryd.log
277 277 celeryd.log.level = debug
278 278 celeryd.max.tasks.per.child = 1
279 279
280 280 ## tasks will never be sent to the queue, but executed locally instead.
281 281 celery.always.eager = false
282 282
283 283 ####################################
284 284 ### BEAKER CACHE ####
285 285 ####################################
286 286 # default cache dir for templates. Putting this into a ramdisk
287 287 ## can boost performance, eg. %(here)s/data_ramdisk
288 288 cache_dir = %(here)s/data
289 289
290 290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294 294
295 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
295 beaker.cache.regions = long_term, sql_cache_short
296 296
297 297 beaker.cache.long_term.type = memory
298 298 beaker.cache.long_term.expire = 36000
299 299 beaker.cache.long_term.key_length = 256
300 300
301 301 beaker.cache.sql_cache_short.type = memory
302 302 beaker.cache.sql_cache_short.expire = 1
303 303 beaker.cache.sql_cache_short.key_length = 256
304 304
305 beaker.cache.repo_cache_long.type = memorylru_base
306 beaker.cache.repo_cache_long.max_items = 4096
307 beaker.cache.repo_cache_long.expire = 2592000
308
309 ## default is memorylru_base cache, configure only if required
310 ## using multi-node or multi-worker setup
311 #beaker.cache.repo_cache_long.type = ext:memcached
312 #beaker.cache.repo_cache_long.url = localhost:11211
313 #beaker.cache.repo_cache_long.expire = 1209600
314 #beaker.cache.repo_cache_long.key_length = 256
315
316
317 305 #####################################
318 306 ### DOGPILE CACHE ####
319 307 #####################################
320 308
321 309 ## permission tree cache settings
322 310 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
323 311 rc_cache.cache_perms.expiration_time = 0
324 312 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
325 313
326 314 ####################################
327 315 ### BEAKER SESSION ####
328 316 ####################################
329 317
330 318 ## .session.type is type of storage options for the session, current allowed
331 319 ## types are file, ext:memcached, ext:database, and memory (default).
332 320 beaker.session.type = file
333 321 beaker.session.data_dir = %(here)s/rc/data/sessions/data
334 322
335 323 ## db based session, fast, and allows easy management over logged in users
336 324 #beaker.session.type = ext:database
337 325 #beaker.session.table_name = db_session
338 326 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
339 327 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
340 328 #beaker.session.sa.pool_recycle = 3600
341 329 #beaker.session.sa.echo = false
342 330
343 331 beaker.session.key = rhodecode
344 332 beaker.session.secret = test-rc-uytcxaz
345 333 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
346 334
347 335 ## Secure encrypted cookie. Requires AES and AES python libraries
348 336 ## you must disable beaker.session.secret to use this
349 337 #beaker.session.encrypt_key = key_for_encryption
350 338 #beaker.session.validate_key = validation_key
351 339
352 340 ## sets session as invalid(also logging out user) if it haven not been
353 341 ## accessed for given amount of time in seconds
354 342 beaker.session.timeout = 2592000
355 343 beaker.session.httponly = true
356 344 ## Path to use for the cookie. Set to prefix if you use prefix middleware
357 345 #beaker.session.cookie_path = /custom_prefix
358 346
359 347 ## uncomment for https secure cookie
360 348 beaker.session.secure = false
361 349
362 350 ## auto save the session to not to use .save()
363 351 beaker.session.auto = false
364 352
365 353 ## default cookie expiration time in seconds, set to `true` to set expire
366 354 ## at browser close
367 355 #beaker.session.cookie_expires = 3600
368 356
369 357 ###################################
370 358 ## SEARCH INDEXING CONFIGURATION ##
371 359 ###################################
372 360 ## Full text search indexer is available in rhodecode-tools under
373 361 ## `rhodecode-tools index` command
374 362
375 363 ## WHOOSH Backend, doesn't require additional services to run
376 364 ## it works good with few dozen repos
377 365 search.module = rhodecode.lib.index.whoosh
378 366 search.location = %(here)s/data/index
379 367
380 368 ########################################
381 369 ### CHANNELSTREAM CONFIG ####
382 370 ########################################
383 371 ## channelstream enables persistent connections and live notification
384 372 ## in the system. It's also used by the chat system
385 373
386 374 channelstream.enabled = false
387 375
388 376 ## server address for channelstream server on the backend
389 377 channelstream.server = 127.0.0.1:9800
390 378 ## location of the channelstream server from outside world
391 379 ## use ws:// for http or wss:// for https. This address needs to be handled
392 380 ## by external HTTP server such as Nginx or Apache
393 381 ## see nginx/apache configuration examples in our docs
394 382 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
395 383 channelstream.secret = secret
396 384 channelstream.history.location = %(here)s/channelstream_history
397 385
398 386 ## Internal application path that Javascript uses to connect into.
399 387 ## If you use proxy-prefix the prefix should be added before /_channelstream
400 388 channelstream.proxy_path = /_channelstream
401 389
402 390
403 391 ###################################
404 392 ## APPENLIGHT CONFIG ##
405 393 ###################################
406 394
407 395 ## Appenlight is tailored to work with RhodeCode, see
408 396 ## http://appenlight.com for details how to obtain an account
409 397
410 398 ## appenlight integration enabled
411 399 appenlight = false
412 400
413 401 appenlight.server_url = https://api.appenlight.com
414 402 appenlight.api_key = YOUR_API_KEY
415 403 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
416 404
417 405 # used for JS client
418 406 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
419 407
420 408 ## TWEAK AMOUNT OF INFO SENT HERE
421 409
422 410 ## enables 404 error logging (default False)
423 411 appenlight.report_404 = false
424 412
425 413 ## time in seconds after request is considered being slow (default 1)
426 414 appenlight.slow_request_time = 1
427 415
428 416 ## record slow requests in application
429 417 ## (needs to be enabled for slow datastore recording and time tracking)
430 418 appenlight.slow_requests = true
431 419
432 420 ## enable hooking to application loggers
433 421 appenlight.logging = true
434 422
435 423 ## minimum log level for log capture
436 424 appenlight.logging.level = WARNING
437 425
438 426 ## send logs only from erroneous/slow requests
439 427 ## (saves API quota for intensive logging)
440 428 appenlight.logging_on_error = false
441 429
442 430 ## list of additonal keywords that should be grabbed from environ object
443 431 ## can be string with comma separated list of words in lowercase
444 432 ## (by default client will always send following info:
445 433 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
446 434 ## start with HTTP* this list be extended with additional keywords here
447 435 appenlight.environ_keys_whitelist =
448 436
449 437 ## list of keywords that should be blanked from request object
450 438 ## can be string with comma separated list of words in lowercase
451 439 ## (by default client will always blank keys that contain following words
452 440 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
453 441 ## this list be extended with additional keywords set here
454 442 appenlight.request_keys_blacklist =
455 443
456 444 ## list of namespaces that should be ignores when gathering log entries
457 445 ## can be string with comma separated list of namespaces
458 446 ## (by default the client ignores own entries: appenlight_client.client)
459 447 appenlight.log_namespace_blacklist =
460 448
461 449
462 450 ################################################################################
463 451 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
464 452 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
465 453 ## execute malicious code after an exception is raised. ##
466 454 ################################################################################
467 455 set debug = false
468 456
469 457
470 458 ##############
471 459 ## STYLING ##
472 460 ##############
473 461 debug_style = false
474 462
475 463 ###########################################
476 464 ### MAIN RHODECODE DATABASE CONFIG ###
477 465 ###########################################
478 466 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
479 467 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
480 468 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
481 469 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
482 470
483 471 # see sqlalchemy docs for other advanced settings
484 472
485 473 ## print the sql statements to output
486 474 sqlalchemy.db1.echo = false
487 475 ## recycle the connections after this amount of seconds
488 476 sqlalchemy.db1.pool_recycle = 3600
489 477 sqlalchemy.db1.convert_unicode = true
490 478
491 479 ## the number of connections to keep open inside the connection pool.
492 480 ## 0 indicates no limit
493 481 #sqlalchemy.db1.pool_size = 5
494 482
495 483 ## the number of connections to allow in connection pool "overflow", that is
496 484 ## connections that can be opened above and beyond the pool_size setting,
497 485 ## which defaults to five.
498 486 #sqlalchemy.db1.max_overflow = 10
499 487
500 488
501 489 ##################
502 490 ### VCS CONFIG ###
503 491 ##################
504 492 vcs.server.enable = true
505 493 vcs.server = localhost:9901
506 494
507 495 ## Web server connectivity protocol, responsible for web based VCS operatations
508 496 ## Available protocols are:
509 497 ## `http` - use http-rpc backend (default)
510 498 vcs.server.protocol = http
511 499
512 500 ## Push/Pull operations protocol, available options are:
513 501 ## `http` - use http-rpc backend (default)
514 502 ## `vcsserver.scm_app` - internal app (EE only)
515 503 vcs.scm_app_implementation = http
516 504
517 505 ## Push/Pull operations hooks protocol, available options are:
518 506 ## `http` - use http-rpc backend (default)
519 507 vcs.hooks.protocol = http
520 508 vcs.hooks.host = 127.0.0.1
521 509
522 510 vcs.server.log_level = debug
523 511 ## Start VCSServer with this instance as a subprocess, usefull for development
524 512 vcs.start_server = false
525 513
526 514 ## List of enabled VCS backends, available options are:
527 515 ## `hg` - mercurial
528 516 ## `git` - git
529 517 ## `svn` - subversion
530 518 vcs.backends = hg, git, svn
531 519
532 520 vcs.connection_timeout = 3600
533 521 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
534 522 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
535 523 #vcs.svn.compatible_version = pre-1.8-compatible
536 524
537 525
538 526 ############################################################
539 527 ### Subversion proxy support (mod_dav_svn) ###
540 528 ### Maps RhodeCode repo groups into SVN paths for Apache ###
541 529 ############################################################
542 530 ## Enable or disable the config file generation.
543 531 svn.proxy.generate_config = false
544 532 ## Generate config file with `SVNListParentPath` set to `On`.
545 533 svn.proxy.list_parent_path = true
546 534 ## Set location and file name of generated config file.
547 535 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
548 536 ## Used as a prefix to the `Location` block in the generated config file.
549 537 ## In most cases it should be set to `/`.
550 538 svn.proxy.location_root = /
551 539 ## Command to reload the mod dav svn configuration on change.
552 540 ## Example: `/etc/init.d/apache2 reload`
553 541 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
554 542 ## If the timeout expires before the reload command finishes, the command will
555 543 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
556 544 #svn.proxy.reload_timeout = 10
557 545
558 546 ############################################################
559 547 ### SSH Support Settings ###
560 548 ############################################################
561 549
562 550 ## Defines if the authorized_keys file should be written on any change of
563 551 ## user ssh keys, setting this to false also disables posibility of adding
564 552 ## ssh keys for users from web interface.
565 553 ssh.generate_authorized_keyfile = true
566 554
567 555 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
568 556 # ssh.authorized_keys_ssh_opts =
569 557
570 558 ## File to generate the authorized keys together with options
571 559 ## It is possible to have multiple key files specified in `sshd_config` e.g.
572 560 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
573 561 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
574 562
575 563 ## Command to execute the SSH wrapper. The binary is available in the
576 564 ## rhodecode installation directory.
577 565 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
578 566 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
579 567
580 568 ## Allow shell when executing the ssh-wrapper command
581 569 ssh.wrapper_cmd_allow_shell = false
582 570
583 571 ## Enables logging, and detailed output send back to the client. Usefull for
584 572 ## debugging, shouldn't be used in production.
585 573 ssh.enable_debug_logging = false
586 574
587 575 ## Paths to binary executrables, by default they are the names, but we can
588 576 ## override them if we want to use a custom one
589 577 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
590 578 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
591 579 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
592 580
593 581
594 582 ## Dummy marker to add new entries after.
595 583 ## Add any custom entries below. Please don't remove.
596 584 custom.conf = 1
597 585
598 586
599 587 ################################
600 588 ### LOGGING CONFIGURATION ####
601 589 ################################
602 590 [loggers]
603 591 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
604 592
605 593 [handlers]
606 594 keys = console, console_sql
607 595
608 596 [formatters]
609 597 keys = generic, color_formatter, color_formatter_sql
610 598
611 599 #############
612 600 ## LOGGERS ##
613 601 #############
614 602 [logger_root]
615 603 level = NOTSET
616 604 handlers = console
617 605
618 606 [logger_routes]
619 607 level = DEBUG
620 608 handlers =
621 609 qualname = routes.middleware
622 610 ## "level = DEBUG" logs the route matched and routing variables.
623 611 propagate = 1
624 612
625 613 [logger_beaker]
626 614 level = DEBUG
627 615 handlers =
628 616 qualname = beaker.container
629 617 propagate = 1
630 618
631 619 [logger_rhodecode]
632 620 level = DEBUG
633 621 handlers =
634 622 qualname = rhodecode
635 623 propagate = 1
636 624
637 625 [logger_sqlalchemy]
638 626 level = ERROR
639 627 handlers = console_sql
640 628 qualname = sqlalchemy.engine
641 629 propagate = 0
642 630
643 631 [logger_ssh_wrapper]
644 632 level = DEBUG
645 633 handlers =
646 634 qualname = ssh_wrapper
647 635 propagate = 1
648 636
649 637
650 638 ##############
651 639 ## HANDLERS ##
652 640 ##############
653 641
654 642 [handler_console]
655 643 class = StreamHandler
656 644 args = (sys.stderr,)
657 645 level = DEBUG
658 646 formatter = generic
659 647
660 648 [handler_console_sql]
661 649 class = StreamHandler
662 650 args = (sys.stderr,)
663 651 level = WARN
664 652 formatter = generic
665 653
666 654 ################
667 655 ## FORMATTERS ##
668 656 ################
669 657
670 658 [formatter_generic]
671 659 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
672 660 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
673 661 datefmt = %Y-%m-%d %H:%M:%S
674 662
675 663 [formatter_color_formatter]
676 664 class = rhodecode.lib.logging_formatter.ColorFormatter
677 665 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
678 666 datefmt = %Y-%m-%d %H:%M:%S
679 667
680 668 [formatter_color_formatter_sql]
681 669 class = rhodecode.lib.logging_formatter.ColorFormatterSql
682 670 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
683 671 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now