##// END OF EJS Templates
caches: don't use beaker for file caches anymore
marcink -
r2846:bbc96602 default
parent child Browse files
Show More
@@ -1,705 +1,738 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = gevent
82 #worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.lib.middleware.request_wrapper
114 rhodecode.lib.middleware.request_wrapper
115
115
116 pyramid.reload_templates = true
116 pyramid.reload_templates = true
117
117
118 debugtoolbar.hosts = 0.0.0.0/0
118 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.exclude_prefixes =
119 debugtoolbar.exclude_prefixes =
120 /css
120 /css
121 /fonts
121 /fonts
122 /images
122 /images
123 /js
123 /js
124
124
125 ## RHODECODE PLUGINS ##
125 ## RHODECODE PLUGINS ##
126 rhodecode.includes =
126 rhodecode.includes =
127 rhodecode.api
127 rhodecode.api
128
128
129
129
130 # api prefix url
130 # api prefix url
131 rhodecode.api.url = /_admin/api
131 rhodecode.api.url = /_admin/api
132
132
133
133
134 ## END RHODECODE PLUGINS ##
134 ## END RHODECODE PLUGINS ##
135
135
136 ## encryption key used to encrypt social plugin tokens,
136 ## encryption key used to encrypt social plugin tokens,
137 ## remote_urls with credentials etc, if not set it defaults to
137 ## remote_urls with credentials etc, if not set it defaults to
138 ## `beaker.session.secret`
138 ## `beaker.session.secret`
139 #rhodecode.encrypted_values.secret =
139 #rhodecode.encrypted_values.secret =
140
140
141 ## decryption strict mode (enabled by default). It controls if decryption raises
141 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
142 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 #rhodecode.encrypted_values.strict = false
143 #rhodecode.encrypted_values.strict = false
144
144
145 ## return gzipped responses from Rhodecode (static files/application)
145 ## return gzipped responses from Rhodecode (static files/application)
146 gzip_responses = false
146 gzip_responses = false
147
147
148 ## autogenerate javascript routes file on startup
148 ## autogenerate javascript routes file on startup
149 generate_js_files = false
149 generate_js_files = false
150
150
151 ## Optional Languages
151 ## Optional Languages
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
152 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 lang = en
153 lang = en
154
154
155 ## perform a full repository scan on each server start, this should be
155 ## perform a full repository scan on each server start, this should be
156 ## set to false after first startup, to allow faster server restarts.
156 ## set to false after first startup, to allow faster server restarts.
157 startup.import_repos = false
157 startup.import_repos = false
158
158
159 ## Uncomment and set this path to use archive download cache.
159 ## Uncomment and set this path to use archive download cache.
160 ## Once enabled, generated archives will be cached at this location
160 ## Once enabled, generated archives will be cached at this location
161 ## and served from the cache during subsequent requests for the same archive of
161 ## and served from the cache during subsequent requests for the same archive of
162 ## the repository.
162 ## the repository.
163 #archive_cache_dir = /tmp/tarballcache
163 #archive_cache_dir = /tmp/tarballcache
164
164
165 ## URL at which the application is running. This is used for bootstraping
165 ## URL at which the application is running. This is used for bootstraping
166 ## requests in context when no web request is available. Used in ishell, or
166 ## requests in context when no web request is available. Used in ishell, or
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
167 ## SSH calls. Set this for events to receive proper url for SSH calls.
168 app.base_url = http://rhodecode.local
168 app.base_url = http://rhodecode.local
169
169
170 ## change this to unique ID for security
170 ## change this to unique ID for security
171 app_instance_uuid = rc-production
171 app_instance_uuid = rc-production
172
172
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
173 ## cut off limit for large diffs (size in bytes). If overall diff size on
174 ## commit, or pull request exceeds this limit this diff will be displayed
174 ## commit, or pull request exceeds this limit this diff will be displayed
175 ## partially. E.g 512000 == 512Kb
175 ## partially. E.g 512000 == 512Kb
176 cut_off_limit_diff = 512000
176 cut_off_limit_diff = 512000
177
177
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
178 ## cut off limit for large files inside diffs (size in bytes). Each individual
179 ## file inside diff which exceeds this limit will be displayed partially.
179 ## file inside diff which exceeds this limit will be displayed partially.
180 ## E.g 128000 == 128Kb
180 ## E.g 128000 == 128Kb
181 cut_off_limit_file = 128000
181 cut_off_limit_file = 128000
182
182
183 ## use cache version of scm repo everywhere
183 ## use cache version of scm repo everywhere
184 vcs_full_cache = true
184 vcs_full_cache = true
185
185
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
186 ## force https in RhodeCode, fixes https redirects, assumes it's always https
187 ## Normally this is controlled by proper http flags sent from http server
187 ## Normally this is controlled by proper http flags sent from http server
188 force_https = false
188 force_https = false
189
189
190 ## use Strict-Transport-Security headers
190 ## use Strict-Transport-Security headers
191 use_htsts = false
191 use_htsts = false
192
192
193 ## git rev filter option, --all is the default filter, if you need to
193 ## git rev filter option, --all is the default filter, if you need to
194 ## hide all refs in changelog switch this to --branches --tags
194 ## hide all refs in changelog switch this to --branches --tags
195 git_rev_filter = --branches --tags
195 git_rev_filter = --branches --tags
196
196
197 # Set to true if your repos are exposed using the dumb protocol
197 # Set to true if your repos are exposed using the dumb protocol
198 git_update_server_info = false
198 git_update_server_info = false
199
199
200 ## RSS/ATOM feed options
200 ## RSS/ATOM feed options
201 rss_cut_off_limit = 256000
201 rss_cut_off_limit = 256000
202 rss_items_per_page = 10
202 rss_items_per_page = 10
203 rss_include_diff = false
203 rss_include_diff = false
204
204
205 ## gist URL alias, used to create nicer urls for gist. This should be an
205 ## gist URL alias, used to create nicer urls for gist. This should be an
206 ## url that does rewrites to _admin/gists/{gistid}.
206 ## url that does rewrites to _admin/gists/{gistid}.
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
207 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
208 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
209 gist_alias_url =
209 gist_alias_url =
210
210
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
211 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
212 ## used for access.
212 ## used for access.
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
213 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
214 ## came from the the logged in user who own this authentication token.
214 ## came from the the logged in user who own this authentication token.
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
215 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
216 ## authentication token. Such view would be only accessible when used together
216 ## authentication token. Such view would be only accessible when used together
217 ## with this authentication token
217 ## with this authentication token
218 ##
218 ##
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
219 ## list of all views can be found under `/_admin/permissions/auth_token_access`
220 ## The list should be "," separated and on a single line.
220 ## The list should be "," separated and on a single line.
221 ##
221 ##
222 ## Most common views to enable:
222 ## Most common views to enable:
223 # RepoCommitsView:repo_commit_download
223 # RepoCommitsView:repo_commit_download
224 # RepoCommitsView:repo_commit_patch
224 # RepoCommitsView:repo_commit_patch
225 # RepoCommitsView:repo_commit_raw
225 # RepoCommitsView:repo_commit_raw
226 # RepoCommitsView:repo_commit_raw@TOKEN
226 # RepoCommitsView:repo_commit_raw@TOKEN
227 # RepoFilesView:repo_files_diff
227 # RepoFilesView:repo_files_diff
228 # RepoFilesView:repo_archivefile
228 # RepoFilesView:repo_archivefile
229 # RepoFilesView:repo_file_raw
229 # RepoFilesView:repo_file_raw
230 # GistView:*
230 # GistView:*
231 api_access_controllers_whitelist =
231 api_access_controllers_whitelist =
232
232
233 ## default encoding used to convert from and to unicode
233 ## default encoding used to convert from and to unicode
234 ## can be also a comma separated list of encoding in case of mixed encodings
234 ## can be also a comma separated list of encoding in case of mixed encodings
235 default_encoding = UTF-8
235 default_encoding = UTF-8
236
236
237 ## instance-id prefix
237 ## instance-id prefix
238 ## a prefix key for this instance used for cache invalidation when running
238 ## a prefix key for this instance used for cache invalidation when running
239 ## multiple instances of rhodecode, make sure it's globally unique for
239 ## multiple instances of rhodecode, make sure it's globally unique for
240 ## all running rhodecode instances. Leave empty if you don't use it
240 ## all running rhodecode instances. Leave empty if you don't use it
241 instance_id =
241 instance_id =
242
242
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
243 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
244 ## of an authentication plugin also if it is disabled by it's settings.
244 ## of an authentication plugin also if it is disabled by it's settings.
245 ## This could be useful if you are unable to log in to the system due to broken
245 ## This could be useful if you are unable to log in to the system due to broken
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
246 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
247 ## module to log in again and fix the settings.
247 ## module to log in again and fix the settings.
248 ##
248 ##
249 ## Available builtin plugin IDs (hash is part of the ID):
249 ## Available builtin plugin IDs (hash is part of the ID):
250 ## egg:rhodecode-enterprise-ce#rhodecode
250 ## egg:rhodecode-enterprise-ce#rhodecode
251 ## egg:rhodecode-enterprise-ce#pam
251 ## egg:rhodecode-enterprise-ce#pam
252 ## egg:rhodecode-enterprise-ce#ldap
252 ## egg:rhodecode-enterprise-ce#ldap
253 ## egg:rhodecode-enterprise-ce#jasig_cas
253 ## egg:rhodecode-enterprise-ce#jasig_cas
254 ## egg:rhodecode-enterprise-ce#headers
254 ## egg:rhodecode-enterprise-ce#headers
255 ## egg:rhodecode-enterprise-ce#crowd
255 ## egg:rhodecode-enterprise-ce#crowd
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
256 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
257
257
258 ## alternative return HTTP header for failed authentication. Default HTTP
258 ## alternative return HTTP header for failed authentication. Default HTTP
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
259 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
260 ## handling that causing a series of failed authentication calls.
260 ## handling that causing a series of failed authentication calls.
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
261 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
262 ## This will be served instead of default 401 on bad authnetication
262 ## This will be served instead of default 401 on bad authnetication
263 auth_ret_code =
263 auth_ret_code =
264
264
265 ## use special detection method when serving auth_ret_code, instead of serving
265 ## use special detection method when serving auth_ret_code, instead of serving
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
266 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
267 ## and then serve auth_ret_code to clients
267 ## and then serve auth_ret_code to clients
268 auth_ret_code_detection = false
268 auth_ret_code_detection = false
269
269
270 ## locking return code. When repository is locked return this HTTP code. 2XX
270 ## locking return code. When repository is locked return this HTTP code. 2XX
271 ## codes don't break the transactions while 4XX codes do
271 ## codes don't break the transactions while 4XX codes do
272 lock_ret_code = 423
272 lock_ret_code = 423
273
273
274 ## allows to change the repository location in settings page
274 ## allows to change the repository location in settings page
275 allow_repo_location_change = true
275 allow_repo_location_change = true
276
276
277 ## allows to setup custom hooks in settings page
277 ## allows to setup custom hooks in settings page
278 allow_custom_hooks_settings = true
278 allow_custom_hooks_settings = true
279
279
280 ## generated license token, goto license page in RhodeCode settings to obtain
280 ## generated license token, goto license page in RhodeCode settings to obtain
281 ## new token
281 ## new token
282 license_token =
282 license_token =
283
283
284 ## supervisor connection uri, for managing supervisor and logs.
284 ## supervisor connection uri, for managing supervisor and logs.
285 supervisor.uri =
285 supervisor.uri =
286 ## supervisord group name/id we only want this RC instance to handle
286 ## supervisord group name/id we only want this RC instance to handle
287 supervisor.group_id = dev
287 supervisor.group_id = dev
288
288
289 ## Display extended labs settings
289 ## Display extended labs settings
290 labs_settings_active = true
290 labs_settings_active = true
291
291
292 ####################################
292 ####################################
293 ### CELERY CONFIG ####
293 ### CELERY CONFIG ####
294 ####################################
294 ####################################
295 ## run: /path/to/celery worker \
295 ## run: /path/to/celery worker \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
296 ## -E --beat --app rhodecode.lib.celerylib.loader \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
297 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
298 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
299
299
300 use_celery = false
300 use_celery = false
301
301
302 ## connection url to the message broker (default rabbitmq)
302 ## connection url to the message broker (default rabbitmq)
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
303 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
304
304
305 ## maximum tasks to execute before worker restart
305 ## maximum tasks to execute before worker restart
306 celery.max_tasks_per_child = 100
306 celery.max_tasks_per_child = 100
307
307
308 ## tasks will never be sent to the queue, but executed locally instead.
308 ## tasks will never be sent to the queue, but executed locally instead.
309 celery.task_always_eager = false
309 celery.task_always_eager = false
310
310
311 #####################################
312 ### DOGPILE CACHE ####
313 #####################################
314 ## Default cache dir for caches. Putting this into a ramdisk
315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 ## of space
317 cache_dir = /tmp/rcdev/data
318
319 ## cache settings for permission tree, auth TTL.
320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 rc_cache.cache_perms.expiration_time = 300
322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323
324 ## redis backend with distributed locks
325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 #rc_cache.cache_perms.expiration_time = 300
327 #rc_cache.cache_perms.arguments.host = localhost
328 #rc_cache.cache_perms.arguments.port = 6379
329 #rc_cache.cache_perms.arguments.db = 0
330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 #rc_cache.cache_perms.arguments.distributed_lock = true
332
333
334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_repo.expiration_time = 2592000
336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337
338 ## redis backend with distributed locks
339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 #rc_cache.cache_repo.expiration_time = 2592000
341 ## this needs to be greater then expiration_time
342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 #rc_cache.cache_repo.arguments.host = localhost
344 #rc_cache.cache_repo.arguments.port = 6379
345 #rc_cache.cache_repo.arguments.db = 1
346 #rc_cache.cache_repo.arguments.distributed_lock = true
347
348
311 ####################################
349 ####################################
312 ### BEAKER CACHE ####
350 ### BEAKER CACHE ####
313 ####################################
351 ####################################
314 # default cache dir for templates. Putting this into a ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk
316 cache_dir = %(here)s/data
317
352
318 ## locking and default file storage for Beaker. Putting this into a ramdisk
353 ## locking and default file storage for Beaker. Putting this into a ramdisk
319 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
354 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
320 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
355 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
321 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
356 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
322
357
323 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
358 beaker.cache.regions = long_term, sql_cache_short
324
359
325 beaker.cache.long_term.type = memory
360 beaker.cache.long_term.type = memorylru_base
326 beaker.cache.long_term.expire = 36000
361 beaker.cache.long_term.expire = 172800
327 beaker.cache.long_term.key_length = 256
362 beaker.cache.long_term.key_length = 256
328
363
329 beaker.cache.sql_cache_short.type = memory
364 beaker.cache.sql_cache_short.type = memorylru_base
330 beaker.cache.sql_cache_short.expire = 10
365 beaker.cache.sql_cache_short.expire = 10
331 beaker.cache.sql_cache_short.key_length = 256
366 beaker.cache.sql_cache_short.key_length = 256
332
367
333 beaker.cache.repo_cache_long.type = memorylru_base
334 beaker.cache.repo_cache_long.max_items = 4096
335 beaker.cache.repo_cache_long.expire = 2592000
336
337 ## default is memorylru_base cache, configure only if required
338 ## using multi-node or multi-worker setup
339 #beaker.cache.repo_cache_long.type = ext:memcached
340 #beaker.cache.repo_cache_long.url = localhost:11211
341 #beaker.cache.repo_cache_long.expire = 1209600
342 #beaker.cache.repo_cache_long.key_length = 256
343
368
344 ####################################
369 ####################################
345 ### BEAKER SESSION ####
370 ### BEAKER SESSION ####
346 ####################################
371 ####################################
347
372
348 ## .session.type is type of storage options for the session, current allowed
373 ## .session.type is type of storage options for the session, current allowed
349 ## types are file, ext:memcached, ext:database, and memory (default).
374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 beaker.session.type = file
375 beaker.session.type = file
351 beaker.session.data_dir = %(here)s/data/sessions/data
376 beaker.session.data_dir = %(here)s/data/sessions
352
377
353 ## db based session, fast, and allows easy management over logged in users
378 ## db based session, fast, and allows easy management over logged in users
354 #beaker.session.type = ext:database
379 #beaker.session.type = ext:database
355 #beaker.session.table_name = db_session
380 #beaker.session.table_name = db_session
356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
381 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
382 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
358 #beaker.session.sa.pool_recycle = 3600
383 #beaker.session.sa.pool_recycle = 3600
359 #beaker.session.sa.echo = false
384 #beaker.session.sa.echo = false
360
385
361 beaker.session.key = rhodecode
386 beaker.session.key = rhodecode
362 beaker.session.secret = develop-rc-uytcxaz
387 beaker.session.secret = develop-rc-uytcxaz
363 beaker.session.lock_dir = %(here)s/data/sessions/lock
388 beaker.session.lock_dir = %(here)s/data/sessions/lock
364
389
365 ## Secure encrypted cookie. Requires AES and AES python libraries
390 ## Secure encrypted cookie. Requires AES and AES python libraries
366 ## you must disable beaker.session.secret to use this
391 ## you must disable beaker.session.secret to use this
367 #beaker.session.encrypt_key = key_for_encryption
392 #beaker.session.encrypt_key = key_for_encryption
368 #beaker.session.validate_key = validation_key
393 #beaker.session.validate_key = validation_key
369
394
370 ## sets session as invalid(also logging out user) if it haven not been
395 ## sets session as invalid(also logging out user) if it haven not been
371 ## accessed for given amount of time in seconds
396 ## accessed for given amount of time in seconds
372 beaker.session.timeout = 2592000
397 beaker.session.timeout = 2592000
373 beaker.session.httponly = true
398 beaker.session.httponly = true
374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
399 ## Path to use for the cookie. Set to prefix if you use prefix middleware
375 #beaker.session.cookie_path = /custom_prefix
400 #beaker.session.cookie_path = /custom_prefix
376
401
377 ## uncomment for https secure cookie
402 ## uncomment for https secure cookie
378 beaker.session.secure = false
403 beaker.session.secure = false
379
404
380 ## auto save the session to not to use .save()
405 ## auto save the session to not to use .save()
381 beaker.session.auto = false
406 beaker.session.auto = false
382
407
383 ## default cookie expiration time in seconds, set to `true` to set expire
408 ## default cookie expiration time in seconds, set to `true` to set expire
384 ## at browser close
409 ## at browser close
385 #beaker.session.cookie_expires = 3600
410 #beaker.session.cookie_expires = 3600
386
411
387 ###################################
412 ###################################
388 ## SEARCH INDEXING CONFIGURATION ##
413 ## SEARCH INDEXING CONFIGURATION ##
389 ###################################
414 ###################################
390 ## Full text search indexer is available in rhodecode-tools under
415 ## Full text search indexer is available in rhodecode-tools under
391 ## `rhodecode-tools index` command
416 ## `rhodecode-tools index` command
392
417
393 ## WHOOSH Backend, doesn't require additional services to run
418 ## WHOOSH Backend, doesn't require additional services to run
394 ## it works good with few dozen repos
419 ## it works good with few dozen repos
395 search.module = rhodecode.lib.index.whoosh
420 search.module = rhodecode.lib.index.whoosh
396 search.location = %(here)s/data/index
421 search.location = %(here)s/data/index
397
422
398 ########################################
423 ########################################
399 ### CHANNELSTREAM CONFIG ####
424 ### CHANNELSTREAM CONFIG ####
400 ########################################
425 ########################################
401 ## channelstream enables persistent connections and live notification
426 ## channelstream enables persistent connections and live notification
402 ## in the system. It's also used by the chat system
427 ## in the system. It's also used by the chat system
403 channelstream.enabled = false
428 channelstream.enabled = false
404
429
405 ## server address for channelstream server on the backend
430 ## server address for channelstream server on the backend
406 channelstream.server = 127.0.0.1:9800
431 channelstream.server = 127.0.0.1:9800
407
432
408 ## location of the channelstream server from outside world
433 ## location of the channelstream server from outside world
409 ## use ws:// for http or wss:// for https. This address needs to be handled
434 ## use ws:// for http or wss:// for https. This address needs to be handled
410 ## by external HTTP server such as Nginx or Apache
435 ## by external HTTP server such as Nginx or Apache
411 ## see nginx/apache configuration examples in our docs
436 ## see nginx/apache configuration examples in our docs
412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
437 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
413 channelstream.secret = secret
438 channelstream.secret = secret
414 channelstream.history.location = %(here)s/channelstream_history
439 channelstream.history.location = %(here)s/channelstream_history
415
440
416 ## Internal application path that Javascript uses to connect into.
441 ## Internal application path that Javascript uses to connect into.
417 ## If you use proxy-prefix the prefix should be added before /_channelstream
442 ## If you use proxy-prefix the prefix should be added before /_channelstream
418 channelstream.proxy_path = /_channelstream
443 channelstream.proxy_path = /_channelstream
419
444
420
445
421 ###################################
446 ###################################
422 ## APPENLIGHT CONFIG ##
447 ## APPENLIGHT CONFIG ##
423 ###################################
448 ###################################
424
449
425 ## Appenlight is tailored to work with RhodeCode, see
450 ## Appenlight is tailored to work with RhodeCode, see
426 ## http://appenlight.com for details how to obtain an account
451 ## http://appenlight.com for details how to obtain an account
427
452
428 ## appenlight integration enabled
453 ## appenlight integration enabled
429 appenlight = false
454 appenlight = false
430
455
431 appenlight.server_url = https://api.appenlight.com
456 appenlight.server_url = https://api.appenlight.com
432 appenlight.api_key = YOUR_API_KEY
457 appenlight.api_key = YOUR_API_KEY
433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
458 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
434
459
435 # used for JS client
460 # used for JS client
436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
461 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
437
462
438 ## TWEAK AMOUNT OF INFO SENT HERE
463 ## TWEAK AMOUNT OF INFO SENT HERE
439
464
440 ## enables 404 error logging (default False)
465 ## enables 404 error logging (default False)
441 appenlight.report_404 = false
466 appenlight.report_404 = false
442
467
443 ## time in seconds after request is considered being slow (default 1)
468 ## time in seconds after request is considered being slow (default 1)
444 appenlight.slow_request_time = 1
469 appenlight.slow_request_time = 1
445
470
446 ## record slow requests in application
471 ## record slow requests in application
447 ## (needs to be enabled for slow datastore recording and time tracking)
472 ## (needs to be enabled for slow datastore recording and time tracking)
448 appenlight.slow_requests = true
473 appenlight.slow_requests = true
449
474
450 ## enable hooking to application loggers
475 ## enable hooking to application loggers
451 appenlight.logging = true
476 appenlight.logging = true
452
477
453 ## minimum log level for log capture
478 ## minimum log level for log capture
454 appenlight.logging.level = WARNING
479 appenlight.logging.level = WARNING
455
480
456 ## send logs only from erroneous/slow requests
481 ## send logs only from erroneous/slow requests
457 ## (saves API quota for intensive logging)
482 ## (saves API quota for intensive logging)
458 appenlight.logging_on_error = false
483 appenlight.logging_on_error = false
459
484
460 ## list of additonal keywords that should be grabbed from environ object
485 ## list of additonal keywords that should be grabbed from environ object
461 ## can be string with comma separated list of words in lowercase
486 ## can be string with comma separated list of words in lowercase
462 ## (by default client will always send following info:
487 ## (by default client will always send following info:
463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
488 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
464 ## start with HTTP* this list be extended with additional keywords here
489 ## start with HTTP* this list be extended with additional keywords here
465 appenlight.environ_keys_whitelist =
490 appenlight.environ_keys_whitelist =
466
491
467 ## list of keywords that should be blanked from request object
492 ## list of keywords that should be blanked from request object
468 ## can be string with comma separated list of words in lowercase
493 ## can be string with comma separated list of words in lowercase
469 ## (by default client will always blank keys that contain following words
494 ## (by default client will always blank keys that contain following words
470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
495 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
471 ## this list be extended with additional keywords set here
496 ## this list be extended with additional keywords set here
472 appenlight.request_keys_blacklist =
497 appenlight.request_keys_blacklist =
473
498
474 ## list of namespaces that should be ignores when gathering log entries
499 ## list of namespaces that should be ignores when gathering log entries
475 ## can be string with comma separated list of namespaces
500 ## can be string with comma separated list of namespaces
476 ## (by default the client ignores own entries: appenlight_client.client)
501 ## (by default the client ignores own entries: appenlight_client.client)
477 appenlight.log_namespace_blacklist =
502 appenlight.log_namespace_blacklist =
478
503
479
504
480 ################################################################################
505 ################################################################################
481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
506 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
507 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
483 ## execute malicious code after an exception is raised. ##
508 ## execute malicious code after an exception is raised. ##
484 ################################################################################
509 ################################################################################
485 #set debug = false
510 #set debug = false
486
511
487
512
488 ##############
513 ##############
489 ## STYLING ##
514 ## STYLING ##
490 ##############
515 ##############
491 debug_style = true
516 debug_style = true
492
517
493 ###########################################
518 ###########################################
494 ### MAIN RHODECODE DATABASE CONFIG ###
519 ### MAIN RHODECODE DATABASE CONFIG ###
495 ###########################################
520 ###########################################
496 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
497 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
498 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525
499 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
500
527
501 # see sqlalchemy docs for other advanced settings
528 # see sqlalchemy docs for other advanced settings
502
529
503 ## print the sql statements to output
530 ## print the sql statements to output
504 sqlalchemy.db1.echo = false
531 sqlalchemy.db1.echo = false
505 ## recycle the connections after this amount of seconds
532 ## recycle the connections after this amount of seconds
506 sqlalchemy.db1.pool_recycle = 3600
533 sqlalchemy.db1.pool_recycle = 3600
507 sqlalchemy.db1.convert_unicode = true
534 sqlalchemy.db1.convert_unicode = true
508
535
509 ## the number of connections to keep open inside the connection pool.
536 ## the number of connections to keep open inside the connection pool.
510 ## 0 indicates no limit
537 ## 0 indicates no limit
511 #sqlalchemy.db1.pool_size = 5
538 #sqlalchemy.db1.pool_size = 5
512
539
513 ## the number of connections to allow in connection pool "overflow", that is
540 ## the number of connections to allow in connection pool "overflow", that is
514 ## connections that can be opened above and beyond the pool_size setting,
541 ## connections that can be opened above and beyond the pool_size setting,
515 ## which defaults to five.
542 ## which defaults to five.
516 #sqlalchemy.db1.max_overflow = 10
543 #sqlalchemy.db1.max_overflow = 10
517
544
545 ## Connection check ping, used to detect broken database connections
546 ## could be enabled to better handle cases if MySQL has gone away errors
547 #sqlalchemy.db1.ping_connection = true
518
548
519 ##################
549 ##################
520 ### VCS CONFIG ###
550 ### VCS CONFIG ###
521 ##################
551 ##################
522 vcs.server.enable = true
552 vcs.server.enable = true
523 vcs.server = localhost:9900
553 vcs.server = localhost:9900
524
554
525 ## Web server connectivity protocol, responsible for web based VCS operatations
555 ## Web server connectivity protocol, responsible for web based VCS operatations
526 ## Available protocols are:
556 ## Available protocols are:
527 ## `http` - use http-rpc backend (default)
557 ## `http` - use http-rpc backend (default)
528 vcs.server.protocol = http
558 vcs.server.protocol = http
529
559
530 ## Push/Pull operations protocol, available options are:
560 ## Push/Pull operations protocol, available options are:
531 ## `http` - use http-rpc backend (default)
561 ## `http` - use http-rpc backend (default)
532 ##
562 ##
533 vcs.scm_app_implementation = http
563 vcs.scm_app_implementation = http
534
564
535 ## Push/Pull operations hooks protocol, available options are:
565 ## Push/Pull operations hooks protocol, available options are:
536 ## `http` - use http-rpc backend (default)
566 ## `http` - use http-rpc backend (default)
537 vcs.hooks.protocol = http
567 vcs.hooks.protocol = http
538
568
539 ## Host on which this instance is listening for hooks. If vcsserver is in other location
569 ## Host on which this instance is listening for hooks. If vcsserver is in other location
540 ## this should be adjusted.
570 ## this should be adjusted.
541 vcs.hooks.host = 127.0.0.1
571 vcs.hooks.host = 127.0.0.1
542
572
543 vcs.server.log_level = debug
573 vcs.server.log_level = debug
544 ## Start VCSServer with this instance as a subprocess, usefull for development
574 ## Start VCSServer with this instance as a subprocess, usefull for development
545 vcs.start_server = false
575 vcs.start_server = false
546
576
547 ## List of enabled VCS backends, available options are:
577 ## List of enabled VCS backends, available options are:
548 ## `hg` - mercurial
578 ## `hg` - mercurial
549 ## `git` - git
579 ## `git` - git
550 ## `svn` - subversion
580 ## `svn` - subversion
551 vcs.backends = hg, git, svn
581 vcs.backends = hg, git, svn
552
582
553 vcs.connection_timeout = 3600
583 vcs.connection_timeout = 3600
554 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
584 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
555 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
585 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
556 #vcs.svn.compatible_version = pre-1.8-compatible
586 #vcs.svn.compatible_version = pre-1.8-compatible
557
587
558
588
559 ############################################################
589 ############################################################
560 ### Subversion proxy support (mod_dav_svn) ###
590 ### Subversion proxy support (mod_dav_svn) ###
561 ### Maps RhodeCode repo groups into SVN paths for Apache ###
591 ### Maps RhodeCode repo groups into SVN paths for Apache ###
562 ############################################################
592 ############################################################
563 ## Enable or disable the config file generation.
593 ## Enable or disable the config file generation.
564 svn.proxy.generate_config = false
594 svn.proxy.generate_config = false
565 ## Generate config file with `SVNListParentPath` set to `On`.
595 ## Generate config file with `SVNListParentPath` set to `On`.
566 svn.proxy.list_parent_path = true
596 svn.proxy.list_parent_path = true
567 ## Set location and file name of generated config file.
597 ## Set location and file name of generated config file.
568 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
598 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
569 ## alternative mod_dav config template. This needs to be a mako template
599 ## alternative mod_dav config template. This needs to be a mako template
570 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
600 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
571 ## Used as a prefix to the `Location` block in the generated config file.
601 ## Used as a prefix to the `Location` block in the generated config file.
572 ## In most cases it should be set to `/`.
602 ## In most cases it should be set to `/`.
573 svn.proxy.location_root = /
603 svn.proxy.location_root = /
574 ## Command to reload the mod dav svn configuration on change.
604 ## Command to reload the mod dav svn configuration on change.
575 ## Example: `/etc/init.d/apache2 reload`
605 ## Example: `/etc/init.d/apache2 reload`
576 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
606 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
577 ## If the timeout expires before the reload command finishes, the command will
607 ## If the timeout expires before the reload command finishes, the command will
578 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
608 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
579 #svn.proxy.reload_timeout = 10
609 #svn.proxy.reload_timeout = 10
580
610
581 ############################################################
611 ############################################################
582 ### SSH Support Settings ###
612 ### SSH Support Settings ###
583 ############################################################
613 ############################################################
584
614
585 ## Defines if a custom authorized_keys file should be created and written on
615 ## Defines if a custom authorized_keys file should be created and written on
586 ## any change user ssh keys. Setting this to false also disables posibility
616 ## any change user ssh keys. Setting this to false also disables posibility
587 ## of adding SSH keys by users from web interface. Super admins can still
617 ## of adding SSH keys by users from web interface. Super admins can still
588 ## manage SSH Keys.
618 ## manage SSH Keys.
589 ssh.generate_authorized_keyfile = false
619 ssh.generate_authorized_keyfile = false
590
620
591 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
621 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
592 # ssh.authorized_keys_ssh_opts =
622 # ssh.authorized_keys_ssh_opts =
593
623
594 ## Path to the authrozied_keys file where the generate entries are placed.
624 ## Path to the authrozied_keys file where the generate entries are placed.
595 ## It is possible to have multiple key files specified in `sshd_config` e.g.
625 ## It is possible to have multiple key files specified in `sshd_config` e.g.
596 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
626 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
597 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
627 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
598
628
599 ## Command to execute the SSH wrapper. The binary is available in the
629 ## Command to execute the SSH wrapper. The binary is available in the
600 ## rhodecode installation directory.
630 ## rhodecode installation directory.
601 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
631 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
602 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
632 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
603
633
604 ## Allow shell when executing the ssh-wrapper command
634 ## Allow shell when executing the ssh-wrapper command
605 ssh.wrapper_cmd_allow_shell = false
635 ssh.wrapper_cmd_allow_shell = false
606
636
607 ## Enables logging, and detailed output send back to the client during SSH
637 ## Enables logging, and detailed output send back to the client during SSH
608 ## operations. Usefull for debugging, shouldn't be used in production.
638 ## operations. Usefull for debugging, shouldn't be used in production.
609 ssh.enable_debug_logging = true
639 ssh.enable_debug_logging = true
610
640
611 ## Paths to binary executable, by default they are the names, but we can
641 ## Paths to binary executable, by default they are the names, but we can
612 ## override them if we want to use a custom one
642 ## override them if we want to use a custom one
613 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
643 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
614 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
644 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
615 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
645 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
616
646
617
647
618 ## Dummy marker to add new entries after.
648 ## Dummy marker to add new entries after.
619 ## Add any custom entries below. Please don't remove.
649 ## Add any custom entries below. Please don't remove.
620 custom.conf = 1
650 custom.conf = 1
621
651
622
652
623 ################################
653 ################################
624 ### LOGGING CONFIGURATION ####
654 ### LOGGING CONFIGURATION ####
625 ################################
655 ################################
626 [loggers]
656 [loggers]
627 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
628
658
629 [handlers]
659 [handlers]
630 keys = console, console_sql
660 keys = console, console_sql
631
661
632 [formatters]
662 [formatters]
633 keys = generic, color_formatter, color_formatter_sql
663 keys = generic, color_formatter, color_formatter_sql
634
664
635 #############
665 #############
636 ## LOGGERS ##
666 ## LOGGERS ##
637 #############
667 #############
638 [logger_root]
668 [logger_root]
639 level = NOTSET
669 level = NOTSET
640 handlers = console
670 handlers = console
641
671
642 [logger_sqlalchemy]
672 [logger_sqlalchemy]
643 level = INFO
673 level = INFO
644 handlers = console_sql
674 handlers = console_sql
645 qualname = sqlalchemy.engine
675 qualname = sqlalchemy.engine
646 propagate = 0
676 propagate = 0
647
677
648 [logger_beaker]
678 [logger_beaker]
649 level = DEBUG
679 level = DEBUG
650 handlers =
680 handlers =
651 qualname = beaker.container
681 qualname = beaker.container
652 propagate = 1
682 propagate = 1
653
683
654 [logger_rhodecode]
684 [logger_rhodecode]
655 level = DEBUG
685 level = DEBUG
656 handlers =
686 handlers =
657 qualname = rhodecode
687 qualname = rhodecode
658 propagate = 1
688 propagate = 1
659
689
660 [logger_ssh_wrapper]
690 [logger_ssh_wrapper]
661 level = DEBUG
691 level = DEBUG
662 handlers =
692 handlers =
663 qualname = ssh_wrapper
693 qualname = ssh_wrapper
664 propagate = 1
694 propagate = 1
665
695
666 [logger_celery]
696 [logger_celery]
667 level = DEBUG
697 level = DEBUG
668 handlers =
698 handlers =
669 qualname = celery
699 qualname = celery
670
700
671
701
672 ##############
702 ##############
673 ## HANDLERS ##
703 ## HANDLERS ##
674 ##############
704 ##############
675
705
676 [handler_console]
706 [handler_console]
677 class = StreamHandler
707 class = StreamHandler
678 args = (sys.stderr, )
708 args = (sys.stderr, )
679 level = DEBUG
709 level = DEBUG
680 formatter = color_formatter
710 formatter = color_formatter
681
711
682 [handler_console_sql]
712 [handler_console_sql]
713 # "level = DEBUG" logs SQL queries and results.
714 # "level = INFO" logs SQL queries.
715 # "level = WARN" logs neither. (Recommended for production systems.)
683 class = StreamHandler
716 class = StreamHandler
684 args = (sys.stderr, )
717 args = (sys.stderr, )
685 level = DEBUG
718 level = WARN
686 formatter = color_formatter_sql
719 formatter = color_formatter_sql
687
720
688 ################
721 ################
689 ## FORMATTERS ##
722 ## FORMATTERS ##
690 ################
723 ################
691
724
692 [formatter_generic]
725 [formatter_generic]
693 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
726 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
694 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
727 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
695 datefmt = %Y-%m-%d %H:%M:%S
728 datefmt = %Y-%m-%d %H:%M:%S
696
729
697 [formatter_color_formatter]
730 [formatter_color_formatter]
698 class = rhodecode.lib.logging_formatter.ColorFormatter
731 class = rhodecode.lib.logging_formatter.ColorFormatter
699 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
732 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
700 datefmt = %Y-%m-%d %H:%M:%S
733 datefmt = %Y-%m-%d %H:%M:%S
701
734
702 [formatter_color_formatter_sql]
735 [formatter_color_formatter_sql]
703 class = rhodecode.lib.logging_formatter.ColorFormatterSql
736 class = rhodecode.lib.logging_formatter.ColorFormatterSql
704 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
737 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
705 datefmt = %Y-%m-%d %H:%M:%S
738 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,674 +1,707 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommended to be at 1
76 ## generally recommended to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = gevent
82 worker_class = gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## URL at which the application is running. This is used for bootstraping
140 ## URL at which the application is running. This is used for bootstraping
141 ## requests in context when no web request is available. Used in ishell, or
141 ## requests in context when no web request is available. Used in ishell, or
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
142 ## SSH calls. Set this for events to receive proper url for SSH calls.
143 app.base_url = http://rhodecode.local
143 app.base_url = http://rhodecode.local
144
144
145 ## change this to unique ID for security
145 ## change this to unique ID for security
146 app_instance_uuid = rc-production
146 app_instance_uuid = rc-production
147
147
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
148 ## cut off limit for large diffs (size in bytes). If overall diff size on
149 ## commit, or pull request exceeds this limit this diff will be displayed
149 ## commit, or pull request exceeds this limit this diff will be displayed
150 ## partially. E.g 512000 == 512Kb
150 ## partially. E.g 512000 == 512Kb
151 cut_off_limit_diff = 512000
151 cut_off_limit_diff = 512000
152
152
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
153 ## cut off limit for large files inside diffs (size in bytes). Each individual
154 ## file inside diff which exceeds this limit will be displayed partially.
154 ## file inside diff which exceeds this limit will be displayed partially.
155 ## E.g 128000 == 128Kb
155 ## E.g 128000 == 128Kb
156 cut_off_limit_file = 128000
156 cut_off_limit_file = 128000
157
157
158 ## use cache version of scm repo everywhere
158 ## use cache version of scm repo everywhere
159 vcs_full_cache = true
159 vcs_full_cache = true
160
160
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
161 ## force https in RhodeCode, fixes https redirects, assumes it's always https
162 ## Normally this is controlled by proper http flags sent from http server
162 ## Normally this is controlled by proper http flags sent from http server
163 force_https = false
163 force_https = false
164
164
165 ## use Strict-Transport-Security headers
165 ## use Strict-Transport-Security headers
166 use_htsts = false
166 use_htsts = false
167
167
168 ## git rev filter option, --all is the default filter, if you need to
168 ## git rev filter option, --all is the default filter, if you need to
169 ## hide all refs in changelog switch this to --branches --tags
169 ## hide all refs in changelog switch this to --branches --tags
170 git_rev_filter = --branches --tags
170 git_rev_filter = --branches --tags
171
171
172 # Set to true if your repos are exposed using the dumb protocol
172 # Set to true if your repos are exposed using the dumb protocol
173 git_update_server_info = false
173 git_update_server_info = false
174
174
175 ## RSS/ATOM feed options
175 ## RSS/ATOM feed options
176 rss_cut_off_limit = 256000
176 rss_cut_off_limit = 256000
177 rss_items_per_page = 10
177 rss_items_per_page = 10
178 rss_include_diff = false
178 rss_include_diff = false
179
179
180 ## gist URL alias, used to create nicer urls for gist. This should be an
180 ## gist URL alias, used to create nicer urls for gist. This should be an
181 ## url that does rewrites to _admin/gists/{gistid}.
181 ## url that does rewrites to _admin/gists/{gistid}.
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
182 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
183 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
184 gist_alias_url =
184 gist_alias_url =
185
185
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
186 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
187 ## used for access.
187 ## used for access.
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
188 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
189 ## came from the the logged in user who own this authentication token.
189 ## came from the the logged in user who own this authentication token.
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
190 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
191 ## authentication token. Such view would be only accessible when used together
191 ## authentication token. Such view would be only accessible when used together
192 ## with this authentication token
192 ## with this authentication token
193 ##
193 ##
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
194 ## list of all views can be found under `/_admin/permissions/auth_token_access`
195 ## The list should be "," separated and on a single line.
195 ## The list should be "," separated and on a single line.
196 ##
196 ##
197 ## Most common views to enable:
197 ## Most common views to enable:
198 # RepoCommitsView:repo_commit_download
198 # RepoCommitsView:repo_commit_download
199 # RepoCommitsView:repo_commit_patch
199 # RepoCommitsView:repo_commit_patch
200 # RepoCommitsView:repo_commit_raw
200 # RepoCommitsView:repo_commit_raw
201 # RepoCommitsView:repo_commit_raw@TOKEN
201 # RepoCommitsView:repo_commit_raw@TOKEN
202 # RepoFilesView:repo_files_diff
202 # RepoFilesView:repo_files_diff
203 # RepoFilesView:repo_archivefile
203 # RepoFilesView:repo_archivefile
204 # RepoFilesView:repo_file_raw
204 # RepoFilesView:repo_file_raw
205 # GistView:*
205 # GistView:*
206 api_access_controllers_whitelist =
206 api_access_controllers_whitelist =
207
207
208 ## default encoding used to convert from and to unicode
208 ## default encoding used to convert from and to unicode
209 ## can be also a comma separated list of encoding in case of mixed encodings
209 ## can be also a comma separated list of encoding in case of mixed encodings
210 default_encoding = UTF-8
210 default_encoding = UTF-8
211
211
212 ## instance-id prefix
212 ## instance-id prefix
213 ## a prefix key for this instance used for cache invalidation when running
213 ## a prefix key for this instance used for cache invalidation when running
214 ## multiple instances of rhodecode, make sure it's globally unique for
214 ## multiple instances of rhodecode, make sure it's globally unique for
215 ## all running rhodecode instances. Leave empty if you don't use it
215 ## all running rhodecode instances. Leave empty if you don't use it
216 instance_id =
216 instance_id =
217
217
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
218 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
219 ## of an authentication plugin also if it is disabled by it's settings.
219 ## of an authentication plugin also if it is disabled by it's settings.
220 ## This could be useful if you are unable to log in to the system due to broken
220 ## This could be useful if you are unable to log in to the system due to broken
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
221 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
222 ## module to log in again and fix the settings.
222 ## module to log in again and fix the settings.
223 ##
223 ##
224 ## Available builtin plugin IDs (hash is part of the ID):
224 ## Available builtin plugin IDs (hash is part of the ID):
225 ## egg:rhodecode-enterprise-ce#rhodecode
225 ## egg:rhodecode-enterprise-ce#rhodecode
226 ## egg:rhodecode-enterprise-ce#pam
226 ## egg:rhodecode-enterprise-ce#pam
227 ## egg:rhodecode-enterprise-ce#ldap
227 ## egg:rhodecode-enterprise-ce#ldap
228 ## egg:rhodecode-enterprise-ce#jasig_cas
228 ## egg:rhodecode-enterprise-ce#jasig_cas
229 ## egg:rhodecode-enterprise-ce#headers
229 ## egg:rhodecode-enterprise-ce#headers
230 ## egg:rhodecode-enterprise-ce#crowd
230 ## egg:rhodecode-enterprise-ce#crowd
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
231 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
232
232
233 ## alternative return HTTP header for failed authentication. Default HTTP
233 ## alternative return HTTP header for failed authentication. Default HTTP
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
234 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
235 ## handling that causing a series of failed authentication calls.
235 ## handling that causing a series of failed authentication calls.
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
236 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
237 ## This will be served instead of default 401 on bad authnetication
237 ## This will be served instead of default 401 on bad authnetication
238 auth_ret_code =
238 auth_ret_code =
239
239
240 ## use special detection method when serving auth_ret_code, instead of serving
240 ## use special detection method when serving auth_ret_code, instead of serving
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
241 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
242 ## and then serve auth_ret_code to clients
242 ## and then serve auth_ret_code to clients
243 auth_ret_code_detection = false
243 auth_ret_code_detection = false
244
244
245 ## locking return code. When repository is locked return this HTTP code. 2XX
245 ## locking return code. When repository is locked return this HTTP code. 2XX
246 ## codes don't break the transactions while 4XX codes do
246 ## codes don't break the transactions while 4XX codes do
247 lock_ret_code = 423
247 lock_ret_code = 423
248
248
249 ## allows to change the repository location in settings page
249 ## allows to change the repository location in settings page
250 allow_repo_location_change = true
250 allow_repo_location_change = true
251
251
252 ## allows to setup custom hooks in settings page
252 ## allows to setup custom hooks in settings page
253 allow_custom_hooks_settings = true
253 allow_custom_hooks_settings = true
254
254
255 ## generated license token, goto license page in RhodeCode settings to obtain
255 ## generated license token, goto license page in RhodeCode settings to obtain
256 ## new token
256 ## new token
257 license_token =
257 license_token =
258
258
259 ## supervisor connection uri, for managing supervisor and logs.
259 ## supervisor connection uri, for managing supervisor and logs.
260 supervisor.uri =
260 supervisor.uri =
261 ## supervisord group name/id we only want this RC instance to handle
261 ## supervisord group name/id we only want this RC instance to handle
262 supervisor.group_id = prod
262 supervisor.group_id = prod
263
263
264 ## Display extended labs settings
264 ## Display extended labs settings
265 labs_settings_active = true
265 labs_settings_active = true
266
266
267 ####################################
267 ####################################
268 ### CELERY CONFIG ####
268 ### CELERY CONFIG ####
269 ####################################
269 ####################################
270 ## run: /path/to/celery worker \
270 ## run: /path/to/celery worker \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
271 ## -E --beat --app rhodecode.lib.celerylib.loader \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
272 ## --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
273 ## --loglevel DEBUG --ini /path/to/rhodecode.ini
274
274
275 use_celery = false
275 use_celery = false
276
276
277 ## connection url to the message broker (default rabbitmq)
277 ## connection url to the message broker (default rabbitmq)
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
278 celery.broker_url = amqp://rabbitmq:qweqwe@localhost:5672/rabbitmqhost
279
279
280 ## maximum tasks to execute before worker restart
280 ## maximum tasks to execute before worker restart
281 celery.max_tasks_per_child = 100
281 celery.max_tasks_per_child = 100
282
282
283 ## tasks will never be sent to the queue, but executed locally instead.
283 ## tasks will never be sent to the queue, but executed locally instead.
284 celery.task_always_eager = false
284 celery.task_always_eager = false
285
285
286 #####################################
287 ### DOGPILE CACHE ####
288 #####################################
289 ## Default cache dir for caches. Putting this into a ramdisk
290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 ## of space
292 cache_dir = /tmp/rcdev/data
293
294 ## cache settings for permission tree, auth TTL.
295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 rc_cache.cache_perms.expiration_time = 300
297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298
299 ## redis backend with distributed locks
300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 #rc_cache.cache_perms.expiration_time = 300
302 #rc_cache.cache_perms.arguments.host = localhost
303 #rc_cache.cache_perms.arguments.port = 6379
304 #rc_cache.cache_perms.arguments.db = 0
305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 #rc_cache.cache_perms.arguments.distributed_lock = true
307
308
309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 rc_cache.cache_repo.expiration_time = 2592000
311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312
313 ## redis backend with distributed locks
314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 #rc_cache.cache_repo.expiration_time = 2592000
316 ## this needs to be greater then expiration_time
317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 #rc_cache.cache_repo.arguments.host = localhost
319 #rc_cache.cache_repo.arguments.port = 6379
320 #rc_cache.cache_repo.arguments.db = 1
321 #rc_cache.cache_repo.arguments.distributed_lock = true
322
323
286 ####################################
324 ####################################
287 ### BEAKER CACHE ####
325 ### BEAKER CACHE ####
288 ####################################
326 ####################################
289 # default cache dir for templates. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
291 cache_dir = %(here)s/data
292
327
293 ## locking and default file storage for Beaker. Putting this into a ramdisk
328 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
329 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
330 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
331 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297
332
298 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
333 beaker.cache.regions = long_term, sql_cache_short
299
334
300 beaker.cache.long_term.type = memory
335 beaker.cache.long_term.type = memory
301 beaker.cache.long_term.expire = 36000
336 beaker.cache.long_term.expire = 172800
302 beaker.cache.long_term.key_length = 256
337 beaker.cache.long_term.key_length = 256
303
338
304 beaker.cache.sql_cache_short.type = memory
339 beaker.cache.sql_cache_short.type = memory
305 beaker.cache.sql_cache_short.expire = 10
340 beaker.cache.sql_cache_short.expire = 10
306 beaker.cache.sql_cache_short.key_length = 256
341 beaker.cache.sql_cache_short.key_length = 256
307
342
308 beaker.cache.repo_cache_long.type = memorylru_base
309 beaker.cache.repo_cache_long.max_items = 4096
310 beaker.cache.repo_cache_long.expire = 2592000
311
312 ## default is memorylru_base cache, configure only if required
313 ## using multi-node or multi-worker setup
314 #beaker.cache.repo_cache_long.type = ext:memcached
315 #beaker.cache.repo_cache_long.url = localhost:11211
316 #beaker.cache.repo_cache_long.expire = 1209600
317 #beaker.cache.repo_cache_long.key_length = 256
318
343
319 ####################################
344 ####################################
320 ### BEAKER SESSION ####
345 ### BEAKER SESSION ####
321 ####################################
346 ####################################
322
347
323 ## .session.type is type of storage options for the session, current allowed
348 ## .session.type is type of storage options for the session, current allowed
324 ## types are file, ext:memcached, ext:database, and memory (default).
349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
325 beaker.session.type = file
350 beaker.session.type = file
326 beaker.session.data_dir = %(here)s/data/sessions/data
351 beaker.session.data_dir = %(here)s/data/sessions
327
352
328 ## db based session, fast, and allows easy management over logged in users
353 ## db based session, fast, and allows easy management over logged in users
329 #beaker.session.type = ext:database
354 #beaker.session.type = ext:database
330 #beaker.session.table_name = db_session
355 #beaker.session.table_name = db_session
331 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
356 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
332 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
357 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
333 #beaker.session.sa.pool_recycle = 3600
358 #beaker.session.sa.pool_recycle = 3600
334 #beaker.session.sa.echo = false
359 #beaker.session.sa.echo = false
335
360
336 beaker.session.key = rhodecode
361 beaker.session.key = rhodecode
337 beaker.session.secret = production-rc-uytcxaz
362 beaker.session.secret = production-rc-uytcxaz
338 beaker.session.lock_dir = %(here)s/data/sessions/lock
363 beaker.session.lock_dir = %(here)s/data/sessions/lock
339
364
340 ## Secure encrypted cookie. Requires AES and AES python libraries
365 ## Secure encrypted cookie. Requires AES and AES python libraries
341 ## you must disable beaker.session.secret to use this
366 ## you must disable beaker.session.secret to use this
342 #beaker.session.encrypt_key = key_for_encryption
367 #beaker.session.encrypt_key = key_for_encryption
343 #beaker.session.validate_key = validation_key
368 #beaker.session.validate_key = validation_key
344
369
345 ## sets session as invalid(also logging out user) if it haven not been
370 ## sets session as invalid(also logging out user) if it haven not been
346 ## accessed for given amount of time in seconds
371 ## accessed for given amount of time in seconds
347 beaker.session.timeout = 2592000
372 beaker.session.timeout = 2592000
348 beaker.session.httponly = true
373 beaker.session.httponly = true
349 ## Path to use for the cookie. Set to prefix if you use prefix middleware
374 ## Path to use for the cookie. Set to prefix if you use prefix middleware
350 #beaker.session.cookie_path = /custom_prefix
375 #beaker.session.cookie_path = /custom_prefix
351
376
352 ## uncomment for https secure cookie
377 ## uncomment for https secure cookie
353 beaker.session.secure = false
378 beaker.session.secure = false
354
379
355 ## auto save the session to not to use .save()
380 ## auto save the session to not to use .save()
356 beaker.session.auto = false
381 beaker.session.auto = false
357
382
358 ## default cookie expiration time in seconds, set to `true` to set expire
383 ## default cookie expiration time in seconds, set to `true` to set expire
359 ## at browser close
384 ## at browser close
360 #beaker.session.cookie_expires = 3600
385 #beaker.session.cookie_expires = 3600
361
386
362 ###################################
387 ###################################
363 ## SEARCH INDEXING CONFIGURATION ##
388 ## SEARCH INDEXING CONFIGURATION ##
364 ###################################
389 ###################################
365 ## Full text search indexer is available in rhodecode-tools under
390 ## Full text search indexer is available in rhodecode-tools under
366 ## `rhodecode-tools index` command
391 ## `rhodecode-tools index` command
367
392
368 ## WHOOSH Backend, doesn't require additional services to run
393 ## WHOOSH Backend, doesn't require additional services to run
369 ## it works good with few dozen repos
394 ## it works good with few dozen repos
370 search.module = rhodecode.lib.index.whoosh
395 search.module = rhodecode.lib.index.whoosh
371 search.location = %(here)s/data/index
396 search.location = %(here)s/data/index
372
397
373 ########################################
398 ########################################
374 ### CHANNELSTREAM CONFIG ####
399 ### CHANNELSTREAM CONFIG ####
375 ########################################
400 ########################################
376 ## channelstream enables persistent connections and live notification
401 ## channelstream enables persistent connections and live notification
377 ## in the system. It's also used by the chat system
402 ## in the system. It's also used by the chat system
378 channelstream.enabled = false
403 channelstream.enabled = false
379
404
380 ## server address for channelstream server on the backend
405 ## server address for channelstream server on the backend
381 channelstream.server = 127.0.0.1:9800
406 channelstream.server = 127.0.0.1:9800
382
407
383 ## location of the channelstream server from outside world
408 ## location of the channelstream server from outside world
384 ## use ws:// for http or wss:// for https. This address needs to be handled
409 ## use ws:// for http or wss:// for https. This address needs to be handled
385 ## by external HTTP server such as Nginx or Apache
410 ## by external HTTP server such as Nginx or Apache
386 ## see nginx/apache configuration examples in our docs
411 ## see nginx/apache configuration examples in our docs
387 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
412 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
388 channelstream.secret = secret
413 channelstream.secret = secret
389 channelstream.history.location = %(here)s/channelstream_history
414 channelstream.history.location = %(here)s/channelstream_history
390
415
391 ## Internal application path that Javascript uses to connect into.
416 ## Internal application path that Javascript uses to connect into.
392 ## If you use proxy-prefix the prefix should be added before /_channelstream
417 ## If you use proxy-prefix the prefix should be added before /_channelstream
393 channelstream.proxy_path = /_channelstream
418 channelstream.proxy_path = /_channelstream
394
419
395
420
396 ###################################
421 ###################################
397 ## APPENLIGHT CONFIG ##
422 ## APPENLIGHT CONFIG ##
398 ###################################
423 ###################################
399
424
400 ## Appenlight is tailored to work with RhodeCode, see
425 ## Appenlight is tailored to work with RhodeCode, see
401 ## http://appenlight.com for details how to obtain an account
426 ## http://appenlight.com for details how to obtain an account
402
427
403 ## appenlight integration enabled
428 ## appenlight integration enabled
404 appenlight = false
429 appenlight = false
405
430
406 appenlight.server_url = https://api.appenlight.com
431 appenlight.server_url = https://api.appenlight.com
407 appenlight.api_key = YOUR_API_KEY
432 appenlight.api_key = YOUR_API_KEY
408 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
433 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
409
434
410 # used for JS client
435 # used for JS client
411 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
436 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
412
437
413 ## TWEAK AMOUNT OF INFO SENT HERE
438 ## TWEAK AMOUNT OF INFO SENT HERE
414
439
415 ## enables 404 error logging (default False)
440 ## enables 404 error logging (default False)
416 appenlight.report_404 = false
441 appenlight.report_404 = false
417
442
418 ## time in seconds after request is considered being slow (default 1)
443 ## time in seconds after request is considered being slow (default 1)
419 appenlight.slow_request_time = 1
444 appenlight.slow_request_time = 1
420
445
421 ## record slow requests in application
446 ## record slow requests in application
422 ## (needs to be enabled for slow datastore recording and time tracking)
447 ## (needs to be enabled for slow datastore recording and time tracking)
423 appenlight.slow_requests = true
448 appenlight.slow_requests = true
424
449
425 ## enable hooking to application loggers
450 ## enable hooking to application loggers
426 appenlight.logging = true
451 appenlight.logging = true
427
452
428 ## minimum log level for log capture
453 ## minimum log level for log capture
429 appenlight.logging.level = WARNING
454 appenlight.logging.level = WARNING
430
455
431 ## send logs only from erroneous/slow requests
456 ## send logs only from erroneous/slow requests
432 ## (saves API quota for intensive logging)
457 ## (saves API quota for intensive logging)
433 appenlight.logging_on_error = false
458 appenlight.logging_on_error = false
434
459
435 ## list of additonal keywords that should be grabbed from environ object
460 ## list of additonal keywords that should be grabbed from environ object
436 ## can be string with comma separated list of words in lowercase
461 ## can be string with comma separated list of words in lowercase
437 ## (by default client will always send following info:
462 ## (by default client will always send following info:
438 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
463 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
439 ## start with HTTP* this list be extended with additional keywords here
464 ## start with HTTP* this list be extended with additional keywords here
440 appenlight.environ_keys_whitelist =
465 appenlight.environ_keys_whitelist =
441
466
442 ## list of keywords that should be blanked from request object
467 ## list of keywords that should be blanked from request object
443 ## can be string with comma separated list of words in lowercase
468 ## can be string with comma separated list of words in lowercase
444 ## (by default client will always blank keys that contain following words
469 ## (by default client will always blank keys that contain following words
445 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
470 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
446 ## this list be extended with additional keywords set here
471 ## this list be extended with additional keywords set here
447 appenlight.request_keys_blacklist =
472 appenlight.request_keys_blacklist =
448
473
449 ## list of namespaces that should be ignores when gathering log entries
474 ## list of namespaces that should be ignores when gathering log entries
450 ## can be string with comma separated list of namespaces
475 ## can be string with comma separated list of namespaces
451 ## (by default the client ignores own entries: appenlight_client.client)
476 ## (by default the client ignores own entries: appenlight_client.client)
452 appenlight.log_namespace_blacklist =
477 appenlight.log_namespace_blacklist =
453
478
454
479
455 ################################################################################
480 ################################################################################
456 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
481 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
457 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
482 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
458 ## execute malicious code after an exception is raised. ##
483 ## execute malicious code after an exception is raised. ##
459 ################################################################################
484 ################################################################################
460 set debug = false
485 set debug = false
461
486
462
487
463 ###########################################
488 ###########################################
464 ### MAIN RHODECODE DATABASE CONFIG ###
489 ### MAIN RHODECODE DATABASE CONFIG ###
465 ###########################################
490 ###########################################
466 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
467 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
468 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495
469 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
470
497
471 # see sqlalchemy docs for other advanced settings
498 # see sqlalchemy docs for other advanced settings
472
499
473 ## print the sql statements to output
500 ## print the sql statements to output
474 sqlalchemy.db1.echo = false
501 sqlalchemy.db1.echo = false
475 ## recycle the connections after this amount of seconds
502 ## recycle the connections after this amount of seconds
476 sqlalchemy.db1.pool_recycle = 3600
503 sqlalchemy.db1.pool_recycle = 3600
477 sqlalchemy.db1.convert_unicode = true
504 sqlalchemy.db1.convert_unicode = true
478
505
479 ## the number of connections to keep open inside the connection pool.
506 ## the number of connections to keep open inside the connection pool.
480 ## 0 indicates no limit
507 ## 0 indicates no limit
481 #sqlalchemy.db1.pool_size = 5
508 #sqlalchemy.db1.pool_size = 5
482
509
483 ## the number of connections to allow in connection pool "overflow", that is
510 ## the number of connections to allow in connection pool "overflow", that is
484 ## connections that can be opened above and beyond the pool_size setting,
511 ## connections that can be opened above and beyond the pool_size setting,
485 ## which defaults to five.
512 ## which defaults to five.
486 #sqlalchemy.db1.max_overflow = 10
513 #sqlalchemy.db1.max_overflow = 10
487
514
515 ## Connection check ping, used to detect broken database connections
516 ## could be enabled to better handle cases if MySQL has gone away errors
517 #sqlalchemy.db1.ping_connection = true
488
518
489 ##################
519 ##################
490 ### VCS CONFIG ###
520 ### VCS CONFIG ###
491 ##################
521 ##################
492 vcs.server.enable = true
522 vcs.server.enable = true
493 vcs.server = localhost:9900
523 vcs.server = localhost:9900
494
524
495 ## Web server connectivity protocol, responsible for web based VCS operatations
525 ## Web server connectivity protocol, responsible for web based VCS operatations
496 ## Available protocols are:
526 ## Available protocols are:
497 ## `http` - use http-rpc backend (default)
527 ## `http` - use http-rpc backend (default)
498 vcs.server.protocol = http
528 vcs.server.protocol = http
499
529
500 ## Push/Pull operations protocol, available options are:
530 ## Push/Pull operations protocol, available options are:
501 ## `http` - use http-rpc backend (default)
531 ## `http` - use http-rpc backend (default)
502 ##
532 ##
503 vcs.scm_app_implementation = http
533 vcs.scm_app_implementation = http
504
534
505 ## Push/Pull operations hooks protocol, available options are:
535 ## Push/Pull operations hooks protocol, available options are:
506 ## `http` - use http-rpc backend (default)
536 ## `http` - use http-rpc backend (default)
507 vcs.hooks.protocol = http
537 vcs.hooks.protocol = http
508 ## Host on which this instance is listening for hooks. If vcsserver is in other location
538 ## Host on which this instance is listening for hooks. If vcsserver is in other location
509 ## this should be adjusted.
539 ## this should be adjusted.
510 vcs.hooks.host = 127.0.0.1
540 vcs.hooks.host = 127.0.0.1
511
541
512 vcs.server.log_level = info
542 vcs.server.log_level = info
513 ## Start VCSServer with this instance as a subprocess, usefull for development
543 ## Start VCSServer with this instance as a subprocess, usefull for development
514 vcs.start_server = false
544 vcs.start_server = false
515
545
516 ## List of enabled VCS backends, available options are:
546 ## List of enabled VCS backends, available options are:
517 ## `hg` - mercurial
547 ## `hg` - mercurial
518 ## `git` - git
548 ## `git` - git
519 ## `svn` - subversion
549 ## `svn` - subversion
520 vcs.backends = hg, git, svn
550 vcs.backends = hg, git, svn
521
551
522 vcs.connection_timeout = 3600
552 vcs.connection_timeout = 3600
523 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
553 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
524 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
554 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
525 #vcs.svn.compatible_version = pre-1.8-compatible
555 #vcs.svn.compatible_version = pre-1.8-compatible
526
556
527
557
528 ############################################################
558 ############################################################
529 ### Subversion proxy support (mod_dav_svn) ###
559 ### Subversion proxy support (mod_dav_svn) ###
530 ### Maps RhodeCode repo groups into SVN paths for Apache ###
560 ### Maps RhodeCode repo groups into SVN paths for Apache ###
531 ############################################################
561 ############################################################
532 ## Enable or disable the config file generation.
562 ## Enable or disable the config file generation.
533 svn.proxy.generate_config = false
563 svn.proxy.generate_config = false
534 ## Generate config file with `SVNListParentPath` set to `On`.
564 ## Generate config file with `SVNListParentPath` set to `On`.
535 svn.proxy.list_parent_path = true
565 svn.proxy.list_parent_path = true
536 ## Set location and file name of generated config file.
566 ## Set location and file name of generated config file.
537 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
567 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
538 ## alternative mod_dav config template. This needs to be a mako template
568 ## alternative mod_dav config template. This needs to be a mako template
539 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
569 #svn.proxy.config_template = ~/.rccontrol/enterprise-1/custom_svn_conf.mako
540 ## Used as a prefix to the `Location` block in the generated config file.
570 ## Used as a prefix to the `Location` block in the generated config file.
541 ## In most cases it should be set to `/`.
571 ## In most cases it should be set to `/`.
542 svn.proxy.location_root = /
572 svn.proxy.location_root = /
543 ## Command to reload the mod dav svn configuration on change.
573 ## Command to reload the mod dav svn configuration on change.
544 ## Example: `/etc/init.d/apache2 reload`
574 ## Example: `/etc/init.d/apache2 reload`
545 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
575 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
546 ## If the timeout expires before the reload command finishes, the command will
576 ## If the timeout expires before the reload command finishes, the command will
547 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
577 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
548 #svn.proxy.reload_timeout = 10
578 #svn.proxy.reload_timeout = 10
549
579
550 ############################################################
580 ############################################################
551 ### SSH Support Settings ###
581 ### SSH Support Settings ###
552 ############################################################
582 ############################################################
553
583
554 ## Defines if a custom authorized_keys file should be created and written on
584 ## Defines if a custom authorized_keys file should be created and written on
555 ## any change user ssh keys. Setting this to false also disables posibility
585 ## any change user ssh keys. Setting this to false also disables posibility
556 ## of adding SSH keys by users from web interface. Super admins can still
586 ## of adding SSH keys by users from web interface. Super admins can still
557 ## manage SSH Keys.
587 ## manage SSH Keys.
558 ssh.generate_authorized_keyfile = false
588 ssh.generate_authorized_keyfile = false
559
589
560 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
590 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
561 # ssh.authorized_keys_ssh_opts =
591 # ssh.authorized_keys_ssh_opts =
562
592
563 ## Path to the authrozied_keys file where the generate entries are placed.
593 ## Path to the authrozied_keys file where the generate entries are placed.
564 ## It is possible to have multiple key files specified in `sshd_config` e.g.
594 ## It is possible to have multiple key files specified in `sshd_config` e.g.
565 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
595 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
566 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
596 ssh.authorized_keys_file_path = ~/.ssh/authorized_keys_rhodecode
567
597
568 ## Command to execute the SSH wrapper. The binary is available in the
598 ## Command to execute the SSH wrapper. The binary is available in the
569 ## rhodecode installation directory.
599 ## rhodecode installation directory.
570 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
600 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
571 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
601 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
572
602
573 ## Allow shell when executing the ssh-wrapper command
603 ## Allow shell when executing the ssh-wrapper command
574 ssh.wrapper_cmd_allow_shell = false
604 ssh.wrapper_cmd_allow_shell = false
575
605
576 ## Enables logging, and detailed output send back to the client during SSH
606 ## Enables logging, and detailed output send back to the client during SSH
577 ## operations. Usefull for debugging, shouldn't be used in production.
607 ## operations. Usefull for debugging, shouldn't be used in production.
578 ssh.enable_debug_logging = false
608 ssh.enable_debug_logging = false
579
609
580 ## Paths to binary executable, by default they are the names, but we can
610 ## Paths to binary executable, by default they are the names, but we can
581 ## override them if we want to use a custom one
611 ## override them if we want to use a custom one
582 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
612 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
583 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
613 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
584 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
614 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
585
615
586
616
587 ## Dummy marker to add new entries after.
617 ## Dummy marker to add new entries after.
588 ## Add any custom entries below. Please don't remove.
618 ## Add any custom entries below. Please don't remove.
589 custom.conf = 1
619 custom.conf = 1
590
620
591
621
592 ################################
622 ################################
593 ### LOGGING CONFIGURATION ####
623 ### LOGGING CONFIGURATION ####
594 ################################
624 ################################
595 [loggers]
625 [loggers]
596 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
597
627
598 [handlers]
628 [handlers]
599 keys = console, console_sql
629 keys = console, console_sql
600
630
601 [formatters]
631 [formatters]
602 keys = generic, color_formatter, color_formatter_sql
632 keys = generic, color_formatter, color_formatter_sql
603
633
604 #############
634 #############
605 ## LOGGERS ##
635 ## LOGGERS ##
606 #############
636 #############
607 [logger_root]
637 [logger_root]
608 level = NOTSET
638 level = NOTSET
609 handlers = console
639 handlers = console
610
640
611 [logger_sqlalchemy]
641 [logger_sqlalchemy]
612 level = INFO
642 level = INFO
613 handlers = console_sql
643 handlers = console_sql
614 qualname = sqlalchemy.engine
644 qualname = sqlalchemy.engine
615 propagate = 0
645 propagate = 0
616
646
617 [logger_beaker]
647 [logger_beaker]
618 level = DEBUG
648 level = DEBUG
619 handlers =
649 handlers =
620 qualname = beaker.container
650 qualname = beaker.container
621 propagate = 1
651 propagate = 1
622
652
623 [logger_rhodecode]
653 [logger_rhodecode]
624 level = DEBUG
654 level = DEBUG
625 handlers =
655 handlers =
626 qualname = rhodecode
656 qualname = rhodecode
627 propagate = 1
657 propagate = 1
628
658
629 [logger_ssh_wrapper]
659 [logger_ssh_wrapper]
630 level = DEBUG
660 level = DEBUG
631 handlers =
661 handlers =
632 qualname = ssh_wrapper
662 qualname = ssh_wrapper
633 propagate = 1
663 propagate = 1
634
664
635 [logger_celery]
665 [logger_celery]
636 level = DEBUG
666 level = DEBUG
637 handlers =
667 handlers =
638 qualname = celery
668 qualname = celery
639
669
640
670
641 ##############
671 ##############
642 ## HANDLERS ##
672 ## HANDLERS ##
643 ##############
673 ##############
644
674
645 [handler_console]
675 [handler_console]
646 class = StreamHandler
676 class = StreamHandler
647 args = (sys.stderr, )
677 args = (sys.stderr, )
648 level = INFO
678 level = INFO
649 formatter = generic
679 formatter = generic
650
680
651 [handler_console_sql]
681 [handler_console_sql]
682 # "level = DEBUG" logs SQL queries and results.
683 # "level = INFO" logs SQL queries.
684 # "level = WARN" logs neither. (Recommended for production systems.)
652 class = StreamHandler
685 class = StreamHandler
653 args = (sys.stderr, )
686 args = (sys.stderr, )
654 level = WARN
687 level = WARN
655 formatter = generic
688 formatter = generic
656
689
657 ################
690 ################
658 ## FORMATTERS ##
691 ## FORMATTERS ##
659 ################
692 ################
660
693
661 [formatter_generic]
694 [formatter_generic]
662 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
695 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
663 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
696 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
664 datefmt = %Y-%m-%d %H:%M:%S
697 datefmt = %Y-%m-%d %H:%M:%S
665
698
666 [formatter_color_formatter]
699 [formatter_color_formatter]
667 class = rhodecode.lib.logging_formatter.ColorFormatter
700 class = rhodecode.lib.logging_formatter.ColorFormatter
668 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
701 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
669 datefmt = %Y-%m-%d %H:%M:%S
702 datefmt = %Y-%m-%d %H:%M:%S
670
703
671 [formatter_color_formatter_sql]
704 [formatter_color_formatter_sql]
672 class = rhodecode.lib.logging_formatter.ColorFormatterSql
705 class = rhodecode.lib.logging_formatter.ColorFormatterSql
673 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
706 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
674 datefmt = %Y-%m-%d %H:%M:%S
707 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1254 +1,1241 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.renderers import render
28 from pyramid.renderers import render
29 from pyramid.response import Response
29 from pyramid.response import Response
30
30
31 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
31 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
32 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
32 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
33 from rhodecode.authentication.plugins import auth_rhodecode
33 from rhodecode.authentication.plugins import auth_rhodecode
34 from rhodecode.events import trigger
34 from rhodecode.events import trigger
35 from rhodecode.model.db import true
35 from rhodecode.model.db import true
36
36
37 from rhodecode.lib import audit_logger, rc_cache
37 from rhodecode.lib import audit_logger, rc_cache
38 from rhodecode.lib.exceptions import (
38 from rhodecode.lib.exceptions import (
39 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
39 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
40 UserOwnsUserGroupsException, DefaultUserException)
40 UserOwnsUserGroupsException, DefaultUserException)
41 from rhodecode.lib.ext_json import json
41 from rhodecode.lib.ext_json import json
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
43 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45 from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict
45 from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict
46 from rhodecode.model.auth_token import AuthTokenModel
46 from rhodecode.model.auth_token import AuthTokenModel
47 from rhodecode.model.forms import (
47 from rhodecode.model.forms import (
48 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
48 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
49 UserExtraEmailForm, UserExtraIpForm)
49 UserExtraEmailForm, UserExtraIpForm)
50 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.permission import PermissionModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.ssh_key import SshKeyModel
52 from rhodecode.model.ssh_key import SshKeyModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.db import (
55 from rhodecode.model.db import (
56 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
56 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
57 UserApiKeys, UserSshKeys, RepoGroup)
57 UserApiKeys, UserSshKeys, RepoGroup)
58 from rhodecode.model.meta import Session
58 from rhodecode.model.meta import Session
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class AdminUsersView(BaseAppView, DataGridAppView):
63 class AdminUsersView(BaseAppView, DataGridAppView):
64
64
65 def load_default_context(self):
65 def load_default_context(self):
66 c = self._get_local_tmpl_context()
66 c = self._get_local_tmpl_context()
67 return c
67 return c
68
68
69 @LoginRequired()
69 @LoginRequired()
70 @HasPermissionAllDecorator('hg.admin')
70 @HasPermissionAllDecorator('hg.admin')
71 @view_config(
71 @view_config(
72 route_name='users', request_method='GET',
72 route_name='users', request_method='GET',
73 renderer='rhodecode:templates/admin/users/users.mako')
73 renderer='rhodecode:templates/admin/users/users.mako')
74 def users_list(self):
74 def users_list(self):
75 c = self.load_default_context()
75 c = self.load_default_context()
76 return self._get_template_context(c)
76 return self._get_template_context(c)
77
77
78 @LoginRequired()
78 @LoginRequired()
79 @HasPermissionAllDecorator('hg.admin')
79 @HasPermissionAllDecorator('hg.admin')
80 @view_config(
80 @view_config(
81 # renderer defined below
81 # renderer defined below
82 route_name='users_data', request_method='GET',
82 route_name='users_data', request_method='GET',
83 renderer='json_ext', xhr=True)
83 renderer='json_ext', xhr=True)
84 def users_list_data(self):
84 def users_list_data(self):
85 self.load_default_context()
85 self.load_default_context()
86 column_map = {
86 column_map = {
87 'first_name': 'name',
87 'first_name': 'name',
88 'last_name': 'lastname',
88 'last_name': 'lastname',
89 }
89 }
90 draw, start, limit = self._extract_chunk(self.request)
90 draw, start, limit = self._extract_chunk(self.request)
91 search_q, order_by, order_dir = self._extract_ordering(
91 search_q, order_by, order_dir = self._extract_ordering(
92 self.request, column_map=column_map)
92 self.request, column_map=column_map)
93 _render = self.request.get_partial_renderer(
93 _render = self.request.get_partial_renderer(
94 'rhodecode:templates/data_table/_dt_elements.mako')
94 'rhodecode:templates/data_table/_dt_elements.mako')
95
95
96 def user_actions(user_id, username):
96 def user_actions(user_id, username):
97 return _render("user_actions", user_id, username)
97 return _render("user_actions", user_id, username)
98
98
99 users_data_total_count = User.query()\
99 users_data_total_count = User.query()\
100 .filter(User.username != User.DEFAULT_USER) \
100 .filter(User.username != User.DEFAULT_USER) \
101 .count()
101 .count()
102
102
103 users_data_total_inactive_count = User.query()\
103 users_data_total_inactive_count = User.query()\
104 .filter(User.username != User.DEFAULT_USER) \
104 .filter(User.username != User.DEFAULT_USER) \
105 .filter(User.active != true())\
105 .filter(User.active != true())\
106 .count()
106 .count()
107
107
108 # json generate
108 # json generate
109 base_q = User.query().filter(User.username != User.DEFAULT_USER)
109 base_q = User.query().filter(User.username != User.DEFAULT_USER)
110 base_inactive_q = base_q.filter(User.active != true())
110 base_inactive_q = base_q.filter(User.active != true())
111
111
112 if search_q:
112 if search_q:
113 like_expression = u'%{}%'.format(safe_unicode(search_q))
113 like_expression = u'%{}%'.format(safe_unicode(search_q))
114 base_q = base_q.filter(or_(
114 base_q = base_q.filter(or_(
115 User.username.ilike(like_expression),
115 User.username.ilike(like_expression),
116 User._email.ilike(like_expression),
116 User._email.ilike(like_expression),
117 User.name.ilike(like_expression),
117 User.name.ilike(like_expression),
118 User.lastname.ilike(like_expression),
118 User.lastname.ilike(like_expression),
119 ))
119 ))
120 base_inactive_q = base_q.filter(User.active != true())
120 base_inactive_q = base_q.filter(User.active != true())
121
121
122 users_data_total_filtered_count = base_q.count()
122 users_data_total_filtered_count = base_q.count()
123 users_data_total_filtered_inactive_count = base_inactive_q.count()
123 users_data_total_filtered_inactive_count = base_inactive_q.count()
124
124
125 sort_col = getattr(User, order_by, None)
125 sort_col = getattr(User, order_by, None)
126 if sort_col:
126 if sort_col:
127 if order_dir == 'asc':
127 if order_dir == 'asc':
128 # handle null values properly to order by NULL last
128 # handle null values properly to order by NULL last
129 if order_by in ['last_activity']:
129 if order_by in ['last_activity']:
130 sort_col = coalesce(sort_col, datetime.date.max)
130 sort_col = coalesce(sort_col, datetime.date.max)
131 sort_col = sort_col.asc()
131 sort_col = sort_col.asc()
132 else:
132 else:
133 # handle null values properly to order by NULL last
133 # handle null values properly to order by NULL last
134 if order_by in ['last_activity']:
134 if order_by in ['last_activity']:
135 sort_col = coalesce(sort_col, datetime.date.min)
135 sort_col = coalesce(sort_col, datetime.date.min)
136 sort_col = sort_col.desc()
136 sort_col = sort_col.desc()
137
137
138 base_q = base_q.order_by(sort_col)
138 base_q = base_q.order_by(sort_col)
139 base_q = base_q.offset(start).limit(limit)
139 base_q = base_q.offset(start).limit(limit)
140
140
141 users_list = base_q.all()
141 users_list = base_q.all()
142
142
143 users_data = []
143 users_data = []
144 for user in users_list:
144 for user in users_list:
145 users_data.append({
145 users_data.append({
146 "username": h.gravatar_with_user(self.request, user.username),
146 "username": h.gravatar_with_user(self.request, user.username),
147 "email": user.email,
147 "email": user.email,
148 "first_name": user.first_name,
148 "first_name": user.first_name,
149 "last_name": user.last_name,
149 "last_name": user.last_name,
150 "last_login": h.format_date(user.last_login),
150 "last_login": h.format_date(user.last_login),
151 "last_activity": h.format_date(user.last_activity),
151 "last_activity": h.format_date(user.last_activity),
152 "active": h.bool2icon(user.active),
152 "active": h.bool2icon(user.active),
153 "active_raw": user.active,
153 "active_raw": user.active,
154 "admin": h.bool2icon(user.admin),
154 "admin": h.bool2icon(user.admin),
155 "extern_type": user.extern_type,
155 "extern_type": user.extern_type,
156 "extern_name": user.extern_name,
156 "extern_name": user.extern_name,
157 "action": user_actions(user.user_id, user.username),
157 "action": user_actions(user.user_id, user.username),
158 })
158 })
159 data = ({
159 data = ({
160 'draw': draw,
160 'draw': draw,
161 'data': users_data,
161 'data': users_data,
162 'recordsTotal': users_data_total_count,
162 'recordsTotal': users_data_total_count,
163 'recordsFiltered': users_data_total_filtered_count,
163 'recordsFiltered': users_data_total_filtered_count,
164 'recordsTotalInactive': users_data_total_inactive_count,
164 'recordsTotalInactive': users_data_total_inactive_count,
165 'recordsFilteredInactive': users_data_total_filtered_inactive_count
165 'recordsFilteredInactive': users_data_total_filtered_inactive_count
166 })
166 })
167
167
168 return data
168 return data
169
169
170 def _set_personal_repo_group_template_vars(self, c_obj):
170 def _set_personal_repo_group_template_vars(self, c_obj):
171 DummyUser = AttributeDict({
171 DummyUser = AttributeDict({
172 'username': '${username}',
172 'username': '${username}',
173 'user_id': '${user_id}',
173 'user_id': '${user_id}',
174 })
174 })
175 c_obj.default_create_repo_group = RepoGroupModel() \
175 c_obj.default_create_repo_group = RepoGroupModel() \
176 .get_default_create_personal_repo_group()
176 .get_default_create_personal_repo_group()
177 c_obj.personal_repo_group_name = RepoGroupModel() \
177 c_obj.personal_repo_group_name = RepoGroupModel() \
178 .get_personal_group_name(DummyUser)
178 .get_personal_group_name(DummyUser)
179
179
180 @LoginRequired()
180 @LoginRequired()
181 @HasPermissionAllDecorator('hg.admin')
181 @HasPermissionAllDecorator('hg.admin')
182 @view_config(
182 @view_config(
183 route_name='users_new', request_method='GET',
183 route_name='users_new', request_method='GET',
184 renderer='rhodecode:templates/admin/users/user_add.mako')
184 renderer='rhodecode:templates/admin/users/user_add.mako')
185 def users_new(self):
185 def users_new(self):
186 _ = self.request.translate
186 _ = self.request.translate
187 c = self.load_default_context()
187 c = self.load_default_context()
188 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
188 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
189 self._set_personal_repo_group_template_vars(c)
189 self._set_personal_repo_group_template_vars(c)
190 return self._get_template_context(c)
190 return self._get_template_context(c)
191
191
192 @LoginRequired()
192 @LoginRequired()
193 @HasPermissionAllDecorator('hg.admin')
193 @HasPermissionAllDecorator('hg.admin')
194 @CSRFRequired()
194 @CSRFRequired()
195 @view_config(
195 @view_config(
196 route_name='users_create', request_method='POST',
196 route_name='users_create', request_method='POST',
197 renderer='rhodecode:templates/admin/users/user_add.mako')
197 renderer='rhodecode:templates/admin/users/user_add.mako')
198 def users_create(self):
198 def users_create(self):
199 _ = self.request.translate
199 _ = self.request.translate
200 c = self.load_default_context()
200 c = self.load_default_context()
201 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
201 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
202 user_model = UserModel()
202 user_model = UserModel()
203 user_form = UserForm(self.request.translate)()
203 user_form = UserForm(self.request.translate)()
204 try:
204 try:
205 form_result = user_form.to_python(dict(self.request.POST))
205 form_result = user_form.to_python(dict(self.request.POST))
206 user = user_model.create(form_result)
206 user = user_model.create(form_result)
207 Session().flush()
207 Session().flush()
208 creation_data = user.get_api_data()
208 creation_data = user.get_api_data()
209 username = form_result['username']
209 username = form_result['username']
210
210
211 audit_logger.store_web(
211 audit_logger.store_web(
212 'user.create', action_data={'data': creation_data},
212 'user.create', action_data={'data': creation_data},
213 user=c.rhodecode_user)
213 user=c.rhodecode_user)
214
214
215 user_link = h.link_to(
215 user_link = h.link_to(
216 h.escape(username),
216 h.escape(username),
217 h.route_path('user_edit', user_id=user.user_id))
217 h.route_path('user_edit', user_id=user.user_id))
218 h.flash(h.literal(_('Created user %(user_link)s')
218 h.flash(h.literal(_('Created user %(user_link)s')
219 % {'user_link': user_link}), category='success')
219 % {'user_link': user_link}), category='success')
220 Session().commit()
220 Session().commit()
221 except formencode.Invalid as errors:
221 except formencode.Invalid as errors:
222 self._set_personal_repo_group_template_vars(c)
222 self._set_personal_repo_group_template_vars(c)
223 data = render(
223 data = render(
224 'rhodecode:templates/admin/users/user_add.mako',
224 'rhodecode:templates/admin/users/user_add.mako',
225 self._get_template_context(c), self.request)
225 self._get_template_context(c), self.request)
226 html = formencode.htmlfill.render(
226 html = formencode.htmlfill.render(
227 data,
227 data,
228 defaults=errors.value,
228 defaults=errors.value,
229 errors=errors.error_dict or {},
229 errors=errors.error_dict or {},
230 prefix_error=False,
230 prefix_error=False,
231 encoding="UTF-8",
231 encoding="UTF-8",
232 force_defaults=False
232 force_defaults=False
233 )
233 )
234 return Response(html)
234 return Response(html)
235 except UserCreationError as e:
235 except UserCreationError as e:
236 h.flash(e, 'error')
236 h.flash(e, 'error')
237 except Exception:
237 except Exception:
238 log.exception("Exception creation of user")
238 log.exception("Exception creation of user")
239 h.flash(_('Error occurred during creation of user %s')
239 h.flash(_('Error occurred during creation of user %s')
240 % self.request.POST.get('username'), category='error')
240 % self.request.POST.get('username'), category='error')
241 raise HTTPFound(h.route_path('users'))
241 raise HTTPFound(h.route_path('users'))
242
242
243
243
244 class UsersView(UserAppView):
244 class UsersView(UserAppView):
245 ALLOW_SCOPED_TOKENS = False
245 ALLOW_SCOPED_TOKENS = False
246 """
246 """
247 This view has alternative version inside EE, if modified please take a look
247 This view has alternative version inside EE, if modified please take a look
248 in there as well.
248 in there as well.
249 """
249 """
250
250
251 def load_default_context(self):
251 def load_default_context(self):
252 c = self._get_local_tmpl_context()
252 c = self._get_local_tmpl_context()
253 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
253 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
254 c.allowed_languages = [
254 c.allowed_languages = [
255 ('en', 'English (en)'),
255 ('en', 'English (en)'),
256 ('de', 'German (de)'),
256 ('de', 'German (de)'),
257 ('fr', 'French (fr)'),
257 ('fr', 'French (fr)'),
258 ('it', 'Italian (it)'),
258 ('it', 'Italian (it)'),
259 ('ja', 'Japanese (ja)'),
259 ('ja', 'Japanese (ja)'),
260 ('pl', 'Polish (pl)'),
260 ('pl', 'Polish (pl)'),
261 ('pt', 'Portuguese (pt)'),
261 ('pt', 'Portuguese (pt)'),
262 ('ru', 'Russian (ru)'),
262 ('ru', 'Russian (ru)'),
263 ('zh', 'Chinese (zh)'),
263 ('zh', 'Chinese (zh)'),
264 ]
264 ]
265 req = self.request
265 req = self.request
266
266
267 c.available_permissions = req.registry.settings['available_permissions']
267 c.available_permissions = req.registry.settings['available_permissions']
268 PermissionModel().set_global_permission_choices(
268 PermissionModel().set_global_permission_choices(
269 c, gettext_translator=req.translate)
269 c, gettext_translator=req.translate)
270
270
271 return c
271 return c
272
272
273 @LoginRequired()
273 @LoginRequired()
274 @HasPermissionAllDecorator('hg.admin')
274 @HasPermissionAllDecorator('hg.admin')
275 @CSRFRequired()
275 @CSRFRequired()
276 @view_config(
276 @view_config(
277 route_name='user_update', request_method='POST',
277 route_name='user_update', request_method='POST',
278 renderer='rhodecode:templates/admin/users/user_edit.mako')
278 renderer='rhodecode:templates/admin/users/user_edit.mako')
279 def user_update(self):
279 def user_update(self):
280 _ = self.request.translate
280 _ = self.request.translate
281 c = self.load_default_context()
281 c = self.load_default_context()
282
282
283 user_id = self.db_user_id
283 user_id = self.db_user_id
284 c.user = self.db_user
284 c.user = self.db_user
285
285
286 c.active = 'profile'
286 c.active = 'profile'
287 c.extern_type = c.user.extern_type
287 c.extern_type = c.user.extern_type
288 c.extern_name = c.user.extern_name
288 c.extern_name = c.user.extern_name
289 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
289 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
290 available_languages = [x[0] for x in c.allowed_languages]
290 available_languages = [x[0] for x in c.allowed_languages]
291 _form = UserForm(self.request.translate, edit=True,
291 _form = UserForm(self.request.translate, edit=True,
292 available_languages=available_languages,
292 available_languages=available_languages,
293 old_data={'user_id': user_id,
293 old_data={'user_id': user_id,
294 'email': c.user.email})()
294 'email': c.user.email})()
295 form_result = {}
295 form_result = {}
296 old_values = c.user.get_api_data()
296 old_values = c.user.get_api_data()
297 try:
297 try:
298 form_result = _form.to_python(dict(self.request.POST))
298 form_result = _form.to_python(dict(self.request.POST))
299 skip_attrs = ['extern_type', 'extern_name']
299 skip_attrs = ['extern_type', 'extern_name']
300 # TODO: plugin should define if username can be updated
300 # TODO: plugin should define if username can be updated
301 if c.extern_type != "rhodecode":
301 if c.extern_type != "rhodecode":
302 # forbid updating username for external accounts
302 # forbid updating username for external accounts
303 skip_attrs.append('username')
303 skip_attrs.append('username')
304
304
305 UserModel().update_user(
305 UserModel().update_user(
306 user_id, skip_attrs=skip_attrs, **form_result)
306 user_id, skip_attrs=skip_attrs, **form_result)
307
307
308 audit_logger.store_web(
308 audit_logger.store_web(
309 'user.edit', action_data={'old_data': old_values},
309 'user.edit', action_data={'old_data': old_values},
310 user=c.rhodecode_user)
310 user=c.rhodecode_user)
311
311
312 Session().commit()
312 Session().commit()
313 h.flash(_('User updated successfully'), category='success')
313 h.flash(_('User updated successfully'), category='success')
314 except formencode.Invalid as errors:
314 except formencode.Invalid as errors:
315 data = render(
315 data = render(
316 'rhodecode:templates/admin/users/user_edit.mako',
316 'rhodecode:templates/admin/users/user_edit.mako',
317 self._get_template_context(c), self.request)
317 self._get_template_context(c), self.request)
318 html = formencode.htmlfill.render(
318 html = formencode.htmlfill.render(
319 data,
319 data,
320 defaults=errors.value,
320 defaults=errors.value,
321 errors=errors.error_dict or {},
321 errors=errors.error_dict or {},
322 prefix_error=False,
322 prefix_error=False,
323 encoding="UTF-8",
323 encoding="UTF-8",
324 force_defaults=False
324 force_defaults=False
325 )
325 )
326 return Response(html)
326 return Response(html)
327 except UserCreationError as e:
327 except UserCreationError as e:
328 h.flash(e, 'error')
328 h.flash(e, 'error')
329 except Exception:
329 except Exception:
330 log.exception("Exception updating user")
330 log.exception("Exception updating user")
331 h.flash(_('Error occurred during update of user %s')
331 h.flash(_('Error occurred during update of user %s')
332 % form_result.get('username'), category='error')
332 % form_result.get('username'), category='error')
333 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
333 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
334
334
335 @LoginRequired()
335 @LoginRequired()
336 @HasPermissionAllDecorator('hg.admin')
336 @HasPermissionAllDecorator('hg.admin')
337 @CSRFRequired()
337 @CSRFRequired()
338 @view_config(
338 @view_config(
339 route_name='user_delete', request_method='POST',
339 route_name='user_delete', request_method='POST',
340 renderer='rhodecode:templates/admin/users/user_edit.mako')
340 renderer='rhodecode:templates/admin/users/user_edit.mako')
341 def user_delete(self):
341 def user_delete(self):
342 _ = self.request.translate
342 _ = self.request.translate
343 c = self.load_default_context()
343 c = self.load_default_context()
344 c.user = self.db_user
344 c.user = self.db_user
345
345
346 _repos = c.user.repositories
346 _repos = c.user.repositories
347 _repo_groups = c.user.repository_groups
347 _repo_groups = c.user.repository_groups
348 _user_groups = c.user.user_groups
348 _user_groups = c.user.user_groups
349
349
350 handle_repos = None
350 handle_repos = None
351 handle_repo_groups = None
351 handle_repo_groups = None
352 handle_user_groups = None
352 handle_user_groups = None
353 # dummy call for flash of handle
353 # dummy call for flash of handle
354 set_handle_flash_repos = lambda: None
354 set_handle_flash_repos = lambda: None
355 set_handle_flash_repo_groups = lambda: None
355 set_handle_flash_repo_groups = lambda: None
356 set_handle_flash_user_groups = lambda: None
356 set_handle_flash_user_groups = lambda: None
357
357
358 if _repos and self.request.POST.get('user_repos'):
358 if _repos and self.request.POST.get('user_repos'):
359 do = self.request.POST['user_repos']
359 do = self.request.POST['user_repos']
360 if do == 'detach':
360 if do == 'detach':
361 handle_repos = 'detach'
361 handle_repos = 'detach'
362 set_handle_flash_repos = lambda: h.flash(
362 set_handle_flash_repos = lambda: h.flash(
363 _('Detached %s repositories') % len(_repos),
363 _('Detached %s repositories') % len(_repos),
364 category='success')
364 category='success')
365 elif do == 'delete':
365 elif do == 'delete':
366 handle_repos = 'delete'
366 handle_repos = 'delete'
367 set_handle_flash_repos = lambda: h.flash(
367 set_handle_flash_repos = lambda: h.flash(
368 _('Deleted %s repositories') % len(_repos),
368 _('Deleted %s repositories') % len(_repos),
369 category='success')
369 category='success')
370
370
371 if _repo_groups and self.request.POST.get('user_repo_groups'):
371 if _repo_groups and self.request.POST.get('user_repo_groups'):
372 do = self.request.POST['user_repo_groups']
372 do = self.request.POST['user_repo_groups']
373 if do == 'detach':
373 if do == 'detach':
374 handle_repo_groups = 'detach'
374 handle_repo_groups = 'detach'
375 set_handle_flash_repo_groups = lambda: h.flash(
375 set_handle_flash_repo_groups = lambda: h.flash(
376 _('Detached %s repository groups') % len(_repo_groups),
376 _('Detached %s repository groups') % len(_repo_groups),
377 category='success')
377 category='success')
378 elif do == 'delete':
378 elif do == 'delete':
379 handle_repo_groups = 'delete'
379 handle_repo_groups = 'delete'
380 set_handle_flash_repo_groups = lambda: h.flash(
380 set_handle_flash_repo_groups = lambda: h.flash(
381 _('Deleted %s repository groups') % len(_repo_groups),
381 _('Deleted %s repository groups') % len(_repo_groups),
382 category='success')
382 category='success')
383
383
384 if _user_groups and self.request.POST.get('user_user_groups'):
384 if _user_groups and self.request.POST.get('user_user_groups'):
385 do = self.request.POST['user_user_groups']
385 do = self.request.POST['user_user_groups']
386 if do == 'detach':
386 if do == 'detach':
387 handle_user_groups = 'detach'
387 handle_user_groups = 'detach'
388 set_handle_flash_user_groups = lambda: h.flash(
388 set_handle_flash_user_groups = lambda: h.flash(
389 _('Detached %s user groups') % len(_user_groups),
389 _('Detached %s user groups') % len(_user_groups),
390 category='success')
390 category='success')
391 elif do == 'delete':
391 elif do == 'delete':
392 handle_user_groups = 'delete'
392 handle_user_groups = 'delete'
393 set_handle_flash_user_groups = lambda: h.flash(
393 set_handle_flash_user_groups = lambda: h.flash(
394 _('Deleted %s user groups') % len(_user_groups),
394 _('Deleted %s user groups') % len(_user_groups),
395 category='success')
395 category='success')
396
396
397 old_values = c.user.get_api_data()
397 old_values = c.user.get_api_data()
398 try:
398 try:
399 UserModel().delete(c.user, handle_repos=handle_repos,
399 UserModel().delete(c.user, handle_repos=handle_repos,
400 handle_repo_groups=handle_repo_groups,
400 handle_repo_groups=handle_repo_groups,
401 handle_user_groups=handle_user_groups)
401 handle_user_groups=handle_user_groups)
402
402
403 audit_logger.store_web(
403 audit_logger.store_web(
404 'user.delete', action_data={'old_data': old_values},
404 'user.delete', action_data={'old_data': old_values},
405 user=c.rhodecode_user)
405 user=c.rhodecode_user)
406
406
407 Session().commit()
407 Session().commit()
408 set_handle_flash_repos()
408 set_handle_flash_repos()
409 set_handle_flash_repo_groups()
409 set_handle_flash_repo_groups()
410 set_handle_flash_user_groups()
410 set_handle_flash_user_groups()
411 h.flash(_('Successfully deleted user'), category='success')
411 h.flash(_('Successfully deleted user'), category='success')
412 except (UserOwnsReposException, UserOwnsRepoGroupsException,
412 except (UserOwnsReposException, UserOwnsRepoGroupsException,
413 UserOwnsUserGroupsException, DefaultUserException) as e:
413 UserOwnsUserGroupsException, DefaultUserException) as e:
414 h.flash(e, category='warning')
414 h.flash(e, category='warning')
415 except Exception:
415 except Exception:
416 log.exception("Exception during deletion of user")
416 log.exception("Exception during deletion of user")
417 h.flash(_('An error occurred during deletion of user'),
417 h.flash(_('An error occurred during deletion of user'),
418 category='error')
418 category='error')
419 raise HTTPFound(h.route_path('users'))
419 raise HTTPFound(h.route_path('users'))
420
420
421 @LoginRequired()
421 @LoginRequired()
422 @HasPermissionAllDecorator('hg.admin')
422 @HasPermissionAllDecorator('hg.admin')
423 @view_config(
423 @view_config(
424 route_name='user_edit', request_method='GET',
424 route_name='user_edit', request_method='GET',
425 renderer='rhodecode:templates/admin/users/user_edit.mako')
425 renderer='rhodecode:templates/admin/users/user_edit.mako')
426 def user_edit(self):
426 def user_edit(self):
427 _ = self.request.translate
427 _ = self.request.translate
428 c = self.load_default_context()
428 c = self.load_default_context()
429 c.user = self.db_user
429 c.user = self.db_user
430
430
431 c.active = 'profile'
431 c.active = 'profile'
432 c.extern_type = c.user.extern_type
432 c.extern_type = c.user.extern_type
433 c.extern_name = c.user.extern_name
433 c.extern_name = c.user.extern_name
434 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
434 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
435
435
436 defaults = c.user.get_dict()
436 defaults = c.user.get_dict()
437 defaults.update({'language': c.user.user_data.get('language')})
437 defaults.update({'language': c.user.user_data.get('language')})
438
438
439 data = render(
439 data = render(
440 'rhodecode:templates/admin/users/user_edit.mako',
440 'rhodecode:templates/admin/users/user_edit.mako',
441 self._get_template_context(c), self.request)
441 self._get_template_context(c), self.request)
442 html = formencode.htmlfill.render(
442 html = formencode.htmlfill.render(
443 data,
443 data,
444 defaults=defaults,
444 defaults=defaults,
445 encoding="UTF-8",
445 encoding="UTF-8",
446 force_defaults=False
446 force_defaults=False
447 )
447 )
448 return Response(html)
448 return Response(html)
449
449
450 @LoginRequired()
450 @LoginRequired()
451 @HasPermissionAllDecorator('hg.admin')
451 @HasPermissionAllDecorator('hg.admin')
452 @view_config(
452 @view_config(
453 route_name='user_edit_advanced', request_method='GET',
453 route_name='user_edit_advanced', request_method='GET',
454 renderer='rhodecode:templates/admin/users/user_edit.mako')
454 renderer='rhodecode:templates/admin/users/user_edit.mako')
455 def user_edit_advanced(self):
455 def user_edit_advanced(self):
456 _ = self.request.translate
456 _ = self.request.translate
457 c = self.load_default_context()
457 c = self.load_default_context()
458
458
459 user_id = self.db_user_id
459 user_id = self.db_user_id
460 c.user = self.db_user
460 c.user = self.db_user
461
461
462 c.active = 'advanced'
462 c.active = 'advanced'
463 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
463 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
464 c.personal_repo_group_name = RepoGroupModel()\
464 c.personal_repo_group_name = RepoGroupModel()\
465 .get_personal_group_name(c.user)
465 .get_personal_group_name(c.user)
466
466
467 c.user_to_review_rules = sorted(
467 c.user_to_review_rules = sorted(
468 (x.user for x in c.user.user_review_rules),
468 (x.user for x in c.user.user_review_rules),
469 key=lambda u: u.username.lower())
469 key=lambda u: u.username.lower())
470
470
471 c.first_admin = User.get_first_super_admin()
471 c.first_admin = User.get_first_super_admin()
472 defaults = c.user.get_dict()
472 defaults = c.user.get_dict()
473
473
474 # Interim workaround if the user participated on any pull requests as a
474 # Interim workaround if the user participated on any pull requests as a
475 # reviewer.
475 # reviewer.
476 has_review = len(c.user.reviewer_pull_requests)
476 has_review = len(c.user.reviewer_pull_requests)
477 c.can_delete_user = not has_review
477 c.can_delete_user = not has_review
478 c.can_delete_user_message = ''
478 c.can_delete_user_message = ''
479 inactive_link = h.link_to(
479 inactive_link = h.link_to(
480 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
480 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
481 if has_review == 1:
481 if has_review == 1:
482 c.can_delete_user_message = h.literal(_(
482 c.can_delete_user_message = h.literal(_(
483 'The user participates as reviewer in {} pull request and '
483 'The user participates as reviewer in {} pull request and '
484 'cannot be deleted. \nYou can set the user to '
484 'cannot be deleted. \nYou can set the user to '
485 '"{}" instead of deleting it.').format(
485 '"{}" instead of deleting it.').format(
486 has_review, inactive_link))
486 has_review, inactive_link))
487 elif has_review:
487 elif has_review:
488 c.can_delete_user_message = h.literal(_(
488 c.can_delete_user_message = h.literal(_(
489 'The user participates as reviewer in {} pull requests and '
489 'The user participates as reviewer in {} pull requests and '
490 'cannot be deleted. \nYou can set the user to '
490 'cannot be deleted. \nYou can set the user to '
491 '"{}" instead of deleting it.').format(
491 '"{}" instead of deleting it.').format(
492 has_review, inactive_link))
492 has_review, inactive_link))
493
493
494 data = render(
494 data = render(
495 'rhodecode:templates/admin/users/user_edit.mako',
495 'rhodecode:templates/admin/users/user_edit.mako',
496 self._get_template_context(c), self.request)
496 self._get_template_context(c), self.request)
497 html = formencode.htmlfill.render(
497 html = formencode.htmlfill.render(
498 data,
498 data,
499 defaults=defaults,
499 defaults=defaults,
500 encoding="UTF-8",
500 encoding="UTF-8",
501 force_defaults=False
501 force_defaults=False
502 )
502 )
503 return Response(html)
503 return Response(html)
504
504
505 @LoginRequired()
505 @LoginRequired()
506 @HasPermissionAllDecorator('hg.admin')
506 @HasPermissionAllDecorator('hg.admin')
507 @view_config(
507 @view_config(
508 route_name='user_edit_global_perms', request_method='GET',
508 route_name='user_edit_global_perms', request_method='GET',
509 renderer='rhodecode:templates/admin/users/user_edit.mako')
509 renderer='rhodecode:templates/admin/users/user_edit.mako')
510 def user_edit_global_perms(self):
510 def user_edit_global_perms(self):
511 _ = self.request.translate
511 _ = self.request.translate
512 c = self.load_default_context()
512 c = self.load_default_context()
513 c.user = self.db_user
513 c.user = self.db_user
514
514
515 c.active = 'global_perms'
515 c.active = 'global_perms'
516
516
517 c.default_user = User.get_default_user()
517 c.default_user = User.get_default_user()
518 defaults = c.user.get_dict()
518 defaults = c.user.get_dict()
519 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
519 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
520 defaults.update(c.default_user.get_default_perms())
520 defaults.update(c.default_user.get_default_perms())
521 defaults.update(c.user.get_default_perms())
521 defaults.update(c.user.get_default_perms())
522
522
523 data = render(
523 data = render(
524 'rhodecode:templates/admin/users/user_edit.mako',
524 'rhodecode:templates/admin/users/user_edit.mako',
525 self._get_template_context(c), self.request)
525 self._get_template_context(c), self.request)
526 html = formencode.htmlfill.render(
526 html = formencode.htmlfill.render(
527 data,
527 data,
528 defaults=defaults,
528 defaults=defaults,
529 encoding="UTF-8",
529 encoding="UTF-8",
530 force_defaults=False
530 force_defaults=False
531 )
531 )
532 return Response(html)
532 return Response(html)
533
533
534 @LoginRequired()
534 @LoginRequired()
535 @HasPermissionAllDecorator('hg.admin')
535 @HasPermissionAllDecorator('hg.admin')
536 @CSRFRequired()
536 @CSRFRequired()
537 @view_config(
537 @view_config(
538 route_name='user_edit_global_perms_update', request_method='POST',
538 route_name='user_edit_global_perms_update', request_method='POST',
539 renderer='rhodecode:templates/admin/users/user_edit.mako')
539 renderer='rhodecode:templates/admin/users/user_edit.mako')
540 def user_edit_global_perms_update(self):
540 def user_edit_global_perms_update(self):
541 _ = self.request.translate
541 _ = self.request.translate
542 c = self.load_default_context()
542 c = self.load_default_context()
543
543
544 user_id = self.db_user_id
544 user_id = self.db_user_id
545 c.user = self.db_user
545 c.user = self.db_user
546
546
547 c.active = 'global_perms'
547 c.active = 'global_perms'
548 try:
548 try:
549 # first stage that verifies the checkbox
549 # first stage that verifies the checkbox
550 _form = UserIndividualPermissionsForm(self.request.translate)
550 _form = UserIndividualPermissionsForm(self.request.translate)
551 form_result = _form.to_python(dict(self.request.POST))
551 form_result = _form.to_python(dict(self.request.POST))
552 inherit_perms = form_result['inherit_default_permissions']
552 inherit_perms = form_result['inherit_default_permissions']
553 c.user.inherit_default_permissions = inherit_perms
553 c.user.inherit_default_permissions = inherit_perms
554 Session().add(c.user)
554 Session().add(c.user)
555
555
556 if not inherit_perms:
556 if not inherit_perms:
557 # only update the individual ones if we un check the flag
557 # only update the individual ones if we un check the flag
558 _form = UserPermissionsForm(
558 _form = UserPermissionsForm(
559 self.request.translate,
559 self.request.translate,
560 [x[0] for x in c.repo_create_choices],
560 [x[0] for x in c.repo_create_choices],
561 [x[0] for x in c.repo_create_on_write_choices],
561 [x[0] for x in c.repo_create_on_write_choices],
562 [x[0] for x in c.repo_group_create_choices],
562 [x[0] for x in c.repo_group_create_choices],
563 [x[0] for x in c.user_group_create_choices],
563 [x[0] for x in c.user_group_create_choices],
564 [x[0] for x in c.fork_choices],
564 [x[0] for x in c.fork_choices],
565 [x[0] for x in c.inherit_default_permission_choices])()
565 [x[0] for x in c.inherit_default_permission_choices])()
566
566
567 form_result = _form.to_python(dict(self.request.POST))
567 form_result = _form.to_python(dict(self.request.POST))
568 form_result.update({'perm_user_id': c.user.user_id})
568 form_result.update({'perm_user_id': c.user.user_id})
569
569
570 PermissionModel().update_user_permissions(form_result)
570 PermissionModel().update_user_permissions(form_result)
571
571
572 # TODO(marcink): implement global permissions
572 # TODO(marcink): implement global permissions
573 # audit_log.store_web('user.edit.permissions')
573 # audit_log.store_web('user.edit.permissions')
574
574
575 Session().commit()
575 Session().commit()
576 h.flash(_('User global permissions updated successfully'),
576 h.flash(_('User global permissions updated successfully'),
577 category='success')
577 category='success')
578
578
579 except formencode.Invalid as errors:
579 except formencode.Invalid as errors:
580 data = render(
580 data = render(
581 'rhodecode:templates/admin/users/user_edit.mako',
581 'rhodecode:templates/admin/users/user_edit.mako',
582 self._get_template_context(c), self.request)
582 self._get_template_context(c), self.request)
583 html = formencode.htmlfill.render(
583 html = formencode.htmlfill.render(
584 data,
584 data,
585 defaults=errors.value,
585 defaults=errors.value,
586 errors=errors.error_dict or {},
586 errors=errors.error_dict or {},
587 prefix_error=False,
587 prefix_error=False,
588 encoding="UTF-8",
588 encoding="UTF-8",
589 force_defaults=False
589 force_defaults=False
590 )
590 )
591 return Response(html)
591 return Response(html)
592 except Exception:
592 except Exception:
593 log.exception("Exception during permissions saving")
593 log.exception("Exception during permissions saving")
594 h.flash(_('An error occurred during permissions saving'),
594 h.flash(_('An error occurred during permissions saving'),
595 category='error')
595 category='error')
596 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
596 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
597
597
598 @LoginRequired()
598 @LoginRequired()
599 @HasPermissionAllDecorator('hg.admin')
599 @HasPermissionAllDecorator('hg.admin')
600 @CSRFRequired()
600 @CSRFRequired()
601 @view_config(
601 @view_config(
602 route_name='user_force_password_reset', request_method='POST',
602 route_name='user_force_password_reset', request_method='POST',
603 renderer='rhodecode:templates/admin/users/user_edit.mako')
603 renderer='rhodecode:templates/admin/users/user_edit.mako')
604 def user_force_password_reset(self):
604 def user_force_password_reset(self):
605 """
605 """
606 toggle reset password flag for this user
606 toggle reset password flag for this user
607 """
607 """
608 _ = self.request.translate
608 _ = self.request.translate
609 c = self.load_default_context()
609 c = self.load_default_context()
610
610
611 user_id = self.db_user_id
611 user_id = self.db_user_id
612 c.user = self.db_user
612 c.user = self.db_user
613
613
614 try:
614 try:
615 old_value = c.user.user_data.get('force_password_change')
615 old_value = c.user.user_data.get('force_password_change')
616 c.user.update_userdata(force_password_change=not old_value)
616 c.user.update_userdata(force_password_change=not old_value)
617
617
618 if old_value:
618 if old_value:
619 msg = _('Force password change disabled for user')
619 msg = _('Force password change disabled for user')
620 audit_logger.store_web(
620 audit_logger.store_web(
621 'user.edit.password_reset.disabled',
621 'user.edit.password_reset.disabled',
622 user=c.rhodecode_user)
622 user=c.rhodecode_user)
623 else:
623 else:
624 msg = _('Force password change enabled for user')
624 msg = _('Force password change enabled for user')
625 audit_logger.store_web(
625 audit_logger.store_web(
626 'user.edit.password_reset.enabled',
626 'user.edit.password_reset.enabled',
627 user=c.rhodecode_user)
627 user=c.rhodecode_user)
628
628
629 Session().commit()
629 Session().commit()
630 h.flash(msg, category='success')
630 h.flash(msg, category='success')
631 except Exception:
631 except Exception:
632 log.exception("Exception during password reset for user")
632 log.exception("Exception during password reset for user")
633 h.flash(_('An error occurred during password reset for user'),
633 h.flash(_('An error occurred during password reset for user'),
634 category='error')
634 category='error')
635
635
636 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
636 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
637
637
638 @LoginRequired()
638 @LoginRequired()
639 @HasPermissionAllDecorator('hg.admin')
639 @HasPermissionAllDecorator('hg.admin')
640 @CSRFRequired()
640 @CSRFRequired()
641 @view_config(
641 @view_config(
642 route_name='user_create_personal_repo_group', request_method='POST',
642 route_name='user_create_personal_repo_group', request_method='POST',
643 renderer='rhodecode:templates/admin/users/user_edit.mako')
643 renderer='rhodecode:templates/admin/users/user_edit.mako')
644 def user_create_personal_repo_group(self):
644 def user_create_personal_repo_group(self):
645 """
645 """
646 Create personal repository group for this user
646 Create personal repository group for this user
647 """
647 """
648 from rhodecode.model.repo_group import RepoGroupModel
648 from rhodecode.model.repo_group import RepoGroupModel
649
649
650 _ = self.request.translate
650 _ = self.request.translate
651 c = self.load_default_context()
651 c = self.load_default_context()
652
652
653 user_id = self.db_user_id
653 user_id = self.db_user_id
654 c.user = self.db_user
654 c.user = self.db_user
655
655
656 personal_repo_group = RepoGroup.get_user_personal_repo_group(
656 personal_repo_group = RepoGroup.get_user_personal_repo_group(
657 c.user.user_id)
657 c.user.user_id)
658 if personal_repo_group:
658 if personal_repo_group:
659 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
659 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
660
660
661 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
661 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
662 c.user)
662 c.user)
663 named_personal_group = RepoGroup.get_by_group_name(
663 named_personal_group = RepoGroup.get_by_group_name(
664 personal_repo_group_name)
664 personal_repo_group_name)
665 try:
665 try:
666
666
667 if named_personal_group and named_personal_group.user_id == c.user.user_id:
667 if named_personal_group and named_personal_group.user_id == c.user.user_id:
668 # migrate the same named group, and mark it as personal
668 # migrate the same named group, and mark it as personal
669 named_personal_group.personal = True
669 named_personal_group.personal = True
670 Session().add(named_personal_group)
670 Session().add(named_personal_group)
671 Session().commit()
671 Session().commit()
672 msg = _('Linked repository group `%s` as personal' % (
672 msg = _('Linked repository group `%s` as personal' % (
673 personal_repo_group_name,))
673 personal_repo_group_name,))
674 h.flash(msg, category='success')
674 h.flash(msg, category='success')
675 elif not named_personal_group:
675 elif not named_personal_group:
676 RepoGroupModel().create_personal_repo_group(c.user)
676 RepoGroupModel().create_personal_repo_group(c.user)
677
677
678 msg = _('Created repository group `%s`' % (
678 msg = _('Created repository group `%s`' % (
679 personal_repo_group_name,))
679 personal_repo_group_name,))
680 h.flash(msg, category='success')
680 h.flash(msg, category='success')
681 else:
681 else:
682 msg = _('Repository group `%s` is already taken' % (
682 msg = _('Repository group `%s` is already taken' % (
683 personal_repo_group_name,))
683 personal_repo_group_name,))
684 h.flash(msg, category='warning')
684 h.flash(msg, category='warning')
685 except Exception:
685 except Exception:
686 log.exception("Exception during repository group creation")
686 log.exception("Exception during repository group creation")
687 msg = _(
687 msg = _(
688 'An error occurred during repository group creation for user')
688 'An error occurred during repository group creation for user')
689 h.flash(msg, category='error')
689 h.flash(msg, category='error')
690 Session().rollback()
690 Session().rollback()
691
691
692 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
692 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
693
693
694 @LoginRequired()
694 @LoginRequired()
695 @HasPermissionAllDecorator('hg.admin')
695 @HasPermissionAllDecorator('hg.admin')
696 @view_config(
696 @view_config(
697 route_name='edit_user_auth_tokens', request_method='GET',
697 route_name='edit_user_auth_tokens', request_method='GET',
698 renderer='rhodecode:templates/admin/users/user_edit.mako')
698 renderer='rhodecode:templates/admin/users/user_edit.mako')
699 def auth_tokens(self):
699 def auth_tokens(self):
700 _ = self.request.translate
700 _ = self.request.translate
701 c = self.load_default_context()
701 c = self.load_default_context()
702 c.user = self.db_user
702 c.user = self.db_user
703
703
704 c.active = 'auth_tokens'
704 c.active = 'auth_tokens'
705
705
706 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
706 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
707 c.role_values = [
707 c.role_values = [
708 (x, AuthTokenModel.cls._get_role_name(x))
708 (x, AuthTokenModel.cls._get_role_name(x))
709 for x in AuthTokenModel.cls.ROLES]
709 for x in AuthTokenModel.cls.ROLES]
710 c.role_options = [(c.role_values, _("Role"))]
710 c.role_options = [(c.role_values, _("Role"))]
711 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
711 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
712 c.user.user_id, show_expired=True)
712 c.user.user_id, show_expired=True)
713 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
713 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
714 return self._get_template_context(c)
714 return self._get_template_context(c)
715
715
716 def maybe_attach_token_scope(self, token):
716 def maybe_attach_token_scope(self, token):
717 # implemented in EE edition
717 # implemented in EE edition
718 pass
718 pass
719
719
720 @LoginRequired()
720 @LoginRequired()
721 @HasPermissionAllDecorator('hg.admin')
721 @HasPermissionAllDecorator('hg.admin')
722 @CSRFRequired()
722 @CSRFRequired()
723 @view_config(
723 @view_config(
724 route_name='edit_user_auth_tokens_add', request_method='POST')
724 route_name='edit_user_auth_tokens_add', request_method='POST')
725 def auth_tokens_add(self):
725 def auth_tokens_add(self):
726 _ = self.request.translate
726 _ = self.request.translate
727 c = self.load_default_context()
727 c = self.load_default_context()
728
728
729 user_id = self.db_user_id
729 user_id = self.db_user_id
730 c.user = self.db_user
730 c.user = self.db_user
731
731
732 user_data = c.user.get_api_data()
732 user_data = c.user.get_api_data()
733 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
733 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
734 description = self.request.POST.get('description')
734 description = self.request.POST.get('description')
735 role = self.request.POST.get('role')
735 role = self.request.POST.get('role')
736
736
737 token = AuthTokenModel().create(
737 token = AuthTokenModel().create(
738 c.user.user_id, description, lifetime, role)
738 c.user.user_id, description, lifetime, role)
739 token_data = token.get_api_data()
739 token_data = token.get_api_data()
740
740
741 self.maybe_attach_token_scope(token)
741 self.maybe_attach_token_scope(token)
742 audit_logger.store_web(
742 audit_logger.store_web(
743 'user.edit.token.add', action_data={
743 'user.edit.token.add', action_data={
744 'data': {'token': token_data, 'user': user_data}},
744 'data': {'token': token_data, 'user': user_data}},
745 user=self._rhodecode_user, )
745 user=self._rhodecode_user, )
746 Session().commit()
746 Session().commit()
747
747
748 h.flash(_("Auth token successfully created"), category='success')
748 h.flash(_("Auth token successfully created"), category='success')
749 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
749 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
750
750
751 @LoginRequired()
751 @LoginRequired()
752 @HasPermissionAllDecorator('hg.admin')
752 @HasPermissionAllDecorator('hg.admin')
753 @CSRFRequired()
753 @CSRFRequired()
754 @view_config(
754 @view_config(
755 route_name='edit_user_auth_tokens_delete', request_method='POST')
755 route_name='edit_user_auth_tokens_delete', request_method='POST')
756 def auth_tokens_delete(self):
756 def auth_tokens_delete(self):
757 _ = self.request.translate
757 _ = self.request.translate
758 c = self.load_default_context()
758 c = self.load_default_context()
759
759
760 user_id = self.db_user_id
760 user_id = self.db_user_id
761 c.user = self.db_user
761 c.user = self.db_user
762
762
763 user_data = c.user.get_api_data()
763 user_data = c.user.get_api_data()
764
764
765 del_auth_token = self.request.POST.get('del_auth_token')
765 del_auth_token = self.request.POST.get('del_auth_token')
766
766
767 if del_auth_token:
767 if del_auth_token:
768 token = UserApiKeys.get_or_404(del_auth_token)
768 token = UserApiKeys.get_or_404(del_auth_token)
769 token_data = token.get_api_data()
769 token_data = token.get_api_data()
770
770
771 AuthTokenModel().delete(del_auth_token, c.user.user_id)
771 AuthTokenModel().delete(del_auth_token, c.user.user_id)
772 audit_logger.store_web(
772 audit_logger.store_web(
773 'user.edit.token.delete', action_data={
773 'user.edit.token.delete', action_data={
774 'data': {'token': token_data, 'user': user_data}},
774 'data': {'token': token_data, 'user': user_data}},
775 user=self._rhodecode_user,)
775 user=self._rhodecode_user,)
776 Session().commit()
776 Session().commit()
777 h.flash(_("Auth token successfully deleted"), category='success')
777 h.flash(_("Auth token successfully deleted"), category='success')
778
778
779 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
779 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
780
780
781 @LoginRequired()
781 @LoginRequired()
782 @HasPermissionAllDecorator('hg.admin')
782 @HasPermissionAllDecorator('hg.admin')
783 @view_config(
783 @view_config(
784 route_name='edit_user_ssh_keys', request_method='GET',
784 route_name='edit_user_ssh_keys', request_method='GET',
785 renderer='rhodecode:templates/admin/users/user_edit.mako')
785 renderer='rhodecode:templates/admin/users/user_edit.mako')
786 def ssh_keys(self):
786 def ssh_keys(self):
787 _ = self.request.translate
787 _ = self.request.translate
788 c = self.load_default_context()
788 c = self.load_default_context()
789 c.user = self.db_user
789 c.user = self.db_user
790
790
791 c.active = 'ssh_keys'
791 c.active = 'ssh_keys'
792 c.default_key = self.request.GET.get('default_key')
792 c.default_key = self.request.GET.get('default_key')
793 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
793 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
794 return self._get_template_context(c)
794 return self._get_template_context(c)
795
795
796 @LoginRequired()
796 @LoginRequired()
797 @HasPermissionAllDecorator('hg.admin')
797 @HasPermissionAllDecorator('hg.admin')
798 @view_config(
798 @view_config(
799 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
799 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
800 renderer='rhodecode:templates/admin/users/user_edit.mako')
800 renderer='rhodecode:templates/admin/users/user_edit.mako')
801 def ssh_keys_generate_keypair(self):
801 def ssh_keys_generate_keypair(self):
802 _ = self.request.translate
802 _ = self.request.translate
803 c = self.load_default_context()
803 c = self.load_default_context()
804
804
805 c.user = self.db_user
805 c.user = self.db_user
806
806
807 c.active = 'ssh_keys_generate'
807 c.active = 'ssh_keys_generate'
808 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
808 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
809 c.private, c.public = SshKeyModel().generate_keypair(comment=comment)
809 c.private, c.public = SshKeyModel().generate_keypair(comment=comment)
810
810
811 return self._get_template_context(c)
811 return self._get_template_context(c)
812
812
813 @LoginRequired()
813 @LoginRequired()
814 @HasPermissionAllDecorator('hg.admin')
814 @HasPermissionAllDecorator('hg.admin')
815 @CSRFRequired()
815 @CSRFRequired()
816 @view_config(
816 @view_config(
817 route_name='edit_user_ssh_keys_add', request_method='POST')
817 route_name='edit_user_ssh_keys_add', request_method='POST')
818 def ssh_keys_add(self):
818 def ssh_keys_add(self):
819 _ = self.request.translate
819 _ = self.request.translate
820 c = self.load_default_context()
820 c = self.load_default_context()
821
821
822 user_id = self.db_user_id
822 user_id = self.db_user_id
823 c.user = self.db_user
823 c.user = self.db_user
824
824
825 user_data = c.user.get_api_data()
825 user_data = c.user.get_api_data()
826 key_data = self.request.POST.get('key_data')
826 key_data = self.request.POST.get('key_data')
827 description = self.request.POST.get('description')
827 description = self.request.POST.get('description')
828
828
829 fingerprint = 'unknown'
829 fingerprint = 'unknown'
830 try:
830 try:
831 if not key_data:
831 if not key_data:
832 raise ValueError('Please add a valid public key')
832 raise ValueError('Please add a valid public key')
833
833
834 key = SshKeyModel().parse_key(key_data.strip())
834 key = SshKeyModel().parse_key(key_data.strip())
835 fingerprint = key.hash_md5()
835 fingerprint = key.hash_md5()
836
836
837 ssh_key = SshKeyModel().create(
837 ssh_key = SshKeyModel().create(
838 c.user.user_id, fingerprint, key.keydata, description)
838 c.user.user_id, fingerprint, key.keydata, description)
839 ssh_key_data = ssh_key.get_api_data()
839 ssh_key_data = ssh_key.get_api_data()
840
840
841 audit_logger.store_web(
841 audit_logger.store_web(
842 'user.edit.ssh_key.add', action_data={
842 'user.edit.ssh_key.add', action_data={
843 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
843 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
844 user=self._rhodecode_user, )
844 user=self._rhodecode_user, )
845 Session().commit()
845 Session().commit()
846
846
847 # Trigger an event on change of keys.
847 # Trigger an event on change of keys.
848 trigger(SshKeyFileChangeEvent(), self.request.registry)
848 trigger(SshKeyFileChangeEvent(), self.request.registry)
849
849
850 h.flash(_("Ssh Key successfully created"), category='success')
850 h.flash(_("Ssh Key successfully created"), category='success')
851
851
852 except IntegrityError:
852 except IntegrityError:
853 log.exception("Exception during ssh key saving")
853 log.exception("Exception during ssh key saving")
854 err = 'Such key with fingerprint `{}` already exists, ' \
854 err = 'Such key with fingerprint `{}` already exists, ' \
855 'please use a different one'.format(fingerprint)
855 'please use a different one'.format(fingerprint)
856 h.flash(_('An error occurred during ssh key saving: {}').format(err),
856 h.flash(_('An error occurred during ssh key saving: {}').format(err),
857 category='error')
857 category='error')
858 except Exception as e:
858 except Exception as e:
859 log.exception("Exception during ssh key saving")
859 log.exception("Exception during ssh key saving")
860 h.flash(_('An error occurred during ssh key saving: {}').format(e),
860 h.flash(_('An error occurred during ssh key saving: {}').format(e),
861 category='error')
861 category='error')
862
862
863 return HTTPFound(
863 return HTTPFound(
864 h.route_path('edit_user_ssh_keys', user_id=user_id))
864 h.route_path('edit_user_ssh_keys', user_id=user_id))
865
865
866 @LoginRequired()
866 @LoginRequired()
867 @HasPermissionAllDecorator('hg.admin')
867 @HasPermissionAllDecorator('hg.admin')
868 @CSRFRequired()
868 @CSRFRequired()
869 @view_config(
869 @view_config(
870 route_name='edit_user_ssh_keys_delete', request_method='POST')
870 route_name='edit_user_ssh_keys_delete', request_method='POST')
871 def ssh_keys_delete(self):
871 def ssh_keys_delete(self):
872 _ = self.request.translate
872 _ = self.request.translate
873 c = self.load_default_context()
873 c = self.load_default_context()
874
874
875 user_id = self.db_user_id
875 user_id = self.db_user_id
876 c.user = self.db_user
876 c.user = self.db_user
877
877
878 user_data = c.user.get_api_data()
878 user_data = c.user.get_api_data()
879
879
880 del_ssh_key = self.request.POST.get('del_ssh_key')
880 del_ssh_key = self.request.POST.get('del_ssh_key')
881
881
882 if del_ssh_key:
882 if del_ssh_key:
883 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
883 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
884 ssh_key_data = ssh_key.get_api_data()
884 ssh_key_data = ssh_key.get_api_data()
885
885
886 SshKeyModel().delete(del_ssh_key, c.user.user_id)
886 SshKeyModel().delete(del_ssh_key, c.user.user_id)
887 audit_logger.store_web(
887 audit_logger.store_web(
888 'user.edit.ssh_key.delete', action_data={
888 'user.edit.ssh_key.delete', action_data={
889 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
889 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
890 user=self._rhodecode_user,)
890 user=self._rhodecode_user,)
891 Session().commit()
891 Session().commit()
892 # Trigger an event on change of keys.
892 # Trigger an event on change of keys.
893 trigger(SshKeyFileChangeEvent(), self.request.registry)
893 trigger(SshKeyFileChangeEvent(), self.request.registry)
894 h.flash(_("Ssh key successfully deleted"), category='success')
894 h.flash(_("Ssh key successfully deleted"), category='success')
895
895
896 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
896 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
897
897
898 @LoginRequired()
898 @LoginRequired()
899 @HasPermissionAllDecorator('hg.admin')
899 @HasPermissionAllDecorator('hg.admin')
900 @view_config(
900 @view_config(
901 route_name='edit_user_emails', request_method='GET',
901 route_name='edit_user_emails', request_method='GET',
902 renderer='rhodecode:templates/admin/users/user_edit.mako')
902 renderer='rhodecode:templates/admin/users/user_edit.mako')
903 def emails(self):
903 def emails(self):
904 _ = self.request.translate
904 _ = self.request.translate
905 c = self.load_default_context()
905 c = self.load_default_context()
906 c.user = self.db_user
906 c.user = self.db_user
907
907
908 c.active = 'emails'
908 c.active = 'emails'
909 c.user_email_map = UserEmailMap.query() \
909 c.user_email_map = UserEmailMap.query() \
910 .filter(UserEmailMap.user == c.user).all()
910 .filter(UserEmailMap.user == c.user).all()
911
911
912 return self._get_template_context(c)
912 return self._get_template_context(c)
913
913
914 @LoginRequired()
914 @LoginRequired()
915 @HasPermissionAllDecorator('hg.admin')
915 @HasPermissionAllDecorator('hg.admin')
916 @CSRFRequired()
916 @CSRFRequired()
917 @view_config(
917 @view_config(
918 route_name='edit_user_emails_add', request_method='POST')
918 route_name='edit_user_emails_add', request_method='POST')
919 def emails_add(self):
919 def emails_add(self):
920 _ = self.request.translate
920 _ = self.request.translate
921 c = self.load_default_context()
921 c = self.load_default_context()
922
922
923 user_id = self.db_user_id
923 user_id = self.db_user_id
924 c.user = self.db_user
924 c.user = self.db_user
925
925
926 email = self.request.POST.get('new_email')
926 email = self.request.POST.get('new_email')
927 user_data = c.user.get_api_data()
927 user_data = c.user.get_api_data()
928 try:
928 try:
929
929
930 form = UserExtraEmailForm(self.request.translate)()
930 form = UserExtraEmailForm(self.request.translate)()
931 data = form.to_python({'email': email})
931 data = form.to_python({'email': email})
932 email = data['email']
932 email = data['email']
933
933
934 UserModel().add_extra_email(c.user.user_id, email)
934 UserModel().add_extra_email(c.user.user_id, email)
935 audit_logger.store_web(
935 audit_logger.store_web(
936 'user.edit.email.add',
936 'user.edit.email.add',
937 action_data={'email': email, 'user': user_data},
937 action_data={'email': email, 'user': user_data},
938 user=self._rhodecode_user)
938 user=self._rhodecode_user)
939 Session().commit()
939 Session().commit()
940 h.flash(_("Added new email address `%s` for user account") % email,
940 h.flash(_("Added new email address `%s` for user account") % email,
941 category='success')
941 category='success')
942 except formencode.Invalid as error:
942 except formencode.Invalid as error:
943 h.flash(h.escape(error.error_dict['email']), category='error')
943 h.flash(h.escape(error.error_dict['email']), category='error')
944 except IntegrityError:
944 except IntegrityError:
945 log.warning("Email %s already exists", email)
945 log.warning("Email %s already exists", email)
946 h.flash(_('Email `{}` is already registered for another user.').format(email),
946 h.flash(_('Email `{}` is already registered for another user.').format(email),
947 category='error')
947 category='error')
948 except Exception:
948 except Exception:
949 log.exception("Exception during email saving")
949 log.exception("Exception during email saving")
950 h.flash(_('An error occurred during email saving'),
950 h.flash(_('An error occurred during email saving'),
951 category='error')
951 category='error')
952 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
952 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
953
953
954 @LoginRequired()
954 @LoginRequired()
955 @HasPermissionAllDecorator('hg.admin')
955 @HasPermissionAllDecorator('hg.admin')
956 @CSRFRequired()
956 @CSRFRequired()
957 @view_config(
957 @view_config(
958 route_name='edit_user_emails_delete', request_method='POST')
958 route_name='edit_user_emails_delete', request_method='POST')
959 def emails_delete(self):
959 def emails_delete(self):
960 _ = self.request.translate
960 _ = self.request.translate
961 c = self.load_default_context()
961 c = self.load_default_context()
962
962
963 user_id = self.db_user_id
963 user_id = self.db_user_id
964 c.user = self.db_user
964 c.user = self.db_user
965
965
966 email_id = self.request.POST.get('del_email_id')
966 email_id = self.request.POST.get('del_email_id')
967 user_model = UserModel()
967 user_model = UserModel()
968
968
969 email = UserEmailMap.query().get(email_id).email
969 email = UserEmailMap.query().get(email_id).email
970 user_data = c.user.get_api_data()
970 user_data = c.user.get_api_data()
971 user_model.delete_extra_email(c.user.user_id, email_id)
971 user_model.delete_extra_email(c.user.user_id, email_id)
972 audit_logger.store_web(
972 audit_logger.store_web(
973 'user.edit.email.delete',
973 'user.edit.email.delete',
974 action_data={'email': email, 'user': user_data},
974 action_data={'email': email, 'user': user_data},
975 user=self._rhodecode_user)
975 user=self._rhodecode_user)
976 Session().commit()
976 Session().commit()
977 h.flash(_("Removed email address from user account"),
977 h.flash(_("Removed email address from user account"),
978 category='success')
978 category='success')
979 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
979 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
980
980
981 @LoginRequired()
981 @LoginRequired()
982 @HasPermissionAllDecorator('hg.admin')
982 @HasPermissionAllDecorator('hg.admin')
983 @view_config(
983 @view_config(
984 route_name='edit_user_ips', request_method='GET',
984 route_name='edit_user_ips', request_method='GET',
985 renderer='rhodecode:templates/admin/users/user_edit.mako')
985 renderer='rhodecode:templates/admin/users/user_edit.mako')
986 def ips(self):
986 def ips(self):
987 _ = self.request.translate
987 _ = self.request.translate
988 c = self.load_default_context()
988 c = self.load_default_context()
989 c.user = self.db_user
989 c.user = self.db_user
990
990
991 c.active = 'ips'
991 c.active = 'ips'
992 c.user_ip_map = UserIpMap.query() \
992 c.user_ip_map = UserIpMap.query() \
993 .filter(UserIpMap.user == c.user).all()
993 .filter(UserIpMap.user == c.user).all()
994
994
995 c.inherit_default_ips = c.user.inherit_default_permissions
995 c.inherit_default_ips = c.user.inherit_default_permissions
996 c.default_user_ip_map = UserIpMap.query() \
996 c.default_user_ip_map = UserIpMap.query() \
997 .filter(UserIpMap.user == User.get_default_user()).all()
997 .filter(UserIpMap.user == User.get_default_user()).all()
998
998
999 return self._get_template_context(c)
999 return self._get_template_context(c)
1000
1000
1001 @LoginRequired()
1001 @LoginRequired()
1002 @HasPermissionAllDecorator('hg.admin')
1002 @HasPermissionAllDecorator('hg.admin')
1003 @CSRFRequired()
1003 @CSRFRequired()
1004 @view_config(
1004 @view_config(
1005 route_name='edit_user_ips_add', request_method='POST')
1005 route_name='edit_user_ips_add', request_method='POST')
1006 # NOTE(marcink): this view is allowed for default users, as we can
1006 # NOTE(marcink): this view is allowed for default users, as we can
1007 # edit their IP white list
1007 # edit their IP white list
1008 def ips_add(self):
1008 def ips_add(self):
1009 _ = self.request.translate
1009 _ = self.request.translate
1010 c = self.load_default_context()
1010 c = self.load_default_context()
1011
1011
1012 user_id = self.db_user_id
1012 user_id = self.db_user_id
1013 c.user = self.db_user
1013 c.user = self.db_user
1014
1014
1015 user_model = UserModel()
1015 user_model = UserModel()
1016 desc = self.request.POST.get('description')
1016 desc = self.request.POST.get('description')
1017 try:
1017 try:
1018 ip_list = user_model.parse_ip_range(
1018 ip_list = user_model.parse_ip_range(
1019 self.request.POST.get('new_ip'))
1019 self.request.POST.get('new_ip'))
1020 except Exception as e:
1020 except Exception as e:
1021 ip_list = []
1021 ip_list = []
1022 log.exception("Exception during ip saving")
1022 log.exception("Exception during ip saving")
1023 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1023 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1024 category='error')
1024 category='error')
1025 added = []
1025 added = []
1026 user_data = c.user.get_api_data()
1026 user_data = c.user.get_api_data()
1027 for ip in ip_list:
1027 for ip in ip_list:
1028 try:
1028 try:
1029 form = UserExtraIpForm(self.request.translate)()
1029 form = UserExtraIpForm(self.request.translate)()
1030 data = form.to_python({'ip': ip})
1030 data = form.to_python({'ip': ip})
1031 ip = data['ip']
1031 ip = data['ip']
1032
1032
1033 user_model.add_extra_ip(c.user.user_id, ip, desc)
1033 user_model.add_extra_ip(c.user.user_id, ip, desc)
1034 audit_logger.store_web(
1034 audit_logger.store_web(
1035 'user.edit.ip.add',
1035 'user.edit.ip.add',
1036 action_data={'ip': ip, 'user': user_data},
1036 action_data={'ip': ip, 'user': user_data},
1037 user=self._rhodecode_user)
1037 user=self._rhodecode_user)
1038 Session().commit()
1038 Session().commit()
1039 added.append(ip)
1039 added.append(ip)
1040 except formencode.Invalid as error:
1040 except formencode.Invalid as error:
1041 msg = error.error_dict['ip']
1041 msg = error.error_dict['ip']
1042 h.flash(msg, category='error')
1042 h.flash(msg, category='error')
1043 except Exception:
1043 except Exception:
1044 log.exception("Exception during ip saving")
1044 log.exception("Exception during ip saving")
1045 h.flash(_('An error occurred during ip saving'),
1045 h.flash(_('An error occurred during ip saving'),
1046 category='error')
1046 category='error')
1047 if added:
1047 if added:
1048 h.flash(
1048 h.flash(
1049 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1049 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1050 category='success')
1050 category='success')
1051 if 'default_user' in self.request.POST:
1051 if 'default_user' in self.request.POST:
1052 # case for editing global IP list we do it for 'DEFAULT' user
1052 # case for editing global IP list we do it for 'DEFAULT' user
1053 raise HTTPFound(h.route_path('admin_permissions_ips'))
1053 raise HTTPFound(h.route_path('admin_permissions_ips'))
1054 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1054 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1055
1055
1056 @LoginRequired()
1056 @LoginRequired()
1057 @HasPermissionAllDecorator('hg.admin')
1057 @HasPermissionAllDecorator('hg.admin')
1058 @CSRFRequired()
1058 @CSRFRequired()
1059 @view_config(
1059 @view_config(
1060 route_name='edit_user_ips_delete', request_method='POST')
1060 route_name='edit_user_ips_delete', request_method='POST')
1061 # NOTE(marcink): this view is allowed for default users, as we can
1061 # NOTE(marcink): this view is allowed for default users, as we can
1062 # edit their IP white list
1062 # edit their IP white list
1063 def ips_delete(self):
1063 def ips_delete(self):
1064 _ = self.request.translate
1064 _ = self.request.translate
1065 c = self.load_default_context()
1065 c = self.load_default_context()
1066
1066
1067 user_id = self.db_user_id
1067 user_id = self.db_user_id
1068 c.user = self.db_user
1068 c.user = self.db_user
1069
1069
1070 ip_id = self.request.POST.get('del_ip_id')
1070 ip_id = self.request.POST.get('del_ip_id')
1071 user_model = UserModel()
1071 user_model = UserModel()
1072 user_data = c.user.get_api_data()
1072 user_data = c.user.get_api_data()
1073 ip = UserIpMap.query().get(ip_id).ip_addr
1073 ip = UserIpMap.query().get(ip_id).ip_addr
1074 user_model.delete_extra_ip(c.user.user_id, ip_id)
1074 user_model.delete_extra_ip(c.user.user_id, ip_id)
1075 audit_logger.store_web(
1075 audit_logger.store_web(
1076 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1076 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1077 user=self._rhodecode_user)
1077 user=self._rhodecode_user)
1078 Session().commit()
1078 Session().commit()
1079 h.flash(_("Removed ip address from user whitelist"), category='success')
1079 h.flash(_("Removed ip address from user whitelist"), category='success')
1080
1080
1081 if 'default_user' in self.request.POST:
1081 if 'default_user' in self.request.POST:
1082 # case for editing global IP list we do it for 'DEFAULT' user
1082 # case for editing global IP list we do it for 'DEFAULT' user
1083 raise HTTPFound(h.route_path('admin_permissions_ips'))
1083 raise HTTPFound(h.route_path('admin_permissions_ips'))
1084 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1084 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1085
1085
1086 @LoginRequired()
1086 @LoginRequired()
1087 @HasPermissionAllDecorator('hg.admin')
1087 @HasPermissionAllDecorator('hg.admin')
1088 @view_config(
1088 @view_config(
1089 route_name='edit_user_groups_management', request_method='GET',
1089 route_name='edit_user_groups_management', request_method='GET',
1090 renderer='rhodecode:templates/admin/users/user_edit.mako')
1090 renderer='rhodecode:templates/admin/users/user_edit.mako')
1091 def groups_management(self):
1091 def groups_management(self):
1092 c = self.load_default_context()
1092 c = self.load_default_context()
1093 c.user = self.db_user
1093 c.user = self.db_user
1094 c.data = c.user.group_member
1094 c.data = c.user.group_member
1095
1095
1096 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1096 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1097 for group in c.user.group_member]
1097 for group in c.user.group_member]
1098 c.groups = json.dumps(groups)
1098 c.groups = json.dumps(groups)
1099 c.active = 'groups'
1099 c.active = 'groups'
1100
1100
1101 return self._get_template_context(c)
1101 return self._get_template_context(c)
1102
1102
1103 @LoginRequired()
1103 @LoginRequired()
1104 @HasPermissionAllDecorator('hg.admin')
1104 @HasPermissionAllDecorator('hg.admin')
1105 @CSRFRequired()
1105 @CSRFRequired()
1106 @view_config(
1106 @view_config(
1107 route_name='edit_user_groups_management_updates', request_method='POST')
1107 route_name='edit_user_groups_management_updates', request_method='POST')
1108 def groups_management_updates(self):
1108 def groups_management_updates(self):
1109 _ = self.request.translate
1109 _ = self.request.translate
1110 c = self.load_default_context()
1110 c = self.load_default_context()
1111
1111
1112 user_id = self.db_user_id
1112 user_id = self.db_user_id
1113 c.user = self.db_user
1113 c.user = self.db_user
1114
1114
1115 user_groups = set(self.request.POST.getall('users_group_id'))
1115 user_groups = set(self.request.POST.getall('users_group_id'))
1116 user_groups_objects = []
1116 user_groups_objects = []
1117
1117
1118 for ugid in user_groups:
1118 for ugid in user_groups:
1119 user_groups_objects.append(
1119 user_groups_objects.append(
1120 UserGroupModel().get_group(safe_int(ugid)))
1120 UserGroupModel().get_group(safe_int(ugid)))
1121 user_group_model = UserGroupModel()
1121 user_group_model = UserGroupModel()
1122 added_to_groups, removed_from_groups = \
1122 added_to_groups, removed_from_groups = \
1123 user_group_model.change_groups(c.user, user_groups_objects)
1123 user_group_model.change_groups(c.user, user_groups_objects)
1124
1124
1125 user_data = c.user.get_api_data()
1125 user_data = c.user.get_api_data()
1126 for user_group_id in added_to_groups:
1126 for user_group_id in added_to_groups:
1127 user_group = UserGroup.get(user_group_id)
1127 user_group = UserGroup.get(user_group_id)
1128 old_values = user_group.get_api_data()
1128 old_values = user_group.get_api_data()
1129 audit_logger.store_web(
1129 audit_logger.store_web(
1130 'user_group.edit.member.add',
1130 'user_group.edit.member.add',
1131 action_data={'user': user_data, 'old_data': old_values},
1131 action_data={'user': user_data, 'old_data': old_values},
1132 user=self._rhodecode_user)
1132 user=self._rhodecode_user)
1133
1133
1134 for user_group_id in removed_from_groups:
1134 for user_group_id in removed_from_groups:
1135 user_group = UserGroup.get(user_group_id)
1135 user_group = UserGroup.get(user_group_id)
1136 old_values = user_group.get_api_data()
1136 old_values = user_group.get_api_data()
1137 audit_logger.store_web(
1137 audit_logger.store_web(
1138 'user_group.edit.member.delete',
1138 'user_group.edit.member.delete',
1139 action_data={'user': user_data, 'old_data': old_values},
1139 action_data={'user': user_data, 'old_data': old_values},
1140 user=self._rhodecode_user)
1140 user=self._rhodecode_user)
1141
1141
1142 Session().commit()
1142 Session().commit()
1143 c.active = 'user_groups_management'
1143 c.active = 'user_groups_management'
1144 h.flash(_("Groups successfully changed"), category='success')
1144 h.flash(_("Groups successfully changed"), category='success')
1145
1145
1146 return HTTPFound(h.route_path(
1146 return HTTPFound(h.route_path(
1147 'edit_user_groups_management', user_id=user_id))
1147 'edit_user_groups_management', user_id=user_id))
1148
1148
1149 @LoginRequired()
1149 @LoginRequired()
1150 @HasPermissionAllDecorator('hg.admin')
1150 @HasPermissionAllDecorator('hg.admin')
1151 @view_config(
1151 @view_config(
1152 route_name='edit_user_audit_logs', request_method='GET',
1152 route_name='edit_user_audit_logs', request_method='GET',
1153 renderer='rhodecode:templates/admin/users/user_edit.mako')
1153 renderer='rhodecode:templates/admin/users/user_edit.mako')
1154 def user_audit_logs(self):
1154 def user_audit_logs(self):
1155 _ = self.request.translate
1155 _ = self.request.translate
1156 c = self.load_default_context()
1156 c = self.load_default_context()
1157 c.user = self.db_user
1157 c.user = self.db_user
1158
1158
1159 c.active = 'audit'
1159 c.active = 'audit'
1160
1160
1161 p = safe_int(self.request.GET.get('page', 1), 1)
1161 p = safe_int(self.request.GET.get('page', 1), 1)
1162
1162
1163 filter_term = self.request.GET.get('filter')
1163 filter_term = self.request.GET.get('filter')
1164 user_log = UserModel().get_user_log(c.user, filter_term)
1164 user_log = UserModel().get_user_log(c.user, filter_term)
1165
1165
1166 def url_generator(**kw):
1166 def url_generator(**kw):
1167 if filter_term:
1167 if filter_term:
1168 kw['filter'] = filter_term
1168 kw['filter'] = filter_term
1169 return self.request.current_route_path(_query=kw)
1169 return self.request.current_route_path(_query=kw)
1170
1170
1171 c.audit_logs = h.Page(
1171 c.audit_logs = h.Page(
1172 user_log, page=p, items_per_page=10, url=url_generator)
1172 user_log, page=p, items_per_page=10, url=url_generator)
1173 c.filter_term = filter_term
1173 c.filter_term = filter_term
1174 return self._get_template_context(c)
1174 return self._get_template_context(c)
1175
1175
1176 @LoginRequired()
1176 @LoginRequired()
1177 @HasPermissionAllDecorator('hg.admin')
1177 @HasPermissionAllDecorator('hg.admin')
1178 @view_config(
1178 @view_config(
1179 route_name='edit_user_perms_summary', request_method='GET',
1179 route_name='edit_user_perms_summary', request_method='GET',
1180 renderer='rhodecode:templates/admin/users/user_edit.mako')
1180 renderer='rhodecode:templates/admin/users/user_edit.mako')
1181 def user_perms_summary(self):
1181 def user_perms_summary(self):
1182 _ = self.request.translate
1182 _ = self.request.translate
1183 c = self.load_default_context()
1183 c = self.load_default_context()
1184 c.user = self.db_user
1184 c.user = self.db_user
1185
1185
1186 c.active = 'perms_summary'
1186 c.active = 'perms_summary'
1187 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1187 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1188
1188
1189 return self._get_template_context(c)
1189 return self._get_template_context(c)
1190
1190
1191 @LoginRequired()
1191 @LoginRequired()
1192 @HasPermissionAllDecorator('hg.admin')
1192 @HasPermissionAllDecorator('hg.admin')
1193 @view_config(
1193 @view_config(
1194 route_name='edit_user_perms_summary_json', request_method='GET',
1194 route_name='edit_user_perms_summary_json', request_method='GET',
1195 renderer='json_ext')
1195 renderer='json_ext')
1196 def user_perms_summary_json(self):
1196 def user_perms_summary_json(self):
1197 self.load_default_context()
1197 self.load_default_context()
1198 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1198 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1199
1199
1200 return perm_user.permissions
1200 return perm_user.permissions
1201
1201
1202 def _get_user_cache_keys(self, cache_namespace_uid, keys):
1203 user_keys = []
1204 for k in sorted(keys):
1205 if k.startswith(cache_namespace_uid):
1206 user_keys.append(k)
1207 return user_keys
1208
1209 @LoginRequired()
1202 @LoginRequired()
1210 @HasPermissionAllDecorator('hg.admin')
1203 @HasPermissionAllDecorator('hg.admin')
1211 @view_config(
1204 @view_config(
1212 route_name='edit_user_caches', request_method='GET',
1205 route_name='edit_user_caches', request_method='GET',
1213 renderer='rhodecode:templates/admin/users/user_edit.mako')
1206 renderer='rhodecode:templates/admin/users/user_edit.mako')
1214 def user_caches(self):
1207 def user_caches(self):
1215 _ = self.request.translate
1208 _ = self.request.translate
1216 c = self.load_default_context()
1209 c = self.load_default_context()
1217 c.user = self.db_user
1210 c.user = self.db_user
1218
1211
1219 c.active = 'caches'
1212 c.active = 'caches'
1220 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1213 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1221
1214
1222 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1215 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1223 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1216 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1224 c.backend = c.region.backend
1217 c.backend = c.region.backend
1225 c.user_keys = self._get_user_cache_keys(
1218 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1226 cache_namespace_uid, c.region.backend.list_keys())
1227
1219
1228 return self._get_template_context(c)
1220 return self._get_template_context(c)
1229
1221
1230 @LoginRequired()
1222 @LoginRequired()
1231 @HasPermissionAllDecorator('hg.admin')
1223 @HasPermissionAllDecorator('hg.admin')
1232 @CSRFRequired()
1224 @CSRFRequired()
1233 @view_config(
1225 @view_config(
1234 route_name='edit_user_caches_update', request_method='POST')
1226 route_name='edit_user_caches_update', request_method='POST')
1235 def user_caches_update(self):
1227 def user_caches_update(self):
1236 _ = self.request.translate
1228 _ = self.request.translate
1237 c = self.load_default_context()
1229 c = self.load_default_context()
1238 c.user = self.db_user
1230 c.user = self.db_user
1239
1231
1240 c.active = 'caches'
1232 c.active = 'caches'
1241 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1233 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1242
1234
1243 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1235 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1244 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1236 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1245
1237
1246 c.user_keys = self._get_user_cache_keys(
1238 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1247 cache_namespace_uid, c.region.backend.list_keys())
1248 for k in c.user_keys:
1249 c.region.delete(k)
1250
1251 h.flash(_("Deleted {} cache keys").format(len(c.user_keys)), category='success')
1252
1239
1253 return HTTPFound(h.route_path(
1240 return HTTPFound(h.route_path(
1254 'edit_user_caches', user_id=c.user.user_id))
1241 'edit_user_caches', user_id=c.user.user_id))
@@ -1,80 +1,88 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import logging
22 import logging
23
23
24 from pyramid.httpexceptions import HTTPFound
24 from pyramid.httpexceptions import HTTPFound
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26
26
27 from rhodecode.apps._base import RepoAppView
27 from rhodecode.apps._base import RepoAppView
28 from rhodecode.lib.auth import (
28 from rhodecode.lib.auth import (
29 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
29 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
30 from rhodecode.lib import helpers as h
30 from rhodecode.lib import helpers as h, rc_cache
31 from rhodecode.lib import system_info
31 from rhodecode.lib import system_info
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.scm import ScmModel
33 from rhodecode.model.scm import ScmModel
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 class RepoCachesView(RepoAppView):
38 class RepoCachesView(RepoAppView):
39 def load_default_context(self):
39 def load_default_context(self):
40 c = self._get_local_tmpl_context()
40 c = self._get_local_tmpl_context()
41 return c
41 return c
42
42
43 @LoginRequired()
43 @LoginRequired()
44 @HasRepoPermissionAnyDecorator('repository.admin')
44 @HasRepoPermissionAnyDecorator('repository.admin')
45 @view_config(
45 @view_config(
46 route_name='edit_repo_caches', request_method='GET',
46 route_name='edit_repo_caches', request_method='GET',
47 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
47 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
48 def repo_caches(self):
48 def repo_caches(self):
49 c = self.load_default_context()
49 c = self.load_default_context()
50 c.active = 'caches'
50 c.active = 'caches'
51 cached_diffs_dir = c.rhodecode_db_repo.cached_diffs_dir
51 cached_diffs_dir = c.rhodecode_db_repo.cached_diffs_dir
52 c.cached_diff_count = len(c.rhodecode_db_repo.cached_diffs())
52 c.cached_diff_count = len(c.rhodecode_db_repo.cached_diffs())
53 c.cached_diff_size = 0
53 c.cached_diff_size = 0
54 if os.path.isdir(cached_diffs_dir):
54 if os.path.isdir(cached_diffs_dir):
55 c.cached_diff_size = system_info.get_storage_size(cached_diffs_dir)
55 c.cached_diff_size = system_info.get_storage_size(cached_diffs_dir)
56 c.shadow_repos = c.rhodecode_db_repo.shadow_repos()
56 c.shadow_repos = c.rhodecode_db_repo.shadow_repos()
57
58 cache_namespace_uid = 'cache_repo.{}'.format(self.db_repo.repo_id)
59 c.region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
60 c.backend = c.region.backend
61 c.repo_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
62
57 return self._get_template_context(c)
63 return self._get_template_context(c)
58
64
59 @LoginRequired()
65 @LoginRequired()
60 @HasRepoPermissionAnyDecorator('repository.admin')
66 @HasRepoPermissionAnyDecorator('repository.admin')
61 @CSRFRequired()
67 @CSRFRequired()
62 @view_config(
68 @view_config(
63 route_name='edit_repo_caches', request_method='POST')
69 route_name='edit_repo_caches', request_method='POST')
64 def repo_caches_purge(self):
70 def repo_caches_purge(self):
65 _ = self.request.translate
71 _ = self.request.translate
66 c = self.load_default_context()
72 c = self.load_default_context()
67 c.active = 'caches'
73 c.active = 'caches'
68
74
69 try:
75 try:
70 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
76 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
77
71 Session().commit()
78 Session().commit()
79
72 h.flash(_('Cache invalidation successful'),
80 h.flash(_('Cache invalidation successful'),
73 category='success')
81 category='success')
74 except Exception:
82 except Exception:
75 log.exception("Exception during cache invalidation")
83 log.exception("Exception during cache invalidation")
76 h.flash(_('An error occurred during cache invalidation'),
84 h.flash(_('An error occurred during cache invalidation'),
77 category='error')
85 category='error')
78
86
79 raise HTTPFound(h.route_path(
87 raise HTTPFound(h.route_path(
80 'edit_repo_caches', repo_name=self.db_repo_name)) No newline at end of file
88 'edit_repo_caches', repo_name=self.db_repo_name))
@@ -1,1289 +1,1278 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import itertools
21 import itertools
22 import logging
22 import logging
23 import os
23 import os
24 import shutil
24 import shutil
25 import tempfile
25 import tempfile
26 import collections
26 import collections
27
27
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import RepoAppView
33 from rhodecode.apps._base import RepoAppView
34
34
35 from rhodecode.controllers.utils import parse_path_ref
35 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.lib import diffs, helpers as h, caches
36 from rhodecode.lib import diffs, helpers as h, caches, rc_cache
37 from rhodecode.lib import audit_logger
37 from rhodecode.lib import audit_logger
38 from rhodecode.lib.exceptions import NonRelativePathError
38 from rhodecode.lib.exceptions import NonRelativePathError
39 from rhodecode.lib.codeblocks import (
39 from rhodecode.lib.codeblocks import (
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 from rhodecode.lib.utils2 import (
41 from rhodecode.lib.utils2 import (
42 convert_line_endings, detect_mode, safe_str, str2bool)
42 convert_line_endings, detect_mode, safe_str, str2bool)
43 from rhodecode.lib.auth import (
43 from rhodecode.lib.auth import (
44 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
44 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
45 from rhodecode.lib.vcs import path as vcspath
45 from rhodecode.lib.vcs import path as vcspath
46 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 from rhodecode.lib.vcs.backends.base import EmptyCommit
47 from rhodecode.lib.vcs.conf import settings
47 from rhodecode.lib.vcs.conf import settings
48 from rhodecode.lib.vcs.nodes import FileNode
48 from rhodecode.lib.vcs.nodes import FileNode
49 from rhodecode.lib.vcs.exceptions import (
49 from rhodecode.lib.vcs.exceptions import (
50 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
50 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
51 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
51 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
52 NodeDoesNotExistError, CommitError, NodeError)
52 NodeDoesNotExistError, CommitError, NodeError)
53
53
54 from rhodecode.model.scm import ScmModel
54 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.db import Repository
55 from rhodecode.model.db import Repository
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoFilesView(RepoAppView):
60 class RepoFilesView(RepoAppView):
61
61
62 @staticmethod
62 @staticmethod
63 def adjust_file_path_for_svn(f_path, repo):
63 def adjust_file_path_for_svn(f_path, repo):
64 """
64 """
65 Computes the relative path of `f_path`.
65 Computes the relative path of `f_path`.
66
66
67 This is mainly based on prefix matching of the recognized tags and
67 This is mainly based on prefix matching of the recognized tags and
68 branches in the underlying repository.
68 branches in the underlying repository.
69 """
69 """
70 tags_and_branches = itertools.chain(
70 tags_and_branches = itertools.chain(
71 repo.branches.iterkeys(),
71 repo.branches.iterkeys(),
72 repo.tags.iterkeys())
72 repo.tags.iterkeys())
73 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
73 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
74
74
75 for name in tags_and_branches:
75 for name in tags_and_branches:
76 if f_path.startswith('{}/'.format(name)):
76 if f_path.startswith('{}/'.format(name)):
77 f_path = vcspath.relpath(f_path, name)
77 f_path = vcspath.relpath(f_path, name)
78 break
78 break
79 return f_path
79 return f_path
80
80
81 def load_default_context(self):
81 def load_default_context(self):
82 c = self._get_local_tmpl_context(include_app_defaults=True)
82 c = self._get_local_tmpl_context(include_app_defaults=True)
83 c.rhodecode_repo = self.rhodecode_vcs_repo
83 c.rhodecode_repo = self.rhodecode_vcs_repo
84 return c
84 return c
85
85
86 def _ensure_not_locked(self):
86 def _ensure_not_locked(self):
87 _ = self.request.translate
87 _ = self.request.translate
88
88
89 repo = self.db_repo
89 repo = self.db_repo
90 if repo.enable_locking and repo.locked[0]:
90 if repo.enable_locking and repo.locked[0]:
91 h.flash(_('This repository has been locked by %s on %s')
91 h.flash(_('This repository has been locked by %s on %s')
92 % (h.person_by_id(repo.locked[0]),
92 % (h.person_by_id(repo.locked[0]),
93 h.format_date(h.time_to_datetime(repo.locked[1]))),
93 h.format_date(h.time_to_datetime(repo.locked[1]))),
94 'warning')
94 'warning')
95 files_url = h.route_path(
95 files_url = h.route_path(
96 'repo_files:default_path',
96 'repo_files:default_path',
97 repo_name=self.db_repo_name, commit_id='tip')
97 repo_name=self.db_repo_name, commit_id='tip')
98 raise HTTPFound(files_url)
98 raise HTTPFound(files_url)
99
99
100 def _get_commit_and_path(self):
100 def _get_commit_and_path(self):
101 default_commit_id = self.db_repo.landing_rev[1]
101 default_commit_id = self.db_repo.landing_rev[1]
102 default_f_path = '/'
102 default_f_path = '/'
103
103
104 commit_id = self.request.matchdict.get(
104 commit_id = self.request.matchdict.get(
105 'commit_id', default_commit_id)
105 'commit_id', default_commit_id)
106 f_path = self._get_f_path(self.request.matchdict, default_f_path)
106 f_path = self._get_f_path(self.request.matchdict, default_f_path)
107 return commit_id, f_path
107 return commit_id, f_path
108
108
109 def _get_default_encoding(self, c):
109 def _get_default_encoding(self, c):
110 enc_list = getattr(c, 'default_encodings', [])
110 enc_list = getattr(c, 'default_encodings', [])
111 return enc_list[0] if enc_list else 'UTF-8'
111 return enc_list[0] if enc_list else 'UTF-8'
112
112
113 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
113 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
114 """
114 """
115 This is a safe way to get commit. If an error occurs it redirects to
115 This is a safe way to get commit. If an error occurs it redirects to
116 tip with proper message
116 tip with proper message
117
117
118 :param commit_id: id of commit to fetch
118 :param commit_id: id of commit to fetch
119 :param redirect_after: toggle redirection
119 :param redirect_after: toggle redirection
120 """
120 """
121 _ = self.request.translate
121 _ = self.request.translate
122
122
123 try:
123 try:
124 return self.rhodecode_vcs_repo.get_commit(commit_id)
124 return self.rhodecode_vcs_repo.get_commit(commit_id)
125 except EmptyRepositoryError:
125 except EmptyRepositoryError:
126 if not redirect_after:
126 if not redirect_after:
127 return None
127 return None
128
128
129 _url = h.route_path(
129 _url = h.route_path(
130 'repo_files_add_file',
130 'repo_files_add_file',
131 repo_name=self.db_repo_name, commit_id=0, f_path='',
131 repo_name=self.db_repo_name, commit_id=0, f_path='',
132 _anchor='edit')
132 _anchor='edit')
133
133
134 if h.HasRepoPermissionAny(
134 if h.HasRepoPermissionAny(
135 'repository.write', 'repository.admin')(self.db_repo_name):
135 'repository.write', 'repository.admin')(self.db_repo_name):
136 add_new = h.link_to(
136 add_new = h.link_to(
137 _('Click here to add a new file.'), _url, class_="alert-link")
137 _('Click here to add a new file.'), _url, class_="alert-link")
138 else:
138 else:
139 add_new = ""
139 add_new = ""
140
140
141 h.flash(h.literal(
141 h.flash(h.literal(
142 _('There are no files yet. %s') % add_new), category='warning')
142 _('There are no files yet. %s') % add_new), category='warning')
143 raise HTTPFound(
143 raise HTTPFound(
144 h.route_path('repo_summary', repo_name=self.db_repo_name))
144 h.route_path('repo_summary', repo_name=self.db_repo_name))
145
145
146 except (CommitDoesNotExistError, LookupError):
146 except (CommitDoesNotExistError, LookupError):
147 msg = _('No such commit exists for this repository')
147 msg = _('No such commit exists for this repository')
148 h.flash(msg, category='error')
148 h.flash(msg, category='error')
149 raise HTTPNotFound()
149 raise HTTPNotFound()
150 except RepositoryError as e:
150 except RepositoryError as e:
151 h.flash(safe_str(h.escape(e)), category='error')
151 h.flash(safe_str(h.escape(e)), category='error')
152 raise HTTPNotFound()
152 raise HTTPNotFound()
153
153
154 def _get_filenode_or_redirect(self, commit_obj, path):
154 def _get_filenode_or_redirect(self, commit_obj, path):
155 """
155 """
156 Returns file_node, if error occurs or given path is directory,
156 Returns file_node, if error occurs or given path is directory,
157 it'll redirect to top level path
157 it'll redirect to top level path
158 """
158 """
159 _ = self.request.translate
159 _ = self.request.translate
160
160
161 try:
161 try:
162 file_node = commit_obj.get_node(path)
162 file_node = commit_obj.get_node(path)
163 if file_node.is_dir():
163 if file_node.is_dir():
164 raise RepositoryError('The given path is a directory')
164 raise RepositoryError('The given path is a directory')
165 except CommitDoesNotExistError:
165 except CommitDoesNotExistError:
166 log.exception('No such commit exists for this repository')
166 log.exception('No such commit exists for this repository')
167 h.flash(_('No such commit exists for this repository'), category='error')
167 h.flash(_('No such commit exists for this repository'), category='error')
168 raise HTTPNotFound()
168 raise HTTPNotFound()
169 except RepositoryError as e:
169 except RepositoryError as e:
170 log.warning('Repository error while fetching '
170 log.warning('Repository error while fetching '
171 'filenode `%s`. Err:%s', path, e)
171 'filenode `%s`. Err:%s', path, e)
172 h.flash(safe_str(h.escape(e)), category='error')
172 h.flash(safe_str(h.escape(e)), category='error')
173 raise HTTPNotFound()
173 raise HTTPNotFound()
174
174
175 return file_node
175 return file_node
176
176
177 def _is_valid_head(self, commit_id, repo):
177 def _is_valid_head(self, commit_id, repo):
178 # check if commit is a branch identifier- basically we cannot
178 # check if commit is a branch identifier- basically we cannot
179 # create multiple heads via file editing
179 # create multiple heads via file editing
180 valid_heads = repo.branches.keys() + repo.branches.values()
180 valid_heads = repo.branches.keys() + repo.branches.values()
181
181
182 if h.is_svn(repo) and not repo.is_empty():
182 if h.is_svn(repo) and not repo.is_empty():
183 # Note: Subversion only has one head, we add it here in case there
183 # Note: Subversion only has one head, we add it here in case there
184 # is no branch matched.
184 # is no branch matched.
185 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
185 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
186
186
187 # check if commit is a branch name or branch hash
187 # check if commit is a branch name or branch hash
188 return commit_id in valid_heads
188 return commit_id in valid_heads
189
189
190 def _get_tree_cache_manager(self, namespace_type):
190 def _get_tree_at_commit(
191 _namespace = caches.get_repo_namespace_key(
191 self, c, commit_id, f_path, full_load=False):
192 namespace_type, self.db_repo_name)
192
193 return caches.get_cache_manager('repo_cache_long', _namespace)
193 repo_id = self.db_repo.repo_id
194
194
195 def _get_tree_at_commit(
195 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
196 self, c, commit_id, f_path, full_load=False, force=False):
196 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
197 def _cached_tree():
197
198 log.debug('Generating cached file tree for %s, %s, %s',
198 @region.cache_on_arguments(namespace=cache_namespace_uid)
199 self.db_repo_name, commit_id, f_path)
199 def compute_file_tree(repo_id, commit_id, f_path, full_load):
200 log.debug('Generating cached file tree for repo_id: %s, %s, %s',
201 repo_id, commit_id, f_path)
200
202
201 c.full_load = full_load
203 c.full_load = full_load
202 return render(
204 return render(
203 'rhodecode:templates/files/files_browser_tree.mako',
205 'rhodecode:templates/files/files_browser_tree.mako',
204 self._get_template_context(c), self.request)
206 self._get_template_context(c), self.request)
205
207
206 cache_manager = self._get_tree_cache_manager(caches.FILE_TREE)
208 return compute_file_tree(self.db_repo.repo_id, commit_id, f_path, full_load)
207
208 cache_key = caches.compute_key_from_params(
209 self.db_repo_name, commit_id, f_path)
210
211 if force:
212 # we want to force recompute of caches
213 cache_manager.remove_value(cache_key)
214
215 return cache_manager.get(cache_key, createfunc=_cached_tree)
216
209
217 def _get_archive_spec(self, fname):
210 def _get_archive_spec(self, fname):
218 log.debug('Detecting archive spec for: `%s`', fname)
211 log.debug('Detecting archive spec for: `%s`', fname)
219
212
220 fileformat = None
213 fileformat = None
221 ext = None
214 ext = None
222 content_type = None
215 content_type = None
223 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
216 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
224 content_type, extension = ext_data
217 content_type, extension = ext_data
225
218
226 if fname.endswith(extension):
219 if fname.endswith(extension):
227 fileformat = a_type
220 fileformat = a_type
228 log.debug('archive is of type: %s', fileformat)
221 log.debug('archive is of type: %s', fileformat)
229 ext = extension
222 ext = extension
230 break
223 break
231
224
232 if not fileformat:
225 if not fileformat:
233 raise ValueError()
226 raise ValueError()
234
227
235 # left over part of whole fname is the commit
228 # left over part of whole fname is the commit
236 commit_id = fname[:-len(ext)]
229 commit_id = fname[:-len(ext)]
237
230
238 return commit_id, ext, fileformat, content_type
231 return commit_id, ext, fileformat, content_type
239
232
240 @LoginRequired()
233 @LoginRequired()
241 @HasRepoPermissionAnyDecorator(
234 @HasRepoPermissionAnyDecorator(
242 'repository.read', 'repository.write', 'repository.admin')
235 'repository.read', 'repository.write', 'repository.admin')
243 @view_config(
236 @view_config(
244 route_name='repo_archivefile', request_method='GET',
237 route_name='repo_archivefile', request_method='GET',
245 renderer=None)
238 renderer=None)
246 def repo_archivefile(self):
239 def repo_archivefile(self):
247 # archive cache config
240 # archive cache config
248 from rhodecode import CONFIG
241 from rhodecode import CONFIG
249 _ = self.request.translate
242 _ = self.request.translate
250 self.load_default_context()
243 self.load_default_context()
251
244
252 fname = self.request.matchdict['fname']
245 fname = self.request.matchdict['fname']
253 subrepos = self.request.GET.get('subrepos') == 'true'
246 subrepos = self.request.GET.get('subrepos') == 'true'
254
247
255 if not self.db_repo.enable_downloads:
248 if not self.db_repo.enable_downloads:
256 return Response(_('Downloads disabled'))
249 return Response(_('Downloads disabled'))
257
250
258 try:
251 try:
259 commit_id, ext, fileformat, content_type = \
252 commit_id, ext, fileformat, content_type = \
260 self._get_archive_spec(fname)
253 self._get_archive_spec(fname)
261 except ValueError:
254 except ValueError:
262 return Response(_('Unknown archive type for: `{}`').format(
255 return Response(_('Unknown archive type for: `{}`').format(
263 h.escape(fname)))
256 h.escape(fname)))
264
257
265 try:
258 try:
266 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
259 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
267 except CommitDoesNotExistError:
260 except CommitDoesNotExistError:
268 return Response(_('Unknown commit_id {}').format(
261 return Response(_('Unknown commit_id {}').format(
269 h.escape(commit_id)))
262 h.escape(commit_id)))
270 except EmptyRepositoryError:
263 except EmptyRepositoryError:
271 return Response(_('Empty repository'))
264 return Response(_('Empty repository'))
272
265
273 archive_name = '%s-%s%s%s' % (
266 archive_name = '%s-%s%s%s' % (
274 safe_str(self.db_repo_name.replace('/', '_')),
267 safe_str(self.db_repo_name.replace('/', '_')),
275 '-sub' if subrepos else '',
268 '-sub' if subrepos else '',
276 safe_str(commit.short_id), ext)
269 safe_str(commit.short_id), ext)
277
270
278 use_cached_archive = False
271 use_cached_archive = False
279 archive_cache_enabled = CONFIG.get(
272 archive_cache_enabled = CONFIG.get(
280 'archive_cache_dir') and not self.request.GET.get('no_cache')
273 'archive_cache_dir') and not self.request.GET.get('no_cache')
281
274
282 if archive_cache_enabled:
275 if archive_cache_enabled:
283 # check if we it's ok to write
276 # check if we it's ok to write
284 if not os.path.isdir(CONFIG['archive_cache_dir']):
277 if not os.path.isdir(CONFIG['archive_cache_dir']):
285 os.makedirs(CONFIG['archive_cache_dir'])
278 os.makedirs(CONFIG['archive_cache_dir'])
286 cached_archive_path = os.path.join(
279 cached_archive_path = os.path.join(
287 CONFIG['archive_cache_dir'], archive_name)
280 CONFIG['archive_cache_dir'], archive_name)
288 if os.path.isfile(cached_archive_path):
281 if os.path.isfile(cached_archive_path):
289 log.debug('Found cached archive in %s', cached_archive_path)
282 log.debug('Found cached archive in %s', cached_archive_path)
290 fd, archive = None, cached_archive_path
283 fd, archive = None, cached_archive_path
291 use_cached_archive = True
284 use_cached_archive = True
292 else:
285 else:
293 log.debug('Archive %s is not yet cached', archive_name)
286 log.debug('Archive %s is not yet cached', archive_name)
294
287
295 if not use_cached_archive:
288 if not use_cached_archive:
296 # generate new archive
289 # generate new archive
297 fd, archive = tempfile.mkstemp()
290 fd, archive = tempfile.mkstemp()
298 log.debug('Creating new temp archive in %s', archive)
291 log.debug('Creating new temp archive in %s', archive)
299 try:
292 try:
300 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
293 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
301 except ImproperArchiveTypeError:
294 except ImproperArchiveTypeError:
302 return _('Unknown archive type')
295 return _('Unknown archive type')
303 if archive_cache_enabled:
296 if archive_cache_enabled:
304 # if we generated the archive and we have cache enabled
297 # if we generated the archive and we have cache enabled
305 # let's use this for future
298 # let's use this for future
306 log.debug('Storing new archive in %s', cached_archive_path)
299 log.debug('Storing new archive in %s', cached_archive_path)
307 shutil.move(archive, cached_archive_path)
300 shutil.move(archive, cached_archive_path)
308 archive = cached_archive_path
301 archive = cached_archive_path
309
302
310 # store download action
303 # store download action
311 audit_logger.store_web(
304 audit_logger.store_web(
312 'repo.archive.download', action_data={
305 'repo.archive.download', action_data={
313 'user_agent': self.request.user_agent,
306 'user_agent': self.request.user_agent,
314 'archive_name': archive_name,
307 'archive_name': archive_name,
315 'archive_spec': fname,
308 'archive_spec': fname,
316 'archive_cached': use_cached_archive},
309 'archive_cached': use_cached_archive},
317 user=self._rhodecode_user,
310 user=self._rhodecode_user,
318 repo=self.db_repo,
311 repo=self.db_repo,
319 commit=True
312 commit=True
320 )
313 )
321
314
322 def get_chunked_archive(archive):
315 def get_chunked_archive(archive):
323 with open(archive, 'rb') as stream:
316 with open(archive, 'rb') as stream:
324 while True:
317 while True:
325 data = stream.read(16 * 1024)
318 data = stream.read(16 * 1024)
326 if not data:
319 if not data:
327 if fd: # fd means we used temporary file
320 if fd: # fd means we used temporary file
328 os.close(fd)
321 os.close(fd)
329 if not archive_cache_enabled:
322 if not archive_cache_enabled:
330 log.debug('Destroying temp archive %s', archive)
323 log.debug('Destroying temp archive %s', archive)
331 os.remove(archive)
324 os.remove(archive)
332 break
325 break
333 yield data
326 yield data
334
327
335 response = Response(app_iter=get_chunked_archive(archive))
328 response = Response(app_iter=get_chunked_archive(archive))
336 response.content_disposition = str(
329 response.content_disposition = str(
337 'attachment; filename=%s' % archive_name)
330 'attachment; filename=%s' % archive_name)
338 response.content_type = str(content_type)
331 response.content_type = str(content_type)
339
332
340 return response
333 return response
341
334
342 def _get_file_node(self, commit_id, f_path):
335 def _get_file_node(self, commit_id, f_path):
343 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
336 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
344 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
337 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
345 try:
338 try:
346 node = commit.get_node(f_path)
339 node = commit.get_node(f_path)
347 if node.is_dir():
340 if node.is_dir():
348 raise NodeError('%s path is a %s not a file'
341 raise NodeError('%s path is a %s not a file'
349 % (node, type(node)))
342 % (node, type(node)))
350 except NodeDoesNotExistError:
343 except NodeDoesNotExistError:
351 commit = EmptyCommit(
344 commit = EmptyCommit(
352 commit_id=commit_id,
345 commit_id=commit_id,
353 idx=commit.idx,
346 idx=commit.idx,
354 repo=commit.repository,
347 repo=commit.repository,
355 alias=commit.repository.alias,
348 alias=commit.repository.alias,
356 message=commit.message,
349 message=commit.message,
357 author=commit.author,
350 author=commit.author,
358 date=commit.date)
351 date=commit.date)
359 node = FileNode(f_path, '', commit=commit)
352 node = FileNode(f_path, '', commit=commit)
360 else:
353 else:
361 commit = EmptyCommit(
354 commit = EmptyCommit(
362 repo=self.rhodecode_vcs_repo,
355 repo=self.rhodecode_vcs_repo,
363 alias=self.rhodecode_vcs_repo.alias)
356 alias=self.rhodecode_vcs_repo.alias)
364 node = FileNode(f_path, '', commit=commit)
357 node = FileNode(f_path, '', commit=commit)
365 return node
358 return node
366
359
367 @LoginRequired()
360 @LoginRequired()
368 @HasRepoPermissionAnyDecorator(
361 @HasRepoPermissionAnyDecorator(
369 'repository.read', 'repository.write', 'repository.admin')
362 'repository.read', 'repository.write', 'repository.admin')
370 @view_config(
363 @view_config(
371 route_name='repo_files_diff', request_method='GET',
364 route_name='repo_files_diff', request_method='GET',
372 renderer=None)
365 renderer=None)
373 def repo_files_diff(self):
366 def repo_files_diff(self):
374 c = self.load_default_context()
367 c = self.load_default_context()
375 f_path = self._get_f_path(self.request.matchdict)
368 f_path = self._get_f_path(self.request.matchdict)
376 diff1 = self.request.GET.get('diff1', '')
369 diff1 = self.request.GET.get('diff1', '')
377 diff2 = self.request.GET.get('diff2', '')
370 diff2 = self.request.GET.get('diff2', '')
378
371
379 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
372 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
380
373
381 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
374 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
382 line_context = self.request.GET.get('context', 3)
375 line_context = self.request.GET.get('context', 3)
383
376
384 if not any((diff1, diff2)):
377 if not any((diff1, diff2)):
385 h.flash(
378 h.flash(
386 'Need query parameter "diff1" or "diff2" to generate a diff.',
379 'Need query parameter "diff1" or "diff2" to generate a diff.',
387 category='error')
380 category='error')
388 raise HTTPBadRequest()
381 raise HTTPBadRequest()
389
382
390 c.action = self.request.GET.get('diff')
383 c.action = self.request.GET.get('diff')
391 if c.action not in ['download', 'raw']:
384 if c.action not in ['download', 'raw']:
392 compare_url = h.route_path(
385 compare_url = h.route_path(
393 'repo_compare',
386 'repo_compare',
394 repo_name=self.db_repo_name,
387 repo_name=self.db_repo_name,
395 source_ref_type='rev',
388 source_ref_type='rev',
396 source_ref=diff1,
389 source_ref=diff1,
397 target_repo=self.db_repo_name,
390 target_repo=self.db_repo_name,
398 target_ref_type='rev',
391 target_ref_type='rev',
399 target_ref=diff2,
392 target_ref=diff2,
400 _query=dict(f_path=f_path))
393 _query=dict(f_path=f_path))
401 # redirect to new view if we render diff
394 # redirect to new view if we render diff
402 raise HTTPFound(compare_url)
395 raise HTTPFound(compare_url)
403
396
404 try:
397 try:
405 node1 = self._get_file_node(diff1, path1)
398 node1 = self._get_file_node(diff1, path1)
406 node2 = self._get_file_node(diff2, f_path)
399 node2 = self._get_file_node(diff2, f_path)
407 except (RepositoryError, NodeError):
400 except (RepositoryError, NodeError):
408 log.exception("Exception while trying to get node from repository")
401 log.exception("Exception while trying to get node from repository")
409 raise HTTPFound(
402 raise HTTPFound(
410 h.route_path('repo_files', repo_name=self.db_repo_name,
403 h.route_path('repo_files', repo_name=self.db_repo_name,
411 commit_id='tip', f_path=f_path))
404 commit_id='tip', f_path=f_path))
412
405
413 if all(isinstance(node.commit, EmptyCommit)
406 if all(isinstance(node.commit, EmptyCommit)
414 for node in (node1, node2)):
407 for node in (node1, node2)):
415 raise HTTPNotFound()
408 raise HTTPNotFound()
416
409
417 c.commit_1 = node1.commit
410 c.commit_1 = node1.commit
418 c.commit_2 = node2.commit
411 c.commit_2 = node2.commit
419
412
420 if c.action == 'download':
413 if c.action == 'download':
421 _diff = diffs.get_gitdiff(node1, node2,
414 _diff = diffs.get_gitdiff(node1, node2,
422 ignore_whitespace=ignore_whitespace,
415 ignore_whitespace=ignore_whitespace,
423 context=line_context)
416 context=line_context)
424 diff = diffs.DiffProcessor(_diff, format='gitdiff')
417 diff = diffs.DiffProcessor(_diff, format='gitdiff')
425
418
426 response = Response(self.path_filter.get_raw_patch(diff))
419 response = Response(self.path_filter.get_raw_patch(diff))
427 response.content_type = 'text/plain'
420 response.content_type = 'text/plain'
428 response.content_disposition = (
421 response.content_disposition = (
429 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
422 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
430 )
423 )
431 charset = self._get_default_encoding(c)
424 charset = self._get_default_encoding(c)
432 if charset:
425 if charset:
433 response.charset = charset
426 response.charset = charset
434 return response
427 return response
435
428
436 elif c.action == 'raw':
429 elif c.action == 'raw':
437 _diff = diffs.get_gitdiff(node1, node2,
430 _diff = diffs.get_gitdiff(node1, node2,
438 ignore_whitespace=ignore_whitespace,
431 ignore_whitespace=ignore_whitespace,
439 context=line_context)
432 context=line_context)
440 diff = diffs.DiffProcessor(_diff, format='gitdiff')
433 diff = diffs.DiffProcessor(_diff, format='gitdiff')
441
434
442 response = Response(self.path_filter.get_raw_patch(diff))
435 response = Response(self.path_filter.get_raw_patch(diff))
443 response.content_type = 'text/plain'
436 response.content_type = 'text/plain'
444 charset = self._get_default_encoding(c)
437 charset = self._get_default_encoding(c)
445 if charset:
438 if charset:
446 response.charset = charset
439 response.charset = charset
447 return response
440 return response
448
441
449 # in case we ever end up here
442 # in case we ever end up here
450 raise HTTPNotFound()
443 raise HTTPNotFound()
451
444
452 @LoginRequired()
445 @LoginRequired()
453 @HasRepoPermissionAnyDecorator(
446 @HasRepoPermissionAnyDecorator(
454 'repository.read', 'repository.write', 'repository.admin')
447 'repository.read', 'repository.write', 'repository.admin')
455 @view_config(
448 @view_config(
456 route_name='repo_files_diff_2way_redirect', request_method='GET',
449 route_name='repo_files_diff_2way_redirect', request_method='GET',
457 renderer=None)
450 renderer=None)
458 def repo_files_diff_2way_redirect(self):
451 def repo_files_diff_2way_redirect(self):
459 """
452 """
460 Kept only to make OLD links work
453 Kept only to make OLD links work
461 """
454 """
462 f_path = self._get_f_path_unchecked(self.request.matchdict)
455 f_path = self._get_f_path_unchecked(self.request.matchdict)
463 diff1 = self.request.GET.get('diff1', '')
456 diff1 = self.request.GET.get('diff1', '')
464 diff2 = self.request.GET.get('diff2', '')
457 diff2 = self.request.GET.get('diff2', '')
465
458
466 if not any((diff1, diff2)):
459 if not any((diff1, diff2)):
467 h.flash(
460 h.flash(
468 'Need query parameter "diff1" or "diff2" to generate a diff.',
461 'Need query parameter "diff1" or "diff2" to generate a diff.',
469 category='error')
462 category='error')
470 raise HTTPBadRequest()
463 raise HTTPBadRequest()
471
464
472 compare_url = h.route_path(
465 compare_url = h.route_path(
473 'repo_compare',
466 'repo_compare',
474 repo_name=self.db_repo_name,
467 repo_name=self.db_repo_name,
475 source_ref_type='rev',
468 source_ref_type='rev',
476 source_ref=diff1,
469 source_ref=diff1,
477 target_ref_type='rev',
470 target_ref_type='rev',
478 target_ref=diff2,
471 target_ref=diff2,
479 _query=dict(f_path=f_path, diffmode='sideside',
472 _query=dict(f_path=f_path, diffmode='sideside',
480 target_repo=self.db_repo_name,))
473 target_repo=self.db_repo_name,))
481 raise HTTPFound(compare_url)
474 raise HTTPFound(compare_url)
482
475
483 @LoginRequired()
476 @LoginRequired()
484 @HasRepoPermissionAnyDecorator(
477 @HasRepoPermissionAnyDecorator(
485 'repository.read', 'repository.write', 'repository.admin')
478 'repository.read', 'repository.write', 'repository.admin')
486 @view_config(
479 @view_config(
487 route_name='repo_files', request_method='GET',
480 route_name='repo_files', request_method='GET',
488 renderer=None)
481 renderer=None)
489 @view_config(
482 @view_config(
490 route_name='repo_files:default_path', request_method='GET',
483 route_name='repo_files:default_path', request_method='GET',
491 renderer=None)
484 renderer=None)
492 @view_config(
485 @view_config(
493 route_name='repo_files:default_commit', request_method='GET',
486 route_name='repo_files:default_commit', request_method='GET',
494 renderer=None)
487 renderer=None)
495 @view_config(
488 @view_config(
496 route_name='repo_files:rendered', request_method='GET',
489 route_name='repo_files:rendered', request_method='GET',
497 renderer=None)
490 renderer=None)
498 @view_config(
491 @view_config(
499 route_name='repo_files:annotated', request_method='GET',
492 route_name='repo_files:annotated', request_method='GET',
500 renderer=None)
493 renderer=None)
501 def repo_files(self):
494 def repo_files(self):
502 c = self.load_default_context()
495 c = self.load_default_context()
503
496
504 view_name = getattr(self.request.matched_route, 'name', None)
497 view_name = getattr(self.request.matched_route, 'name', None)
505
498
506 c.annotate = view_name == 'repo_files:annotated'
499 c.annotate = view_name == 'repo_files:annotated'
507 # default is false, but .rst/.md files later are auto rendered, we can
500 # default is false, but .rst/.md files later are auto rendered, we can
508 # overwrite auto rendering by setting this GET flag
501 # overwrite auto rendering by setting this GET flag
509 c.renderer = view_name == 'repo_files:rendered' or \
502 c.renderer = view_name == 'repo_files:rendered' or \
510 not self.request.GET.get('no-render', False)
503 not self.request.GET.get('no-render', False)
511
504
512 # redirect to given commit_id from form if given
505 # redirect to given commit_id from form if given
513 get_commit_id = self.request.GET.get('at_rev', None)
506 get_commit_id = self.request.GET.get('at_rev', None)
514 if get_commit_id:
507 if get_commit_id:
515 self._get_commit_or_redirect(get_commit_id)
508 self._get_commit_or_redirect(get_commit_id)
516
509
517 commit_id, f_path = self._get_commit_and_path()
510 commit_id, f_path = self._get_commit_and_path()
518 c.commit = self._get_commit_or_redirect(commit_id)
511 c.commit = self._get_commit_or_redirect(commit_id)
519 c.branch = self.request.GET.get('branch', None)
512 c.branch = self.request.GET.get('branch', None)
520 c.f_path = f_path
513 c.f_path = f_path
521
514
522 # prev link
515 # prev link
523 try:
516 try:
524 prev_commit = c.commit.prev(c.branch)
517 prev_commit = c.commit.prev(c.branch)
525 c.prev_commit = prev_commit
518 c.prev_commit = prev_commit
526 c.url_prev = h.route_path(
519 c.url_prev = h.route_path(
527 'repo_files', repo_name=self.db_repo_name,
520 'repo_files', repo_name=self.db_repo_name,
528 commit_id=prev_commit.raw_id, f_path=f_path)
521 commit_id=prev_commit.raw_id, f_path=f_path)
529 if c.branch:
522 if c.branch:
530 c.url_prev += '?branch=%s' % c.branch
523 c.url_prev += '?branch=%s' % c.branch
531 except (CommitDoesNotExistError, VCSError):
524 except (CommitDoesNotExistError, VCSError):
532 c.url_prev = '#'
525 c.url_prev = '#'
533 c.prev_commit = EmptyCommit()
526 c.prev_commit = EmptyCommit()
534
527
535 # next link
528 # next link
536 try:
529 try:
537 next_commit = c.commit.next(c.branch)
530 next_commit = c.commit.next(c.branch)
538 c.next_commit = next_commit
531 c.next_commit = next_commit
539 c.url_next = h.route_path(
532 c.url_next = h.route_path(
540 'repo_files', repo_name=self.db_repo_name,
533 'repo_files', repo_name=self.db_repo_name,
541 commit_id=next_commit.raw_id, f_path=f_path)
534 commit_id=next_commit.raw_id, f_path=f_path)
542 if c.branch:
535 if c.branch:
543 c.url_next += '?branch=%s' % c.branch
536 c.url_next += '?branch=%s' % c.branch
544 except (CommitDoesNotExistError, VCSError):
537 except (CommitDoesNotExistError, VCSError):
545 c.url_next = '#'
538 c.url_next = '#'
546 c.next_commit = EmptyCommit()
539 c.next_commit = EmptyCommit()
547
540
548 # files or dirs
541 # files or dirs
549 try:
542 try:
550 c.file = c.commit.get_node(f_path)
543 c.file = c.commit.get_node(f_path)
551 c.file_author = True
544 c.file_author = True
552 c.file_tree = ''
545 c.file_tree = ''
553
546
554 # load file content
547 # load file content
555 if c.file.is_file():
548 if c.file.is_file():
556 c.lf_node = c.file.get_largefile_node()
549 c.lf_node = c.file.get_largefile_node()
557
550
558 c.file_source_page = 'true'
551 c.file_source_page = 'true'
559 c.file_last_commit = c.file.last_commit
552 c.file_last_commit = c.file.last_commit
560 if c.file.size < c.visual.cut_off_limit_diff:
553 if c.file.size < c.visual.cut_off_limit_diff:
561 if c.annotate: # annotation has precedence over renderer
554 if c.annotate: # annotation has precedence over renderer
562 c.annotated_lines = filenode_as_annotated_lines_tokens(
555 c.annotated_lines = filenode_as_annotated_lines_tokens(
563 c.file
556 c.file
564 )
557 )
565 else:
558 else:
566 c.renderer = (
559 c.renderer = (
567 c.renderer and h.renderer_from_filename(c.file.path)
560 c.renderer and h.renderer_from_filename(c.file.path)
568 )
561 )
569 if not c.renderer:
562 if not c.renderer:
570 c.lines = filenode_as_lines_tokens(c.file)
563 c.lines = filenode_as_lines_tokens(c.file)
571
564
572 c.on_branch_head = self._is_valid_head(
565 c.on_branch_head = self._is_valid_head(
573 commit_id, self.rhodecode_vcs_repo)
566 commit_id, self.rhodecode_vcs_repo)
574
567
575 branch = c.commit.branch if (
568 branch = c.commit.branch if (
576 c.commit.branch and '/' not in c.commit.branch) else None
569 c.commit.branch and '/' not in c.commit.branch) else None
577 c.branch_or_raw_id = branch or c.commit.raw_id
570 c.branch_or_raw_id = branch or c.commit.raw_id
578 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
571 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
579
572
580 author = c.file_last_commit.author
573 author = c.file_last_commit.author
581 c.authors = [[
574 c.authors = [[
582 h.email(author),
575 h.email(author),
583 h.person(author, 'username_or_name_or_email'),
576 h.person(author, 'username_or_name_or_email'),
584 1
577 1
585 ]]
578 ]]
586
579
587 else: # load tree content at path
580 else: # load tree content at path
588 c.file_source_page = 'false'
581 c.file_source_page = 'false'
589 c.authors = []
582 c.authors = []
590 # this loads a simple tree without metadata to speed things up
583 # this loads a simple tree without metadata to speed things up
591 # later via ajax we call repo_nodetree_full and fetch whole
584 # later via ajax we call repo_nodetree_full and fetch whole
592 c.file_tree = self._get_tree_at_commit(
585 c.file_tree = self._get_tree_at_commit(
593 c, c.commit.raw_id, f_path)
586 c, c.commit.raw_id, f_path)
594
587
595 except RepositoryError as e:
588 except RepositoryError as e:
596 h.flash(safe_str(h.escape(e)), category='error')
589 h.flash(safe_str(h.escape(e)), category='error')
597 raise HTTPNotFound()
590 raise HTTPNotFound()
598
591
599 if self.request.environ.get('HTTP_X_PJAX'):
592 if self.request.environ.get('HTTP_X_PJAX'):
600 html = render('rhodecode:templates/files/files_pjax.mako',
593 html = render('rhodecode:templates/files/files_pjax.mako',
601 self._get_template_context(c), self.request)
594 self._get_template_context(c), self.request)
602 else:
595 else:
603 html = render('rhodecode:templates/files/files.mako',
596 html = render('rhodecode:templates/files/files.mako',
604 self._get_template_context(c), self.request)
597 self._get_template_context(c), self.request)
605 return Response(html)
598 return Response(html)
606
599
607 @HasRepoPermissionAnyDecorator(
600 @HasRepoPermissionAnyDecorator(
608 'repository.read', 'repository.write', 'repository.admin')
601 'repository.read', 'repository.write', 'repository.admin')
609 @view_config(
602 @view_config(
610 route_name='repo_files:annotated_previous', request_method='GET',
603 route_name='repo_files:annotated_previous', request_method='GET',
611 renderer=None)
604 renderer=None)
612 def repo_files_annotated_previous(self):
605 def repo_files_annotated_previous(self):
613 self.load_default_context()
606 self.load_default_context()
614
607
615 commit_id, f_path = self._get_commit_and_path()
608 commit_id, f_path = self._get_commit_and_path()
616 commit = self._get_commit_or_redirect(commit_id)
609 commit = self._get_commit_or_redirect(commit_id)
617 prev_commit_id = commit.raw_id
610 prev_commit_id = commit.raw_id
618 line_anchor = self.request.GET.get('line_anchor')
611 line_anchor = self.request.GET.get('line_anchor')
619 is_file = False
612 is_file = False
620 try:
613 try:
621 _file = commit.get_node(f_path)
614 _file = commit.get_node(f_path)
622 is_file = _file.is_file()
615 is_file = _file.is_file()
623 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
616 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
624 pass
617 pass
625
618
626 if is_file:
619 if is_file:
627 history = commit.get_file_history(f_path)
620 history = commit.get_file_history(f_path)
628 prev_commit_id = history[1].raw_id \
621 prev_commit_id = history[1].raw_id \
629 if len(history) > 1 else prev_commit_id
622 if len(history) > 1 else prev_commit_id
630 prev_url = h.route_path(
623 prev_url = h.route_path(
631 'repo_files:annotated', repo_name=self.db_repo_name,
624 'repo_files:annotated', repo_name=self.db_repo_name,
632 commit_id=prev_commit_id, f_path=f_path,
625 commit_id=prev_commit_id, f_path=f_path,
633 _anchor='L{}'.format(line_anchor))
626 _anchor='L{}'.format(line_anchor))
634
627
635 raise HTTPFound(prev_url)
628 raise HTTPFound(prev_url)
636
629
637 @LoginRequired()
630 @LoginRequired()
638 @HasRepoPermissionAnyDecorator(
631 @HasRepoPermissionAnyDecorator(
639 'repository.read', 'repository.write', 'repository.admin')
632 'repository.read', 'repository.write', 'repository.admin')
640 @view_config(
633 @view_config(
641 route_name='repo_nodetree_full', request_method='GET',
634 route_name='repo_nodetree_full', request_method='GET',
642 renderer=None, xhr=True)
635 renderer=None, xhr=True)
643 @view_config(
636 @view_config(
644 route_name='repo_nodetree_full:default_path', request_method='GET',
637 route_name='repo_nodetree_full:default_path', request_method='GET',
645 renderer=None, xhr=True)
638 renderer=None, xhr=True)
646 def repo_nodetree_full(self):
639 def repo_nodetree_full(self):
647 """
640 """
648 Returns rendered html of file tree that contains commit date,
641 Returns rendered html of file tree that contains commit date,
649 author, commit_id for the specified combination of
642 author, commit_id for the specified combination of
650 repo, commit_id and file path
643 repo, commit_id and file path
651 """
644 """
652 c = self.load_default_context()
645 c = self.load_default_context()
653
646
654 commit_id, f_path = self._get_commit_and_path()
647 commit_id, f_path = self._get_commit_and_path()
655 commit = self._get_commit_or_redirect(commit_id)
648 commit = self._get_commit_or_redirect(commit_id)
656 try:
649 try:
657 dir_node = commit.get_node(f_path)
650 dir_node = commit.get_node(f_path)
658 except RepositoryError as e:
651 except RepositoryError as e:
659 return Response('error: {}'.format(h.escape(safe_str(e))))
652 return Response('error: {}'.format(h.escape(safe_str(e))))
660
653
661 if dir_node.is_file():
654 if dir_node.is_file():
662 return Response('')
655 return Response('')
663
656
664 c.file = dir_node
657 c.file = dir_node
665 c.commit = commit
658 c.commit = commit
666
659
667 # using force=True here, make a little trick. We flush the cache and
668 # compute it using the same key as without previous full_load, so now
669 # the fully loaded tree is now returned instead of partial,
670 # and we store this in caches
671 html = self._get_tree_at_commit(
660 html = self._get_tree_at_commit(
672 c, commit.raw_id, dir_node.path, full_load=True, force=True)
661 c, commit.raw_id, dir_node.path, full_load=True)
673
662
674 return Response(html)
663 return Response(html)
675
664
676 def _get_attachement_disposition(self, f_path):
665 def _get_attachement_disposition(self, f_path):
677 return 'attachment; filename=%s' % \
666 return 'attachment; filename=%s' % \
678 safe_str(f_path.split(Repository.NAME_SEP)[-1])
667 safe_str(f_path.split(Repository.NAME_SEP)[-1])
679
668
680 @LoginRequired()
669 @LoginRequired()
681 @HasRepoPermissionAnyDecorator(
670 @HasRepoPermissionAnyDecorator(
682 'repository.read', 'repository.write', 'repository.admin')
671 'repository.read', 'repository.write', 'repository.admin')
683 @view_config(
672 @view_config(
684 route_name='repo_file_raw', request_method='GET',
673 route_name='repo_file_raw', request_method='GET',
685 renderer=None)
674 renderer=None)
686 def repo_file_raw(self):
675 def repo_file_raw(self):
687 """
676 """
688 Action for show as raw, some mimetypes are "rendered",
677 Action for show as raw, some mimetypes are "rendered",
689 those include images, icons.
678 those include images, icons.
690 """
679 """
691 c = self.load_default_context()
680 c = self.load_default_context()
692
681
693 commit_id, f_path = self._get_commit_and_path()
682 commit_id, f_path = self._get_commit_and_path()
694 commit = self._get_commit_or_redirect(commit_id)
683 commit = self._get_commit_or_redirect(commit_id)
695 file_node = self._get_filenode_or_redirect(commit, f_path)
684 file_node = self._get_filenode_or_redirect(commit, f_path)
696
685
697 raw_mimetype_mapping = {
686 raw_mimetype_mapping = {
698 # map original mimetype to a mimetype used for "show as raw"
687 # map original mimetype to a mimetype used for "show as raw"
699 # you can also provide a content-disposition to override the
688 # you can also provide a content-disposition to override the
700 # default "attachment" disposition.
689 # default "attachment" disposition.
701 # orig_type: (new_type, new_dispo)
690 # orig_type: (new_type, new_dispo)
702
691
703 # show images inline:
692 # show images inline:
704 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
693 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
705 # for example render an SVG with javascript inside or even render
694 # for example render an SVG with javascript inside or even render
706 # HTML.
695 # HTML.
707 'image/x-icon': ('image/x-icon', 'inline'),
696 'image/x-icon': ('image/x-icon', 'inline'),
708 'image/png': ('image/png', 'inline'),
697 'image/png': ('image/png', 'inline'),
709 'image/gif': ('image/gif', 'inline'),
698 'image/gif': ('image/gif', 'inline'),
710 'image/jpeg': ('image/jpeg', 'inline'),
699 'image/jpeg': ('image/jpeg', 'inline'),
711 'application/pdf': ('application/pdf', 'inline'),
700 'application/pdf': ('application/pdf', 'inline'),
712 }
701 }
713
702
714 mimetype = file_node.mimetype
703 mimetype = file_node.mimetype
715 try:
704 try:
716 mimetype, disposition = raw_mimetype_mapping[mimetype]
705 mimetype, disposition = raw_mimetype_mapping[mimetype]
717 except KeyError:
706 except KeyError:
718 # we don't know anything special about this, handle it safely
707 # we don't know anything special about this, handle it safely
719 if file_node.is_binary:
708 if file_node.is_binary:
720 # do same as download raw for binary files
709 # do same as download raw for binary files
721 mimetype, disposition = 'application/octet-stream', 'attachment'
710 mimetype, disposition = 'application/octet-stream', 'attachment'
722 else:
711 else:
723 # do not just use the original mimetype, but force text/plain,
712 # do not just use the original mimetype, but force text/plain,
724 # otherwise it would serve text/html and that might be unsafe.
713 # otherwise it would serve text/html and that might be unsafe.
725 # Note: underlying vcs library fakes text/plain mimetype if the
714 # Note: underlying vcs library fakes text/plain mimetype if the
726 # mimetype can not be determined and it thinks it is not
715 # mimetype can not be determined and it thinks it is not
727 # binary.This might lead to erroneous text display in some
716 # binary.This might lead to erroneous text display in some
728 # cases, but helps in other cases, like with text files
717 # cases, but helps in other cases, like with text files
729 # without extension.
718 # without extension.
730 mimetype, disposition = 'text/plain', 'inline'
719 mimetype, disposition = 'text/plain', 'inline'
731
720
732 if disposition == 'attachment':
721 if disposition == 'attachment':
733 disposition = self._get_attachement_disposition(f_path)
722 disposition = self._get_attachement_disposition(f_path)
734
723
735 def stream_node():
724 def stream_node():
736 yield file_node.raw_bytes
725 yield file_node.raw_bytes
737
726
738 response = Response(app_iter=stream_node())
727 response = Response(app_iter=stream_node())
739 response.content_disposition = disposition
728 response.content_disposition = disposition
740 response.content_type = mimetype
729 response.content_type = mimetype
741
730
742 charset = self._get_default_encoding(c)
731 charset = self._get_default_encoding(c)
743 if charset:
732 if charset:
744 response.charset = charset
733 response.charset = charset
745
734
746 return response
735 return response
747
736
748 @LoginRequired()
737 @LoginRequired()
749 @HasRepoPermissionAnyDecorator(
738 @HasRepoPermissionAnyDecorator(
750 'repository.read', 'repository.write', 'repository.admin')
739 'repository.read', 'repository.write', 'repository.admin')
751 @view_config(
740 @view_config(
752 route_name='repo_file_download', request_method='GET',
741 route_name='repo_file_download', request_method='GET',
753 renderer=None)
742 renderer=None)
754 @view_config(
743 @view_config(
755 route_name='repo_file_download:legacy', request_method='GET',
744 route_name='repo_file_download:legacy', request_method='GET',
756 renderer=None)
745 renderer=None)
757 def repo_file_download(self):
746 def repo_file_download(self):
758 c = self.load_default_context()
747 c = self.load_default_context()
759
748
760 commit_id, f_path = self._get_commit_and_path()
749 commit_id, f_path = self._get_commit_and_path()
761 commit = self._get_commit_or_redirect(commit_id)
750 commit = self._get_commit_or_redirect(commit_id)
762 file_node = self._get_filenode_or_redirect(commit, f_path)
751 file_node = self._get_filenode_or_redirect(commit, f_path)
763
752
764 if self.request.GET.get('lf'):
753 if self.request.GET.get('lf'):
765 # only if lf get flag is passed, we download this file
754 # only if lf get flag is passed, we download this file
766 # as LFS/Largefile
755 # as LFS/Largefile
767 lf_node = file_node.get_largefile_node()
756 lf_node = file_node.get_largefile_node()
768 if lf_node:
757 if lf_node:
769 # overwrite our pointer with the REAL large-file
758 # overwrite our pointer with the REAL large-file
770 file_node = lf_node
759 file_node = lf_node
771
760
772 disposition = self._get_attachement_disposition(f_path)
761 disposition = self._get_attachement_disposition(f_path)
773
762
774 def stream_node():
763 def stream_node():
775 yield file_node.raw_bytes
764 yield file_node.raw_bytes
776
765
777 response = Response(app_iter=stream_node())
766 response = Response(app_iter=stream_node())
778 response.content_disposition = disposition
767 response.content_disposition = disposition
779 response.content_type = file_node.mimetype
768 response.content_type = file_node.mimetype
780
769
781 charset = self._get_default_encoding(c)
770 charset = self._get_default_encoding(c)
782 if charset:
771 if charset:
783 response.charset = charset
772 response.charset = charset
784
773
785 return response
774 return response
786
775
787 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
776 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
788 def _cached_nodes():
777
789 log.debug('Generating cached nodelist for %s, %s, %s',
778 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
790 repo_name, commit_id, f_path)
779 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
780
781 @region.cache_on_arguments(namespace=cache_namespace_uid)
782 def compute_file_search(repo_id, commit_id, f_path):
783 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
784 repo_id, commit_id, f_path)
791 try:
785 try:
792 _d, _f = ScmModel().get_nodes(
786 _d, _f = ScmModel().get_nodes(
793 repo_name, commit_id, f_path, flat=False)
787 repo_name, commit_id, f_path, flat=False)
794 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
788 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
795 log.exception(safe_str(e))
789 log.exception(safe_str(e))
796 h.flash(safe_str(h.escape(e)), category='error')
790 h.flash(safe_str(h.escape(e)), category='error')
797 raise HTTPFound(h.route_path(
791 raise HTTPFound(h.route_path(
798 'repo_files', repo_name=self.db_repo_name,
792 'repo_files', repo_name=self.db_repo_name,
799 commit_id='tip', f_path='/'))
793 commit_id='tip', f_path='/'))
800 return _d + _f
794 return _d + _f
801
795
802 cache_manager = self._get_tree_cache_manager(
796 return compute_file_search(self.db_repo.repo_id, commit_id, f_path)
803 caches.FILE_SEARCH_TREE_META)
804
805 cache_key = caches.compute_key_from_params(
806 repo_name, commit_id, f_path)
807 return cache_manager.get(cache_key, createfunc=_cached_nodes)
808
797
809 @LoginRequired()
798 @LoginRequired()
810 @HasRepoPermissionAnyDecorator(
799 @HasRepoPermissionAnyDecorator(
811 'repository.read', 'repository.write', 'repository.admin')
800 'repository.read', 'repository.write', 'repository.admin')
812 @view_config(
801 @view_config(
813 route_name='repo_files_nodelist', request_method='GET',
802 route_name='repo_files_nodelist', request_method='GET',
814 renderer='json_ext', xhr=True)
803 renderer='json_ext', xhr=True)
815 def repo_nodelist(self):
804 def repo_nodelist(self):
816 self.load_default_context()
805 self.load_default_context()
817
806
818 commit_id, f_path = self._get_commit_and_path()
807 commit_id, f_path = self._get_commit_and_path()
819 commit = self._get_commit_or_redirect(commit_id)
808 commit = self._get_commit_or_redirect(commit_id)
820
809
821 metadata = self._get_nodelist_at_commit(
810 metadata = self._get_nodelist_at_commit(
822 self.db_repo_name, commit.raw_id, f_path)
811 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
823 return {'nodes': metadata}
812 return {'nodes': metadata}
824
813
825 def _create_references(
814 def _create_references(
826 self, branches_or_tags, symbolic_reference, f_path):
815 self, branches_or_tags, symbolic_reference, f_path):
827 items = []
816 items = []
828 for name, commit_id in branches_or_tags.items():
817 for name, commit_id in branches_or_tags.items():
829 sym_ref = symbolic_reference(commit_id, name, f_path)
818 sym_ref = symbolic_reference(commit_id, name, f_path)
830 items.append((sym_ref, name))
819 items.append((sym_ref, name))
831 return items
820 return items
832
821
833 def _symbolic_reference(self, commit_id, name, f_path):
822 def _symbolic_reference(self, commit_id, name, f_path):
834 return commit_id
823 return commit_id
835
824
836 def _symbolic_reference_svn(self, commit_id, name, f_path):
825 def _symbolic_reference_svn(self, commit_id, name, f_path):
837 new_f_path = vcspath.join(name, f_path)
826 new_f_path = vcspath.join(name, f_path)
838 return u'%s@%s' % (new_f_path, commit_id)
827 return u'%s@%s' % (new_f_path, commit_id)
839
828
840 def _get_node_history(self, commit_obj, f_path, commits=None):
829 def _get_node_history(self, commit_obj, f_path, commits=None):
841 """
830 """
842 get commit history for given node
831 get commit history for given node
843
832
844 :param commit_obj: commit to calculate history
833 :param commit_obj: commit to calculate history
845 :param f_path: path for node to calculate history for
834 :param f_path: path for node to calculate history for
846 :param commits: if passed don't calculate history and take
835 :param commits: if passed don't calculate history and take
847 commits defined in this list
836 commits defined in this list
848 """
837 """
849 _ = self.request.translate
838 _ = self.request.translate
850
839
851 # calculate history based on tip
840 # calculate history based on tip
852 tip = self.rhodecode_vcs_repo.get_commit()
841 tip = self.rhodecode_vcs_repo.get_commit()
853 if commits is None:
842 if commits is None:
854 pre_load = ["author", "branch"]
843 pre_load = ["author", "branch"]
855 try:
844 try:
856 commits = tip.get_file_history(f_path, pre_load=pre_load)
845 commits = tip.get_file_history(f_path, pre_load=pre_load)
857 except (NodeDoesNotExistError, CommitError):
846 except (NodeDoesNotExistError, CommitError):
858 # this node is not present at tip!
847 # this node is not present at tip!
859 commits = commit_obj.get_file_history(f_path, pre_load=pre_load)
848 commits = commit_obj.get_file_history(f_path, pre_load=pre_load)
860
849
861 history = []
850 history = []
862 commits_group = ([], _("Changesets"))
851 commits_group = ([], _("Changesets"))
863 for commit in commits:
852 for commit in commits:
864 branch = ' (%s)' % commit.branch if commit.branch else ''
853 branch = ' (%s)' % commit.branch if commit.branch else ''
865 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
854 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
866 commits_group[0].append((commit.raw_id, n_desc,))
855 commits_group[0].append((commit.raw_id, n_desc,))
867 history.append(commits_group)
856 history.append(commits_group)
868
857
869 symbolic_reference = self._symbolic_reference
858 symbolic_reference = self._symbolic_reference
870
859
871 if self.rhodecode_vcs_repo.alias == 'svn':
860 if self.rhodecode_vcs_repo.alias == 'svn':
872 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
861 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
873 f_path, self.rhodecode_vcs_repo)
862 f_path, self.rhodecode_vcs_repo)
874 if adjusted_f_path != f_path:
863 if adjusted_f_path != f_path:
875 log.debug(
864 log.debug(
876 'Recognized svn tag or branch in file "%s", using svn '
865 'Recognized svn tag or branch in file "%s", using svn '
877 'specific symbolic references', f_path)
866 'specific symbolic references', f_path)
878 f_path = adjusted_f_path
867 f_path = adjusted_f_path
879 symbolic_reference = self._symbolic_reference_svn
868 symbolic_reference = self._symbolic_reference_svn
880
869
881 branches = self._create_references(
870 branches = self._create_references(
882 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path)
871 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path)
883 branches_group = (branches, _("Branches"))
872 branches_group = (branches, _("Branches"))
884
873
885 tags = self._create_references(
874 tags = self._create_references(
886 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path)
875 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path)
887 tags_group = (tags, _("Tags"))
876 tags_group = (tags, _("Tags"))
888
877
889 history.append(branches_group)
878 history.append(branches_group)
890 history.append(tags_group)
879 history.append(tags_group)
891
880
892 return history, commits
881 return history, commits
893
882
894 @LoginRequired()
883 @LoginRequired()
895 @HasRepoPermissionAnyDecorator(
884 @HasRepoPermissionAnyDecorator(
896 'repository.read', 'repository.write', 'repository.admin')
885 'repository.read', 'repository.write', 'repository.admin')
897 @view_config(
886 @view_config(
898 route_name='repo_file_history', request_method='GET',
887 route_name='repo_file_history', request_method='GET',
899 renderer='json_ext')
888 renderer='json_ext')
900 def repo_file_history(self):
889 def repo_file_history(self):
901 self.load_default_context()
890 self.load_default_context()
902
891
903 commit_id, f_path = self._get_commit_and_path()
892 commit_id, f_path = self._get_commit_and_path()
904 commit = self._get_commit_or_redirect(commit_id)
893 commit = self._get_commit_or_redirect(commit_id)
905 file_node = self._get_filenode_or_redirect(commit, f_path)
894 file_node = self._get_filenode_or_redirect(commit, f_path)
906
895
907 if file_node.is_file():
896 if file_node.is_file():
908 file_history, _hist = self._get_node_history(commit, f_path)
897 file_history, _hist = self._get_node_history(commit, f_path)
909
898
910 res = []
899 res = []
911 for obj in file_history:
900 for obj in file_history:
912 res.append({
901 res.append({
913 'text': obj[1],
902 'text': obj[1],
914 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
903 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
915 })
904 })
916
905
917 data = {
906 data = {
918 'more': False,
907 'more': False,
919 'results': res
908 'results': res
920 }
909 }
921 return data
910 return data
922
911
923 log.warning('Cannot fetch history for directory')
912 log.warning('Cannot fetch history for directory')
924 raise HTTPBadRequest()
913 raise HTTPBadRequest()
925
914
926 @LoginRequired()
915 @LoginRequired()
927 @HasRepoPermissionAnyDecorator(
916 @HasRepoPermissionAnyDecorator(
928 'repository.read', 'repository.write', 'repository.admin')
917 'repository.read', 'repository.write', 'repository.admin')
929 @view_config(
918 @view_config(
930 route_name='repo_file_authors', request_method='GET',
919 route_name='repo_file_authors', request_method='GET',
931 renderer='rhodecode:templates/files/file_authors_box.mako')
920 renderer='rhodecode:templates/files/file_authors_box.mako')
932 def repo_file_authors(self):
921 def repo_file_authors(self):
933 c = self.load_default_context()
922 c = self.load_default_context()
934
923
935 commit_id, f_path = self._get_commit_and_path()
924 commit_id, f_path = self._get_commit_and_path()
936 commit = self._get_commit_or_redirect(commit_id)
925 commit = self._get_commit_or_redirect(commit_id)
937 file_node = self._get_filenode_or_redirect(commit, f_path)
926 file_node = self._get_filenode_or_redirect(commit, f_path)
938
927
939 if not file_node.is_file():
928 if not file_node.is_file():
940 raise HTTPBadRequest()
929 raise HTTPBadRequest()
941
930
942 c.file_last_commit = file_node.last_commit
931 c.file_last_commit = file_node.last_commit
943 if self.request.GET.get('annotate') == '1':
932 if self.request.GET.get('annotate') == '1':
944 # use _hist from annotation if annotation mode is on
933 # use _hist from annotation if annotation mode is on
945 commit_ids = set(x[1] for x in file_node.annotate)
934 commit_ids = set(x[1] for x in file_node.annotate)
946 _hist = (
935 _hist = (
947 self.rhodecode_vcs_repo.get_commit(commit_id)
936 self.rhodecode_vcs_repo.get_commit(commit_id)
948 for commit_id in commit_ids)
937 for commit_id in commit_ids)
949 else:
938 else:
950 _f_history, _hist = self._get_node_history(commit, f_path)
939 _f_history, _hist = self._get_node_history(commit, f_path)
951 c.file_author = False
940 c.file_author = False
952
941
953 unique = collections.OrderedDict()
942 unique = collections.OrderedDict()
954 for commit in _hist:
943 for commit in _hist:
955 author = commit.author
944 author = commit.author
956 if author not in unique:
945 if author not in unique:
957 unique[commit.author] = [
946 unique[commit.author] = [
958 h.email(author),
947 h.email(author),
959 h.person(author, 'username_or_name_or_email'),
948 h.person(author, 'username_or_name_or_email'),
960 1 # counter
949 1 # counter
961 ]
950 ]
962
951
963 else:
952 else:
964 # increase counter
953 # increase counter
965 unique[commit.author][2] += 1
954 unique[commit.author][2] += 1
966
955
967 c.authors = [val for val in unique.values()]
956 c.authors = [val for val in unique.values()]
968
957
969 return self._get_template_context(c)
958 return self._get_template_context(c)
970
959
971 @LoginRequired()
960 @LoginRequired()
972 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
961 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
973 @view_config(
962 @view_config(
974 route_name='repo_files_remove_file', request_method='GET',
963 route_name='repo_files_remove_file', request_method='GET',
975 renderer='rhodecode:templates/files/files_delete.mako')
964 renderer='rhodecode:templates/files/files_delete.mako')
976 def repo_files_remove_file(self):
965 def repo_files_remove_file(self):
977 _ = self.request.translate
966 _ = self.request.translate
978 c = self.load_default_context()
967 c = self.load_default_context()
979 commit_id, f_path = self._get_commit_and_path()
968 commit_id, f_path = self._get_commit_and_path()
980
969
981 self._ensure_not_locked()
970 self._ensure_not_locked()
982
971
983 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
972 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
984 h.flash(_('You can only delete files with commit '
973 h.flash(_('You can only delete files with commit '
985 'being a valid branch '), category='warning')
974 'being a valid branch '), category='warning')
986 raise HTTPFound(
975 raise HTTPFound(
987 h.route_path('repo_files',
976 h.route_path('repo_files',
988 repo_name=self.db_repo_name, commit_id='tip',
977 repo_name=self.db_repo_name, commit_id='tip',
989 f_path=f_path))
978 f_path=f_path))
990
979
991 c.commit = self._get_commit_or_redirect(commit_id)
980 c.commit = self._get_commit_or_redirect(commit_id)
992 c.file = self._get_filenode_or_redirect(c.commit, f_path)
981 c.file = self._get_filenode_or_redirect(c.commit, f_path)
993
982
994 c.default_message = _(
983 c.default_message = _(
995 'Deleted file {} via RhodeCode Enterprise').format(f_path)
984 'Deleted file {} via RhodeCode Enterprise').format(f_path)
996 c.f_path = f_path
985 c.f_path = f_path
997
986
998 return self._get_template_context(c)
987 return self._get_template_context(c)
999
988
1000 @LoginRequired()
989 @LoginRequired()
1001 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
990 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1002 @CSRFRequired()
991 @CSRFRequired()
1003 @view_config(
992 @view_config(
1004 route_name='repo_files_delete_file', request_method='POST',
993 route_name='repo_files_delete_file', request_method='POST',
1005 renderer=None)
994 renderer=None)
1006 def repo_files_delete_file(self):
995 def repo_files_delete_file(self):
1007 _ = self.request.translate
996 _ = self.request.translate
1008
997
1009 c = self.load_default_context()
998 c = self.load_default_context()
1010 commit_id, f_path = self._get_commit_and_path()
999 commit_id, f_path = self._get_commit_and_path()
1011
1000
1012 self._ensure_not_locked()
1001 self._ensure_not_locked()
1013
1002
1014 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1003 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1015 h.flash(_('You can only delete files with commit '
1004 h.flash(_('You can only delete files with commit '
1016 'being a valid branch '), category='warning')
1005 'being a valid branch '), category='warning')
1017 raise HTTPFound(
1006 raise HTTPFound(
1018 h.route_path('repo_files',
1007 h.route_path('repo_files',
1019 repo_name=self.db_repo_name, commit_id='tip',
1008 repo_name=self.db_repo_name, commit_id='tip',
1020 f_path=f_path))
1009 f_path=f_path))
1021
1010
1022 c.commit = self._get_commit_or_redirect(commit_id)
1011 c.commit = self._get_commit_or_redirect(commit_id)
1023 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1012 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1024
1013
1025 c.default_message = _(
1014 c.default_message = _(
1026 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1015 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1027 c.f_path = f_path
1016 c.f_path = f_path
1028 node_path = f_path
1017 node_path = f_path
1029 author = self._rhodecode_db_user.full_contact
1018 author = self._rhodecode_db_user.full_contact
1030 message = self.request.POST.get('message') or c.default_message
1019 message = self.request.POST.get('message') or c.default_message
1031 try:
1020 try:
1032 nodes = {
1021 nodes = {
1033 node_path: {
1022 node_path: {
1034 'content': ''
1023 'content': ''
1035 }
1024 }
1036 }
1025 }
1037 ScmModel().delete_nodes(
1026 ScmModel().delete_nodes(
1038 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1027 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1039 message=message,
1028 message=message,
1040 nodes=nodes,
1029 nodes=nodes,
1041 parent_commit=c.commit,
1030 parent_commit=c.commit,
1042 author=author,
1031 author=author,
1043 )
1032 )
1044
1033
1045 h.flash(
1034 h.flash(
1046 _('Successfully deleted file `{}`').format(
1035 _('Successfully deleted file `{}`').format(
1047 h.escape(f_path)), category='success')
1036 h.escape(f_path)), category='success')
1048 except Exception:
1037 except Exception:
1049 log.exception('Error during commit operation')
1038 log.exception('Error during commit operation')
1050 h.flash(_('Error occurred during commit'), category='error')
1039 h.flash(_('Error occurred during commit'), category='error')
1051 raise HTTPFound(
1040 raise HTTPFound(
1052 h.route_path('repo_commit', repo_name=self.db_repo_name,
1041 h.route_path('repo_commit', repo_name=self.db_repo_name,
1053 commit_id='tip'))
1042 commit_id='tip'))
1054
1043
1055 @LoginRequired()
1044 @LoginRequired()
1056 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1045 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1057 @view_config(
1046 @view_config(
1058 route_name='repo_files_edit_file', request_method='GET',
1047 route_name='repo_files_edit_file', request_method='GET',
1059 renderer='rhodecode:templates/files/files_edit.mako')
1048 renderer='rhodecode:templates/files/files_edit.mako')
1060 def repo_files_edit_file(self):
1049 def repo_files_edit_file(self):
1061 _ = self.request.translate
1050 _ = self.request.translate
1062 c = self.load_default_context()
1051 c = self.load_default_context()
1063 commit_id, f_path = self._get_commit_and_path()
1052 commit_id, f_path = self._get_commit_and_path()
1064
1053
1065 self._ensure_not_locked()
1054 self._ensure_not_locked()
1066
1055
1067 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1056 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1068 h.flash(_('You can only edit files with commit '
1057 h.flash(_('You can only edit files with commit '
1069 'being a valid branch '), category='warning')
1058 'being a valid branch '), category='warning')
1070 raise HTTPFound(
1059 raise HTTPFound(
1071 h.route_path('repo_files',
1060 h.route_path('repo_files',
1072 repo_name=self.db_repo_name, commit_id='tip',
1061 repo_name=self.db_repo_name, commit_id='tip',
1073 f_path=f_path))
1062 f_path=f_path))
1074
1063
1075 c.commit = self._get_commit_or_redirect(commit_id)
1064 c.commit = self._get_commit_or_redirect(commit_id)
1076 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1065 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1077
1066
1078 if c.file.is_binary:
1067 if c.file.is_binary:
1079 files_url = h.route_path(
1068 files_url = h.route_path(
1080 'repo_files',
1069 'repo_files',
1081 repo_name=self.db_repo_name,
1070 repo_name=self.db_repo_name,
1082 commit_id=c.commit.raw_id, f_path=f_path)
1071 commit_id=c.commit.raw_id, f_path=f_path)
1083 raise HTTPFound(files_url)
1072 raise HTTPFound(files_url)
1084
1073
1085 c.default_message = _(
1074 c.default_message = _(
1086 'Edited file {} via RhodeCode Enterprise').format(f_path)
1075 'Edited file {} via RhodeCode Enterprise').format(f_path)
1087 c.f_path = f_path
1076 c.f_path = f_path
1088
1077
1089 return self._get_template_context(c)
1078 return self._get_template_context(c)
1090
1079
1091 @LoginRequired()
1080 @LoginRequired()
1092 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1081 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1093 @CSRFRequired()
1082 @CSRFRequired()
1094 @view_config(
1083 @view_config(
1095 route_name='repo_files_update_file', request_method='POST',
1084 route_name='repo_files_update_file', request_method='POST',
1096 renderer=None)
1085 renderer=None)
1097 def repo_files_update_file(self):
1086 def repo_files_update_file(self):
1098 _ = self.request.translate
1087 _ = self.request.translate
1099 c = self.load_default_context()
1088 c = self.load_default_context()
1100 commit_id, f_path = self._get_commit_and_path()
1089 commit_id, f_path = self._get_commit_and_path()
1101
1090
1102 self._ensure_not_locked()
1091 self._ensure_not_locked()
1103
1092
1104 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1093 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1105 h.flash(_('You can only edit files with commit '
1094 h.flash(_('You can only edit files with commit '
1106 'being a valid branch '), category='warning')
1095 'being a valid branch '), category='warning')
1107 raise HTTPFound(
1096 raise HTTPFound(
1108 h.route_path('repo_files',
1097 h.route_path('repo_files',
1109 repo_name=self.db_repo_name, commit_id='tip',
1098 repo_name=self.db_repo_name, commit_id='tip',
1110 f_path=f_path))
1099 f_path=f_path))
1111
1100
1112 c.commit = self._get_commit_or_redirect(commit_id)
1101 c.commit = self._get_commit_or_redirect(commit_id)
1113 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1102 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1114
1103
1115 if c.file.is_binary:
1104 if c.file.is_binary:
1116 raise HTTPFound(
1105 raise HTTPFound(
1117 h.route_path('repo_files',
1106 h.route_path('repo_files',
1118 repo_name=self.db_repo_name,
1107 repo_name=self.db_repo_name,
1119 commit_id=c.commit.raw_id,
1108 commit_id=c.commit.raw_id,
1120 f_path=f_path))
1109 f_path=f_path))
1121
1110
1122 c.default_message = _(
1111 c.default_message = _(
1123 'Edited file {} via RhodeCode Enterprise').format(f_path)
1112 'Edited file {} via RhodeCode Enterprise').format(f_path)
1124 c.f_path = f_path
1113 c.f_path = f_path
1125 old_content = c.file.content
1114 old_content = c.file.content
1126 sl = old_content.splitlines(1)
1115 sl = old_content.splitlines(1)
1127 first_line = sl[0] if sl else ''
1116 first_line = sl[0] if sl else ''
1128
1117
1129 r_post = self.request.POST
1118 r_post = self.request.POST
1130 # modes: 0 - Unix, 1 - Mac, 2 - DOS
1119 # modes: 0 - Unix, 1 - Mac, 2 - DOS
1131 mode = detect_mode(first_line, 0)
1120 mode = detect_mode(first_line, 0)
1132 content = convert_line_endings(r_post.get('content', ''), mode)
1121 content = convert_line_endings(r_post.get('content', ''), mode)
1133
1122
1134 message = r_post.get('message') or c.default_message
1123 message = r_post.get('message') or c.default_message
1135 org_f_path = c.file.unicode_path
1124 org_f_path = c.file.unicode_path
1136 filename = r_post['filename']
1125 filename = r_post['filename']
1137 org_filename = c.file.name
1126 org_filename = c.file.name
1138
1127
1139 if content == old_content and filename == org_filename:
1128 if content == old_content and filename == org_filename:
1140 h.flash(_('No changes'), category='warning')
1129 h.flash(_('No changes'), category='warning')
1141 raise HTTPFound(
1130 raise HTTPFound(
1142 h.route_path('repo_commit', repo_name=self.db_repo_name,
1131 h.route_path('repo_commit', repo_name=self.db_repo_name,
1143 commit_id='tip'))
1132 commit_id='tip'))
1144 try:
1133 try:
1145 mapping = {
1134 mapping = {
1146 org_f_path: {
1135 org_f_path: {
1147 'org_filename': org_f_path,
1136 'org_filename': org_f_path,
1148 'filename': os.path.join(c.file.dir_path, filename),
1137 'filename': os.path.join(c.file.dir_path, filename),
1149 'content': content,
1138 'content': content,
1150 'lexer': '',
1139 'lexer': '',
1151 'op': 'mod',
1140 'op': 'mod',
1152 }
1141 }
1153 }
1142 }
1154
1143
1155 ScmModel().update_nodes(
1144 ScmModel().update_nodes(
1156 user=self._rhodecode_db_user.user_id,
1145 user=self._rhodecode_db_user.user_id,
1157 repo=self.db_repo,
1146 repo=self.db_repo,
1158 message=message,
1147 message=message,
1159 nodes=mapping,
1148 nodes=mapping,
1160 parent_commit=c.commit,
1149 parent_commit=c.commit,
1161 )
1150 )
1162
1151
1163 h.flash(
1152 h.flash(
1164 _('Successfully committed changes to file `{}`').format(
1153 _('Successfully committed changes to file `{}`').format(
1165 h.escape(f_path)), category='success')
1154 h.escape(f_path)), category='success')
1166 except Exception:
1155 except Exception:
1167 log.exception('Error occurred during commit')
1156 log.exception('Error occurred during commit')
1168 h.flash(_('Error occurred during commit'), category='error')
1157 h.flash(_('Error occurred during commit'), category='error')
1169 raise HTTPFound(
1158 raise HTTPFound(
1170 h.route_path('repo_commit', repo_name=self.db_repo_name,
1159 h.route_path('repo_commit', repo_name=self.db_repo_name,
1171 commit_id='tip'))
1160 commit_id='tip'))
1172
1161
1173 @LoginRequired()
1162 @LoginRequired()
1174 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1163 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1175 @view_config(
1164 @view_config(
1176 route_name='repo_files_add_file', request_method='GET',
1165 route_name='repo_files_add_file', request_method='GET',
1177 renderer='rhodecode:templates/files/files_add.mako')
1166 renderer='rhodecode:templates/files/files_add.mako')
1178 def repo_files_add_file(self):
1167 def repo_files_add_file(self):
1179 _ = self.request.translate
1168 _ = self.request.translate
1180 c = self.load_default_context()
1169 c = self.load_default_context()
1181 commit_id, f_path = self._get_commit_and_path()
1170 commit_id, f_path = self._get_commit_and_path()
1182
1171
1183 self._ensure_not_locked()
1172 self._ensure_not_locked()
1184
1173
1185 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1174 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1186 if c.commit is None:
1175 if c.commit is None:
1187 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1176 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1188 c.default_message = (_('Added file via RhodeCode Enterprise'))
1177 c.default_message = (_('Added file via RhodeCode Enterprise'))
1189 c.f_path = f_path.lstrip('/') # ensure not relative path
1178 c.f_path = f_path.lstrip('/') # ensure not relative path
1190
1179
1191 return self._get_template_context(c)
1180 return self._get_template_context(c)
1192
1181
1193 @LoginRequired()
1182 @LoginRequired()
1194 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1183 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1195 @CSRFRequired()
1184 @CSRFRequired()
1196 @view_config(
1185 @view_config(
1197 route_name='repo_files_create_file', request_method='POST',
1186 route_name='repo_files_create_file', request_method='POST',
1198 renderer=None)
1187 renderer=None)
1199 def repo_files_create_file(self):
1188 def repo_files_create_file(self):
1200 _ = self.request.translate
1189 _ = self.request.translate
1201 c = self.load_default_context()
1190 c = self.load_default_context()
1202 commit_id, f_path = self._get_commit_and_path()
1191 commit_id, f_path = self._get_commit_and_path()
1203
1192
1204 self._ensure_not_locked()
1193 self._ensure_not_locked()
1205
1194
1206 r_post = self.request.POST
1195 r_post = self.request.POST
1207
1196
1208 c.commit = self._get_commit_or_redirect(
1197 c.commit = self._get_commit_or_redirect(
1209 commit_id, redirect_after=False)
1198 commit_id, redirect_after=False)
1210 if c.commit is None:
1199 if c.commit is None:
1211 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1200 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1212 c.default_message = (_('Added file via RhodeCode Enterprise'))
1201 c.default_message = (_('Added file via RhodeCode Enterprise'))
1213 c.f_path = f_path
1202 c.f_path = f_path
1214 unix_mode = 0
1203 unix_mode = 0
1215 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1204 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1216
1205
1217 message = r_post.get('message') or c.default_message
1206 message = r_post.get('message') or c.default_message
1218 filename = r_post.get('filename')
1207 filename = r_post.get('filename')
1219 location = r_post.get('location', '') # dir location
1208 location = r_post.get('location', '') # dir location
1220 file_obj = r_post.get('upload_file', None)
1209 file_obj = r_post.get('upload_file', None)
1221
1210
1222 if file_obj is not None and hasattr(file_obj, 'filename'):
1211 if file_obj is not None and hasattr(file_obj, 'filename'):
1223 filename = r_post.get('filename_upload')
1212 filename = r_post.get('filename_upload')
1224 content = file_obj.file
1213 content = file_obj.file
1225
1214
1226 if hasattr(content, 'file'):
1215 if hasattr(content, 'file'):
1227 # non posix systems store real file under file attr
1216 # non posix systems store real file under file attr
1228 content = content.file
1217 content = content.file
1229
1218
1230 if self.rhodecode_vcs_repo.is_empty:
1219 if self.rhodecode_vcs_repo.is_empty:
1231 default_redirect_url = h.route_path(
1220 default_redirect_url = h.route_path(
1232 'repo_summary', repo_name=self.db_repo_name)
1221 'repo_summary', repo_name=self.db_repo_name)
1233 else:
1222 else:
1234 default_redirect_url = h.route_path(
1223 default_redirect_url = h.route_path(
1235 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1224 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1236
1225
1237 # If there's no commit, redirect to repo summary
1226 # If there's no commit, redirect to repo summary
1238 if type(c.commit) is EmptyCommit:
1227 if type(c.commit) is EmptyCommit:
1239 redirect_url = h.route_path(
1228 redirect_url = h.route_path(
1240 'repo_summary', repo_name=self.db_repo_name)
1229 'repo_summary', repo_name=self.db_repo_name)
1241 else:
1230 else:
1242 redirect_url = default_redirect_url
1231 redirect_url = default_redirect_url
1243
1232
1244 if not filename:
1233 if not filename:
1245 h.flash(_('No filename'), category='warning')
1234 h.flash(_('No filename'), category='warning')
1246 raise HTTPFound(redirect_url)
1235 raise HTTPFound(redirect_url)
1247
1236
1248 # extract the location from filename,
1237 # extract the location from filename,
1249 # allows using foo/bar.txt syntax to create subdirectories
1238 # allows using foo/bar.txt syntax to create subdirectories
1250 subdir_loc = filename.rsplit('/', 1)
1239 subdir_loc = filename.rsplit('/', 1)
1251 if len(subdir_loc) == 2:
1240 if len(subdir_loc) == 2:
1252 location = os.path.join(location, subdir_loc[0])
1241 location = os.path.join(location, subdir_loc[0])
1253
1242
1254 # strip all crap out of file, just leave the basename
1243 # strip all crap out of file, just leave the basename
1255 filename = os.path.basename(filename)
1244 filename = os.path.basename(filename)
1256 node_path = os.path.join(location, filename)
1245 node_path = os.path.join(location, filename)
1257 author = self._rhodecode_db_user.full_contact
1246 author = self._rhodecode_db_user.full_contact
1258
1247
1259 try:
1248 try:
1260 nodes = {
1249 nodes = {
1261 node_path: {
1250 node_path: {
1262 'content': content
1251 'content': content
1263 }
1252 }
1264 }
1253 }
1265 ScmModel().create_nodes(
1254 ScmModel().create_nodes(
1266 user=self._rhodecode_db_user.user_id,
1255 user=self._rhodecode_db_user.user_id,
1267 repo=self.db_repo,
1256 repo=self.db_repo,
1268 message=message,
1257 message=message,
1269 nodes=nodes,
1258 nodes=nodes,
1270 parent_commit=c.commit,
1259 parent_commit=c.commit,
1271 author=author,
1260 author=author,
1272 )
1261 )
1273
1262
1274 h.flash(
1263 h.flash(
1275 _('Successfully committed new file `{}`').format(
1264 _('Successfully committed new file `{}`').format(
1276 h.escape(node_path)), category='success')
1265 h.escape(node_path)), category='success')
1277 except NonRelativePathError:
1266 except NonRelativePathError:
1278 log.exception('Non Relative path found')
1267 log.exception('Non Relative path found')
1279 h.flash(_(
1268 h.flash(_(
1280 'The location specified must be a relative path and must not '
1269 'The location specified must be a relative path and must not '
1281 'contain .. in the path'), category='warning')
1270 'contain .. in the path'), category='warning')
1282 raise HTTPFound(default_redirect_url)
1271 raise HTTPFound(default_redirect_url)
1283 except (NodeError, NodeAlreadyExistsError) as e:
1272 except (NodeError, NodeAlreadyExistsError) as e:
1284 h.flash(_(h.escape(e)), category='error')
1273 h.flash(_(h.escape(e)), category='error')
1285 except Exception:
1274 except Exception:
1286 log.exception('Error occurred during commit')
1275 log.exception('Error occurred during commit')
1287 h.flash(_('Error occurred during commit'), category='error')
1276 h.flash(_('Error occurred during commit'), category='error')
1288
1277
1289 raise HTTPFound(default_redirect_url)
1278 raise HTTPFound(default_redirect_url)
@@ -1,372 +1,369 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import string
22 import string
23
23
24 from pyramid.view import view_config
24 from pyramid.view import view_config
25 from beaker.cache import cache_region
25 from beaker.cache import cache_region
26
26
27 from rhodecode.controllers import utils
27 from rhodecode.controllers import utils
28 from rhodecode.apps._base import RepoAppView
28 from rhodecode.apps._base import RepoAppView
29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 from rhodecode.lib import caches, helpers as h
30 from rhodecode.lib import helpers as h, rc_cache
31 from rhodecode.lib.helpers import RepoPage
32 from rhodecode.lib.utils2 import safe_str, safe_int
31 from rhodecode.lib.utils2 import safe_str, safe_int
33 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
35 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError, \
36 from rhodecode.lib.vcs.exceptions import (
38 CommitDoesNotExistError
37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
39 from rhodecode.model.db import Statistics, CacheKey, User
38 from rhodecode.model.db import Statistics, CacheKey, User
40 from rhodecode.model.meta import Session
39 from rhodecode.model.meta import Session
41 from rhodecode.model.repo import ReadmeFinder
40 from rhodecode.model.repo import ReadmeFinder
42 from rhodecode.model.scm import ScmModel
41 from rhodecode.model.scm import ScmModel
43
42
44 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
45
44
46
45
47 class RepoSummaryView(RepoAppView):
46 class RepoSummaryView(RepoAppView):
48
47
49 def load_default_context(self):
48 def load_default_context(self):
50 c = self._get_local_tmpl_context(include_app_defaults=True)
49 c = self._get_local_tmpl_context(include_app_defaults=True)
51 c.rhodecode_repo = None
50 c.rhodecode_repo = None
52 if not c.repository_requirements_missing:
51 if not c.repository_requirements_missing:
53 c.rhodecode_repo = self.rhodecode_vcs_repo
52 c.rhodecode_repo = self.rhodecode_vcs_repo
54 return c
53 return c
55
54
56 def _get_readme_data(self, db_repo, default_renderer):
55 def _get_readme_data(self, db_repo, default_renderer):
57 repo_name = db_repo.repo_name
56 repo_name = db_repo.repo_name
58 log.debug('Looking for README file')
57 log.debug('Looking for README file')
59
58
60 @cache_region('long_term')
59 @cache_region('long_term')
61 def _generate_readme(cache_key):
60 def _generate_readme(cache_key):
62 readme_data = None
61 readme_data = None
63 readme_node = None
62 readme_node = None
64 readme_filename = None
63 readme_filename = None
65 commit = self._get_landing_commit_or_none(db_repo)
64 commit = self._get_landing_commit_or_none(db_repo)
66 if commit:
65 if commit:
67 log.debug("Searching for a README file.")
66 log.debug("Searching for a README file.")
68 readme_node = ReadmeFinder(default_renderer).search(commit)
67 readme_node = ReadmeFinder(default_renderer).search(commit)
69 if readme_node:
68 if readme_node:
70 relative_urls = {
69 relative_urls = {
71 'raw': h.route_path(
70 'raw': h.route_path(
72 'repo_file_raw', repo_name=repo_name,
71 'repo_file_raw', repo_name=repo_name,
73 commit_id=commit.raw_id, f_path=readme_node.path),
72 commit_id=commit.raw_id, f_path=readme_node.path),
74 'standard': h.route_path(
73 'standard': h.route_path(
75 'repo_files', repo_name=repo_name,
74 'repo_files', repo_name=repo_name,
76 commit_id=commit.raw_id, f_path=readme_node.path),
75 commit_id=commit.raw_id, f_path=readme_node.path),
77 }
76 }
78 readme_data = self._render_readme_or_none(
77 readme_data = self._render_readme_or_none(
79 commit, readme_node, relative_urls)
78 commit, readme_node, relative_urls)
80 readme_filename = readme_node.path
79 readme_filename = readme_node.path
81 return readme_data, readme_filename
80 return readme_data, readme_filename
82
81
83 invalidator_context = CacheKey.repo_context_cache(
82 invalidator_context = CacheKey.repo_context_cache(
84 _generate_readme, repo_name, CacheKey.CACHE_TYPE_README)
83 _generate_readme, repo_name, CacheKey.CACHE_TYPE_README)
85
84
86 with invalidator_context as context:
85 with invalidator_context as context:
87 context.invalidate()
86 context.invalidate()
88 computed = context.compute()
87 computed = context.compute()
89
88
90 return computed
89 return computed
91
90
92 def _get_landing_commit_or_none(self, db_repo):
91 def _get_landing_commit_or_none(self, db_repo):
93 log.debug("Getting the landing commit.")
92 log.debug("Getting the landing commit.")
94 try:
93 try:
95 commit = db_repo.get_landing_commit()
94 commit = db_repo.get_landing_commit()
96 if not isinstance(commit, EmptyCommit):
95 if not isinstance(commit, EmptyCommit):
97 return commit
96 return commit
98 else:
97 else:
99 log.debug("Repository is empty, no README to render.")
98 log.debug("Repository is empty, no README to render.")
100 except CommitError:
99 except CommitError:
101 log.exception(
100 log.exception(
102 "Problem getting commit when trying to render the README.")
101 "Problem getting commit when trying to render the README.")
103
102
104 def _render_readme_or_none(self, commit, readme_node, relative_urls):
103 def _render_readme_or_none(self, commit, readme_node, relative_urls):
105 log.debug(
104 log.debug(
106 'Found README file `%s` rendering...', readme_node.path)
105 'Found README file `%s` rendering...', readme_node.path)
107 renderer = MarkupRenderer()
106 renderer = MarkupRenderer()
108 try:
107 try:
109 html_source = renderer.render(
108 html_source = renderer.render(
110 readme_node.content, filename=readme_node.path)
109 readme_node.content, filename=readme_node.path)
111 if relative_urls:
110 if relative_urls:
112 return relative_links(html_source, relative_urls)
111 return relative_links(html_source, relative_urls)
113 return html_source
112 return html_source
114 except Exception:
113 except Exception:
115 log.exception(
114 log.exception(
116 "Exception while trying to render the README")
115 "Exception while trying to render the README")
117
116
118 def _load_commits_context(self, c):
117 def _load_commits_context(self, c):
119 p = safe_int(self.request.GET.get('page'), 1)
118 p = safe_int(self.request.GET.get('page'), 1)
120 size = safe_int(self.request.GET.get('size'), 10)
119 size = safe_int(self.request.GET.get('size'), 10)
121
120
122 def url_generator(**kw):
121 def url_generator(**kw):
123 query_params = {
122 query_params = {
124 'size': size
123 'size': size
125 }
124 }
126 query_params.update(kw)
125 query_params.update(kw)
127 return h.route_path(
126 return h.route_path(
128 'repo_summary_commits',
127 'repo_summary_commits',
129 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
128 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
130
129
131 pre_load = ['author', 'branch', 'date', 'message']
130 pre_load = ['author', 'branch', 'date', 'message']
132 try:
131 try:
133 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
132 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
134 except EmptyRepositoryError:
133 except EmptyRepositoryError:
135 collection = self.rhodecode_vcs_repo
134 collection = self.rhodecode_vcs_repo
136
135
137 c.repo_commits = RepoPage(
136 c.repo_commits = h.RepoPage(
138 collection, page=p, items_per_page=size, url=url_generator)
137 collection, page=p, items_per_page=size, url=url_generator)
139 page_ids = [x.raw_id for x in c.repo_commits]
138 page_ids = [x.raw_id for x in c.repo_commits]
140 c.comments = self.db_repo.get_comments(page_ids)
139 c.comments = self.db_repo.get_comments(page_ids)
141 c.statuses = self.db_repo.statuses(page_ids)
140 c.statuses = self.db_repo.statuses(page_ids)
142
141
143 @LoginRequired()
142 @LoginRequired()
144 @HasRepoPermissionAnyDecorator(
143 @HasRepoPermissionAnyDecorator(
145 'repository.read', 'repository.write', 'repository.admin')
144 'repository.read', 'repository.write', 'repository.admin')
146 @view_config(
145 @view_config(
147 route_name='repo_summary_commits', request_method='GET',
146 route_name='repo_summary_commits', request_method='GET',
148 renderer='rhodecode:templates/summary/summary_commits.mako')
147 renderer='rhodecode:templates/summary/summary_commits.mako')
149 def summary_commits(self):
148 def summary_commits(self):
150 c = self.load_default_context()
149 c = self.load_default_context()
151 self._load_commits_context(c)
150 self._load_commits_context(c)
152 return self._get_template_context(c)
151 return self._get_template_context(c)
153
152
154 @LoginRequired()
153 @LoginRequired()
155 @HasRepoPermissionAnyDecorator(
154 @HasRepoPermissionAnyDecorator(
156 'repository.read', 'repository.write', 'repository.admin')
155 'repository.read', 'repository.write', 'repository.admin')
157 @view_config(
156 @view_config(
158 route_name='repo_summary', request_method='GET',
157 route_name='repo_summary', request_method='GET',
159 renderer='rhodecode:templates/summary/summary.mako')
158 renderer='rhodecode:templates/summary/summary.mako')
160 @view_config(
159 @view_config(
161 route_name='repo_summary_slash', request_method='GET',
160 route_name='repo_summary_slash', request_method='GET',
162 renderer='rhodecode:templates/summary/summary.mako')
161 renderer='rhodecode:templates/summary/summary.mako')
163 @view_config(
162 @view_config(
164 route_name='repo_summary_explicit', request_method='GET',
163 route_name='repo_summary_explicit', request_method='GET',
165 renderer='rhodecode:templates/summary/summary.mako')
164 renderer='rhodecode:templates/summary/summary.mako')
166 def summary(self):
165 def summary(self):
167 c = self.load_default_context()
166 c = self.load_default_context()
168
167
169 # Prepare the clone URL
168 # Prepare the clone URL
170 username = ''
169 username = ''
171 if self._rhodecode_user.username != User.DEFAULT_USER:
170 if self._rhodecode_user.username != User.DEFAULT_USER:
172 username = safe_str(self._rhodecode_user.username)
171 username = safe_str(self._rhodecode_user.username)
173
172
174 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
173 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
175 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
174 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
176
175
177 if '{repo}' in _def_clone_uri:
176 if '{repo}' in _def_clone_uri:
178 _def_clone_uri_id = _def_clone_uri.replace(
177 _def_clone_uri_id = _def_clone_uri.replace(
179 '{repo}', '_{repoid}')
178 '{repo}', '_{repoid}')
180 elif '{repoid}' in _def_clone_uri:
179 elif '{repoid}' in _def_clone_uri:
181 _def_clone_uri_id = _def_clone_uri.replace(
180 _def_clone_uri_id = _def_clone_uri.replace(
182 '_{repoid}', '{repo}')
181 '_{repoid}', '{repo}')
183
182
184 c.clone_repo_url = self.db_repo.clone_url(
183 c.clone_repo_url = self.db_repo.clone_url(
185 user=username, uri_tmpl=_def_clone_uri)
184 user=username, uri_tmpl=_def_clone_uri)
186 c.clone_repo_url_id = self.db_repo.clone_url(
185 c.clone_repo_url_id = self.db_repo.clone_url(
187 user=username, uri_tmpl=_def_clone_uri_id)
186 user=username, uri_tmpl=_def_clone_uri_id)
188 c.clone_repo_url_ssh = self.db_repo.clone_url(
187 c.clone_repo_url_ssh = self.db_repo.clone_url(
189 uri_tmpl=_def_clone_uri_ssh, ssh=True)
188 uri_tmpl=_def_clone_uri_ssh, ssh=True)
190
189
191 # If enabled, get statistics data
190 # If enabled, get statistics data
192
191
193 c.show_stats = bool(self.db_repo.enable_statistics)
192 c.show_stats = bool(self.db_repo.enable_statistics)
194
193
195 stats = Session().query(Statistics) \
194 stats = Session().query(Statistics) \
196 .filter(Statistics.repository == self.db_repo) \
195 .filter(Statistics.repository == self.db_repo) \
197 .scalar()
196 .scalar()
198
197
199 c.stats_percentage = 0
198 c.stats_percentage = 0
200
199
201 if stats and stats.languages:
200 if stats and stats.languages:
202 c.no_data = False is self.db_repo.enable_statistics
201 c.no_data = False is self.db_repo.enable_statistics
203 lang_stats_d = json.loads(stats.languages)
202 lang_stats_d = json.loads(stats.languages)
204
203
205 # Sort first by decreasing count and second by the file extension,
204 # Sort first by decreasing count and second by the file extension,
206 # so we have a consistent output.
205 # so we have a consistent output.
207 lang_stats_items = sorted(lang_stats_d.iteritems(),
206 lang_stats_items = sorted(lang_stats_d.iteritems(),
208 key=lambda k: (-k[1], k[0]))[:10]
207 key=lambda k: (-k[1], k[0]))[:10]
209 lang_stats = [(x, {"count": y,
208 lang_stats = [(x, {"count": y,
210 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
209 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
211 for x, y in lang_stats_items]
210 for x, y in lang_stats_items]
212
211
213 c.trending_languages = json.dumps(lang_stats)
212 c.trending_languages = json.dumps(lang_stats)
214 else:
213 else:
215 c.no_data = True
214 c.no_data = True
216 c.trending_languages = json.dumps({})
215 c.trending_languages = json.dumps({})
217
216
218 scm_model = ScmModel()
217 scm_model = ScmModel()
219 c.enable_downloads = self.db_repo.enable_downloads
218 c.enable_downloads = self.db_repo.enable_downloads
220 c.repository_followers = scm_model.get_followers(self.db_repo)
219 c.repository_followers = scm_model.get_followers(self.db_repo)
221 c.repository_forks = scm_model.get_forks(self.db_repo)
220 c.repository_forks = scm_model.get_forks(self.db_repo)
222 c.repository_is_user_following = scm_model.is_following_repo(
221 c.repository_is_user_following = scm_model.is_following_repo(
223 self.db_repo_name, self._rhodecode_user.user_id)
222 self.db_repo_name, self._rhodecode_user.user_id)
224
223
225 # first interaction with the VCS instance after here...
224 # first interaction with the VCS instance after here...
226 if c.repository_requirements_missing:
225 if c.repository_requirements_missing:
227 self.request.override_renderer = \
226 self.request.override_renderer = \
228 'rhodecode:templates/summary/missing_requirements.mako'
227 'rhodecode:templates/summary/missing_requirements.mako'
229 return self._get_template_context(c)
228 return self._get_template_context(c)
230
229
231 c.readme_data, c.readme_file = \
230 c.readme_data, c.readme_file = \
232 self._get_readme_data(self.db_repo, c.visual.default_renderer)
231 self._get_readme_data(self.db_repo, c.visual.default_renderer)
233
232
234 # loads the summary commits template context
233 # loads the summary commits template context
235 self._load_commits_context(c)
234 self._load_commits_context(c)
236
235
237 return self._get_template_context(c)
236 return self._get_template_context(c)
238
237
239 def get_request_commit_id(self):
238 def get_request_commit_id(self):
240 return self.request.matchdict['commit_id']
239 return self.request.matchdict['commit_id']
241
240
242 @LoginRequired()
241 @LoginRequired()
243 @HasRepoPermissionAnyDecorator(
242 @HasRepoPermissionAnyDecorator(
244 'repository.read', 'repository.write', 'repository.admin')
243 'repository.read', 'repository.write', 'repository.admin')
245 @view_config(
244 @view_config(
246 route_name='repo_stats', request_method='GET',
245 route_name='repo_stats', request_method='GET',
247 renderer='json_ext')
246 renderer='json_ext')
248 def repo_stats(self):
247 def repo_stats(self):
249 commit_id = self.get_request_commit_id()
248 commit_id = self.get_request_commit_id()
249 show_stats = bool(self.db_repo.enable_statistics)
250 repo_id = self.db_repo.repo_id
250
251
251 _namespace = caches.get_repo_namespace_key(
252 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
252 caches.SUMMARY_STATS, self.db_repo_name)
253 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
253 show_stats = bool(self.db_repo.enable_statistics)
254 cache_manager = caches.get_cache_manager(
255 'repo_cache_long', _namespace)
256 _cache_key = caches.compute_key_from_params(
257 self.db_repo_name, commit_id, show_stats)
258
254
259 def compute_stats():
255 @region.cache_on_arguments(namespace=cache_namespace_uid)
256 def compute_stats(repo_id, commit_id, show_stats):
260 code_stats = {}
257 code_stats = {}
261 size = 0
258 size = 0
262 try:
259 try:
263 scm_instance = self.db_repo.scm_instance()
260 scm_instance = self.db_repo.scm_instance()
264 commit = scm_instance.get_commit(commit_id)
261 commit = scm_instance.get_commit(commit_id)
265
262
266 for node in commit.get_filenodes_generator():
263 for node in commit.get_filenodes_generator():
267 size += node.size
264 size += node.size
268 if not show_stats:
265 if not show_stats:
269 continue
266 continue
270 ext = string.lower(node.extension)
267 ext = string.lower(node.extension)
271 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
268 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
272 if ext_info:
269 if ext_info:
273 if ext in code_stats:
270 if ext in code_stats:
274 code_stats[ext]['count'] += 1
271 code_stats[ext]['count'] += 1
275 else:
272 else:
276 code_stats[ext] = {"count": 1, "desc": ext_info}
273 code_stats[ext] = {"count": 1, "desc": ext_info}
277 except (EmptyRepositoryError, CommitDoesNotExistError):
274 except (EmptyRepositoryError, CommitDoesNotExistError):
278 pass
275 pass
279 return {'size': h.format_byte_size_binary(size),
276 return {'size': h.format_byte_size_binary(size),
280 'code_stats': code_stats}
277 'code_stats': code_stats}
281
278
282 stats = cache_manager.get(_cache_key, createfunc=compute_stats)
279 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
283 return stats
280 return stats
284
281
285 @LoginRequired()
282 @LoginRequired()
286 @HasRepoPermissionAnyDecorator(
283 @HasRepoPermissionAnyDecorator(
287 'repository.read', 'repository.write', 'repository.admin')
284 'repository.read', 'repository.write', 'repository.admin')
288 @view_config(
285 @view_config(
289 route_name='repo_refs_data', request_method='GET',
286 route_name='repo_refs_data', request_method='GET',
290 renderer='json_ext')
287 renderer='json_ext')
291 def repo_refs_data(self):
288 def repo_refs_data(self):
292 _ = self.request.translate
289 _ = self.request.translate
293 self.load_default_context()
290 self.load_default_context()
294
291
295 repo = self.rhodecode_vcs_repo
292 repo = self.rhodecode_vcs_repo
296 refs_to_create = [
293 refs_to_create = [
297 (_("Branch"), repo.branches, 'branch'),
294 (_("Branch"), repo.branches, 'branch'),
298 (_("Tag"), repo.tags, 'tag'),
295 (_("Tag"), repo.tags, 'tag'),
299 (_("Bookmark"), repo.bookmarks, 'book'),
296 (_("Bookmark"), repo.bookmarks, 'book'),
300 ]
297 ]
301 res = self._create_reference_data(
298 res = self._create_reference_data(
302 repo, self.db_repo_name, refs_to_create)
299 repo, self.db_repo_name, refs_to_create)
303 data = {
300 data = {
304 'more': False,
301 'more': False,
305 'results': res
302 'results': res
306 }
303 }
307 return data
304 return data
308
305
309 @LoginRequired()
306 @LoginRequired()
310 @HasRepoPermissionAnyDecorator(
307 @HasRepoPermissionAnyDecorator(
311 'repository.read', 'repository.write', 'repository.admin')
308 'repository.read', 'repository.write', 'repository.admin')
312 @view_config(
309 @view_config(
313 route_name='repo_refs_changelog_data', request_method='GET',
310 route_name='repo_refs_changelog_data', request_method='GET',
314 renderer='json_ext')
311 renderer='json_ext')
315 def repo_refs_changelog_data(self):
312 def repo_refs_changelog_data(self):
316 _ = self.request.translate
313 _ = self.request.translate
317 self.load_default_context()
314 self.load_default_context()
318
315
319 repo = self.rhodecode_vcs_repo
316 repo = self.rhodecode_vcs_repo
320
317
321 refs_to_create = [
318 refs_to_create = [
322 (_("Branches"), repo.branches, 'branch'),
319 (_("Branches"), repo.branches, 'branch'),
323 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
320 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
324 # TODO: enable when vcs can handle bookmarks filters
321 # TODO: enable when vcs can handle bookmarks filters
325 # (_("Bookmarks"), repo.bookmarks, "book"),
322 # (_("Bookmarks"), repo.bookmarks, "book"),
326 ]
323 ]
327 res = self._create_reference_data(
324 res = self._create_reference_data(
328 repo, self.db_repo_name, refs_to_create)
325 repo, self.db_repo_name, refs_to_create)
329 data = {
326 data = {
330 'more': False,
327 'more': False,
331 'results': res
328 'results': res
332 }
329 }
333 return data
330 return data
334
331
335 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
332 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
336 format_ref_id = utils.get_format_ref_id(repo)
333 format_ref_id = utils.get_format_ref_id(repo)
337
334
338 result = []
335 result = []
339 for title, refs, ref_type in refs_to_create:
336 for title, refs, ref_type in refs_to_create:
340 if refs:
337 if refs:
341 result.append({
338 result.append({
342 'text': title,
339 'text': title,
343 'children': self._create_reference_items(
340 'children': self._create_reference_items(
344 repo, full_repo_name, refs, ref_type,
341 repo, full_repo_name, refs, ref_type,
345 format_ref_id),
342 format_ref_id),
346 })
343 })
347 return result
344 return result
348
345
349 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
346 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
350 format_ref_id):
347 format_ref_id):
351 result = []
348 result = []
352 is_svn = h.is_svn(repo)
349 is_svn = h.is_svn(repo)
353 for ref_name, raw_id in refs.iteritems():
350 for ref_name, raw_id in refs.iteritems():
354 files_url = self._create_files_url(
351 files_url = self._create_files_url(
355 repo, full_repo_name, ref_name, raw_id, is_svn)
352 repo, full_repo_name, ref_name, raw_id, is_svn)
356 result.append({
353 result.append({
357 'text': ref_name,
354 'text': ref_name,
358 'id': format_ref_id(ref_name, raw_id),
355 'id': format_ref_id(ref_name, raw_id),
359 'raw_id': raw_id,
356 'raw_id': raw_id,
360 'type': ref_type,
357 'type': ref_type,
361 'files_url': files_url,
358 'files_url': files_url,
362 })
359 })
363 return result
360 return result
364
361
365 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
362 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
366 use_commit_id = '/' in ref_name or is_svn
363 use_commit_id = '/' in ref_name or is_svn
367 return h.route_path(
364 return h.route_path(
368 'repo_files',
365 'repo_files',
369 repo_name=full_repo_name,
366 repo_name=full_repo_name,
370 f_path=ref_name if is_svn else '',
367 f_path=ref_name if is_svn else '',
371 commit_id=raw_id if use_commit_id else ref_name,
368 commit_id=raw_id if use_commit_id else ref_name,
372 _query=dict(at=ref_name))
369 _query=dict(at=ref_name))
@@ -1,476 +1,483 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import logging
22 import logging
23 import traceback
23 import traceback
24 import collections
24 import collections
25 import tempfile
25 import tempfile
26
26
27 from paste.gzipper import make_gzip_middleware
27 from paste.gzipper import make_gzip_middleware
28 from pyramid.wsgi import wsgiapp
28 from pyramid.wsgi import wsgiapp
29 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
30 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.settings import asbool, aslist
31 from pyramid.settings import asbool, aslist
32 from pyramid.httpexceptions import (
32 from pyramid.httpexceptions import (
33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
33 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
34 from pyramid.events import ApplicationCreated
34 from pyramid.events import ApplicationCreated
35 from pyramid.renderers import render_to_response
35 from pyramid.renderers import render_to_response
36
36
37 from rhodecode.model import meta
37 from rhodecode.model import meta
38 from rhodecode.config import patches
38 from rhodecode.config import patches
39 from rhodecode.config import utils as config_utils
39 from rhodecode.config import utils as config_utils
40 from rhodecode.config.environment import load_pyramid_environment
40 from rhodecode.config.environment import load_pyramid_environment
41
41
42 from rhodecode.lib.middleware.vcs import VCSMiddleware
42 from rhodecode.lib.middleware.vcs import VCSMiddleware
43 from rhodecode.lib.request import Request
43 from rhodecode.lib.request import Request
44 from rhodecode.lib.vcs import VCSCommunicationError
44 from rhodecode.lib.vcs import VCSCommunicationError
45 from rhodecode.lib.exceptions import VCSServerUnavailable
45 from rhodecode.lib.exceptions import VCSServerUnavailable
46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 from rhodecode.lib.celerylib.loader import configure_celery
48 from rhodecode.lib.celerylib.loader import configure_celery
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
50 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
51 from rhodecode.subscribers import (
51 from rhodecode.subscribers import (
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
52 scan_repositories_if_enabled, write_js_routes_if_enabled,
53 write_metadata_if_needed, inject_app_settings)
53 write_metadata_if_needed, inject_app_settings)
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def is_http_error(response):
59 def is_http_error(response):
60 # error which should have traceback
60 # error which should have traceback
61 return response.status_code > 499
61 return response.status_code > 499
62
62
63
63
64 def make_pyramid_app(global_config, **settings):
64 def make_pyramid_app(global_config, **settings):
65 """
65 """
66 Constructs the WSGI application based on Pyramid.
66 Constructs the WSGI application based on Pyramid.
67
67
68 Specials:
68 Specials:
69
69
70 * The application can also be integrated like a plugin via the call to
70 * The application can also be integrated like a plugin via the call to
71 `includeme`. This is accompanied with the other utility functions which
71 `includeme`. This is accompanied with the other utility functions which
72 are called. Changing this should be done with great care to not break
72 are called. Changing this should be done with great care to not break
73 cases when these fragments are assembled from another place.
73 cases when these fragments are assembled from another place.
74
74
75 """
75 """
76
76
77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
77 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
78 # will be replaced by the value of the environment variable "NAME" in this case.
78 # will be replaced by the value of the environment variable "NAME" in this case.
79 environ = {
79 environ = {
80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
80 'ENV_{}'.format(key): value for key, value in os.environ.items()}
81
81
82 global_config = _substitute_values(global_config, environ)
82 global_config = _substitute_values(global_config, environ)
83 settings = _substitute_values(settings, environ)
83 settings = _substitute_values(settings, environ)
84
84
85 sanitize_settings_and_apply_defaults(settings)
85 sanitize_settings_and_apply_defaults(settings)
86
86
87 config = Configurator(settings=settings)
87 config = Configurator(settings=settings)
88
88
89 # Apply compatibility patches
89 # Apply compatibility patches
90 patches.inspect_getargspec()
90 patches.inspect_getargspec()
91
91
92 load_pyramid_environment(global_config, settings)
92 load_pyramid_environment(global_config, settings)
93
93
94 # Static file view comes first
94 # Static file view comes first
95 includeme_first(config)
95 includeme_first(config)
96
96
97 includeme(config)
97 includeme(config)
98
98
99 pyramid_app = config.make_wsgi_app()
99 pyramid_app = config.make_wsgi_app()
100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
100 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
101 pyramid_app.config = config
101 pyramid_app.config = config
102
102
103 config.configure_celery(global_config['__file__'])
103 config.configure_celery(global_config['__file__'])
104 # creating the app uses a connection - return it after we are done
104 # creating the app uses a connection - return it after we are done
105 meta.Session.remove()
105 meta.Session.remove()
106
106
107 log.info('Pyramid app %s created and configured.', pyramid_app)
107 log.info('Pyramid app %s created and configured.', pyramid_app)
108 return pyramid_app
108 return pyramid_app
109
109
110
110
111 def not_found_view(request):
111 def not_found_view(request):
112 """
112 """
113 This creates the view which should be registered as not-found-view to
113 This creates the view which should be registered as not-found-view to
114 pyramid.
114 pyramid.
115 """
115 """
116
116
117 if not getattr(request, 'vcs_call', None):
117 if not getattr(request, 'vcs_call', None):
118 # handle like regular case with our error_handler
118 # handle like regular case with our error_handler
119 return error_handler(HTTPNotFound(), request)
119 return error_handler(HTTPNotFound(), request)
120
120
121 # handle not found view as a vcs call
121 # handle not found view as a vcs call
122 settings = request.registry.settings
122 settings = request.registry.settings
123 ae_client = getattr(request, 'ae_client', None)
123 ae_client = getattr(request, 'ae_client', None)
124 vcs_app = VCSMiddleware(
124 vcs_app = VCSMiddleware(
125 HTTPNotFound(), request.registry, settings,
125 HTTPNotFound(), request.registry, settings,
126 appenlight_client=ae_client)
126 appenlight_client=ae_client)
127
127
128 return wsgiapp(vcs_app)(None, request)
128 return wsgiapp(vcs_app)(None, request)
129
129
130
130
131 def error_handler(exception, request):
131 def error_handler(exception, request):
132 import rhodecode
132 import rhodecode
133 from rhodecode.lib import helpers
133 from rhodecode.lib import helpers
134
134
135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
135 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
136
136
137 base_response = HTTPInternalServerError()
137 base_response = HTTPInternalServerError()
138 # prefer original exception for the response since it may have headers set
138 # prefer original exception for the response since it may have headers set
139 if isinstance(exception, HTTPException):
139 if isinstance(exception, HTTPException):
140 base_response = exception
140 base_response = exception
141 elif isinstance(exception, VCSCommunicationError):
141 elif isinstance(exception, VCSCommunicationError):
142 base_response = VCSServerUnavailable()
142 base_response = VCSServerUnavailable()
143
143
144 if is_http_error(base_response):
144 if is_http_error(base_response):
145 log.exception(
145 log.exception(
146 'error occurred handling this request for path: %s', request.path)
146 'error occurred handling this request for path: %s', request.path)
147
147
148 error_explanation = base_response.explanation or str(base_response)
148 error_explanation = base_response.explanation or str(base_response)
149 if base_response.status_code == 404:
149 if base_response.status_code == 404:
150 error_explanation += " Or you don't have permission to access it."
150 error_explanation += " Or you don't have permission to access it."
151 c = AttributeDict()
151 c = AttributeDict()
152 c.error_message = base_response.status
152 c.error_message = base_response.status
153 c.error_explanation = error_explanation
153 c.error_explanation = error_explanation
154 c.visual = AttributeDict()
154 c.visual = AttributeDict()
155
155
156 c.visual.rhodecode_support_url = (
156 c.visual.rhodecode_support_url = (
157 request.registry.settings.get('rhodecode_support_url') or
157 request.registry.settings.get('rhodecode_support_url') or
158 request.route_url('rhodecode_support')
158 request.route_url('rhodecode_support')
159 )
159 )
160 c.redirect_time = 0
160 c.redirect_time = 0
161 c.rhodecode_name = rhodecode_title
161 c.rhodecode_name = rhodecode_title
162 if not c.rhodecode_name:
162 if not c.rhodecode_name:
163 c.rhodecode_name = 'Rhodecode'
163 c.rhodecode_name = 'Rhodecode'
164
164
165 c.causes = []
165 c.causes = []
166 if is_http_error(base_response):
166 if is_http_error(base_response):
167 c.causes.append('Server is overloaded.')
167 c.causes.append('Server is overloaded.')
168 c.causes.append('Server database connection is lost.')
168 c.causes.append('Server database connection is lost.')
169 c.causes.append('Server expected unhandled error.')
169 c.causes.append('Server expected unhandled error.')
170
170
171 if hasattr(base_response, 'causes'):
171 if hasattr(base_response, 'causes'):
172 c.causes = base_response.causes
172 c.causes = base_response.causes
173
173
174 c.messages = helpers.flash.pop_messages(request=request)
174 c.messages = helpers.flash.pop_messages(request=request)
175 c.traceback = traceback.format_exc()
175 c.traceback = traceback.format_exc()
176 response = render_to_response(
176 response = render_to_response(
177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
177 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
178 response=base_response)
178 response=base_response)
179
179
180 return response
180 return response
181
181
182
182
183 def includeme_first(config):
183 def includeme_first(config):
184 # redirect automatic browser favicon.ico requests to correct place
184 # redirect automatic browser favicon.ico requests to correct place
185 def favicon_redirect(context, request):
185 def favicon_redirect(context, request):
186 return HTTPFound(
186 return HTTPFound(
187 request.static_path('rhodecode:public/images/favicon.ico'))
187 request.static_path('rhodecode:public/images/favicon.ico'))
188
188
189 config.add_view(favicon_redirect, route_name='favicon')
189 config.add_view(favicon_redirect, route_name='favicon')
190 config.add_route('favicon', '/favicon.ico')
190 config.add_route('favicon', '/favicon.ico')
191
191
192 def robots_redirect(context, request):
192 def robots_redirect(context, request):
193 return HTTPFound(
193 return HTTPFound(
194 request.static_path('rhodecode:public/robots.txt'))
194 request.static_path('rhodecode:public/robots.txt'))
195
195
196 config.add_view(robots_redirect, route_name='robots')
196 config.add_view(robots_redirect, route_name='robots')
197 config.add_route('robots', '/robots.txt')
197 config.add_route('robots', '/robots.txt')
198
198
199 config.add_static_view(
199 config.add_static_view(
200 '_static/deform', 'deform:static')
200 '_static/deform', 'deform:static')
201 config.add_static_view(
201 config.add_static_view(
202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
202 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
203
203
204
204
205 def includeme(config):
205 def includeme(config):
206 settings = config.registry.settings
206 settings = config.registry.settings
207 config.set_request_factory(Request)
207 config.set_request_factory(Request)
208
208
209 # plugin information
209 # plugin information
210 config.registry.rhodecode_plugins = collections.OrderedDict()
210 config.registry.rhodecode_plugins = collections.OrderedDict()
211
211
212 config.add_directive(
212 config.add_directive(
213 'register_rhodecode_plugin', register_rhodecode_plugin)
213 'register_rhodecode_plugin', register_rhodecode_plugin)
214
214
215 config.add_directive('configure_celery', configure_celery)
215 config.add_directive('configure_celery', configure_celery)
216
216
217 if asbool(settings.get('appenlight', 'false')):
217 if asbool(settings.get('appenlight', 'false')):
218 config.include('appenlight_client.ext.pyramid_tween')
218 config.include('appenlight_client.ext.pyramid_tween')
219
219
220 # Includes which are required. The application would fail without them.
220 # Includes which are required. The application would fail without them.
221 config.include('pyramid_mako')
221 config.include('pyramid_mako')
222 config.include('pyramid_beaker')
222 config.include('pyramid_beaker')
223 config.include('rhodecode.lib.caches')
223 config.include('rhodecode.lib.caches')
224 config.include('rhodecode.lib.rc_cache')
224 config.include('rhodecode.lib.rc_cache')
225
225
226 config.include('rhodecode.authentication')
226 config.include('rhodecode.authentication')
227 config.include('rhodecode.integrations')
227 config.include('rhodecode.integrations')
228
228
229 # apps
229 # apps
230 config.include('rhodecode.apps._base')
230 config.include('rhodecode.apps._base')
231 config.include('rhodecode.apps.ops')
231 config.include('rhodecode.apps.ops')
232
232
233 config.include('rhodecode.apps.admin')
233 config.include('rhodecode.apps.admin')
234 config.include('rhodecode.apps.channelstream')
234 config.include('rhodecode.apps.channelstream')
235 config.include('rhodecode.apps.login')
235 config.include('rhodecode.apps.login')
236 config.include('rhodecode.apps.home')
236 config.include('rhodecode.apps.home')
237 config.include('rhodecode.apps.journal')
237 config.include('rhodecode.apps.journal')
238 config.include('rhodecode.apps.repository')
238 config.include('rhodecode.apps.repository')
239 config.include('rhodecode.apps.repo_group')
239 config.include('rhodecode.apps.repo_group')
240 config.include('rhodecode.apps.user_group')
240 config.include('rhodecode.apps.user_group')
241 config.include('rhodecode.apps.search')
241 config.include('rhodecode.apps.search')
242 config.include('rhodecode.apps.user_profile')
242 config.include('rhodecode.apps.user_profile')
243 config.include('rhodecode.apps.user_group_profile')
243 config.include('rhodecode.apps.user_group_profile')
244 config.include('rhodecode.apps.my_account')
244 config.include('rhodecode.apps.my_account')
245 config.include('rhodecode.apps.svn_support')
245 config.include('rhodecode.apps.svn_support')
246 config.include('rhodecode.apps.ssh_support')
246 config.include('rhodecode.apps.ssh_support')
247 config.include('rhodecode.apps.gist')
247 config.include('rhodecode.apps.gist')
248
248
249 config.include('rhodecode.apps.debug_style')
249 config.include('rhodecode.apps.debug_style')
250 config.include('rhodecode.tweens')
250 config.include('rhodecode.tweens')
251 config.include('rhodecode.api')
251 config.include('rhodecode.api')
252
252
253 config.add_route(
253 config.add_route(
254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
254 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
255
255
256 config.add_translation_dirs('rhodecode:i18n/')
256 config.add_translation_dirs('rhodecode:i18n/')
257 settings['default_locale_name'] = settings.get('lang', 'en')
257 settings['default_locale_name'] = settings.get('lang', 'en')
258
258
259 # Add subscribers.
259 # Add subscribers.
260 config.add_subscriber(inject_app_settings, ApplicationCreated)
260 config.add_subscriber(inject_app_settings, ApplicationCreated)
261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
261 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
262 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
263 config.add_subscriber(write_js_routes_if_enabled, ApplicationCreated)
264
264
265 # events
265 # events
266 # TODO(marcink): this should be done when pyramid migration is finished
266 # TODO(marcink): this should be done when pyramid migration is finished
267 # config.add_subscriber(
267 # config.add_subscriber(
268 # 'rhodecode.integrations.integrations_event_handler',
268 # 'rhodecode.integrations.integrations_event_handler',
269 # 'rhodecode.events.RhodecodeEvent')
269 # 'rhodecode.events.RhodecodeEvent')
270
270
271 # request custom methods
271 # request custom methods
272 config.add_request_method(
272 config.add_request_method(
273 'rhodecode.lib.partial_renderer.get_partial_renderer',
273 'rhodecode.lib.partial_renderer.get_partial_renderer',
274 'get_partial_renderer')
274 'get_partial_renderer')
275
275
276 # Set the authorization policy.
276 # Set the authorization policy.
277 authz_policy = ACLAuthorizationPolicy()
277 authz_policy = ACLAuthorizationPolicy()
278 config.set_authorization_policy(authz_policy)
278 config.set_authorization_policy(authz_policy)
279
279
280 # Set the default renderer for HTML templates to mako.
280 # Set the default renderer for HTML templates to mako.
281 config.add_mako_renderer('.html')
281 config.add_mako_renderer('.html')
282
282
283 config.add_renderer(
283 config.add_renderer(
284 name='json_ext',
284 name='json_ext',
285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
285 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
286
286
287 # include RhodeCode plugins
287 # include RhodeCode plugins
288 includes = aslist(settings.get('rhodecode.includes', []))
288 includes = aslist(settings.get('rhodecode.includes', []))
289 for inc in includes:
289 for inc in includes:
290 config.include(inc)
290 config.include(inc)
291
291
292 # custom not found view, if our pyramid app doesn't know how to handle
292 # custom not found view, if our pyramid app doesn't know how to handle
293 # the request pass it to potential VCS handling ap
293 # the request pass it to potential VCS handling ap
294 config.add_notfound_view(not_found_view)
294 config.add_notfound_view(not_found_view)
295 if not settings.get('debugtoolbar.enabled', False):
295 if not settings.get('debugtoolbar.enabled', False):
296 # disabled debugtoolbar handle all exceptions via the error_handlers
296 # disabled debugtoolbar handle all exceptions via the error_handlers
297 config.add_view(error_handler, context=Exception)
297 config.add_view(error_handler, context=Exception)
298
298
299 # all errors including 403/404/50X
299 # all errors including 403/404/50X
300 config.add_view(error_handler, context=HTTPError)
300 config.add_view(error_handler, context=HTTPError)
301
301
302
302
303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
303 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
304 """
304 """
305 Apply outer WSGI middlewares around the application.
305 Apply outer WSGI middlewares around the application.
306 """
306 """
307 settings = config.registry.settings
307 settings = config.registry.settings
308
308
309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
309 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
310 pyramid_app = HttpsFixup(pyramid_app, settings)
310 pyramid_app = HttpsFixup(pyramid_app, settings)
311
311
312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
312 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
313 pyramid_app, settings)
313 pyramid_app, settings)
314 config.registry.ae_client = _ae_client
314 config.registry.ae_client = _ae_client
315
315
316 if settings['gzip_responses']:
316 if settings['gzip_responses']:
317 pyramid_app = make_gzip_middleware(
317 pyramid_app = make_gzip_middleware(
318 pyramid_app, settings, compress_level=1)
318 pyramid_app, settings, compress_level=1)
319
319
320 # this should be the outer most middleware in the wsgi stack since
320 # this should be the outer most middleware in the wsgi stack since
321 # middleware like Routes make database calls
321 # middleware like Routes make database calls
322 def pyramid_app_with_cleanup(environ, start_response):
322 def pyramid_app_with_cleanup(environ, start_response):
323 try:
323 try:
324 return pyramid_app(environ, start_response)
324 return pyramid_app(environ, start_response)
325 finally:
325 finally:
326 # Dispose current database session and rollback uncommitted
326 # Dispose current database session and rollback uncommitted
327 # transactions.
327 # transactions.
328 meta.Session.remove()
328 meta.Session.remove()
329
329
330 # In a single threaded mode server, on non sqlite db we should have
330 # In a single threaded mode server, on non sqlite db we should have
331 # '0 Current Checked out connections' at the end of a request,
331 # '0 Current Checked out connections' at the end of a request,
332 # if not, then something, somewhere is leaving a connection open
332 # if not, then something, somewhere is leaving a connection open
333 pool = meta.Base.metadata.bind.engine.pool
333 pool = meta.Base.metadata.bind.engine.pool
334 log.debug('sa pool status: %s', pool.status())
334 log.debug('sa pool status: %s', pool.status())
335
335
336 return pyramid_app_with_cleanup
336 return pyramid_app_with_cleanup
337
337
338
338
339 def sanitize_settings_and_apply_defaults(settings):
339 def sanitize_settings_and_apply_defaults(settings):
340 """
340 """
341 Applies settings defaults and does all type conversion.
341 Applies settings defaults and does all type conversion.
342
342
343 We would move all settings parsing and preparation into this place, so that
343 We would move all settings parsing and preparation into this place, so that
344 we have only one place left which deals with this part. The remaining parts
344 we have only one place left which deals with this part. The remaining parts
345 of the application would start to rely fully on well prepared settings.
345 of the application would start to rely fully on well prepared settings.
346
346
347 This piece would later be split up per topic to avoid a big fat monster
347 This piece would later be split up per topic to avoid a big fat monster
348 function.
348 function.
349 """
349 """
350
350
351 settings.setdefault('rhodecode.edition', 'Community Edition')
351 settings.setdefault('rhodecode.edition', 'Community Edition')
352
352
353 if 'mako.default_filters' not in settings:
353 if 'mako.default_filters' not in settings:
354 # set custom default filters if we don't have it defined
354 # set custom default filters if we don't have it defined
355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
355 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
356 settings['mako.default_filters'] = 'h_filter'
356 settings['mako.default_filters'] = 'h_filter'
357
357
358 if 'mako.directories' not in settings:
358 if 'mako.directories' not in settings:
359 mako_directories = settings.setdefault('mako.directories', [
359 mako_directories = settings.setdefault('mako.directories', [
360 # Base templates of the original application
360 # Base templates of the original application
361 'rhodecode:templates',
361 'rhodecode:templates',
362 ])
362 ])
363 log.debug(
363 log.debug(
364 "Using the following Mako template directories: %s",
364 "Using the following Mako template directories: %s",
365 mako_directories)
365 mako_directories)
366
366
367 # Default includes, possible to change as a user
367 # Default includes, possible to change as a user
368 pyramid_includes = settings.setdefault('pyramid.includes', [
368 pyramid_includes = settings.setdefault('pyramid.includes', [
369 'rhodecode.lib.middleware.request_wrapper',
369 'rhodecode.lib.middleware.request_wrapper',
370 ])
370 ])
371 log.debug(
371 log.debug(
372 "Using the following pyramid.includes: %s",
372 "Using the following pyramid.includes: %s",
373 pyramid_includes)
373 pyramid_includes)
374
374
375 # TODO: johbo: Re-think this, usually the call to config.include
375 # TODO: johbo: Re-think this, usually the call to config.include
376 # should allow to pass in a prefix.
376 # should allow to pass in a prefix.
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
377 settings.setdefault('rhodecode.api.url', '/_admin/api')
378
378
379 # Sanitize generic settings.
379 # Sanitize generic settings.
380 _list_setting(settings, 'default_encoding', 'UTF-8')
380 _list_setting(settings, 'default_encoding', 'UTF-8')
381 _bool_setting(settings, 'is_test', 'false')
381 _bool_setting(settings, 'is_test', 'false')
382 _bool_setting(settings, 'gzip_responses', 'false')
382 _bool_setting(settings, 'gzip_responses', 'false')
383
383
384 # Call split out functions that sanitize settings for each topic.
384 # Call split out functions that sanitize settings for each topic.
385 _sanitize_appenlight_settings(settings)
385 _sanitize_appenlight_settings(settings)
386 _sanitize_vcs_settings(settings)
386 _sanitize_vcs_settings(settings)
387 _sanitize_cache_settings(settings)
387 _sanitize_cache_settings(settings)
388
388
389 # configure instance id
389 # configure instance id
390 config_utils.set_instance_id(settings)
390 config_utils.set_instance_id(settings)
391
391
392 return settings
392 return settings
393
393
394
394
395 def _sanitize_appenlight_settings(settings):
395 def _sanitize_appenlight_settings(settings):
396 _bool_setting(settings, 'appenlight', 'false')
396 _bool_setting(settings, 'appenlight', 'false')
397
397
398
398
399 def _sanitize_vcs_settings(settings):
399 def _sanitize_vcs_settings(settings):
400 """
400 """
401 Applies settings defaults and does type conversion for all VCS related
401 Applies settings defaults and does type conversion for all VCS related
402 settings.
402 settings.
403 """
403 """
404 _string_setting(settings, 'vcs.svn.compatible_version', '')
404 _string_setting(settings, 'vcs.svn.compatible_version', '')
405 _string_setting(settings, 'git_rev_filter', '--all')
405 _string_setting(settings, 'git_rev_filter', '--all')
406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
406 _string_setting(settings, 'vcs.hooks.protocol', 'http')
407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
407 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
408 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
409 _string_setting(settings, 'vcs.server', '')
409 _string_setting(settings, 'vcs.server', '')
410 _string_setting(settings, 'vcs.server.log_level', 'debug')
410 _string_setting(settings, 'vcs.server.log_level', 'debug')
411 _string_setting(settings, 'vcs.server.protocol', 'http')
411 _string_setting(settings, 'vcs.server.protocol', 'http')
412 _bool_setting(settings, 'startup.import_repos', 'false')
412 _bool_setting(settings, 'startup.import_repos', 'false')
413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
413 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
414 _bool_setting(settings, 'vcs.server.enable', 'true')
414 _bool_setting(settings, 'vcs.server.enable', 'true')
415 _bool_setting(settings, 'vcs.start_server', 'false')
415 _bool_setting(settings, 'vcs.start_server', 'false')
416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
416 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
417 _int_setting(settings, 'vcs.connection_timeout', 3600)
417 _int_setting(settings, 'vcs.connection_timeout', 3600)
418
418
419 # Support legacy values of vcs.scm_app_implementation. Legacy
419 # Support legacy values of vcs.scm_app_implementation. Legacy
420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
420 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
421 # which is now mapped to 'http'.
421 # which is now mapped to 'http'.
422 scm_app_impl = settings['vcs.scm_app_implementation']
422 scm_app_impl = settings['vcs.scm_app_implementation']
423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
423 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
424 settings['vcs.scm_app_implementation'] = 'http'
424 settings['vcs.scm_app_implementation'] = 'http'
425
425
426
426
427 def _sanitize_cache_settings(settings):
427 def _sanitize_cache_settings(settings):
428 _string_setting(settings, 'cache_dir',
428 _string_setting(settings, 'cache_dir',
429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
429 os.path.join(tempfile.gettempdir(), 'rc_cache'))
430
430
431 _string_setting(settings, 'rc_cache.cache_perms.backend',
431 _string_setting(settings, 'rc_cache.cache_perms.backend',
432 'dogpile.cache.rc.file_namespace')
432 'dogpile.cache.rc.file_namespace')
433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
433 _int_setting(settings, 'rc_cache.cache_perms.expiration_time',
434 60)
434 60)
435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
437
437
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
441 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
444
438
445
439 def _int_setting(settings, name, default):
446 def _int_setting(settings, name, default):
440 settings[name] = int(settings.get(name, default))
447 settings[name] = int(settings.get(name, default))
441
448
442
449
443 def _bool_setting(settings, name, default):
450 def _bool_setting(settings, name, default):
444 input_val = settings.get(name, default)
451 input_val = settings.get(name, default)
445 if isinstance(input_val, unicode):
452 if isinstance(input_val, unicode):
446 input_val = input_val.encode('utf8')
453 input_val = input_val.encode('utf8')
447 settings[name] = asbool(input_val)
454 settings[name] = asbool(input_val)
448
455
449
456
450 def _list_setting(settings, name, default):
457 def _list_setting(settings, name, default):
451 raw_value = settings.get(name, default)
458 raw_value = settings.get(name, default)
452
459
453 old_separator = ','
460 old_separator = ','
454 if old_separator in raw_value:
461 if old_separator in raw_value:
455 # If we get a comma separated list, pass it to our own function.
462 # If we get a comma separated list, pass it to our own function.
456 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
463 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
457 else:
464 else:
458 # Otherwise we assume it uses pyramids space/newline separation.
465 # Otherwise we assume it uses pyramids space/newline separation.
459 settings[name] = aslist(raw_value)
466 settings[name] = aslist(raw_value)
460
467
461
468
462 def _string_setting(settings, name, default, lower=True):
469 def _string_setting(settings, name, default, lower=True):
463 value = settings.get(name, default)
470 value = settings.get(name, default)
464 if lower:
471 if lower:
465 value = value.lower()
472 value = value.lower()
466 settings[name] = value
473 settings[name] = value
467
474
468
475
469 def _substitute_values(mapping, substitutions):
476 def _substitute_values(mapping, substitutions):
470 result = {
477 result = {
471 # Note: Cannot use regular replacements, since they would clash
478 # Note: Cannot use regular replacements, since they would clash
472 # with the implementation of ConfigParser. Using "format" instead.
479 # with the implementation of ConfigParser. Using "format" instead.
473 key: value.format(**substitutions)
480 key: value.format(**substitutions)
474 for key, value in mapping.items()
481 for key, value in mapping.items()
475 }
482 }
476 return result
483 return result
@@ -1,295 +1,226 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import functools
20 import functools
21
21
22 import beaker
22 import beaker
23 import logging
23 import logging
24 import threading
24 import threading
25
25
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
27 from sqlalchemy.exc import IntegrityError
27 from sqlalchemy.exc import IntegrityError
28
28
29 from rhodecode.lib.utils import safe_str, sha1
29 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.model.db import Session, CacheKey
30 from rhodecode.model.db import Session, CacheKey
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34 FILE_TREE = 'cache_file_tree'
35 FILE_TREE_META = 'cache_file_tree_metadata'
36 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
37 SUMMARY_STATS = 'cache_summary_stats'
38
39 # This list of caches gets purged when invalidation happens
40 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
41
34
42 DEFAULT_CACHE_MANAGER_CONFIG = {
35 DEFAULT_CACHE_MANAGER_CONFIG = {
43 'type': 'memorylru_base',
36 'type': 'memorylru_base',
44 'max_items': 10240,
37 'max_items': 10240,
45 'key_length': 256,
38 'key_length': 256,
46 'enabled': True
39 'enabled': True
47 }
40 }
48
41
49
42
50 def get_default_cache_settings(settings):
43 def get_default_cache_settings(settings):
51 cache_settings = {}
44 cache_settings = {}
52 for key in settings.keys():
45 for key in settings.keys():
53 for prefix in ['beaker.cache.', 'cache.']:
46 for prefix in ['beaker.cache.', 'cache.']:
54 if key.startswith(prefix):
47 if key.startswith(prefix):
55 name = key.split(prefix)[1].strip()
48 name = key.split(prefix)[1].strip()
56 cache_settings[name] = settings[key].strip()
49 cache_settings[name] = settings[key].strip()
57 return cache_settings
50 return cache_settings
58
51
59
52
60 # set cache regions for beaker so celery can utilise it
53 # set cache regions for beaker so celery can utilise it
61 def configure_caches(settings, default_region_settings=None):
54 def configure_caches(settings, default_region_settings=None):
62 cache_settings = {'regions': None}
55 cache_settings = {'regions': None}
63 # main cache settings used as default ...
56 # main cache settings used as default ...
64 cache_settings.update(get_default_cache_settings(settings))
57 cache_settings.update(get_default_cache_settings(settings))
65 default_region_settings = default_region_settings or \
58 default_region_settings = default_region_settings or \
66 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
59 {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']}
67 if cache_settings['regions']:
60 if cache_settings['regions']:
68 for region in cache_settings['regions'].split(','):
61 for region in cache_settings['regions'].split(','):
69 region = region.strip()
62 region = region.strip()
70 region_settings = default_region_settings.copy()
63 region_settings = default_region_settings.copy()
71 for key, value in cache_settings.items():
64 for key, value in cache_settings.items():
72 if key.startswith(region):
65 if key.startswith(region):
73 region_settings[key.split(region + '.')[-1]] = value
66 region_settings[key.split(region + '.')[-1]] = value
74 log.debug('Configuring cache region `%s` with settings %s',
67 log.debug('Configuring cache region `%s` with settings %s',
75 region, region_settings)
68 region, region_settings)
76 configure_cache_region(
69 configure_cache_region(
77 region, region_settings, cache_settings)
70 region, region_settings, cache_settings)
78
71
79
72
80 def configure_cache_region(
73 def configure_cache_region(
81 region_name, region_settings, default_cache_kw, default_expire=60):
74 region_name, region_settings, default_cache_kw, default_expire=60):
82 default_type = default_cache_kw.get('type', 'memory')
75 default_type = default_cache_kw.get('type', 'memory')
83 default_lock_dir = default_cache_kw.get('lock_dir')
76 default_lock_dir = default_cache_kw.get('lock_dir')
84 default_data_dir = default_cache_kw.get('data_dir')
77 default_data_dir = default_cache_kw.get('data_dir')
85
78
86 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
79 region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir)
87 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
80 region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir)
88 region_settings['type'] = region_settings.get('type', default_type)
81 region_settings['type'] = region_settings.get('type', default_type)
89 region_settings['expire'] = int(region_settings.get('expire', default_expire))
82 region_settings['expire'] = int(region_settings.get('expire', default_expire))
90
83
91 beaker.cache.cache_regions[region_name] = region_settings
84 beaker.cache.cache_regions[region_name] = region_settings
92
85
93
86
94 def get_cache_manager(region_name, cache_name, custom_ttl=None):
87 def get_cache_manager(region_name, cache_name, custom_ttl=None):
95 """
88 """
96 Creates a Beaker cache manager. Such instance can be used like that::
89 Creates a Beaker cache manager. Such instance can be used like that::
97
90
98 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
91 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
99 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
92 cache_manager = caches.get_cache_manager('some_namespace_name', _namespace)
100 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
93 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
101 def heavy_compute():
94 def heavy_compute():
102 ...
95 ...
103 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
96 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
104
97
105 :param region_name: region from ini file
98 :param region_name: region from ini file
106 :param cache_name: custom cache name, usually prefix+repo_name. eg
99 :param cache_name: custom cache name, usually prefix+repo_name. eg
107 file_switcher_repo1
100 file_switcher_repo1
108 :param custom_ttl: override .ini file timeout on this cache
101 :param custom_ttl: override .ini file timeout on this cache
109 :return: instance of cache manager
102 :return: instance of cache manager
110 """
103 """
111
104
112 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
105 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
113 if custom_ttl:
106 if custom_ttl:
114 log.debug('Updating region %s with custom ttl: %s',
107 log.debug('Updating region %s with custom ttl: %s',
115 region_name, custom_ttl)
108 region_name, custom_ttl)
116 cache_config.update({'expire': custom_ttl})
109 cache_config.update({'expire': custom_ttl})
117
110
118 return beaker.cache.Cache._get_cache(cache_name, cache_config)
111 return beaker.cache.Cache._get_cache(cache_name, cache_config)
119
112
120
113
121 def clear_cache_manager(cache_manager):
114 def clear_cache_manager(cache_manager):
122 """
115 """
123 namespace = 'foobar'
116 namespace = 'foobar'
124 cache_manager = get_cache_manager('some_namespace_name', namespace)
117 cache_manager = get_cache_manager('some_namespace_name', namespace)
125 clear_cache_manager(cache_manager)
118 clear_cache_manager(cache_manager)
126 """
119 """
127
120
128 log.debug('Clearing all values for cache manager %s', cache_manager)
121 log.debug('Clearing all values for cache manager %s', cache_manager)
129 cache_manager.clear()
122 cache_manager.clear()
130
123
131
124
132 def clear_repo_caches(repo_name):
133 # invalidate cache manager for this repo
134 for prefix in USED_REPO_CACHES:
135 namespace = get_repo_namespace_key(prefix, repo_name)
136 cache_manager = get_cache_manager('repo_cache_long', namespace)
137 clear_cache_manager(cache_manager)
138
139
140 def compute_key_from_params(*args):
125 def compute_key_from_params(*args):
141 """
126 """
142 Helper to compute key from given params to be used in cache manager
127 Helper to compute key from given params to be used in cache manager
143 """
128 """
144 return sha1("_".join(map(safe_str, args)))
129 return sha1("_".join(map(safe_str, args)))
145
130
146
131
147 def get_repo_namespace_key(prefix, repo_name):
132 def get_repo_namespace_key(prefix, repo_name):
148 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
133 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
149
134
150
135
151 def conditional_cache(region, cache_namespace, condition, func):
152 """
153 Conditional caching function use like::
154 def _c(arg):
155 # heavy computation function
156 return data
157
158 # depending on the condition the compute is wrapped in cache or not
159 compute = conditional_cache('short_term', 'cache_namespace_id',
160 condition=True, func=func)
161 return compute(arg)
162
163 :param region: name of cache region
164 :param cache_namespace: cache namespace
165 :param condition: condition for cache to be triggered, and
166 return data cached
167 :param func: wrapped heavy function to compute
168
169 """
170 wrapped = func
171 if condition:
172 log.debug('conditional_cache: True, wrapping call of '
173 'func: %s into %s region cache', region, func)
174
175 def _cache_wrap(region_name, cache_namespace):
176 """Return a caching wrapper"""
177
178 def decorate(func):
179 @functools.wraps(func)
180 def cached(*args, **kwargs):
181 if kwargs:
182 raise AttributeError(
183 'Usage of kwargs is not allowed. '
184 'Use only positional arguments in wrapped function')
185 manager = get_cache_manager(region_name, cache_namespace)
186 cache_key = compute_key_from_params(*args)
187
188 def go():
189 return func(*args, **kwargs)
190
191 # save org function name
192 go.__name__ = '_cached_%s' % (func.__name__,)
193
194 return manager.get(cache_key, createfunc=go)
195 return cached
196
197 return decorate
198
199 cached_region = _cache_wrap(region, cache_namespace)
200 wrapped = cached_region(func)
201
202 return wrapped
203
204
205 class ActiveRegionCache(object):
136 class ActiveRegionCache(object):
206 def __init__(self, context):
137 def __init__(self, context):
207 self.context = context
138 self.context = context
208
139
209 def invalidate(self, *args, **kwargs):
140 def invalidate(self, *args, **kwargs):
210 return False
141 return False
211
142
212 def compute(self):
143 def compute(self):
213 log.debug('Context cache: getting obj %s from cache', self.context)
144 log.debug('Context cache: getting obj %s from cache', self.context)
214 return self.context.compute_func(self.context.cache_key)
145 return self.context.compute_func(self.context.cache_key)
215
146
216
147
217 class FreshRegionCache(ActiveRegionCache):
148 class FreshRegionCache(ActiveRegionCache):
218 def invalidate(self):
149 def invalidate(self):
219 log.debug('Context cache: invalidating cache for %s', self.context)
150 log.debug('Context cache: invalidating cache for %s', self.context)
220 region_invalidate(
151 region_invalidate(
221 self.context.compute_func, None, self.context.cache_key)
152 self.context.compute_func, None, self.context.cache_key)
222 return True
153 return True
223
154
224
155
225 class InvalidationContext(object):
156 class InvalidationContext(object):
226 def __repr__(self):
157 def __repr__(self):
227 return '<InvalidationContext:{}[{}]>'.format(
158 return '<InvalidationContext:{}[{}]>'.format(
228 safe_str(self.repo_name), safe_str(self.cache_type))
159 safe_str(self.repo_name), safe_str(self.cache_type))
229
160
230 def __init__(self, compute_func, repo_name, cache_type,
161 def __init__(self, compute_func, repo_name, cache_type,
231 raise_exception=False, thread_scoped=False):
162 raise_exception=False, thread_scoped=False):
232 self.compute_func = compute_func
163 self.compute_func = compute_func
233 self.repo_name = repo_name
164 self.repo_name = repo_name
234 self.cache_type = cache_type
165 self.cache_type = cache_type
235 self.cache_key = compute_key_from_params(
166 self.cache_key = compute_key_from_params(
236 repo_name, cache_type)
167 repo_name, cache_type)
237 self.raise_exception = raise_exception
168 self.raise_exception = raise_exception
238
169
239 # Append the thread id to the cache key if this invalidation context
170 # Append the thread id to the cache key if this invalidation context
240 # should be scoped to the current thread.
171 # should be scoped to the current thread.
241 if thread_scoped:
172 if thread_scoped:
242 thread_id = threading.current_thread().ident
173 thread_id = threading.current_thread().ident
243 self.cache_key = '{cache_key}_{thread_id}'.format(
174 self.cache_key = '{cache_key}_{thread_id}'.format(
244 cache_key=self.cache_key, thread_id=thread_id)
175 cache_key=self.cache_key, thread_id=thread_id)
245
176
246 def get_cache_obj(self):
177 def get_cache_obj(self):
247 cache_key = CacheKey.get_cache_key(
178 cache_key = CacheKey.get_cache_key(
248 self.repo_name, self.cache_type)
179 self.repo_name, self.cache_type)
249 cache_obj = CacheKey.get_active_cache(cache_key)
180 cache_obj = CacheKey.get_active_cache(cache_key)
250 if not cache_obj:
181 if not cache_obj:
251 cache_obj = CacheKey(cache_key, self.repo_name)
182 cache_obj = CacheKey(cache_key, self.repo_name)
252 return cache_obj
183 return cache_obj
253
184
254 def __enter__(self):
185 def __enter__(self):
255 """
186 """
256 Test if current object is valid, and return CacheRegion function
187 Test if current object is valid, and return CacheRegion function
257 that does invalidation and calculation
188 that does invalidation and calculation
258 """
189 """
259
190
260 self.cache_obj = self.get_cache_obj()
191 self.cache_obj = self.get_cache_obj()
261 if self.cache_obj.cache_active:
192 if self.cache_obj.cache_active:
262 # means our cache obj is existing and marked as it's
193 # means our cache obj is existing and marked as it's
263 # cache is not outdated, we return BaseInvalidator
194 # cache is not outdated, we return BaseInvalidator
264 self.skip_cache_active_change = True
195 self.skip_cache_active_change = True
265 return ActiveRegionCache(self)
196 return ActiveRegionCache(self)
266
197
267 # the key is either not existing or set to False, we return
198 # the key is either not existing or set to False, we return
268 # the real invalidator which re-computes value. We additionally set
199 # the real invalidator which re-computes value. We additionally set
269 # the flag to actually update the Database objects
200 # the flag to actually update the Database objects
270 self.skip_cache_active_change = False
201 self.skip_cache_active_change = False
271 return FreshRegionCache(self)
202 return FreshRegionCache(self)
272
203
273 def __exit__(self, exc_type, exc_val, exc_tb):
204 def __exit__(self, exc_type, exc_val, exc_tb):
274
205
275 if self.skip_cache_active_change:
206 if self.skip_cache_active_change:
276 return
207 return
277
208
278 try:
209 try:
279 self.cache_obj.cache_active = True
210 self.cache_obj.cache_active = True
280 Session().add(self.cache_obj)
211 Session().add(self.cache_obj)
281 Session().commit()
212 Session().commit()
282 except IntegrityError:
213 except IntegrityError:
283 # if we catch integrity error, it means we inserted this object
214 # if we catch integrity error, it means we inserted this object
284 # assumption is that's really an edge race-condition case and
215 # assumption is that's really an edge race-condition case and
285 # it's safe is to skip it
216 # it's safe is to skip it
286 Session().rollback()
217 Session().rollback()
287 except Exception:
218 except Exception:
288 log.exception('Failed to commit on cache key update')
219 log.exception('Failed to commit on cache key update')
289 Session().rollback()
220 Session().rollback()
290 if self.raise_exception:
221 if self.raise_exception:
291 raise
222 raise
292
223
293
224
294 def includeme(config):
225 def includeme(config):
295 configure_caches(config.registry.settings)
226 configure_caches(config.registry.settings)
@@ -1,66 +1,68 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from dogpile.cache import register_backend
21 from dogpile.cache import register_backend
22 from dogpile.cache import make_region
22 from dogpile.cache import make_region
23
23
24 register_backend(
24 register_backend(
25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
26 "LRUMemoryBackend")
26 "LRUMemoryBackend")
27
27
28 register_backend(
28 register_backend(
29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
30 "FileNamespaceBackend")
30 "FileNamespaceBackend")
31
31
32 register_backend(
32 register_backend(
33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
34 "RedisPickleBackend")
34 "RedisPickleBackend")
35
35
36
36
37 from . import region_meta
37 from . import region_meta
38 from .utils import get_default_cache_settings, key_generator, get_or_create_region
38 from .utils import (
39 get_default_cache_settings, key_generator, get_or_create_region,
40 clear_cache_namespace)
39
41
40
42
41 def configure_dogpile_cache(settings):
43 def configure_dogpile_cache(settings):
42 cache_dir = settings.get('cache_dir')
44 cache_dir = settings.get('cache_dir')
43 if cache_dir:
45 if cache_dir:
44 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
46 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
45
47
46 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
48 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
47
49
48 # inspect available namespaces
50 # inspect available namespaces
49 avail_regions = set()
51 avail_regions = set()
50 for key in rc_cache_data.keys():
52 for key in rc_cache_data.keys():
51 namespace_name = key.split('.', 1)[0]
53 namespace_name = key.split('.', 1)[0]
52 avail_regions.add(namespace_name)
54 avail_regions.add(namespace_name)
53
55
54 # register them into namespace
56 # register them into namespace
55 for region_name in avail_regions:
57 for region_name in avail_regions:
56 new_region = make_region(
58 new_region = make_region(
57 name=region_name,
59 name=region_name,
58 function_key_generator=key_generator
60 function_key_generator=key_generator
59 )
61 )
60
62
61 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
63 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
62 region_meta.dogpile_cache_regions[region_name] = new_region
64 region_meta.dogpile_cache_regions[region_name] = new_region
63
65
64
66
65 def includeme(config):
67 def includeme(config):
66 configure_dogpile_cache(config.registry.settings)
68 configure_dogpile_cache(config.registry.settings)
@@ -1,109 +1,120 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from dogpile.cache.backends import memory as memory_backend
21 from dogpile.cache.backends import memory as memory_backend
22 from dogpile.cache.backends import file as file_backend
22 from dogpile.cache.backends import file as file_backend
23 from dogpile.cache.backends import redis as redis_backend
23 from dogpile.cache.backends import redis as redis_backend
24 from dogpile.cache.backends.file import NO_VALUE, compat
24 from dogpile.cache.backends.file import NO_VALUE, compat
25
25
26 from rhodecode.lib.memory_lru_debug import LRUDict
26 from rhodecode.lib.memory_lru_debug import LRUDict
27
27
28 _default_max_size = 1024
28 _default_max_size = 1024
29
29
30
30
31 class LRUMemoryBackend(memory_backend.MemoryBackend):
31 class LRUMemoryBackend(memory_backend.MemoryBackend):
32
32
33 def __init__(self, arguments):
33 def __init__(self, arguments):
34 max_size = arguments.pop('max_size', _default_max_size)
34 max_size = arguments.pop('max_size', _default_max_size)
35 arguments['cache_dict'] = LRUDict(max_size)
35 arguments['cache_dict'] = LRUDict(max_size)
36 super(LRUMemoryBackend, self).__init__(arguments)
36 super(LRUMemoryBackend, self).__init__(arguments)
37
37
38
38
39 class Serializer(object):
39 class Serializer(object):
40 def _dumps(self, value):
40 def _dumps(self, value):
41 return compat.pickle.dumps(value)
41 return compat.pickle.dumps(value)
42
42
43 def _loads(self, value):
43 def _loads(self, value):
44 return compat.pickle.loads(value)
44 return compat.pickle.loads(value)
45
45
46
46
47 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
47 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
48
48
49 def __init__(self, arguments):
49 def __init__(self, arguments):
50 super(FileNamespaceBackend, self).__init__(arguments)
50 super(FileNamespaceBackend, self).__init__(arguments)
51
51
52 def list_keys(self):
52 def list_keys(self, prefix=''):
53 def cond(v):
54 if not prefix:
55 return True
56
57 if v.startswith(prefix):
58 return True
59 return False
60
53 with self._dbm_file(True) as dbm:
61 with self._dbm_file(True) as dbm:
54 return dbm.keys()
62
63 return filter(cond, dbm.keys())
55
64
56 def get_store(self):
65 def get_store(self):
57 return self.filename
66 return self.filename
58
67
59 def get(self, key):
68 def get(self, key):
60 with self._dbm_file(False) as dbm:
69 with self._dbm_file(False) as dbm:
61 if hasattr(dbm, 'get'):
70 if hasattr(dbm, 'get'):
62 value = dbm.get(key, NO_VALUE)
71 value = dbm.get(key, NO_VALUE)
63 else:
72 else:
64 # gdbm objects lack a .get method
73 # gdbm objects lack a .get method
65 try:
74 try:
66 value = dbm[key]
75 value = dbm[key]
67 except KeyError:
76 except KeyError:
68 value = NO_VALUE
77 value = NO_VALUE
69 if value is not NO_VALUE:
78 if value is not NO_VALUE:
70 value = self._loads(value)
79 value = self._loads(value)
71 return value
80 return value
72
81
73 def set(self, key, value):
82 def set(self, key, value):
74 with self._dbm_file(True) as dbm:
83 with self._dbm_file(True) as dbm:
75 dbm[key] = self._dumps(value)
84 dbm[key] = self._dumps(value)
76
85
77 def set_multi(self, mapping):
86 def set_multi(self, mapping):
78 with self._dbm_file(True) as dbm:
87 with self._dbm_file(True) as dbm:
79 for key, value in mapping.items():
88 for key, value in mapping.items():
80 dbm[key] = self._dumps(value)
89 dbm[key] = self._dumps(value)
81
90
82
91
83 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
92 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
84 def list_keys(self):
93 def list_keys(self, prefix=''):
85 return self.client.keys()
94 if prefix:
95 prefix = prefix + '*'
96 return self.client.keys(prefix)
86
97
87 def get_store(self):
98 def get_store(self):
88 return self.client.connection_pool
99 return self.client.connection_pool
89
100
90 def set(self, key, value):
101 def set(self, key, value):
91 if self.redis_expiration_time:
102 if self.redis_expiration_time:
92 self.client.setex(key, self.redis_expiration_time,
103 self.client.setex(key, self.redis_expiration_time,
93 self._dumps(value))
104 self._dumps(value))
94 else:
105 else:
95 self.client.set(key, self._dumps(value))
106 self.client.set(key, self._dumps(value))
96
107
97 def set_multi(self, mapping):
108 def set_multi(self, mapping):
98 mapping = dict(
109 mapping = dict(
99 (k, self._dumps(v))
110 (k, self._dumps(v))
100 for k, v in mapping.items()
111 for k, v in mapping.items()
101 )
112 )
102
113
103 if not self.redis_expiration_time:
114 if not self.redis_expiration_time:
104 self.client.mset(mapping)
115 self.client.mset(mapping)
105 else:
116 else:
106 pipe = self.client.pipeline()
117 pipe = self.client.pipeline()
107 for key, value in mapping.items():
118 for key, value in mapping.items():
108 pipe.setex(key, self.redis_expiration_time, value)
119 pipe.setex(key, self.redis_expiration_time, value)
109 pipe.execute()
120 pipe.execute()
@@ -1,99 +1,107 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import logging
21 import logging
22 from dogpile.cache import make_region
22 from dogpile.cache import make_region
23
23
24 from rhodecode.lib.utils import safe_str, sha1
24 from rhodecode.lib.utils import safe_str, sha1
25 from . import region_meta
25 from . import region_meta
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 def get_default_cache_settings(settings, prefixes=None):
30 def get_default_cache_settings(settings, prefixes=None):
31 prefixes = prefixes or []
31 prefixes = prefixes or []
32 cache_settings = {}
32 cache_settings = {}
33 for key in settings.keys():
33 for key in settings.keys():
34 for prefix in prefixes:
34 for prefix in prefixes:
35 if key.startswith(prefix):
35 if key.startswith(prefix):
36 name = key.split(prefix)[1].strip()
36 name = key.split(prefix)[1].strip()
37 val = settings[key]
37 val = settings[key]
38 if isinstance(val, basestring):
38 if isinstance(val, basestring):
39 val = val.strip()
39 val = val.strip()
40 cache_settings[name] = val
40 cache_settings[name] = val
41 return cache_settings
41 return cache_settings
42
42
43
43
44 def compute_key_from_params(*args):
44 def compute_key_from_params(*args):
45 """
45 """
46 Helper to compute key from given params to be used in cache manager
46 Helper to compute key from given params to be used in cache manager
47 """
47 """
48 return sha1("_".join(map(safe_str, args)))
48 return sha1("_".join(map(safe_str, args)))
49
49
50
50
51 def key_generator(namespace, fn):
51 def key_generator(namespace, fn):
52 fname = fn.__name__
52 fname = fn.__name__
53
53
54 def generate_key(*args):
54 def generate_key(*args):
55 namespace_pref = namespace or 'default'
55 namespace_pref = namespace or 'default'
56 arg_key = compute_key_from_params(*args)
56 arg_key = compute_key_from_params(*args)
57 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
57 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
58
58
59 return final_key
59 return final_key
60
60
61 return generate_key
61 return generate_key
62
62
63
63
64 def get_or_create_region(region_name, region_namespace=None):
64 def get_or_create_region(region_name, region_namespace=None):
65 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
65 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
66 region_obj = region_meta.dogpile_cache_regions.get(region_name)
66 region_obj = region_meta.dogpile_cache_regions.get(region_name)
67 if not region_obj:
67 if not region_obj:
68 raise EnvironmentError(
68 raise EnvironmentError(
69 'Region `{}` not in configured: {}.'.format(
69 'Region `{}` not in configured: {}.'.format(
70 region_name, region_meta.dogpile_cache_regions.keys()))
70 region_name, region_meta.dogpile_cache_regions.keys()))
71
71
72 region_uid_name = '{}:{}'.format(region_name, region_namespace)
72 region_uid_name = '{}:{}'.format(region_name, region_namespace)
73 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
73 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
74 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
74 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
75 if region_exist:
75 if region_exist:
76 log.debug('Using already configured region: %s', region_namespace)
76 log.debug('Using already configured region: %s', region_namespace)
77 return region_exist
77 return region_exist
78 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
78 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
79 expiration_time = region_obj.expiration_time
79 expiration_time = region_obj.expiration_time
80
80
81 if not os.path.isdir(cache_dir):
81 if not os.path.isdir(cache_dir):
82 os.makedirs(cache_dir)
82 os.makedirs(cache_dir)
83 new_region = make_region(
83 new_region = make_region(
84 name=region_uid_name, function_key_generator=key_generator
84 name=region_uid_name, function_key_generator=key_generator
85 )
85 )
86 namespace_filename = os.path.join(
86 namespace_filename = os.path.join(
87 cache_dir, "{}.cache.dbm".format(region_namespace))
87 cache_dir, "{}.cache.dbm".format(region_namespace))
88 # special type that allows 1db per namespace
88 # special type that allows 1db per namespace
89 new_region.configure(
89 new_region.configure(
90 backend='dogpile.cache.rc.file_namespace',
90 backend='dogpile.cache.rc.file_namespace',
91 expiration_time=expiration_time,
91 expiration_time=expiration_time,
92 arguments={"filename": namespace_filename}
92 arguments={"filename": namespace_filename}
93 )
93 )
94
94
95 # create and save in region caches
95 # create and save in region caches
96 log.debug('configuring new region: %s',region_uid_name)
96 log.debug('configuring new region: %s',region_uid_name)
97 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
97 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
98
98
99 return region_obj
99 return region_obj
100
101
102 def clear_cache_namespace(cache_region, cache_namespace_uid):
103 region = get_or_create_region(cache_region, cache_namespace_uid)
104 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
105 for k in cache_keys:
106 region.delete(k)
107 return len(cache_keys)
@@ -1,812 +1,815 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from sqlalchemy import func
33 from sqlalchemy import func
34 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.lib.vcs import get_backend
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h, rc_cache
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 HasUserGroupPermissionAny)
44 HasUserGroupPermissionAny)
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 from rhodecode.lib import hooks_utils, caches
46 from rhodecode.lib import hooks_utils, caches
47 from rhodecode.lib.utils import (
47 from rhodecode.lib.utils import (
48 get_filesystem_repos, make_db_config)
48 get_filesystem_repos, make_db_config)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 from rhodecode.lib.system_info import get_system_info
50 from rhodecode.lib.system_info import get_system_info
51 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 PullRequest)
54 PullRequest)
55 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.settings import VcsSettingsModel
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class UserTemp(object):
60 class UserTemp(object):
61 def __init__(self, user_id):
61 def __init__(self, user_id):
62 self.user_id = user_id
62 self.user_id = user_id
63
63
64 def __repr__(self):
64 def __repr__(self):
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66
66
67
67
68 class RepoTemp(object):
68 class RepoTemp(object):
69 def __init__(self, repo_id):
69 def __init__(self, repo_id):
70 self.repo_id = repo_id
70 self.repo_id = repo_id
71
71
72 def __repr__(self):
72 def __repr__(self):
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74
74
75
75
76 class SimpleCachedRepoList(object):
76 class SimpleCachedRepoList(object):
77 """
77 """
78 Lighter version of of iteration of repos without the scm initialisation,
78 Lighter version of of iteration of repos without the scm initialisation,
79 and with cache usage
79 and with cache usage
80 """
80 """
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 self.db_repo_list = db_repo_list
82 self.db_repo_list = db_repo_list
83 self.repos_path = repos_path
83 self.repos_path = repos_path
84 self.order_by = order_by
84 self.order_by = order_by
85 self.reversed = (order_by or '').startswith('-')
85 self.reversed = (order_by or '').startswith('-')
86 if not perm_set:
86 if not perm_set:
87 perm_set = ['repository.read', 'repository.write',
87 perm_set = ['repository.read', 'repository.write',
88 'repository.admin']
88 'repository.admin']
89 self.perm_set = perm_set
89 self.perm_set = perm_set
90
90
91 def __len__(self):
91 def __len__(self):
92 return len(self.db_repo_list)
92 return len(self.db_repo_list)
93
93
94 def __repr__(self):
94 def __repr__(self):
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96
96
97 def __iter__(self):
97 def __iter__(self):
98 for dbr in self.db_repo_list:
98 for dbr in self.db_repo_list:
99 # check permission at this level
99 # check permission at this level
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 dbr.repo_name, 'SimpleCachedRepoList check')
101 dbr.repo_name, 'SimpleCachedRepoList check')
102 if not has_perm:
102 if not has_perm:
103 continue
103 continue
104
104
105 tmp_d = {
105 tmp_d = {
106 'name': dbr.repo_name,
106 'name': dbr.repo_name,
107 'dbrepo': dbr.get_dict(),
107 'dbrepo': dbr.get_dict(),
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 }
109 }
110 yield tmp_d
110 yield tmp_d
111
111
112
112
113 class _PermCheckIterator(object):
113 class _PermCheckIterator(object):
114
114
115 def __init__(
115 def __init__(
116 self, obj_list, obj_attr, perm_set, perm_checker,
116 self, obj_list, obj_attr, perm_set, perm_checker,
117 extra_kwargs=None):
117 extra_kwargs=None):
118 """
118 """
119 Creates iterator from given list of objects, additionally
119 Creates iterator from given list of objects, additionally
120 checking permission for them from perm_set var
120 checking permission for them from perm_set var
121
121
122 :param obj_list: list of db objects
122 :param obj_list: list of db objects
123 :param obj_attr: attribute of object to pass into perm_checker
123 :param obj_attr: attribute of object to pass into perm_checker
124 :param perm_set: list of permissions to check
124 :param perm_set: list of permissions to check
125 :param perm_checker: callable to check permissions against
125 :param perm_checker: callable to check permissions against
126 """
126 """
127 self.obj_list = obj_list
127 self.obj_list = obj_list
128 self.obj_attr = obj_attr
128 self.obj_attr = obj_attr
129 self.perm_set = perm_set
129 self.perm_set = perm_set
130 self.perm_checker = perm_checker
130 self.perm_checker = perm_checker
131 self.extra_kwargs = extra_kwargs or {}
131 self.extra_kwargs = extra_kwargs or {}
132
132
133 def __len__(self):
133 def __len__(self):
134 return len(self.obj_list)
134 return len(self.obj_list)
135
135
136 def __repr__(self):
136 def __repr__(self):
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138
138
139 def __iter__(self):
139 def __iter__(self):
140 checker = self.perm_checker(*self.perm_set)
140 checker = self.perm_checker(*self.perm_set)
141 for db_obj in self.obj_list:
141 for db_obj in self.obj_list:
142 # check permission at this level
142 # check permission at this level
143 name = getattr(db_obj, self.obj_attr, None)
143 name = getattr(db_obj, self.obj_attr, None)
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 continue
145 continue
146
146
147 yield db_obj
147 yield db_obj
148
148
149
149
150 class RepoList(_PermCheckIterator):
150 class RepoList(_PermCheckIterator):
151
151
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 if not perm_set:
153 if not perm_set:
154 perm_set = [
154 perm_set = [
155 'repository.read', 'repository.write', 'repository.admin']
155 'repository.read', 'repository.write', 'repository.admin']
156
156
157 super(RepoList, self).__init__(
157 super(RepoList, self).__init__(
158 obj_list=db_repo_list,
158 obj_list=db_repo_list,
159 obj_attr='repo_name', perm_set=perm_set,
159 obj_attr='repo_name', perm_set=perm_set,
160 perm_checker=HasRepoPermissionAny,
160 perm_checker=HasRepoPermissionAny,
161 extra_kwargs=extra_kwargs)
161 extra_kwargs=extra_kwargs)
162
162
163
163
164 class RepoGroupList(_PermCheckIterator):
164 class RepoGroupList(_PermCheckIterator):
165
165
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 if not perm_set:
167 if not perm_set:
168 perm_set = ['group.read', 'group.write', 'group.admin']
168 perm_set = ['group.read', 'group.write', 'group.admin']
169
169
170 super(RepoGroupList, self).__init__(
170 super(RepoGroupList, self).__init__(
171 obj_list=db_repo_group_list,
171 obj_list=db_repo_group_list,
172 obj_attr='group_name', perm_set=perm_set,
172 obj_attr='group_name', perm_set=perm_set,
173 perm_checker=HasRepoGroupPermissionAny,
173 perm_checker=HasRepoGroupPermissionAny,
174 extra_kwargs=extra_kwargs)
174 extra_kwargs=extra_kwargs)
175
175
176
176
177 class UserGroupList(_PermCheckIterator):
177 class UserGroupList(_PermCheckIterator):
178
178
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 if not perm_set:
180 if not perm_set:
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182
182
183 super(UserGroupList, self).__init__(
183 super(UserGroupList, self).__init__(
184 obj_list=db_user_group_list,
184 obj_list=db_user_group_list,
185 obj_attr='users_group_name', perm_set=perm_set,
185 obj_attr='users_group_name', perm_set=perm_set,
186 perm_checker=HasUserGroupPermissionAny,
186 perm_checker=HasUserGroupPermissionAny,
187 extra_kwargs=extra_kwargs)
187 extra_kwargs=extra_kwargs)
188
188
189
189
190 class ScmModel(BaseModel):
190 class ScmModel(BaseModel):
191 """
191 """
192 Generic Scm Model
192 Generic Scm Model
193 """
193 """
194
194
195 @LazyProperty
195 @LazyProperty
196 def repos_path(self):
196 def repos_path(self):
197 """
197 """
198 Gets the repositories root path from database
198 Gets the repositories root path from database
199 """
199 """
200
200
201 settings_model = VcsSettingsModel(sa=self.sa)
201 settings_model = VcsSettingsModel(sa=self.sa)
202 return settings_model.get_repos_location()
202 return settings_model.get_repos_location()
203
203
204 def repo_scan(self, repos_path=None):
204 def repo_scan(self, repos_path=None):
205 """
205 """
206 Listing of repositories in given path. This path should not be a
206 Listing of repositories in given path. This path should not be a
207 repository itself. Return a dictionary of repository objects
207 repository itself. Return a dictionary of repository objects
208
208
209 :param repos_path: path to directory containing repositories
209 :param repos_path: path to directory containing repositories
210 """
210 """
211
211
212 if repos_path is None:
212 if repos_path is None:
213 repos_path = self.repos_path
213 repos_path = self.repos_path
214
214
215 log.info('scanning for repositories in %s', repos_path)
215 log.info('scanning for repositories in %s', repos_path)
216
216
217 config = make_db_config()
217 config = make_db_config()
218 config.set('extensions', 'largefiles', '')
218 config.set('extensions', 'largefiles', '')
219 repos = {}
219 repos = {}
220
220
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 # name need to be decomposed and put back together using the /
222 # name need to be decomposed and put back together using the /
223 # since this is internal storage separator for rhodecode
223 # since this is internal storage separator for rhodecode
224 name = Repository.normalize_repo_name(name)
224 name = Repository.normalize_repo_name(name)
225
225
226 try:
226 try:
227 if name in repos:
227 if name in repos:
228 raise RepositoryError('Duplicate repository name %s '
228 raise RepositoryError('Duplicate repository name %s '
229 'found in %s' % (name, path))
229 'found in %s' % (name, path))
230 elif path[0] in rhodecode.BACKENDS:
230 elif path[0] in rhodecode.BACKENDS:
231 klass = get_backend(path[0])
231 klass = get_backend(path[0])
232 repos[name] = klass(path[1], config=config)
232 repos[name] = klass(path[1], config=config)
233 except OSError:
233 except OSError:
234 continue
234 continue
235 log.debug('found %s paths with repositories', len(repos))
235 log.debug('found %s paths with repositories', len(repos))
236 return repos
236 return repos
237
237
238 def get_repos(self, all_repos=None, sort_key=None):
238 def get_repos(self, all_repos=None, sort_key=None):
239 """
239 """
240 Get all repositories from db and for each repo create it's
240 Get all repositories from db and for each repo create it's
241 backend instance and fill that backed with information from database
241 backend instance and fill that backed with information from database
242
242
243 :param all_repos: list of repository names as strings
243 :param all_repos: list of repository names as strings
244 give specific repositories list, good for filtering
244 give specific repositories list, good for filtering
245
245
246 :param sort_key: initial sorting of repositories
246 :param sort_key: initial sorting of repositories
247 """
247 """
248 if all_repos is None:
248 if all_repos is None:
249 all_repos = self.sa.query(Repository)\
249 all_repos = self.sa.query(Repository)\
250 .filter(Repository.group_id == None)\
250 .filter(Repository.group_id == None)\
251 .order_by(func.lower(Repository.repo_name)).all()
251 .order_by(func.lower(Repository.repo_name)).all()
252 repo_iter = SimpleCachedRepoList(
252 repo_iter = SimpleCachedRepoList(
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 return repo_iter
254 return repo_iter
255
255
256 def get_repo_groups(self, all_groups=None):
256 def get_repo_groups(self, all_groups=None):
257 if all_groups is None:
257 if all_groups is None:
258 all_groups = RepoGroup.query()\
258 all_groups = RepoGroup.query()\
259 .filter(RepoGroup.group_parent_id == None).all()
259 .filter(RepoGroup.group_parent_id == None).all()
260 return [x for x in RepoGroupList(all_groups)]
260 return [x for x in RepoGroupList(all_groups)]
261
261
262 def mark_for_invalidation(self, repo_name, delete=False):
262 def mark_for_invalidation(self, repo_name, delete=False):
263 """
263 """
264 Mark caches of this repo invalid in the database. `delete` flag
264 Mark caches of this repo invalid in the database. `delete` flag
265 removes the cache entries
265 removes the cache entries
266
266
267 :param repo_name: the repo_name for which caches should be marked
267 :param repo_name: the repo_name for which caches should be marked
268 invalid, or deleted
268 invalid, or deleted
269 :param delete: delete the entry keys instead of setting bool
269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them, and also purge caches used by the dogpile
271 """
271 """
272 CacheKey.set_invalidate(repo_name, delete=delete)
272 CacheKey.set_invalidate(repo_name, delete=delete)
273 repo = Repository.get_by_repo_name(repo_name)
273 repo = Repository.get_by_repo_name(repo_name)
274
274
275 if repo:
275 if repo:
276 repo_id = repo.repo_id
276 config = repo._config
277 config = repo._config
277 config.set('extensions', 'largefiles', '')
278 config.set('extensions', 'largefiles', '')
278 repo.update_commit_cache(config=config, cs_cache=None)
279 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
280 if delete:
281 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
280
283
281 def toggle_following_repo(self, follow_repo_id, user_id):
284 def toggle_following_repo(self, follow_repo_id, user_id):
282
285
283 f = self.sa.query(UserFollowing)\
286 f = self.sa.query(UserFollowing)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 .filter(UserFollowing.user_id == user_id).scalar()
288 .filter(UserFollowing.user_id == user_id).scalar()
286
289
287 if f is not None:
290 if f is not None:
288 try:
291 try:
289 self.sa.delete(f)
292 self.sa.delete(f)
290 return
293 return
291 except Exception:
294 except Exception:
292 log.error(traceback.format_exc())
295 log.error(traceback.format_exc())
293 raise
296 raise
294
297
295 try:
298 try:
296 f = UserFollowing()
299 f = UserFollowing()
297 f.user_id = user_id
300 f.user_id = user_id
298 f.follows_repo_id = follow_repo_id
301 f.follows_repo_id = follow_repo_id
299 self.sa.add(f)
302 self.sa.add(f)
300 except Exception:
303 except Exception:
301 log.error(traceback.format_exc())
304 log.error(traceback.format_exc())
302 raise
305 raise
303
306
304 def toggle_following_user(self, follow_user_id, user_id):
307 def toggle_following_user(self, follow_user_id, user_id):
305 f = self.sa.query(UserFollowing)\
308 f = self.sa.query(UserFollowing)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
309 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 .filter(UserFollowing.user_id == user_id).scalar()
310 .filter(UserFollowing.user_id == user_id).scalar()
308
311
309 if f is not None:
312 if f is not None:
310 try:
313 try:
311 self.sa.delete(f)
314 self.sa.delete(f)
312 return
315 return
313 except Exception:
316 except Exception:
314 log.error(traceback.format_exc())
317 log.error(traceback.format_exc())
315 raise
318 raise
316
319
317 try:
320 try:
318 f = UserFollowing()
321 f = UserFollowing()
319 f.user_id = user_id
322 f.user_id = user_id
320 f.follows_user_id = follow_user_id
323 f.follows_user_id = follow_user_id
321 self.sa.add(f)
324 self.sa.add(f)
322 except Exception:
325 except Exception:
323 log.error(traceback.format_exc())
326 log.error(traceback.format_exc())
324 raise
327 raise
325
328
326 def is_following_repo(self, repo_name, user_id, cache=False):
329 def is_following_repo(self, repo_name, user_id, cache=False):
327 r = self.sa.query(Repository)\
330 r = self.sa.query(Repository)\
328 .filter(Repository.repo_name == repo_name).scalar()
331 .filter(Repository.repo_name == repo_name).scalar()
329
332
330 f = self.sa.query(UserFollowing)\
333 f = self.sa.query(UserFollowing)\
331 .filter(UserFollowing.follows_repository == r)\
334 .filter(UserFollowing.follows_repository == r)\
332 .filter(UserFollowing.user_id == user_id).scalar()
335 .filter(UserFollowing.user_id == user_id).scalar()
333
336
334 return f is not None
337 return f is not None
335
338
336 def is_following_user(self, username, user_id, cache=False):
339 def is_following_user(self, username, user_id, cache=False):
337 u = User.get_by_username(username)
340 u = User.get_by_username(username)
338
341
339 f = self.sa.query(UserFollowing)\
342 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_user == u)\
343 .filter(UserFollowing.follows_user == u)\
341 .filter(UserFollowing.user_id == user_id).scalar()
344 .filter(UserFollowing.user_id == user_id).scalar()
342
345
343 return f is not None
346 return f is not None
344
347
345 def get_followers(self, repo):
348 def get_followers(self, repo):
346 repo = self._get_repo(repo)
349 repo = self._get_repo(repo)
347
350
348 return self.sa.query(UserFollowing)\
351 return self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_repository == repo).count()
352 .filter(UserFollowing.follows_repository == repo).count()
350
353
351 def get_forks(self, repo):
354 def get_forks(self, repo):
352 repo = self._get_repo(repo)
355 repo = self._get_repo(repo)
353 return self.sa.query(Repository)\
356 return self.sa.query(Repository)\
354 .filter(Repository.fork == repo).count()
357 .filter(Repository.fork == repo).count()
355
358
356 def get_pull_requests(self, repo):
359 def get_pull_requests(self, repo):
357 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
358 return self.sa.query(PullRequest)\
361 return self.sa.query(PullRequest)\
359 .filter(PullRequest.target_repo == repo)\
362 .filter(PullRequest.target_repo == repo)\
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
363 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361
364
362 def mark_as_fork(self, repo, fork, user):
365 def mark_as_fork(self, repo, fork, user):
363 repo = self._get_repo(repo)
366 repo = self._get_repo(repo)
364 fork = self._get_repo(fork)
367 fork = self._get_repo(fork)
365 if fork and repo.repo_id == fork.repo_id:
368 if fork and repo.repo_id == fork.repo_id:
366 raise Exception("Cannot set repository as fork of itself")
369 raise Exception("Cannot set repository as fork of itself")
367
370
368 if fork and repo.repo_type != fork.repo_type:
371 if fork and repo.repo_type != fork.repo_type:
369 raise RepositoryError(
372 raise RepositoryError(
370 "Cannot set repository as fork of repository with other type")
373 "Cannot set repository as fork of repository with other type")
371
374
372 repo.fork = fork
375 repo.fork = fork
373 self.sa.add(repo)
376 self.sa.add(repo)
374 return repo
377 return repo
375
378
376 def pull_changes(self, repo, username, remote_uri=None):
379 def pull_changes(self, repo, username, remote_uri=None):
377 dbrepo = self._get_repo(repo)
380 dbrepo = self._get_repo(repo)
378 remote_uri = remote_uri or dbrepo.clone_uri
381 remote_uri = remote_uri or dbrepo.clone_uri
379 if not remote_uri:
382 if not remote_uri:
380 raise Exception("This repository doesn't have a clone uri")
383 raise Exception("This repository doesn't have a clone uri")
381
384
382 repo = dbrepo.scm_instance(cache=False)
385 repo = dbrepo.scm_instance(cache=False)
383 # TODO: marcink fix this an re-enable since we need common logic
386 # TODO: marcink fix this an re-enable since we need common logic
384 # for hg/git remove hooks so we don't trigger them on fetching
387 # for hg/git remove hooks so we don't trigger them on fetching
385 # commits from remote
388 # commits from remote
386 repo.config.clear_section('hooks')
389 repo.config.clear_section('hooks')
387
390
388 repo_name = dbrepo.repo_name
391 repo_name = dbrepo.repo_name
389 try:
392 try:
390 # TODO: we need to make sure those operations call proper hooks !
393 # TODO: we need to make sure those operations call proper hooks !
391 repo.pull(remote_uri)
394 repo.pull(remote_uri)
392
395
393 self.mark_for_invalidation(repo_name)
396 self.mark_for_invalidation(repo_name)
394 except Exception:
397 except Exception:
395 log.error(traceback.format_exc())
398 log.error(traceback.format_exc())
396 raise
399 raise
397
400
398 def push_changes(self, repo, username, remote_uri=None):
401 def push_changes(self, repo, username, remote_uri=None):
399 dbrepo = self._get_repo(repo)
402 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.push_uri
403 remote_uri = remote_uri or dbrepo.push_uri
401 if not remote_uri:
404 if not remote_uri:
402 raise Exception("This repository doesn't have a clone uri")
405 raise Exception("This repository doesn't have a clone uri")
403
406
404 repo = dbrepo.scm_instance(cache=False)
407 repo = dbrepo.scm_instance(cache=False)
405 repo.config.clear_section('hooks')
408 repo.config.clear_section('hooks')
406
409
407 try:
410 try:
408 repo.push(remote_uri)
411 repo.push(remote_uri)
409 except Exception:
412 except Exception:
410 log.error(traceback.format_exc())
413 log.error(traceback.format_exc())
411 raise
414 raise
412
415
413 def commit_change(self, repo, repo_name, commit, user, author, message,
416 def commit_change(self, repo, repo_name, commit, user, author, message,
414 content, f_path):
417 content, f_path):
415 """
418 """
416 Commits changes
419 Commits changes
417
420
418 :param repo: SCM instance
421 :param repo: SCM instance
419
422
420 """
423 """
421 user = self._get_user(user)
424 user = self._get_user(user)
422
425
423 # decoding here will force that we have proper encoded values
426 # decoding here will force that we have proper encoded values
424 # in any other case this will throw exceptions and deny commit
427 # in any other case this will throw exceptions and deny commit
425 content = safe_str(content)
428 content = safe_str(content)
426 path = safe_str(f_path)
429 path = safe_str(f_path)
427 # message and author needs to be unicode
430 # message and author needs to be unicode
428 # proper backend should then translate that into required type
431 # proper backend should then translate that into required type
429 message = safe_unicode(message)
432 message = safe_unicode(message)
430 author = safe_unicode(author)
433 author = safe_unicode(author)
431 imc = repo.in_memory_commit
434 imc = repo.in_memory_commit
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
435 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 try:
436 try:
434 # TODO: handle pre-push action !
437 # TODO: handle pre-push action !
435 tip = imc.commit(
438 tip = imc.commit(
436 message=message, author=author, parents=[commit],
439 message=message, author=author, parents=[commit],
437 branch=commit.branch)
440 branch=commit.branch)
438 except Exception as e:
441 except Exception as e:
439 log.error(traceback.format_exc())
442 log.error(traceback.format_exc())
440 raise IMCCommitError(str(e))
443 raise IMCCommitError(str(e))
441 finally:
444 finally:
442 # always clear caches, if commit fails we want fresh object also
445 # always clear caches, if commit fails we want fresh object also
443 self.mark_for_invalidation(repo_name)
446 self.mark_for_invalidation(repo_name)
444
447
445 # We trigger the post-push action
448 # We trigger the post-push action
446 hooks_utils.trigger_post_push_hook(
449 hooks_utils.trigger_post_push_hook(
447 username=user.username, action='push_local', repo_name=repo_name,
450 username=user.username, action='push_local', repo_name=repo_name,
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
451 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 return tip
452 return tip
450
453
451 def _sanitize_path(self, f_path):
454 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
455 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 raise NonRelativePathError('%s is not an relative path' % f_path)
456 raise NonRelativePathError('%s is not an relative path' % f_path)
454 if f_path:
457 if f_path:
455 f_path = os.path.normpath(f_path)
458 f_path = os.path.normpath(f_path)
456 return f_path
459 return f_path
457
460
458 def get_dirnode_metadata(self, request, commit, dir_node):
461 def get_dirnode_metadata(self, request, commit, dir_node):
459 if not dir_node.is_dir():
462 if not dir_node.is_dir():
460 return []
463 return []
461
464
462 data = []
465 data = []
463 for node in dir_node:
466 for node in dir_node:
464 if not node.is_file():
467 if not node.is_file():
465 # we skip file-nodes
468 # we skip file-nodes
466 continue
469 continue
467
470
468 last_commit = node.last_commit
471 last_commit = node.last_commit
469 last_commit_date = last_commit.date
472 last_commit_date = last_commit.date
470 data.append({
473 data.append({
471 'name': node.name,
474 'name': node.name,
472 'size': h.format_byte_size_binary(node.size),
475 'size': h.format_byte_size_binary(node.size),
473 'modified_at': h.format_date(last_commit_date),
476 'modified_at': h.format_date(last_commit_date),
474 'modified_ts': last_commit_date.isoformat(),
477 'modified_ts': last_commit_date.isoformat(),
475 'revision': last_commit.revision,
478 'revision': last_commit.revision,
476 'short_id': last_commit.short_id,
479 'short_id': last_commit.short_id,
477 'message': h.escape(last_commit.message),
480 'message': h.escape(last_commit.message),
478 'author': h.escape(last_commit.author),
481 'author': h.escape(last_commit.author),
479 'user_profile': h.gravatar_with_user(
482 'user_profile': h.gravatar_with_user(
480 request, last_commit.author),
483 request, last_commit.author),
481 })
484 })
482
485
483 return data
486 return data
484
487
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
488 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 extended_info=False, content=False, max_file_bytes=None):
489 extended_info=False, content=False, max_file_bytes=None):
487 """
490 """
488 recursive walk in root dir and return a set of all path in that dir
491 recursive walk in root dir and return a set of all path in that dir
489 based on repository walk function
492 based on repository walk function
490
493
491 :param repo_name: name of repository
494 :param repo_name: name of repository
492 :param commit_id: commit id for which to list nodes
495 :param commit_id: commit id for which to list nodes
493 :param root_path: root path to list
496 :param root_path: root path to list
494 :param flat: return as a list, if False returns a dict with description
497 :param flat: return as a list, if False returns a dict with description
495 :param max_file_bytes: will not return file contents over this limit
498 :param max_file_bytes: will not return file contents over this limit
496
499
497 """
500 """
498 _files = list()
501 _files = list()
499 _dirs = list()
502 _dirs = list()
500 try:
503 try:
501 _repo = self._get_repo(repo_name)
504 _repo = self._get_repo(repo_name)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
505 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 root_path = root_path.lstrip('/')
506 root_path = root_path.lstrip('/')
504 for __, dirs, files in commit.walk(root_path):
507 for __, dirs, files in commit.walk(root_path):
505 for f in files:
508 for f in files:
506 _content = None
509 _content = None
507 _data = f.unicode_path
510 _data = f.unicode_path
508 over_size_limit = (max_file_bytes is not None
511 over_size_limit = (max_file_bytes is not None
509 and f.size > max_file_bytes)
512 and f.size > max_file_bytes)
510
513
511 if not flat:
514 if not flat:
512 _data = {
515 _data = {
513 "name": h.escape(f.unicode_path),
516 "name": h.escape(f.unicode_path),
514 "type": "file",
517 "type": "file",
515 }
518 }
516 if extended_info:
519 if extended_info:
517 _data.update({
520 _data.update({
518 "md5": f.md5,
521 "md5": f.md5,
519 "binary": f.is_binary,
522 "binary": f.is_binary,
520 "size": f.size,
523 "size": f.size,
521 "extension": f.extension,
524 "extension": f.extension,
522 "mimetype": f.mimetype,
525 "mimetype": f.mimetype,
523 "lines": f.lines()[0]
526 "lines": f.lines()[0]
524 })
527 })
525
528
526 if content:
529 if content:
527 full_content = None
530 full_content = None
528 if not f.is_binary and not over_size_limit:
531 if not f.is_binary and not over_size_limit:
529 full_content = safe_str(f.content)
532 full_content = safe_str(f.content)
530
533
531 _data.update({
534 _data.update({
532 "content": full_content,
535 "content": full_content,
533 })
536 })
534 _files.append(_data)
537 _files.append(_data)
535 for d in dirs:
538 for d in dirs:
536 _data = d.unicode_path
539 _data = d.unicode_path
537 if not flat:
540 if not flat:
538 _data = {
541 _data = {
539 "name": h.escape(d.unicode_path),
542 "name": h.escape(d.unicode_path),
540 "type": "dir",
543 "type": "dir",
541 }
544 }
542 if extended_info:
545 if extended_info:
543 _data.update({
546 _data.update({
544 "md5": None,
547 "md5": None,
545 "binary": None,
548 "binary": None,
546 "size": None,
549 "size": None,
547 "extension": None,
550 "extension": None,
548 })
551 })
549 if content:
552 if content:
550 _data.update({
553 _data.update({
551 "content": None
554 "content": None
552 })
555 })
553 _dirs.append(_data)
556 _dirs.append(_data)
554 except RepositoryError:
557 except RepositoryError:
555 log.debug("Exception in get_nodes", exc_info=True)
558 log.debug("Exception in get_nodes", exc_info=True)
556 raise
559 raise
557
560
558 return _dirs, _files
561 return _dirs, _files
559
562
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
563 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 author=None, trigger_push_hook=True):
564 author=None, trigger_push_hook=True):
562 """
565 """
563 Commits given multiple nodes into repo
566 Commits given multiple nodes into repo
564
567
565 :param user: RhodeCode User object or user_id, the commiter
568 :param user: RhodeCode User object or user_id, the commiter
566 :param repo: RhodeCode Repository object
569 :param repo: RhodeCode Repository object
567 :param message: commit message
570 :param message: commit message
568 :param nodes: mapping {filename:{'content':content},...}
571 :param nodes: mapping {filename:{'content':content},...}
569 :param parent_commit: parent commit, can be empty than it's
572 :param parent_commit: parent commit, can be empty than it's
570 initial commit
573 initial commit
571 :param author: author of commit, cna be different that commiter
574 :param author: author of commit, cna be different that commiter
572 only for git
575 only for git
573 :param trigger_push_hook: trigger push hooks
576 :param trigger_push_hook: trigger push hooks
574
577
575 :returns: new commited commit
578 :returns: new commited commit
576 """
579 """
577
580
578 user = self._get_user(user)
581 user = self._get_user(user)
579 scm_instance = repo.scm_instance(cache=False)
582 scm_instance = repo.scm_instance(cache=False)
580
583
581 processed_nodes = []
584 processed_nodes = []
582 for f_path in nodes:
585 for f_path in nodes:
583 f_path = self._sanitize_path(f_path)
586 f_path = self._sanitize_path(f_path)
584 content = nodes[f_path]['content']
587 content = nodes[f_path]['content']
585 f_path = safe_str(f_path)
588 f_path = safe_str(f_path)
586 # decoding here will force that we have proper encoded values
589 # decoding here will force that we have proper encoded values
587 # in any other case this will throw exceptions and deny commit
590 # in any other case this will throw exceptions and deny commit
588 if isinstance(content, (basestring,)):
591 if isinstance(content, (basestring,)):
589 content = safe_str(content)
592 content = safe_str(content)
590 elif isinstance(content, (file, cStringIO.OutputType,)):
593 elif isinstance(content, (file, cStringIO.OutputType,)):
591 content = content.read()
594 content = content.read()
592 else:
595 else:
593 raise Exception('Content is of unrecognized type %s' % (
596 raise Exception('Content is of unrecognized type %s' % (
594 type(content)
597 type(content)
595 ))
598 ))
596 processed_nodes.append((f_path, content))
599 processed_nodes.append((f_path, content))
597
600
598 message = safe_unicode(message)
601 message = safe_unicode(message)
599 commiter = user.full_contact
602 commiter = user.full_contact
600 author = safe_unicode(author) if author else commiter
603 author = safe_unicode(author) if author else commiter
601
604
602 imc = scm_instance.in_memory_commit
605 imc = scm_instance.in_memory_commit
603
606
604 if not parent_commit:
607 if not parent_commit:
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
608 parent_commit = EmptyCommit(alias=scm_instance.alias)
606
609
607 if isinstance(parent_commit, EmptyCommit):
610 if isinstance(parent_commit, EmptyCommit):
608 # EmptyCommit means we we're editing empty repository
611 # EmptyCommit means we we're editing empty repository
609 parents = None
612 parents = None
610 else:
613 else:
611 parents = [parent_commit]
614 parents = [parent_commit]
612 # add multiple nodes
615 # add multiple nodes
613 for path, content in processed_nodes:
616 for path, content in processed_nodes:
614 imc.add(FileNode(path, content=content))
617 imc.add(FileNode(path, content=content))
615 # TODO: handle pre push scenario
618 # TODO: handle pre push scenario
616 tip = imc.commit(message=message,
619 tip = imc.commit(message=message,
617 author=author,
620 author=author,
618 parents=parents,
621 parents=parents,
619 branch=parent_commit.branch)
622 branch=parent_commit.branch)
620
623
621 self.mark_for_invalidation(repo.repo_name)
624 self.mark_for_invalidation(repo.repo_name)
622 if trigger_push_hook:
625 if trigger_push_hook:
623 hooks_utils.trigger_post_push_hook(
626 hooks_utils.trigger_post_push_hook(
624 username=user.username, action='push_local',
627 username=user.username, action='push_local',
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
628 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 commit_ids=[tip.raw_id])
629 commit_ids=[tip.raw_id])
627 return tip
630 return tip
628
631
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
632 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 author=None, trigger_push_hook=True):
633 author=None, trigger_push_hook=True):
631 user = self._get_user(user)
634 user = self._get_user(user)
632 scm_instance = repo.scm_instance(cache=False)
635 scm_instance = repo.scm_instance(cache=False)
633
636
634 message = safe_unicode(message)
637 message = safe_unicode(message)
635 commiter = user.full_contact
638 commiter = user.full_contact
636 author = safe_unicode(author) if author else commiter
639 author = safe_unicode(author) if author else commiter
637
640
638 imc = scm_instance.in_memory_commit
641 imc = scm_instance.in_memory_commit
639
642
640 if not parent_commit:
643 if not parent_commit:
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
644 parent_commit = EmptyCommit(alias=scm_instance.alias)
642
645
643 if isinstance(parent_commit, EmptyCommit):
646 if isinstance(parent_commit, EmptyCommit):
644 # EmptyCommit means we we're editing empty repository
647 # EmptyCommit means we we're editing empty repository
645 parents = None
648 parents = None
646 else:
649 else:
647 parents = [parent_commit]
650 parents = [parent_commit]
648
651
649 # add multiple nodes
652 # add multiple nodes
650 for _filename, data in nodes.items():
653 for _filename, data in nodes.items():
651 # new filename, can be renamed from the old one, also sanitaze
654 # new filename, can be renamed from the old one, also sanitaze
652 # the path for any hack around relative paths like ../../ etc.
655 # the path for any hack around relative paths like ../../ etc.
653 filename = self._sanitize_path(data['filename'])
656 filename = self._sanitize_path(data['filename'])
654 old_filename = self._sanitize_path(_filename)
657 old_filename = self._sanitize_path(_filename)
655 content = data['content']
658 content = data['content']
656
659
657 filenode = FileNode(old_filename, content=content)
660 filenode = FileNode(old_filename, content=content)
658 op = data['op']
661 op = data['op']
659 if op == 'add':
662 if op == 'add':
660 imc.add(filenode)
663 imc.add(filenode)
661 elif op == 'del':
664 elif op == 'del':
662 imc.remove(filenode)
665 imc.remove(filenode)
663 elif op == 'mod':
666 elif op == 'mod':
664 if filename != old_filename:
667 if filename != old_filename:
665 # TODO: handle renames more efficient, needs vcs lib
668 # TODO: handle renames more efficient, needs vcs lib
666 # changes
669 # changes
667 imc.remove(filenode)
670 imc.remove(filenode)
668 imc.add(FileNode(filename, content=content))
671 imc.add(FileNode(filename, content=content))
669 else:
672 else:
670 imc.change(filenode)
673 imc.change(filenode)
671
674
672 try:
675 try:
673 # TODO: handle pre push scenario
676 # TODO: handle pre push scenario
674 # commit changes
677 # commit changes
675 tip = imc.commit(message=message,
678 tip = imc.commit(message=message,
676 author=author,
679 author=author,
677 parents=parents,
680 parents=parents,
678 branch=parent_commit.branch)
681 branch=parent_commit.branch)
679 except NodeNotChangedError:
682 except NodeNotChangedError:
680 raise
683 raise
681 except Exception as e:
684 except Exception as e:
682 log.exception("Unexpected exception during call to imc.commit")
685 log.exception("Unexpected exception during call to imc.commit")
683 raise IMCCommitError(str(e))
686 raise IMCCommitError(str(e))
684 finally:
687 finally:
685 # always clear caches, if commit fails we want fresh object also
688 # always clear caches, if commit fails we want fresh object also
686 self.mark_for_invalidation(repo.repo_name)
689 self.mark_for_invalidation(repo.repo_name)
687
690
688 if trigger_push_hook:
691 if trigger_push_hook:
689 hooks_utils.trigger_post_push_hook(
692 hooks_utils.trigger_post_push_hook(
690 username=user.username, action='push_local',
693 username=user.username, action='push_local',
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
694 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 commit_ids=[tip.raw_id])
695 commit_ids=[tip.raw_id])
693
696
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
697 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 author=None, trigger_push_hook=True):
698 author=None, trigger_push_hook=True):
696 """
699 """
697 Deletes given multiple nodes into `repo`
700 Deletes given multiple nodes into `repo`
698
701
699 :param user: RhodeCode User object or user_id, the committer
702 :param user: RhodeCode User object or user_id, the committer
700 :param repo: RhodeCode Repository object
703 :param repo: RhodeCode Repository object
701 :param message: commit message
704 :param message: commit message
702 :param nodes: mapping {filename:{'content':content},...}
705 :param nodes: mapping {filename:{'content':content},...}
703 :param parent_commit: parent commit, can be empty than it's initial
706 :param parent_commit: parent commit, can be empty than it's initial
704 commit
707 commit
705 :param author: author of commit, cna be different that commiter only
708 :param author: author of commit, cna be different that commiter only
706 for git
709 for git
707 :param trigger_push_hook: trigger push hooks
710 :param trigger_push_hook: trigger push hooks
708
711
709 :returns: new commit after deletion
712 :returns: new commit after deletion
710 """
713 """
711
714
712 user = self._get_user(user)
715 user = self._get_user(user)
713 scm_instance = repo.scm_instance(cache=False)
716 scm_instance = repo.scm_instance(cache=False)
714
717
715 processed_nodes = []
718 processed_nodes = []
716 for f_path in nodes:
719 for f_path in nodes:
717 f_path = self._sanitize_path(f_path)
720 f_path = self._sanitize_path(f_path)
718 # content can be empty but for compatabilty it allows same dicts
721 # content can be empty but for compatabilty it allows same dicts
719 # structure as add_nodes
722 # structure as add_nodes
720 content = nodes[f_path].get('content')
723 content = nodes[f_path].get('content')
721 processed_nodes.append((f_path, content))
724 processed_nodes.append((f_path, content))
722
725
723 message = safe_unicode(message)
726 message = safe_unicode(message)
724 commiter = user.full_contact
727 commiter = user.full_contact
725 author = safe_unicode(author) if author else commiter
728 author = safe_unicode(author) if author else commiter
726
729
727 imc = scm_instance.in_memory_commit
730 imc = scm_instance.in_memory_commit
728
731
729 if not parent_commit:
732 if not parent_commit:
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
733 parent_commit = EmptyCommit(alias=scm_instance.alias)
731
734
732 if isinstance(parent_commit, EmptyCommit):
735 if isinstance(parent_commit, EmptyCommit):
733 # EmptyCommit means we we're editing empty repository
736 # EmptyCommit means we we're editing empty repository
734 parents = None
737 parents = None
735 else:
738 else:
736 parents = [parent_commit]
739 parents = [parent_commit]
737 # add multiple nodes
740 # add multiple nodes
738 for path, content in processed_nodes:
741 for path, content in processed_nodes:
739 imc.remove(FileNode(path, content=content))
742 imc.remove(FileNode(path, content=content))
740
743
741 # TODO: handle pre push scenario
744 # TODO: handle pre push scenario
742 tip = imc.commit(message=message,
745 tip = imc.commit(message=message,
743 author=author,
746 author=author,
744 parents=parents,
747 parents=parents,
745 branch=parent_commit.branch)
748 branch=parent_commit.branch)
746
749
747 self.mark_for_invalidation(repo.repo_name)
750 self.mark_for_invalidation(repo.repo_name)
748 if trigger_push_hook:
751 if trigger_push_hook:
749 hooks_utils.trigger_post_push_hook(
752 hooks_utils.trigger_post_push_hook(
750 username=user.username, action='push_local',
753 username=user.username, action='push_local',
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
754 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 commit_ids=[tip.raw_id])
755 commit_ids=[tip.raw_id])
753 return tip
756 return tip
754
757
755 def strip(self, repo, commit_id, branch):
758 def strip(self, repo, commit_id, branch):
756 scm_instance = repo.scm_instance(cache=False)
759 scm_instance = repo.scm_instance(cache=False)
757 scm_instance.config.clear_section('hooks')
760 scm_instance.config.clear_section('hooks')
758 scm_instance.strip(commit_id, branch)
761 scm_instance.strip(commit_id, branch)
759 self.mark_for_invalidation(repo.repo_name)
762 self.mark_for_invalidation(repo.repo_name)
760
763
761 def get_unread_journal(self):
764 def get_unread_journal(self):
762 return self.sa.query(UserLog).count()
765 return self.sa.query(UserLog).count()
763
766
764 def get_repo_landing_revs(self, translator, repo=None):
767 def get_repo_landing_revs(self, translator, repo=None):
765 """
768 """
766 Generates select option with tags branches and bookmarks (for hg only)
769 Generates select option with tags branches and bookmarks (for hg only)
767 grouped by type
770 grouped by type
768
771
769 :param repo:
772 :param repo:
770 """
773 """
771 _ = translator
774 _ = translator
772 repo = self._get_repo(repo)
775 repo = self._get_repo(repo)
773
776
774 hist_l = [
777 hist_l = [
775 ['rev:tip', _('latest tip')]
778 ['rev:tip', _('latest tip')]
776 ]
779 ]
777 choices = [
780 choices = [
778 'rev:tip'
781 'rev:tip'
779 ]
782 ]
780
783
781 if not repo:
784 if not repo:
782 return choices, hist_l
785 return choices, hist_l
783
786
784 repo = repo.scm_instance()
787 repo = repo.scm_instance()
785
788
786 branches_group = (
789 branches_group = (
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
790 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 for b in repo.branches],
791 for b in repo.branches],
789 _("Branches"))
792 _("Branches"))
790 hist_l.append(branches_group)
793 hist_l.append(branches_group)
791 choices.extend([x[0] for x in branches_group[0]])
794 choices.extend([x[0] for x in branches_group[0]])
792
795
793 if repo.alias == 'hg':
796 if repo.alias == 'hg':
794 bookmarks_group = (
797 bookmarks_group = (
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
798 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 for b in repo.bookmarks],
799 for b in repo.bookmarks],
797 _("Bookmarks"))
800 _("Bookmarks"))
798 hist_l.append(bookmarks_group)
801 hist_l.append(bookmarks_group)
799 choices.extend([x[0] for x in bookmarks_group[0]])
802 choices.extend([x[0] for x in bookmarks_group[0]])
800
803
801 tags_group = (
804 tags_group = (
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
805 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 for t in repo.tags],
806 for t in repo.tags],
804 _("Tags"))
807 _("Tags"))
805 hist_l.append(tags_group)
808 hist_l.append(tags_group)
806 choices.extend([x[0] for x in tags_group[0]])
809 choices.extend([x[0] for x in tags_group[0]])
807
810
808 return choices, hist_l
811 return choices, hist_l
809
812
810 def get_server_info(self, environ=None):
813 def get_server_info(self, environ=None):
811 server_info = get_system_info(environ)
814 server_info = get_system_info(environ)
812 return server_info
815 return server_info
@@ -1,98 +1,133 b''
1 <div class="panel panel-default">
1 <div class="panel panel-default">
2 <div class="panel-heading">
2 <div class="panel-heading">
3 <h3 class="panel-title">${_('Invalidate Cache for Repository')}</h3>
3 <h3 class="panel-title">${_('Invalidate Cache for Repository')}</h3>
4 </div>
4 </div>
5 <div class="panel-body">
5 <div class="panel-body">
6
6
7 <h4>${_('Manually invalidate the repository cache. On the next access a repository cache will be recreated.')}</h4>
7 <h4>${_('Manually invalidate the repository cache. On the next access a repository cache will be recreated.')}</h4>
8
8
9 <p>
9 <p>
10 ${_('Cache purge can be automated by such api call. Can be called periodically in crontab etc.')}
10 ${_('Cache purge can be automated by such api call. Can be called periodically in crontab etc.')}
11 <br/>
11 <br/>
12 <code>
12 <code>
13 ${h.api_call_example(method='invalidate_cache', args={"repoid": c.rhodecode_db_repo.repo_name})}
13 ${h.api_call_example(method='invalidate_cache', args={"repoid": c.rhodecode_db_repo.repo_name})}
14 </code>
14 </code>
15 </p>
15 </p>
16
16
17 ${h.secure_form(h.route_path('edit_repo_caches', repo_name=c.repo_name), request=request)}
17 ${h.secure_form(h.route_path('edit_repo_caches', repo_name=c.repo_name), request=request)}
18 <div class="form">
18 <div class="form">
19 <div class="fields">
19 <div class="fields">
20 ${h.submit('reset_cache_%s' % c.rhodecode_db_repo.repo_name,_('Invalidate repository cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate repository cache')+"');")}
20 ${h.submit('reset_cache_%s' % c.rhodecode_db_repo.repo_name,_('Invalidate repository cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate repository cache')+"');")}
21 </div>
21 </div>
22 </div>
22 </div>
23 ${h.end_form()}
23 ${h.end_form()}
24
24
25 </div>
25 </div>
26 </div>
26 </div>
27
27
28
28
29 <div class="panel panel-default">
29 <div class="panel panel-default">
30 <div class="panel-heading">
30 <div class="panel-heading">
31 <h3 class="panel-title">
31 <h3 class="panel-title">
32 ${(_ungettext('List of repository caches (%(count)s entry)', 'List of repository caches (%(count)s entries)' ,len(c.rhodecode_db_repo.cache_keys)) % {'count': len(c.rhodecode_db_repo.cache_keys)})}
32 ${(_ungettext('List of repository caches (%(count)s entry)', 'List of repository caches (%(count)s entries)' ,len(c.rhodecode_db_repo.cache_keys)) % {'count': len(c.rhodecode_db_repo.cache_keys)})}
33 </h3>
33 </h3>
34 </div>
34 </div>
35 <div class="panel-body">
35 <div class="panel-body">
36 <div class="field" >
36 <div class="field" >
37 <table class="rctable edit_cache">
37 <table class="rctable edit_cache">
38 <tr>
38 <tr>
39 <th>${_('Prefix')}</th>
39 <th>${_('Prefix')}</th>
40 <th>${_('Key')}</th>
40 <th>${_('Key')}</th>
41 <th>${_('Active')}</th>
41 <th>${_('Active')}</th>
42 </tr>
42 </tr>
43 %for cache in c.rhodecode_db_repo.cache_keys:
43 %for cache in c.rhodecode_db_repo.cache_keys:
44 <tr>
44 <tr>
45 <td class="td-prefix">${cache.get_prefix() or '-'}</td>
45 <td class="td-prefix">${cache.get_prefix() or '-'}</td>
46 <td class="td-cachekey">${cache.cache_key}</td>
46 <td class="td-cachekey">${cache.cache_key}</td>
47 <td class="td-active">${h.bool2icon(cache.cache_active)}</td>
47 <td class="td-active">${h.bool2icon(cache.cache_active)}</td>
48 </tr>
48 </tr>
49 %endfor
49 %endfor
50 </table>
50 </table>
51 </div>
51 </div>
52 </div>
52 </div>
53 </div>
53 </div>
54
54
55 <div class="panel panel-default">
55 <div class="panel panel-default">
56 <div class="panel-heading">
56 <div class="panel-heading">
57 <h3 class="panel-title">
58 ${_('Cache keys')}
59 </h3>
60 </div>
61 <div class="panel-body">
62 <p>
63 Cache keys used for storing cached values of repository stats,
64 file tree history and file tree search.
65 Invalidating the cache will remove those entries.
66 </p>
67 <pre>
68 region: ${c.region.name}
69 backend: ${c.region.actual_backend.__class__}
70 store: ${c.region.actual_backend.get_store()}
71
72
73 % if c.repo_keys:
74 ${len(c.repo_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
75 <span id="show-keys" style="display: none">
76 % for k in c.repo_keys:
77 - ${k}
78 % endfor
79 </span>
80 % else:
81 NO KEYS FOUND
82 % endif
83
84 </pre>
85
86 </div>
87 </div>
88
89
90 <div class="panel panel-default">
91 <div class="panel-heading">
57 <h3 class="panel-title">${_('Shadow Repositories')}</h3>
92 <h3 class="panel-title">${_('Shadow Repositories')}</h3>
58 </div>
93 </div>
59 <div class="panel-body">
94 <div class="panel-body">
60 <table class="rctable edit_cache">
95 <table class="rctable edit_cache">
61 % if c.shadow_repos:
96 % if c.shadow_repos:
62 % for shadow_repo in c.shadow_repos:
97 % for shadow_repo in c.shadow_repos:
63 <tr>
98 <tr>
64 <td>${shadow_repo}</td>
99 <td>${shadow_repo}</td>
65 </tr>
100 </tr>
66 % endfor
101 % endfor
67 % else:
102 % else:
68 <tr>
103 <tr>
69 <td>${_('No Shadow repositories exist for this repository.')}</td>
104 <td>${_('No Shadow repositories exist for this repository.')}</td>
70 </tr>
105 </tr>
71 % endif
106 % endif
72
107
73 </table>
108 </table>
74 </div>
109 </div>
75 </div>
110 </div>
76
111
77
112
78 <div class="panel panel-default">
113 <div class="panel panel-default">
79 <div class="panel-heading">
114 <div class="panel-heading">
80 <h3 class="panel-title">${_('Diff Caches')}</h3>
115 <h3 class="panel-title">${_('Diff Caches')}</h3>
81 </div>
116 </div>
82 <div class="panel-body">
117 <div class="panel-body">
83 <table class="rctable edit_cache">
118 <table class="rctable edit_cache">
84 <tr>
119 <tr>
85 <td>${_('Cached diff name')}:</td>
120 <td>${_('Cached diff name')}:</td>
86 <td>${c.rhodecode_db_repo.cached_diffs_relative_dir}</td>
121 <td>${c.rhodecode_db_repo.cached_diffs_relative_dir}</td>
87 </tr>
122 </tr>
88 <tr>
123 <tr>
89 <td>${_('Cached diff files')}:</td>
124 <td>${_('Cached diff files')}:</td>
90 <td>${c.cached_diff_count}</td>
125 <td>${c.cached_diff_count}</td>
91 </tr>
126 </tr>
92 <tr>
127 <tr>
93 <td>${_('Cached diff size')}:</td>
128 <td>${_('Cached diff size')}:</td>
94 <td>${h.format_byte_size(c.cached_diff_size)}</td>
129 <td>${h.format_byte_size(c.cached_diff_size)}</td>
95 </tr>
130 </tr>
96 </table>
131 </table>
97 </div>
132 </div>
98 </div>
133 </div>
@@ -1,29 +1,41 b''
1 <%namespace name="base" file="/base/base.mako"/>
1 <%namespace name="base" file="/base/base.mako"/>
2
2
3 <div class="panel panel-default">
3 <div class="panel panel-default">
4 <div class="panel-heading">
4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Caches')}</h3>
5 <h3 class="panel-title">${_('Caches')}</h3>
6 </div>
6 </div>
7 <div class="panel-body">
7 <div class="panel-body">
8 <pre>
8 <p>
9 Cache keys used for storing cached values of user permissions and authentication plugin cache.
10 Invalidating the cache will remove those entries.
11 </p>
12
13 <pre>
9 region: ${c.region.name}
14 region: ${c.region.name}
10 backend: ${c.region.actual_backend.__class__}
15 backend: ${c.region.actual_backend.__class__}
11 store: ${c.region.actual_backend.get_store()}
16 store: ${c.region.actual_backend.get_store()}
12
17
13 % for k in c.user_keys:
18 % if c.user_keys:
19 ${len(c.user_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
20 <span id="show-keys" style="display: none">
21 % for k in c.user_keys:
14 - ${k}
22 - ${k}
15 % endfor
23 % endfor
16 </pre>
24 </span>
17
25 % else:
26 NO KEYS FOUND
27 % endif
28 </pre>
29 <p></p>
18 ${h.secure_form(h.route_path('edit_user_caches_update', user_id=c.user.user_id), request=request)}
30 ${h.secure_form(h.route_path('edit_user_caches_update', user_id=c.user.user_id), request=request)}
19 <div class="form">
31 <div class="form">
20 <div class="fields">
32 <div class="fields">
21 ${h.submit('reset_cache_%s' % c.user.user_id, _('Invalidate user cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate user cache')+"');")}
33 ${h.submit('reset_cache_%s' % c.user.user_id, _('Invalidate user cache'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to invalidate user cache')+"');")}
22 </div>
34 </div>
23 </div>
35 </div>
24 ${h.end_form()}
36 ${h.end_form()}
25
37
26 </div>
38 </div>
27 </div>
39 </div>
28
40
29
41
@@ -1,683 +1,671 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 0.0.0.0
46 host = 0.0.0.0
47 port = 5000
47 port = 5000
48
48
49 ##########################
49 ##########################
50 ## GUNICORN WSGI SERVER ##
50 ## GUNICORN WSGI SERVER ##
51 ##########################
51 ##########################
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
52 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
53
53
54 use = egg:gunicorn#main
54 use = egg:gunicorn#main
55 ## Sets the number of process workers. You must set `instance_id = *`
55 ## Sets the number of process workers. You must set `instance_id = *`
56 ## when this option is set to more than one worker, recommended
56 ## when this option is set to more than one worker, recommended
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
58 ## The `instance_id = *` must be set in the [app:main] section below
58 ## The `instance_id = *` must be set in the [app:main] section below
59 #workers = 2
59 #workers = 2
60 ## number of threads for each of the worker, must be set to 1 for gevent
60 ## number of threads for each of the worker, must be set to 1 for gevent
61 ## generally recommened to be at 1
61 ## generally recommened to be at 1
62 #threads = 1
62 #threads = 1
63 ## process name
63 ## process name
64 #proc_name = rhodecode
64 #proc_name = rhodecode
65 ## type of worker class, one of sync, gevent
65 ## type of worker class, one of sync, gevent
66 ## recommended for bigger setup is using of of other than sync one
66 ## recommended for bigger setup is using of of other than sync one
67 #worker_class = sync
67 #worker_class = sync
68 ## The maximum number of simultaneous clients. Valid only for Gevent
68 ## The maximum number of simultaneous clients. Valid only for Gevent
69 #worker_connections = 10
69 #worker_connections = 10
70 ## max number of requests that worker will handle before being gracefully
70 ## max number of requests that worker will handle before being gracefully
71 ## restarted, could prevent memory leaks
71 ## restarted, could prevent memory leaks
72 #max_requests = 1000
72 #max_requests = 1000
73 #max_requests_jitter = 30
73 #max_requests_jitter = 30
74 ## amount of time a worker can spend with handling a request before it
74 ## amount of time a worker can spend with handling a request before it
75 ## gets killed and restarted. Set to 6hrs
75 ## gets killed and restarted. Set to 6hrs
76 #timeout = 21600
76 #timeout = 21600
77
77
78 ## prefix middleware for RhodeCode.
78 ## prefix middleware for RhodeCode.
79 ## recommended when using proxy setup.
79 ## recommended when using proxy setup.
80 ## allows to set RhodeCode under a prefix in server.
80 ## allows to set RhodeCode under a prefix in server.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
81 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
82 ## And set your prefix like: `prefix = /custom_prefix`
82 ## And set your prefix like: `prefix = /custom_prefix`
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
83 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
84 ## to make your cookies only work on prefix url
84 ## to make your cookies only work on prefix url
85 [filter:proxy-prefix]
85 [filter:proxy-prefix]
86 use = egg:PasteDeploy#prefix
86 use = egg:PasteDeploy#prefix
87 prefix = /
87 prefix = /
88
88
89 [app:main]
89 [app:main]
90 is_test = True
90 is_test = True
91 use = egg:rhodecode-enterprise-ce
91 use = egg:rhodecode-enterprise-ce
92
92
93 ## enable proxy prefix middleware, defined above
93 ## enable proxy prefix middleware, defined above
94 #filter-with = proxy-prefix
94 #filter-with = proxy-prefix
95
95
96
96
97 ## RHODECODE PLUGINS ##
97 ## RHODECODE PLUGINS ##
98 rhodecode.includes = rhodecode.api
98 rhodecode.includes = rhodecode.api
99
99
100 # api prefix url
100 # api prefix url
101 rhodecode.api.url = /_admin/api
101 rhodecode.api.url = /_admin/api
102
102
103
103
104 ## END RHODECODE PLUGINS ##
104 ## END RHODECODE PLUGINS ##
105
105
106 ## encryption key used to encrypt social plugin tokens,
106 ## encryption key used to encrypt social plugin tokens,
107 ## remote_urls with credentials etc, if not set it defaults to
107 ## remote_urls with credentials etc, if not set it defaults to
108 ## `beaker.session.secret`
108 ## `beaker.session.secret`
109 #rhodecode.encrypted_values.secret =
109 #rhodecode.encrypted_values.secret =
110
110
111 ## decryption strict mode (enabled by default). It controls if decryption raises
111 ## decryption strict mode (enabled by default). It controls if decryption raises
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
112 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
113 #rhodecode.encrypted_values.strict = false
113 #rhodecode.encrypted_values.strict = false
114
114
115 ## return gzipped responses from Rhodecode (static files/application)
115 ## return gzipped responses from Rhodecode (static files/application)
116 gzip_responses = false
116 gzip_responses = false
117
117
118 ## autogenerate javascript routes file on startup
118 ## autogenerate javascript routes file on startup
119 generate_js_files = false
119 generate_js_files = false
120
120
121 ## Optional Languages
121 ## Optional Languages
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
122 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 lang = en
123 lang = en
124
124
125 ## perform a full repository scan on each server start, this should be
125 ## perform a full repository scan on each server start, this should be
126 ## set to false after first startup, to allow faster server restarts.
126 ## set to false after first startup, to allow faster server restarts.
127 startup.import_repos = true
127 startup.import_repos = true
128
128
129 ## Uncomment and set this path to use archive download cache.
129 ## Uncomment and set this path to use archive download cache.
130 ## Once enabled, generated archives will be cached at this location
130 ## Once enabled, generated archives will be cached at this location
131 ## and served from the cache during subsequent requests for the same archive of
131 ## and served from the cache during subsequent requests for the same archive of
132 ## the repository.
132 ## the repository.
133 #archive_cache_dir = /tmp/tarballcache
133 #archive_cache_dir = /tmp/tarballcache
134
134
135 ## URL at which the application is running. This is used for bootstraping
135 ## URL at which the application is running. This is used for bootstraping
136 ## requests in context when no web request is available. Used in ishell, or
136 ## requests in context when no web request is available. Used in ishell, or
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
137 ## SSH calls. Set this for events to receive proper url for SSH calls.
138 app.base_url = http://rhodecode.local
138 app.base_url = http://rhodecode.local
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
145 cut_off_limit_file = 256000
146
146
147 ## use cache version of scm repo everywhere
147 ## use cache version of scm repo everywhere
148 vcs_full_cache = false
148 vcs_full_cache = false
149
149
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
151 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
152 force_https = false
153
153
154 ## use Strict-Transport-Security headers
154 ## use Strict-Transport-Security headers
155 use_htsts = false
155 use_htsts = false
156
156
157 ## git rev filter option, --all is the default filter, if you need to
157 ## git rev filter option, --all is the default filter, if you need to
158 ## hide all refs in changelog switch this to --branches --tags
158 ## hide all refs in changelog switch this to --branches --tags
159 git_rev_filter = --all
159 git_rev_filter = --all
160
160
161 # Set to true if your repos are exposed using the dumb protocol
161 # Set to true if your repos are exposed using the dumb protocol
162 git_update_server_info = false
162 git_update_server_info = false
163
163
164 ## RSS/ATOM feed options
164 ## RSS/ATOM feed options
165 rss_cut_off_limit = 256000
165 rss_cut_off_limit = 256000
166 rss_items_per_page = 10
166 rss_items_per_page = 10
167 rss_include_diff = false
167 rss_include_diff = false
168
168
169 ## gist URL alias, used to create nicer urls for gist. This should be an
169 ## gist URL alias, used to create nicer urls for gist. This should be an
170 ## url that does rewrites to _admin/gists/{gistid}.
170 ## url that does rewrites to _admin/gists/{gistid}.
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
173 gist_alias_url =
173 gist_alias_url =
174
174
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
175 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
176 ## used for access.
176 ## used for access.
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
177 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
178 ## came from the the logged in user who own this authentication token.
178 ## came from the the logged in user who own this authentication token.
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
179 ## Additionally @TOKEN syntaxt can be used to bound the view to specific
180 ## authentication token. Such view would be only accessible when used together
180 ## authentication token. Such view would be only accessible when used together
181 ## with this authentication token
181 ## with this authentication token
182 ##
182 ##
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
183 ## list of all views can be found under `/_admin/permissions/auth_token_access`
184 ## The list should be "," separated and on a single line.
184 ## The list should be "," separated and on a single line.
185 ##
185 ##
186 ## Most common views to enable:
186 ## Most common views to enable:
187 # RepoCommitsView:repo_commit_download
187 # RepoCommitsView:repo_commit_download
188 # RepoCommitsView:repo_commit_patch
188 # RepoCommitsView:repo_commit_patch
189 # RepoCommitsView:repo_commit_raw
189 # RepoCommitsView:repo_commit_raw
190 # RepoCommitsView:repo_commit_raw@TOKEN
190 # RepoCommitsView:repo_commit_raw@TOKEN
191 # RepoFilesView:repo_files_diff
191 # RepoFilesView:repo_files_diff
192 # RepoFilesView:repo_archivefile
192 # RepoFilesView:repo_archivefile
193 # RepoFilesView:repo_file_raw
193 # RepoFilesView:repo_file_raw
194 # GistView:*
194 # GistView:*
195 api_access_controllers_whitelist =
195 api_access_controllers_whitelist =
196
196
197 ## default encoding used to convert from and to unicode
197 ## default encoding used to convert from and to unicode
198 ## can be also a comma separated list of encoding in case of mixed encodings
198 ## can be also a comma separated list of encoding in case of mixed encodings
199 default_encoding = UTF-8
199 default_encoding = UTF-8
200
200
201 ## instance-id prefix
201 ## instance-id prefix
202 ## a prefix key for this instance used for cache invalidation when running
202 ## a prefix key for this instance used for cache invalidation when running
203 ## multiple instances of rhodecode, make sure it's globally unique for
203 ## multiple instances of rhodecode, make sure it's globally unique for
204 ## all running rhodecode instances. Leave empty if you don't use it
204 ## all running rhodecode instances. Leave empty if you don't use it
205 instance_id =
205 instance_id =
206
206
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
207 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
208 ## of an authentication plugin also if it is disabled by it's settings.
208 ## of an authentication plugin also if it is disabled by it's settings.
209 ## This could be useful if you are unable to log in to the system due to broken
209 ## This could be useful if you are unable to log in to the system due to broken
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
210 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
211 ## module to log in again and fix the settings.
211 ## module to log in again and fix the settings.
212 ##
212 ##
213 ## Available builtin plugin IDs (hash is part of the ID):
213 ## Available builtin plugin IDs (hash is part of the ID):
214 ## egg:rhodecode-enterprise-ce#rhodecode
214 ## egg:rhodecode-enterprise-ce#rhodecode
215 ## egg:rhodecode-enterprise-ce#pam
215 ## egg:rhodecode-enterprise-ce#pam
216 ## egg:rhodecode-enterprise-ce#ldap
216 ## egg:rhodecode-enterprise-ce#ldap
217 ## egg:rhodecode-enterprise-ce#jasig_cas
217 ## egg:rhodecode-enterprise-ce#jasig_cas
218 ## egg:rhodecode-enterprise-ce#headers
218 ## egg:rhodecode-enterprise-ce#headers
219 ## egg:rhodecode-enterprise-ce#crowd
219 ## egg:rhodecode-enterprise-ce#crowd
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
220 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
221
221
222 ## alternative return HTTP header for failed authentication. Default HTTP
222 ## alternative return HTTP header for failed authentication. Default HTTP
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
223 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
224 ## handling that causing a series of failed authentication calls.
224 ## handling that causing a series of failed authentication calls.
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
225 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
226 ## This will be served instead of default 401 on bad authnetication
226 ## This will be served instead of default 401 on bad authnetication
227 auth_ret_code =
227 auth_ret_code =
228
228
229 ## use special detection method when serving auth_ret_code, instead of serving
229 ## use special detection method when serving auth_ret_code, instead of serving
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
230 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
231 ## and then serve auth_ret_code to clients
231 ## and then serve auth_ret_code to clients
232 auth_ret_code_detection = false
232 auth_ret_code_detection = false
233
233
234 ## locking return code. When repository is locked return this HTTP code. 2XX
234 ## locking return code. When repository is locked return this HTTP code. 2XX
235 ## codes don't break the transactions while 4XX codes do
235 ## codes don't break the transactions while 4XX codes do
236 lock_ret_code = 423
236 lock_ret_code = 423
237
237
238 ## allows to change the repository location in settings page
238 ## allows to change the repository location in settings page
239 allow_repo_location_change = true
239 allow_repo_location_change = true
240
240
241 ## allows to setup custom hooks in settings page
241 ## allows to setup custom hooks in settings page
242 allow_custom_hooks_settings = true
242 allow_custom_hooks_settings = true
243
243
244 ## generated license token, goto license page in RhodeCode settings to obtain
244 ## generated license token, goto license page in RhodeCode settings to obtain
245 ## new token
245 ## new token
246 license_token = abra-cada-bra1-rce3
246 license_token = abra-cada-bra1-rce3
247
247
248 ## supervisor connection uri, for managing supervisor and logs.
248 ## supervisor connection uri, for managing supervisor and logs.
249 supervisor.uri =
249 supervisor.uri =
250 ## supervisord group name/id we only want this RC instance to handle
250 ## supervisord group name/id we only want this RC instance to handle
251 supervisor.group_id = dev
251 supervisor.group_id = dev
252
252
253 ## Display extended labs settings
253 ## Display extended labs settings
254 labs_settings_active = true
254 labs_settings_active = true
255
255
256 ####################################
256 ####################################
257 ### CELERY CONFIG ####
257 ### CELERY CONFIG ####
258 ####################################
258 ####################################
259 use_celery = false
259 use_celery = false
260 broker.host = localhost
260 broker.host = localhost
261 broker.vhost = rabbitmqhost
261 broker.vhost = rabbitmqhost
262 broker.port = 5672
262 broker.port = 5672
263 broker.user = rabbitmq
263 broker.user = rabbitmq
264 broker.password = qweqwe
264 broker.password = qweqwe
265
265
266 celery.imports = rhodecode.lib.celerylib.tasks
266 celery.imports = rhodecode.lib.celerylib.tasks
267
267
268 celery.result.backend = amqp
268 celery.result.backend = amqp
269 celery.result.dburi = amqp://
269 celery.result.dburi = amqp://
270 celery.result.serialier = json
270 celery.result.serialier = json
271
271
272 #celery.send.task.error.emails = true
272 #celery.send.task.error.emails = true
273 #celery.amqp.task.result.expires = 18000
273 #celery.amqp.task.result.expires = 18000
274
274
275 celeryd.concurrency = 2
275 celeryd.concurrency = 2
276 #celeryd.log.file = celeryd.log
276 #celeryd.log.file = celeryd.log
277 celeryd.log.level = debug
277 celeryd.log.level = debug
278 celeryd.max.tasks.per.child = 1
278 celeryd.max.tasks.per.child = 1
279
279
280 ## tasks will never be sent to the queue, but executed locally instead.
280 ## tasks will never be sent to the queue, but executed locally instead.
281 celery.always.eager = false
281 celery.always.eager = false
282
282
283 ####################################
283 ####################################
284 ### BEAKER CACHE ####
284 ### BEAKER CACHE ####
285 ####################################
285 ####################################
286 # default cache dir for templates. Putting this into a ramdisk
286 # default cache dir for templates. Putting this into a ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk
288 cache_dir = %(here)s/data
288 cache_dir = %(here)s/data
289
289
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
290 ## locking and default file storage for Beaker. Putting this into a ramdisk
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
291 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294
294
295 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
295 beaker.cache.regions = long_term, sql_cache_short
296
296
297 beaker.cache.long_term.type = memory
297 beaker.cache.long_term.type = memory
298 beaker.cache.long_term.expire = 36000
298 beaker.cache.long_term.expire = 36000
299 beaker.cache.long_term.key_length = 256
299 beaker.cache.long_term.key_length = 256
300
300
301 beaker.cache.sql_cache_short.type = memory
301 beaker.cache.sql_cache_short.type = memory
302 beaker.cache.sql_cache_short.expire = 1
302 beaker.cache.sql_cache_short.expire = 1
303 beaker.cache.sql_cache_short.key_length = 256
303 beaker.cache.sql_cache_short.key_length = 256
304
304
305 beaker.cache.repo_cache_long.type = memorylru_base
306 beaker.cache.repo_cache_long.max_items = 4096
307 beaker.cache.repo_cache_long.expire = 2592000
308
309 ## default is memorylru_base cache, configure only if required
310 ## using multi-node or multi-worker setup
311 #beaker.cache.repo_cache_long.type = ext:memcached
312 #beaker.cache.repo_cache_long.url = localhost:11211
313 #beaker.cache.repo_cache_long.expire = 1209600
314 #beaker.cache.repo_cache_long.key_length = 256
315
316
317 #####################################
305 #####################################
318 ### DOGPILE CACHE ####
306 ### DOGPILE CACHE ####
319 #####################################
307 #####################################
320
308
321 ## permission tree cache settings
309 ## permission tree cache settings
322 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
310 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
323 rc_cache.cache_perms.expiration_time = 0
311 rc_cache.cache_perms.expiration_time = 0
324 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
312 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
325
313
326 ####################################
314 ####################################
327 ### BEAKER SESSION ####
315 ### BEAKER SESSION ####
328 ####################################
316 ####################################
329
317
330 ## .session.type is type of storage options for the session, current allowed
318 ## .session.type is type of storage options for the session, current allowed
331 ## types are file, ext:memcached, ext:database, and memory (default).
319 ## types are file, ext:memcached, ext:database, and memory (default).
332 beaker.session.type = file
320 beaker.session.type = file
333 beaker.session.data_dir = %(here)s/rc/data/sessions/data
321 beaker.session.data_dir = %(here)s/rc/data/sessions/data
334
322
335 ## db based session, fast, and allows easy management over logged in users
323 ## db based session, fast, and allows easy management over logged in users
336 #beaker.session.type = ext:database
324 #beaker.session.type = ext:database
337 #beaker.session.table_name = db_session
325 #beaker.session.table_name = db_session
338 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
326 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
339 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
327 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
340 #beaker.session.sa.pool_recycle = 3600
328 #beaker.session.sa.pool_recycle = 3600
341 #beaker.session.sa.echo = false
329 #beaker.session.sa.echo = false
342
330
343 beaker.session.key = rhodecode
331 beaker.session.key = rhodecode
344 beaker.session.secret = test-rc-uytcxaz
332 beaker.session.secret = test-rc-uytcxaz
345 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
333 beaker.session.lock_dir = %(here)s/rc/data/sessions/lock
346
334
347 ## Secure encrypted cookie. Requires AES and AES python libraries
335 ## Secure encrypted cookie. Requires AES and AES python libraries
348 ## you must disable beaker.session.secret to use this
336 ## you must disable beaker.session.secret to use this
349 #beaker.session.encrypt_key = key_for_encryption
337 #beaker.session.encrypt_key = key_for_encryption
350 #beaker.session.validate_key = validation_key
338 #beaker.session.validate_key = validation_key
351
339
352 ## sets session as invalid(also logging out user) if it haven not been
340 ## sets session as invalid(also logging out user) if it haven not been
353 ## accessed for given amount of time in seconds
341 ## accessed for given amount of time in seconds
354 beaker.session.timeout = 2592000
342 beaker.session.timeout = 2592000
355 beaker.session.httponly = true
343 beaker.session.httponly = true
356 ## Path to use for the cookie. Set to prefix if you use prefix middleware
344 ## Path to use for the cookie. Set to prefix if you use prefix middleware
357 #beaker.session.cookie_path = /custom_prefix
345 #beaker.session.cookie_path = /custom_prefix
358
346
359 ## uncomment for https secure cookie
347 ## uncomment for https secure cookie
360 beaker.session.secure = false
348 beaker.session.secure = false
361
349
362 ## auto save the session to not to use .save()
350 ## auto save the session to not to use .save()
363 beaker.session.auto = false
351 beaker.session.auto = false
364
352
365 ## default cookie expiration time in seconds, set to `true` to set expire
353 ## default cookie expiration time in seconds, set to `true` to set expire
366 ## at browser close
354 ## at browser close
367 #beaker.session.cookie_expires = 3600
355 #beaker.session.cookie_expires = 3600
368
356
369 ###################################
357 ###################################
370 ## SEARCH INDEXING CONFIGURATION ##
358 ## SEARCH INDEXING CONFIGURATION ##
371 ###################################
359 ###################################
372 ## Full text search indexer is available in rhodecode-tools under
360 ## Full text search indexer is available in rhodecode-tools under
373 ## `rhodecode-tools index` command
361 ## `rhodecode-tools index` command
374
362
375 ## WHOOSH Backend, doesn't require additional services to run
363 ## WHOOSH Backend, doesn't require additional services to run
376 ## it works good with few dozen repos
364 ## it works good with few dozen repos
377 search.module = rhodecode.lib.index.whoosh
365 search.module = rhodecode.lib.index.whoosh
378 search.location = %(here)s/data/index
366 search.location = %(here)s/data/index
379
367
380 ########################################
368 ########################################
381 ### CHANNELSTREAM CONFIG ####
369 ### CHANNELSTREAM CONFIG ####
382 ########################################
370 ########################################
383 ## channelstream enables persistent connections and live notification
371 ## channelstream enables persistent connections and live notification
384 ## in the system. It's also used by the chat system
372 ## in the system. It's also used by the chat system
385
373
386 channelstream.enabled = false
374 channelstream.enabled = false
387
375
388 ## server address for channelstream server on the backend
376 ## server address for channelstream server on the backend
389 channelstream.server = 127.0.0.1:9800
377 channelstream.server = 127.0.0.1:9800
390 ## location of the channelstream server from outside world
378 ## location of the channelstream server from outside world
391 ## use ws:// for http or wss:// for https. This address needs to be handled
379 ## use ws:// for http or wss:// for https. This address needs to be handled
392 ## by external HTTP server such as Nginx or Apache
380 ## by external HTTP server such as Nginx or Apache
393 ## see nginx/apache configuration examples in our docs
381 ## see nginx/apache configuration examples in our docs
394 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
382 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
395 channelstream.secret = secret
383 channelstream.secret = secret
396 channelstream.history.location = %(here)s/channelstream_history
384 channelstream.history.location = %(here)s/channelstream_history
397
385
398 ## Internal application path that Javascript uses to connect into.
386 ## Internal application path that Javascript uses to connect into.
399 ## If you use proxy-prefix the prefix should be added before /_channelstream
387 ## If you use proxy-prefix the prefix should be added before /_channelstream
400 channelstream.proxy_path = /_channelstream
388 channelstream.proxy_path = /_channelstream
401
389
402
390
403 ###################################
391 ###################################
404 ## APPENLIGHT CONFIG ##
392 ## APPENLIGHT CONFIG ##
405 ###################################
393 ###################################
406
394
407 ## Appenlight is tailored to work with RhodeCode, see
395 ## Appenlight is tailored to work with RhodeCode, see
408 ## http://appenlight.com for details how to obtain an account
396 ## http://appenlight.com for details how to obtain an account
409
397
410 ## appenlight integration enabled
398 ## appenlight integration enabled
411 appenlight = false
399 appenlight = false
412
400
413 appenlight.server_url = https://api.appenlight.com
401 appenlight.server_url = https://api.appenlight.com
414 appenlight.api_key = YOUR_API_KEY
402 appenlight.api_key = YOUR_API_KEY
415 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
403 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
416
404
417 # used for JS client
405 # used for JS client
418 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
406 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
419
407
420 ## TWEAK AMOUNT OF INFO SENT HERE
408 ## TWEAK AMOUNT OF INFO SENT HERE
421
409
422 ## enables 404 error logging (default False)
410 ## enables 404 error logging (default False)
423 appenlight.report_404 = false
411 appenlight.report_404 = false
424
412
425 ## time in seconds after request is considered being slow (default 1)
413 ## time in seconds after request is considered being slow (default 1)
426 appenlight.slow_request_time = 1
414 appenlight.slow_request_time = 1
427
415
428 ## record slow requests in application
416 ## record slow requests in application
429 ## (needs to be enabled for slow datastore recording and time tracking)
417 ## (needs to be enabled for slow datastore recording and time tracking)
430 appenlight.slow_requests = true
418 appenlight.slow_requests = true
431
419
432 ## enable hooking to application loggers
420 ## enable hooking to application loggers
433 appenlight.logging = true
421 appenlight.logging = true
434
422
435 ## minimum log level for log capture
423 ## minimum log level for log capture
436 appenlight.logging.level = WARNING
424 appenlight.logging.level = WARNING
437
425
438 ## send logs only from erroneous/slow requests
426 ## send logs only from erroneous/slow requests
439 ## (saves API quota for intensive logging)
427 ## (saves API quota for intensive logging)
440 appenlight.logging_on_error = false
428 appenlight.logging_on_error = false
441
429
442 ## list of additonal keywords that should be grabbed from environ object
430 ## list of additonal keywords that should be grabbed from environ object
443 ## can be string with comma separated list of words in lowercase
431 ## can be string with comma separated list of words in lowercase
444 ## (by default client will always send following info:
432 ## (by default client will always send following info:
445 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
433 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
446 ## start with HTTP* this list be extended with additional keywords here
434 ## start with HTTP* this list be extended with additional keywords here
447 appenlight.environ_keys_whitelist =
435 appenlight.environ_keys_whitelist =
448
436
449 ## list of keywords that should be blanked from request object
437 ## list of keywords that should be blanked from request object
450 ## can be string with comma separated list of words in lowercase
438 ## can be string with comma separated list of words in lowercase
451 ## (by default client will always blank keys that contain following words
439 ## (by default client will always blank keys that contain following words
452 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
440 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
453 ## this list be extended with additional keywords set here
441 ## this list be extended with additional keywords set here
454 appenlight.request_keys_blacklist =
442 appenlight.request_keys_blacklist =
455
443
456 ## list of namespaces that should be ignores when gathering log entries
444 ## list of namespaces that should be ignores when gathering log entries
457 ## can be string with comma separated list of namespaces
445 ## can be string with comma separated list of namespaces
458 ## (by default the client ignores own entries: appenlight_client.client)
446 ## (by default the client ignores own entries: appenlight_client.client)
459 appenlight.log_namespace_blacklist =
447 appenlight.log_namespace_blacklist =
460
448
461
449
462 ################################################################################
450 ################################################################################
463 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
451 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
464 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
452 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
465 ## execute malicious code after an exception is raised. ##
453 ## execute malicious code after an exception is raised. ##
466 ################################################################################
454 ################################################################################
467 set debug = false
455 set debug = false
468
456
469
457
470 ##############
458 ##############
471 ## STYLING ##
459 ## STYLING ##
472 ##############
460 ##############
473 debug_style = false
461 debug_style = false
474
462
475 ###########################################
463 ###########################################
476 ### MAIN RHODECODE DATABASE CONFIG ###
464 ### MAIN RHODECODE DATABASE CONFIG ###
477 ###########################################
465 ###########################################
478 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
466 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
479 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
467 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode_test
480 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
468 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode_test
481 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
469 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.db?timeout=30
482
470
483 # see sqlalchemy docs for other advanced settings
471 # see sqlalchemy docs for other advanced settings
484
472
485 ## print the sql statements to output
473 ## print the sql statements to output
486 sqlalchemy.db1.echo = false
474 sqlalchemy.db1.echo = false
487 ## recycle the connections after this amount of seconds
475 ## recycle the connections after this amount of seconds
488 sqlalchemy.db1.pool_recycle = 3600
476 sqlalchemy.db1.pool_recycle = 3600
489 sqlalchemy.db1.convert_unicode = true
477 sqlalchemy.db1.convert_unicode = true
490
478
491 ## the number of connections to keep open inside the connection pool.
479 ## the number of connections to keep open inside the connection pool.
492 ## 0 indicates no limit
480 ## 0 indicates no limit
493 #sqlalchemy.db1.pool_size = 5
481 #sqlalchemy.db1.pool_size = 5
494
482
495 ## the number of connections to allow in connection pool "overflow", that is
483 ## the number of connections to allow in connection pool "overflow", that is
496 ## connections that can be opened above and beyond the pool_size setting,
484 ## connections that can be opened above and beyond the pool_size setting,
497 ## which defaults to five.
485 ## which defaults to five.
498 #sqlalchemy.db1.max_overflow = 10
486 #sqlalchemy.db1.max_overflow = 10
499
487
500
488
501 ##################
489 ##################
502 ### VCS CONFIG ###
490 ### VCS CONFIG ###
503 ##################
491 ##################
504 vcs.server.enable = true
492 vcs.server.enable = true
505 vcs.server = localhost:9901
493 vcs.server = localhost:9901
506
494
507 ## Web server connectivity protocol, responsible for web based VCS operatations
495 ## Web server connectivity protocol, responsible for web based VCS operatations
508 ## Available protocols are:
496 ## Available protocols are:
509 ## `http` - use http-rpc backend (default)
497 ## `http` - use http-rpc backend (default)
510 vcs.server.protocol = http
498 vcs.server.protocol = http
511
499
512 ## Push/Pull operations protocol, available options are:
500 ## Push/Pull operations protocol, available options are:
513 ## `http` - use http-rpc backend (default)
501 ## `http` - use http-rpc backend (default)
514 ## `vcsserver.scm_app` - internal app (EE only)
502 ## `vcsserver.scm_app` - internal app (EE only)
515 vcs.scm_app_implementation = http
503 vcs.scm_app_implementation = http
516
504
517 ## Push/Pull operations hooks protocol, available options are:
505 ## Push/Pull operations hooks protocol, available options are:
518 ## `http` - use http-rpc backend (default)
506 ## `http` - use http-rpc backend (default)
519 vcs.hooks.protocol = http
507 vcs.hooks.protocol = http
520 vcs.hooks.host = 127.0.0.1
508 vcs.hooks.host = 127.0.0.1
521
509
522 vcs.server.log_level = debug
510 vcs.server.log_level = debug
523 ## Start VCSServer with this instance as a subprocess, usefull for development
511 ## Start VCSServer with this instance as a subprocess, usefull for development
524 vcs.start_server = false
512 vcs.start_server = false
525
513
526 ## List of enabled VCS backends, available options are:
514 ## List of enabled VCS backends, available options are:
527 ## `hg` - mercurial
515 ## `hg` - mercurial
528 ## `git` - git
516 ## `git` - git
529 ## `svn` - subversion
517 ## `svn` - subversion
530 vcs.backends = hg, git, svn
518 vcs.backends = hg, git, svn
531
519
532 vcs.connection_timeout = 3600
520 vcs.connection_timeout = 3600
533 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
521 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
534 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
522 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible, pre-1.9-compatible
535 #vcs.svn.compatible_version = pre-1.8-compatible
523 #vcs.svn.compatible_version = pre-1.8-compatible
536
524
537
525
538 ############################################################
526 ############################################################
539 ### Subversion proxy support (mod_dav_svn) ###
527 ### Subversion proxy support (mod_dav_svn) ###
540 ### Maps RhodeCode repo groups into SVN paths for Apache ###
528 ### Maps RhodeCode repo groups into SVN paths for Apache ###
541 ############################################################
529 ############################################################
542 ## Enable or disable the config file generation.
530 ## Enable or disable the config file generation.
543 svn.proxy.generate_config = false
531 svn.proxy.generate_config = false
544 ## Generate config file with `SVNListParentPath` set to `On`.
532 ## Generate config file with `SVNListParentPath` set to `On`.
545 svn.proxy.list_parent_path = true
533 svn.proxy.list_parent_path = true
546 ## Set location and file name of generated config file.
534 ## Set location and file name of generated config file.
547 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
535 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
548 ## Used as a prefix to the `Location` block in the generated config file.
536 ## Used as a prefix to the `Location` block in the generated config file.
549 ## In most cases it should be set to `/`.
537 ## In most cases it should be set to `/`.
550 svn.proxy.location_root = /
538 svn.proxy.location_root = /
551 ## Command to reload the mod dav svn configuration on change.
539 ## Command to reload the mod dav svn configuration on change.
552 ## Example: `/etc/init.d/apache2 reload`
540 ## Example: `/etc/init.d/apache2 reload`
553 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
541 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
554 ## If the timeout expires before the reload command finishes, the command will
542 ## If the timeout expires before the reload command finishes, the command will
555 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
543 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
556 #svn.proxy.reload_timeout = 10
544 #svn.proxy.reload_timeout = 10
557
545
558 ############################################################
546 ############################################################
559 ### SSH Support Settings ###
547 ### SSH Support Settings ###
560 ############################################################
548 ############################################################
561
549
562 ## Defines if the authorized_keys file should be written on any change of
550 ## Defines if the authorized_keys file should be written on any change of
563 ## user ssh keys, setting this to false also disables posibility of adding
551 ## user ssh keys, setting this to false also disables posibility of adding
564 ## ssh keys for users from web interface.
552 ## ssh keys for users from web interface.
565 ssh.generate_authorized_keyfile = true
553 ssh.generate_authorized_keyfile = true
566
554
567 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
555 ## Options for ssh, default is `no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding`
568 # ssh.authorized_keys_ssh_opts =
556 # ssh.authorized_keys_ssh_opts =
569
557
570 ## File to generate the authorized keys together with options
558 ## File to generate the authorized keys together with options
571 ## It is possible to have multiple key files specified in `sshd_config` e.g.
559 ## It is possible to have multiple key files specified in `sshd_config` e.g.
572 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
560 ## AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
573 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
561 ssh.authorized_keys_file_path = %(here)s/rc/authorized_keys_rhodecode
574
562
575 ## Command to execute the SSH wrapper. The binary is available in the
563 ## Command to execute the SSH wrapper. The binary is available in the
576 ## rhodecode installation directory.
564 ## rhodecode installation directory.
577 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
565 ## e.g ~/.rccontrol/community-1/profile/bin/rc-ssh-wrapper
578 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
566 ssh.wrapper_cmd = ~/.rccontrol/community-1/rc-ssh-wrapper
579
567
580 ## Allow shell when executing the ssh-wrapper command
568 ## Allow shell when executing the ssh-wrapper command
581 ssh.wrapper_cmd_allow_shell = false
569 ssh.wrapper_cmd_allow_shell = false
582
570
583 ## Enables logging, and detailed output send back to the client. Usefull for
571 ## Enables logging, and detailed output send back to the client. Usefull for
584 ## debugging, shouldn't be used in production.
572 ## debugging, shouldn't be used in production.
585 ssh.enable_debug_logging = false
573 ssh.enable_debug_logging = false
586
574
587 ## Paths to binary executrables, by default they are the names, but we can
575 ## Paths to binary executrables, by default they are the names, but we can
588 ## override them if we want to use a custom one
576 ## override them if we want to use a custom one
589 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
577 ssh.executable.hg = ~/.rccontrol/vcsserver-1/profile/bin/hg
590 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
578 ssh.executable.git = ~/.rccontrol/vcsserver-1/profile/bin/git
591 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
579 ssh.executable.svn = ~/.rccontrol/vcsserver-1/profile/bin/svnserve
592
580
593
581
594 ## Dummy marker to add new entries after.
582 ## Dummy marker to add new entries after.
595 ## Add any custom entries below. Please don't remove.
583 ## Add any custom entries below. Please don't remove.
596 custom.conf = 1
584 custom.conf = 1
597
585
598
586
599 ################################
587 ################################
600 ### LOGGING CONFIGURATION ####
588 ### LOGGING CONFIGURATION ####
601 ################################
589 ################################
602 [loggers]
590 [loggers]
603 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
591 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper
604
592
605 [handlers]
593 [handlers]
606 keys = console, console_sql
594 keys = console, console_sql
607
595
608 [formatters]
596 [formatters]
609 keys = generic, color_formatter, color_formatter_sql
597 keys = generic, color_formatter, color_formatter_sql
610
598
611 #############
599 #############
612 ## LOGGERS ##
600 ## LOGGERS ##
613 #############
601 #############
614 [logger_root]
602 [logger_root]
615 level = NOTSET
603 level = NOTSET
616 handlers = console
604 handlers = console
617
605
618 [logger_routes]
606 [logger_routes]
619 level = DEBUG
607 level = DEBUG
620 handlers =
608 handlers =
621 qualname = routes.middleware
609 qualname = routes.middleware
622 ## "level = DEBUG" logs the route matched and routing variables.
610 ## "level = DEBUG" logs the route matched and routing variables.
623 propagate = 1
611 propagate = 1
624
612
625 [logger_beaker]
613 [logger_beaker]
626 level = DEBUG
614 level = DEBUG
627 handlers =
615 handlers =
628 qualname = beaker.container
616 qualname = beaker.container
629 propagate = 1
617 propagate = 1
630
618
631 [logger_rhodecode]
619 [logger_rhodecode]
632 level = DEBUG
620 level = DEBUG
633 handlers =
621 handlers =
634 qualname = rhodecode
622 qualname = rhodecode
635 propagate = 1
623 propagate = 1
636
624
637 [logger_sqlalchemy]
625 [logger_sqlalchemy]
638 level = ERROR
626 level = ERROR
639 handlers = console_sql
627 handlers = console_sql
640 qualname = sqlalchemy.engine
628 qualname = sqlalchemy.engine
641 propagate = 0
629 propagate = 0
642
630
643 [logger_ssh_wrapper]
631 [logger_ssh_wrapper]
644 level = DEBUG
632 level = DEBUG
645 handlers =
633 handlers =
646 qualname = ssh_wrapper
634 qualname = ssh_wrapper
647 propagate = 1
635 propagate = 1
648
636
649
637
650 ##############
638 ##############
651 ## HANDLERS ##
639 ## HANDLERS ##
652 ##############
640 ##############
653
641
654 [handler_console]
642 [handler_console]
655 class = StreamHandler
643 class = StreamHandler
656 args = (sys.stderr,)
644 args = (sys.stderr,)
657 level = DEBUG
645 level = DEBUG
658 formatter = generic
646 formatter = generic
659
647
660 [handler_console_sql]
648 [handler_console_sql]
661 class = StreamHandler
649 class = StreamHandler
662 args = (sys.stderr,)
650 args = (sys.stderr,)
663 level = WARN
651 level = WARN
664 formatter = generic
652 formatter = generic
665
653
666 ################
654 ################
667 ## FORMATTERS ##
655 ## FORMATTERS ##
668 ################
656 ################
669
657
670 [formatter_generic]
658 [formatter_generic]
671 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
659 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
672 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
660 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
673 datefmt = %Y-%m-%d %H:%M:%S
661 datefmt = %Y-%m-%d %H:%M:%S
674
662
675 [formatter_color_formatter]
663 [formatter_color_formatter]
676 class = rhodecode.lib.logging_formatter.ColorFormatter
664 class = rhodecode.lib.logging_formatter.ColorFormatter
677 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
665 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
678 datefmt = %Y-%m-%d %H:%M:%S
666 datefmt = %Y-%m-%d %H:%M:%S
679
667
680 [formatter_color_formatter_sql]
668 [formatter_color_formatter_sql]
681 class = rhodecode.lib.logging_formatter.ColorFormatterSql
669 class = rhodecode.lib.logging_formatter.ColorFormatterSql
682 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
670 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
683 datefmt = %Y-%m-%d %H:%M:%S
671 datefmt = %Y-%m-%d %H:%M:%S
General Comments 0
You need to be logged in to leave comments. Login now