##// END OF EJS Templates
pyramid: make responses/exceptions from pyramid/pylons work
dan -
r187:fc8c8497 default
parent child Browse files
Show More
@@ -1,608 +1,607 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode Enterprise - configuration file #
3 # RhodeCode Enterprise - configuration file #
4 # Built-in functions and variables #
4 # Built-in functions and variables #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 # #
6 # #
7 ################################################################################
7 ################################################################################
8
8
9 [DEFAULT]
9 [DEFAULT]
10 debug = true
10 debug = true
11 pdebug = false
12 ################################################################################
11 ################################################################################
13 ## Uncomment and replace with the email address which should receive ##
12 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
13 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
14 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
15 ################################################################################
17 #email_to = admin@localhost
16 #email_to = admin@localhost
18 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
19 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
20 #error_message =
19 #error_message =
21 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
22
21
23 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
24 #smtp_username =
23 #smtp_username =
25 #smtp_password =
24 #smtp_password =
26 #smtp_port =
25 #smtp_port =
27 #smtp_use_tls = false
26 #smtp_use_tls = false
28 #smtp_use_ssl = true
27 #smtp_use_ssl = true
29 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
30 #smtp_auth =
29 #smtp_auth =
31
30
32 [server:main]
31 [server:main]
33 ## COMMON ##
32 ## COMMON ##
34 host = 127.0.0.1
33 host = 127.0.0.1
35 port = 5000
34 port = 5000
36
35
37 ##################################
36 ##################################
38 ## WAITRESS WSGI SERVER ##
37 ## WAITRESS WSGI SERVER ##
39 ## Recommended for Development ##
38 ## Recommended for Development ##
40 ##################################
39 ##################################
41 use = egg:waitress#main
40 use = egg:waitress#main
42 ## number of worker threads
41 ## number of worker threads
43 threads = 5
42 threads = 5
44 ## MAX BODY SIZE 100GB
43 ## MAX BODY SIZE 100GB
45 max_request_body_size = 107374182400
44 max_request_body_size = 107374182400
46 ## Use poll instead of select, fixes file descriptors limits problems.
45 ## Use poll instead of select, fixes file descriptors limits problems.
47 ## May not work on old windows systems.
46 ## May not work on old windows systems.
48 asyncore_use_poll = true
47 asyncore_use_poll = true
49
48
50
49
51 ##########################
50 ##########################
52 ## GUNICORN WSGI SERVER ##
51 ## GUNICORN WSGI SERVER ##
53 ##########################
52 ##########################
54 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
53 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
55 #use = egg:gunicorn#main
54 #use = egg:gunicorn#main
56 ## Sets the number of process workers. You must set `instance_id = *`
55 ## Sets the number of process workers. You must set `instance_id = *`
57 ## when this option is set to more than one worker, recommended
56 ## when this option is set to more than one worker, recommended
58 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
59 ## The `instance_id = *` must be set in the [app:main] section below
58 ## The `instance_id = *` must be set in the [app:main] section below
60 #workers = 2
59 #workers = 2
61 ## number of threads for each of the worker, must be set to 1 for gevent
60 ## number of threads for each of the worker, must be set to 1 for gevent
62 ## generally recommened to be at 1
61 ## generally recommened to be at 1
63 #threads = 1
62 #threads = 1
64 ## process name
63 ## process name
65 #proc_name = rhodecode
64 #proc_name = rhodecode
66 ## type of worker class, one of sync, gevent
65 ## type of worker class, one of sync, gevent
67 ## recommended for bigger setup is using of of other than sync one
66 ## recommended for bigger setup is using of of other than sync one
68 #worker_class = sync
67 #worker_class = sync
69 ## The maximum number of simultaneous clients. Valid only for Gevent
68 ## The maximum number of simultaneous clients. Valid only for Gevent
70 #worker_connections = 10
69 #worker_connections = 10
71 ## max number of requests that worker will handle before being gracefully
70 ## max number of requests that worker will handle before being gracefully
72 ## restarted, could prevent memory leaks
71 ## restarted, could prevent memory leaks
73 #max_requests = 1000
72 #max_requests = 1000
74 #max_requests_jitter = 30
73 #max_requests_jitter = 30
75 ## amount of time a worker can spend with handling a request before it
74 ## amount of time a worker can spend with handling a request before it
76 ## gets killed and restarted. Set to 6hrs
75 ## gets killed and restarted. Set to 6hrs
77 #timeout = 21600
76 #timeout = 21600
78
77
79
78
80 ## prefix middleware for RhodeCode, disables force_https flag.
79 ## prefix middleware for RhodeCode, disables force_https flag.
81 ## allows to set RhodeCode under a prefix in server.
80 ## allows to set RhodeCode under a prefix in server.
82 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
81 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
83 #[filter:proxy-prefix]
82 #[filter:proxy-prefix]
84 #use = egg:PasteDeploy#prefix
83 #use = egg:PasteDeploy#prefix
85 #prefix = /<your-prefix>
84 #prefix = /<your-prefix>
86
85
87 [app:main]
86 [app:main]
88 use = egg:rhodecode-enterprise-ce
87 use = egg:rhodecode-enterprise-ce
89 ## enable proxy prefix middleware, defined below
88 ## enable proxy prefix middleware, defined below
90 #filter-with = proxy-prefix
89 #filter-with = proxy-prefix
91
90
92 # During development the we want to have the debug toolbar enabled
91 # During development the we want to have the debug toolbar enabled
93 pyramid.includes =
92 pyramid.includes =
94 pyramid_debugtoolbar
93 pyramid_debugtoolbar
95 rhodecode.utils.debugtoolbar
94 rhodecode.utils.debugtoolbar
96 rhodecode.lib.middleware.request_wrapper
95 rhodecode.lib.middleware.request_wrapper
97
96
98 pyramid.reload_templates = true
97 pyramid.reload_templates = true
99
98
100 debugtoolbar.hosts = 0.0.0.0/0
99 debugtoolbar.hosts = 0.0.0.0/0
101 debugtoolbar.exclude_prefixes =
100 debugtoolbar.exclude_prefixes =
102 /css
101 /css
103 /fonts
102 /fonts
104 /images
103 /images
105 /js
104 /js
106
105
107 ## RHODECODE PLUGINS ##
106 ## RHODECODE PLUGINS ##
108 rhodecode.includes =
107 rhodecode.includes =
109 rhodecode.api
108 rhodecode.api
110
109
111
110
112 # api prefix url
111 # api prefix url
113 rhodecode.api.url = /_admin/api
112 rhodecode.api.url = /_admin/api
114
113
115
114
116 ## END RHODECODE PLUGINS ##
115 ## END RHODECODE PLUGINS ##
117
116
118 full_stack = true
117 full_stack = true
119
118
120 ## Serve static files via RhodeCode, disable to serve them via HTTP server
119 ## Serve static files via RhodeCode, disable to serve them via HTTP server
121 static_files = true
120 static_files = true
122
121
123 ## Optional Languages
122 ## Optional Languages
124 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
123 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
125 lang = en
124 lang = en
126
125
127 ## perform a full repository scan on each server start, this should be
126 ## perform a full repository scan on each server start, this should be
128 ## set to false after first startup, to allow faster server restarts.
127 ## set to false after first startup, to allow faster server restarts.
129 startup.import_repos = false
128 startup.import_repos = false
130
129
131 ## Uncomment and set this path to use archive download cache.
130 ## Uncomment and set this path to use archive download cache.
132 ## Once enabled, generated archives will be cached at this location
131 ## Once enabled, generated archives will be cached at this location
133 ## and served from the cache during subsequent requests for the same archive of
132 ## and served from the cache during subsequent requests for the same archive of
134 ## the repository.
133 ## the repository.
135 #archive_cache_dir = /tmp/tarballcache
134 #archive_cache_dir = /tmp/tarballcache
136
135
137 ## change this to unique ID for security
136 ## change this to unique ID for security
138 app_instance_uuid = rc-production
137 app_instance_uuid = rc-production
139
138
140 ## cut off limit for large diffs (size in bytes)
139 ## cut off limit for large diffs (size in bytes)
141 cut_off_limit_diff = 1024000
140 cut_off_limit_diff = 1024000
142 cut_off_limit_file = 256000
141 cut_off_limit_file = 256000
143
142
144 ## use cache version of scm repo everywhere
143 ## use cache version of scm repo everywhere
145 vcs_full_cache = true
144 vcs_full_cache = true
146
145
147 ## force https in RhodeCode, fixes https redirects, assumes it's always https
146 ## force https in RhodeCode, fixes https redirects, assumes it's always https
148 ## Normally this is controlled by proper http flags sent from http server
147 ## Normally this is controlled by proper http flags sent from http server
149 force_https = false
148 force_https = false
150
149
151 ## use Strict-Transport-Security headers
150 ## use Strict-Transport-Security headers
152 use_htsts = false
151 use_htsts = false
153
152
154 ## number of commits stats will parse on each iteration
153 ## number of commits stats will parse on each iteration
155 commit_parse_limit = 25
154 commit_parse_limit = 25
156
155
157 ## git rev filter option, --all is the default filter, if you need to
156 ## git rev filter option, --all is the default filter, if you need to
158 ## hide all refs in changelog switch this to --branches --tags
157 ## hide all refs in changelog switch this to --branches --tags
159 git_rev_filter = --branches --tags
158 git_rev_filter = --branches --tags
160
159
161 # Set to true if your repos are exposed using the dumb protocol
160 # Set to true if your repos are exposed using the dumb protocol
162 git_update_server_info = false
161 git_update_server_info = false
163
162
164 ## RSS/ATOM feed options
163 ## RSS/ATOM feed options
165 rss_cut_off_limit = 256000
164 rss_cut_off_limit = 256000
166 rss_items_per_page = 10
165 rss_items_per_page = 10
167 rss_include_diff = false
166 rss_include_diff = false
168
167
169 ## gist URL alias, used to create nicer urls for gist. This should be an
168 ## gist URL alias, used to create nicer urls for gist. This should be an
170 ## url that does rewrites to _admin/gists/<gistid>.
169 ## url that does rewrites to _admin/gists/<gistid>.
171 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
170 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
172 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
171 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
173 gist_alias_url =
172 gist_alias_url =
174
173
175 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
174 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
176 ## used for access.
175 ## used for access.
177 ## Adding ?auth_token = <token> to the url authenticates this request as if it
176 ## Adding ?auth_token = <token> to the url authenticates this request as if it
178 ## came from the the logged in user who own this authentication token.
177 ## came from the the logged in user who own this authentication token.
179 ##
178 ##
180 ## Syntax is <ControllerClass>:<function_pattern>.
179 ## Syntax is <ControllerClass>:<function_pattern>.
181 ## To enable access to raw_files put `FilesController:raw`.
180 ## To enable access to raw_files put `FilesController:raw`.
182 ## To enable access to patches add `ChangesetController:changeset_patch`.
181 ## To enable access to patches add `ChangesetController:changeset_patch`.
183 ## The list should be "," separated and on a single line.
182 ## The list should be "," separated and on a single line.
184 ##
183 ##
185 ## Recommended controllers to enable:
184 ## Recommended controllers to enable:
186 # ChangesetController:changeset_patch,
185 # ChangesetController:changeset_patch,
187 # ChangesetController:changeset_raw,
186 # ChangesetController:changeset_raw,
188 # FilesController:raw,
187 # FilesController:raw,
189 # FilesController:archivefile,
188 # FilesController:archivefile,
190 # GistsController:*,
189 # GistsController:*,
191 api_access_controllers_whitelist =
190 api_access_controllers_whitelist =
192
191
193 ## default encoding used to convert from and to unicode
192 ## default encoding used to convert from and to unicode
194 ## can be also a comma separated list of encoding in case of mixed encodings
193 ## can be also a comma separated list of encoding in case of mixed encodings
195 default_encoding = UTF-8
194 default_encoding = UTF-8
196
195
197 ## instance-id prefix
196 ## instance-id prefix
198 ## a prefix key for this instance used for cache invalidation when running
197 ## a prefix key for this instance used for cache invalidation when running
199 ## multiple instances of rhodecode, make sure it's globally unique for
198 ## multiple instances of rhodecode, make sure it's globally unique for
200 ## all running rhodecode instances. Leave empty if you don't use it
199 ## all running rhodecode instances. Leave empty if you don't use it
201 instance_id =
200 instance_id =
202
201
203 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
202 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
204 ## of an authentication plugin also if it is disabled by it's settings.
203 ## of an authentication plugin also if it is disabled by it's settings.
205 ## This could be useful if you are unable to log in to the system due to broken
204 ## This could be useful if you are unable to log in to the system due to broken
206 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
205 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
207 ## module to log in again and fix the settings.
206 ## module to log in again and fix the settings.
208 ##
207 ##
209 ## Available builtin plugin IDs (hash is part of the ID):
208 ## Available builtin plugin IDs (hash is part of the ID):
210 ## egg:rhodecode-enterprise-ce#rhodecode
209 ## egg:rhodecode-enterprise-ce#rhodecode
211 ## egg:rhodecode-enterprise-ce#pam
210 ## egg:rhodecode-enterprise-ce#pam
212 ## egg:rhodecode-enterprise-ce#ldap
211 ## egg:rhodecode-enterprise-ce#ldap
213 ## egg:rhodecode-enterprise-ce#jasig_cas
212 ## egg:rhodecode-enterprise-ce#jasig_cas
214 ## egg:rhodecode-enterprise-ce#headers
213 ## egg:rhodecode-enterprise-ce#headers
215 ## egg:rhodecode-enterprise-ce#crowd
214 ## egg:rhodecode-enterprise-ce#crowd
216 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
215 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
217
216
218 ## alternative return HTTP header for failed authentication. Default HTTP
217 ## alternative return HTTP header for failed authentication. Default HTTP
219 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
218 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
220 ## handling that causing a series of failed authentication calls.
219 ## handling that causing a series of failed authentication calls.
221 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
220 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
222 ## This will be served instead of default 401 on bad authnetication
221 ## This will be served instead of default 401 on bad authnetication
223 auth_ret_code =
222 auth_ret_code =
224
223
225 ## use special detection method when serving auth_ret_code, instead of serving
224 ## use special detection method when serving auth_ret_code, instead of serving
226 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
225 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
227 ## and then serve auth_ret_code to clients
226 ## and then serve auth_ret_code to clients
228 auth_ret_code_detection = false
227 auth_ret_code_detection = false
229
228
230 ## locking return code. When repository is locked return this HTTP code. 2XX
229 ## locking return code. When repository is locked return this HTTP code. 2XX
231 ## codes don't break the transactions while 4XX codes do
230 ## codes don't break the transactions while 4XX codes do
232 lock_ret_code = 423
231 lock_ret_code = 423
233
232
234 ## allows to change the repository location in settings page
233 ## allows to change the repository location in settings page
235 allow_repo_location_change = true
234 allow_repo_location_change = true
236
235
237 ## allows to setup custom hooks in settings page
236 ## allows to setup custom hooks in settings page
238 allow_custom_hooks_settings = true
237 allow_custom_hooks_settings = true
239
238
240 ## generated license token, goto license page in RhodeCode settings to obtain
239 ## generated license token, goto license page in RhodeCode settings to obtain
241 ## new token
240 ## new token
242 license_token =
241 license_token =
243
242
244 ## supervisor connection uri, for managing supervisor and logs.
243 ## supervisor connection uri, for managing supervisor and logs.
245 supervisor.uri =
244 supervisor.uri =
246 ## supervisord group name/id we only want this RC instance to handle
245 ## supervisord group name/id we only want this RC instance to handle
247 supervisor.group_id = dev
246 supervisor.group_id = dev
248
247
249 ## Display extended labs settings
248 ## Display extended labs settings
250 labs_settings_active = true
249 labs_settings_active = true
251
250
252 ####################################
251 ####################################
253 ### CELERY CONFIG ####
252 ### CELERY CONFIG ####
254 ####################################
253 ####################################
255 use_celery = false
254 use_celery = false
256 broker.host = localhost
255 broker.host = localhost
257 broker.vhost = rabbitmqhost
256 broker.vhost = rabbitmqhost
258 broker.port = 5672
257 broker.port = 5672
259 broker.user = rabbitmq
258 broker.user = rabbitmq
260 broker.password = qweqwe
259 broker.password = qweqwe
261
260
262 celery.imports = rhodecode.lib.celerylib.tasks
261 celery.imports = rhodecode.lib.celerylib.tasks
263
262
264 celery.result.backend = amqp
263 celery.result.backend = amqp
265 celery.result.dburi = amqp://
264 celery.result.dburi = amqp://
266 celery.result.serialier = json
265 celery.result.serialier = json
267
266
268 #celery.send.task.error.emails = true
267 #celery.send.task.error.emails = true
269 #celery.amqp.task.result.expires = 18000
268 #celery.amqp.task.result.expires = 18000
270
269
271 celeryd.concurrency = 2
270 celeryd.concurrency = 2
272 #celeryd.log.file = celeryd.log
271 #celeryd.log.file = celeryd.log
273 celeryd.log.level = debug
272 celeryd.log.level = debug
274 celeryd.max.tasks.per.child = 1
273 celeryd.max.tasks.per.child = 1
275
274
276 ## tasks will never be sent to the queue, but executed locally instead.
275 ## tasks will never be sent to the queue, but executed locally instead.
277 celery.always.eager = false
276 celery.always.eager = false
278
277
279 ####################################
278 ####################################
280 ### BEAKER CACHE ####
279 ### BEAKER CACHE ####
281 ####################################
280 ####################################
282 # default cache dir for templates. Putting this into a ramdisk
281 # default cache dir for templates. Putting this into a ramdisk
283 ## can boost performance, eg. %(here)s/data_ramdisk
282 ## can boost performance, eg. %(here)s/data_ramdisk
284 cache_dir = %(here)s/data
283 cache_dir = %(here)s/data
285
284
286 ## locking and default file storage for Beaker. Putting this into a ramdisk
285 ## locking and default file storage for Beaker. Putting this into a ramdisk
287 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
286 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
288 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
287 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
289 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
288 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
290
289
291 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
290 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
292
291
293 beaker.cache.super_short_term.type = memory
292 beaker.cache.super_short_term.type = memory
294 beaker.cache.super_short_term.expire = 10
293 beaker.cache.super_short_term.expire = 10
295 beaker.cache.super_short_term.key_length = 256
294 beaker.cache.super_short_term.key_length = 256
296
295
297 beaker.cache.short_term.type = memory
296 beaker.cache.short_term.type = memory
298 beaker.cache.short_term.expire = 60
297 beaker.cache.short_term.expire = 60
299 beaker.cache.short_term.key_length = 256
298 beaker.cache.short_term.key_length = 256
300
299
301 beaker.cache.long_term.type = memory
300 beaker.cache.long_term.type = memory
302 beaker.cache.long_term.expire = 36000
301 beaker.cache.long_term.expire = 36000
303 beaker.cache.long_term.key_length = 256
302 beaker.cache.long_term.key_length = 256
304
303
305 beaker.cache.sql_cache_short.type = memory
304 beaker.cache.sql_cache_short.type = memory
306 beaker.cache.sql_cache_short.expire = 10
305 beaker.cache.sql_cache_short.expire = 10
307 beaker.cache.sql_cache_short.key_length = 256
306 beaker.cache.sql_cache_short.key_length = 256
308
307
309 # default is memory cache, configure only if required
308 # default is memory cache, configure only if required
310 # using multi-node or multi-worker setup
309 # using multi-node or multi-worker setup
311 #beaker.cache.auth_plugins.type = ext:database
310 #beaker.cache.auth_plugins.type = ext:database
312 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
311 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
313 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
312 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
314 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
313 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
315 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
314 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
316 #beaker.cache.auth_plugins.sa.pool_size = 10
315 #beaker.cache.auth_plugins.sa.pool_size = 10
317 #beaker.cache.auth_plugins.sa.max_overflow = 0
316 #beaker.cache.auth_plugins.sa.max_overflow = 0
318
317
319 beaker.cache.repo_cache_long.type = memorylru_base
318 beaker.cache.repo_cache_long.type = memorylru_base
320 beaker.cache.repo_cache_long.max_items = 4096
319 beaker.cache.repo_cache_long.max_items = 4096
321 beaker.cache.repo_cache_long.expire = 2592000
320 beaker.cache.repo_cache_long.expire = 2592000
322
321
323 # default is memorylru_base cache, configure only if required
322 # default is memorylru_base cache, configure only if required
324 # using multi-node or multi-worker setup
323 # using multi-node or multi-worker setup
325 #beaker.cache.repo_cache_long.type = ext:memcached
324 #beaker.cache.repo_cache_long.type = ext:memcached
326 #beaker.cache.repo_cache_long.url = localhost:11211
325 #beaker.cache.repo_cache_long.url = localhost:11211
327 #beaker.cache.repo_cache_long.expire = 1209600
326 #beaker.cache.repo_cache_long.expire = 1209600
328 #beaker.cache.repo_cache_long.key_length = 256
327 #beaker.cache.repo_cache_long.key_length = 256
329
328
330 ####################################
329 ####################################
331 ### BEAKER SESSION ####
330 ### BEAKER SESSION ####
332 ####################################
331 ####################################
333
332
334 ## .session.type is type of storage options for the session, current allowed
333 ## .session.type is type of storage options for the session, current allowed
335 ## types are file, ext:memcached, ext:database, and memory (default).
334 ## types are file, ext:memcached, ext:database, and memory (default).
336 beaker.session.type = file
335 beaker.session.type = file
337 beaker.session.data_dir = %(here)s/data/sessions/data
336 beaker.session.data_dir = %(here)s/data/sessions/data
338
337
339 ## db based session, fast, and allows easy management over logged in users ##
338 ## db based session, fast, and allows easy management over logged in users ##
340 #beaker.session.type = ext:database
339 #beaker.session.type = ext:database
341 #beaker.session.table_name = db_session
340 #beaker.session.table_name = db_session
342 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
341 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
343 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
342 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
344 #beaker.session.sa.pool_recycle = 3600
343 #beaker.session.sa.pool_recycle = 3600
345 #beaker.session.sa.echo = false
344 #beaker.session.sa.echo = false
346
345
347 beaker.session.key = rhodecode
346 beaker.session.key = rhodecode
348 beaker.session.secret = develop-rc-uytcxaz
347 beaker.session.secret = develop-rc-uytcxaz
349 beaker.session.lock_dir = %(here)s/data/sessions/lock
348 beaker.session.lock_dir = %(here)s/data/sessions/lock
350
349
351 ## Secure encrypted cookie. Requires AES and AES python libraries
350 ## Secure encrypted cookie. Requires AES and AES python libraries
352 ## you must disable beaker.session.secret to use this
351 ## you must disable beaker.session.secret to use this
353 #beaker.session.encrypt_key = <key_for_encryption>
352 #beaker.session.encrypt_key = <key_for_encryption>
354 #beaker.session.validate_key = <validation_key>
353 #beaker.session.validate_key = <validation_key>
355
354
356 ## sets session as invalid(also logging out user) if it haven not been
355 ## sets session as invalid(also logging out user) if it haven not been
357 ## accessed for given amount of time in seconds
356 ## accessed for given amount of time in seconds
358 beaker.session.timeout = 2592000
357 beaker.session.timeout = 2592000
359 beaker.session.httponly = true
358 beaker.session.httponly = true
360 #beaker.session.cookie_path = /<your-prefix>
359 #beaker.session.cookie_path = /<your-prefix>
361
360
362 ## uncomment for https secure cookie
361 ## uncomment for https secure cookie
363 beaker.session.secure = false
362 beaker.session.secure = false
364
363
365 ## auto save the session to not to use .save()
364 ## auto save the session to not to use .save()
366 beaker.session.auto = false
365 beaker.session.auto = false
367
366
368 ## default cookie expiration time in seconds, set to `true` to set expire
367 ## default cookie expiration time in seconds, set to `true` to set expire
369 ## at browser close
368 ## at browser close
370 #beaker.session.cookie_expires = 3600
369 #beaker.session.cookie_expires = 3600
371
370
372 ###################################
371 ###################################
373 ## SEARCH INDEXING CONFIGURATION ##
372 ## SEARCH INDEXING CONFIGURATION ##
374 ###################################
373 ###################################
375 ## Full text search indexer is available in rhodecode-tools under
374 ## Full text search indexer is available in rhodecode-tools under
376 ## `rhodecode-tools index` command
375 ## `rhodecode-tools index` command
377
376
378 # WHOOSH Backend, doesn't require additional services to run
377 # WHOOSH Backend, doesn't require additional services to run
379 # it works good with few dozen repos
378 # it works good with few dozen repos
380 search.module = rhodecode.lib.index.whoosh
379 search.module = rhodecode.lib.index.whoosh
381 search.location = %(here)s/data/index
380 search.location = %(here)s/data/index
382
381
383
382
384 ###################################
383 ###################################
385 ## APPENLIGHT CONFIG ##
384 ## APPENLIGHT CONFIG ##
386 ###################################
385 ###################################
387
386
388 ## Appenlight is tailored to work with RhodeCode, see
387 ## Appenlight is tailored to work with RhodeCode, see
389 ## http://appenlight.com for details how to obtain an account
388 ## http://appenlight.com for details how to obtain an account
390
389
391 ## appenlight integration enabled
390 ## appenlight integration enabled
392 appenlight = false
391 appenlight = false
393
392
394 appenlight.server_url = https://api.appenlight.com
393 appenlight.server_url = https://api.appenlight.com
395 appenlight.api_key = YOUR_API_KEY
394 appenlight.api_key = YOUR_API_KEY
396 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
395 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
397
396
398 # used for JS client
397 # used for JS client
399 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
398 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
400
399
401 ## TWEAK AMOUNT OF INFO SENT HERE
400 ## TWEAK AMOUNT OF INFO SENT HERE
402
401
403 ## enables 404 error logging (default False)
402 ## enables 404 error logging (default False)
404 appenlight.report_404 = false
403 appenlight.report_404 = false
405
404
406 ## time in seconds after request is considered being slow (default 1)
405 ## time in seconds after request is considered being slow (default 1)
407 appenlight.slow_request_time = 1
406 appenlight.slow_request_time = 1
408
407
409 ## record slow requests in application
408 ## record slow requests in application
410 ## (needs to be enabled for slow datastore recording and time tracking)
409 ## (needs to be enabled for slow datastore recording and time tracking)
411 appenlight.slow_requests = true
410 appenlight.slow_requests = true
412
411
413 ## enable hooking to application loggers
412 ## enable hooking to application loggers
414 appenlight.logging = true
413 appenlight.logging = true
415
414
416 ## minimum log level for log capture
415 ## minimum log level for log capture
417 appenlight.logging.level = WARNING
416 appenlight.logging.level = WARNING
418
417
419 ## send logs only from erroneous/slow requests
418 ## send logs only from erroneous/slow requests
420 ## (saves API quota for intensive logging)
419 ## (saves API quota for intensive logging)
421 appenlight.logging_on_error = false
420 appenlight.logging_on_error = false
422
421
423 ## list of additonal keywords that should be grabbed from environ object
422 ## list of additonal keywords that should be grabbed from environ object
424 ## can be string with comma separated list of words in lowercase
423 ## can be string with comma separated list of words in lowercase
425 ## (by default client will always send following info:
424 ## (by default client will always send following info:
426 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
425 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
427 ## start with HTTP* this list be extended with additional keywords here
426 ## start with HTTP* this list be extended with additional keywords here
428 appenlight.environ_keys_whitelist =
427 appenlight.environ_keys_whitelist =
429
428
430 ## list of keywords that should be blanked from request object
429 ## list of keywords that should be blanked from request object
431 ## can be string with comma separated list of words in lowercase
430 ## can be string with comma separated list of words in lowercase
432 ## (by default client will always blank keys that contain following words
431 ## (by default client will always blank keys that contain following words
433 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
432 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
434 ## this list be extended with additional keywords set here
433 ## this list be extended with additional keywords set here
435 appenlight.request_keys_blacklist =
434 appenlight.request_keys_blacklist =
436
435
437 ## list of namespaces that should be ignores when gathering log entries
436 ## list of namespaces that should be ignores when gathering log entries
438 ## can be string with comma separated list of namespaces
437 ## can be string with comma separated list of namespaces
439 ## (by default the client ignores own entries: appenlight_client.client)
438 ## (by default the client ignores own entries: appenlight_client.client)
440 appenlight.log_namespace_blacklist =
439 appenlight.log_namespace_blacklist =
441
440
442
441
443 ################################################################################
442 ################################################################################
444 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
443 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
445 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
444 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
446 ## execute malicious code after an exception is raised. ##
445 ## execute malicious code after an exception is raised. ##
447 ################################################################################
446 ################################################################################
448 #set debug = false
447 #set debug = false
449
448
450
449
451 ##############
450 ##############
452 ## STYLING ##
451 ## STYLING ##
453 ##############
452 ##############
454 debug_style = true
453 debug_style = true
455
454
456 #########################################################
455 #########################################################
457 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
456 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
458 #########################################################
457 #########################################################
459 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
458 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
460 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
459 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
461 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
460 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
462
461
463 # see sqlalchemy docs for other advanced settings
462 # see sqlalchemy docs for other advanced settings
464
463
465 ## print the sql statements to output
464 ## print the sql statements to output
466 sqlalchemy.db1.echo = false
465 sqlalchemy.db1.echo = false
467 ## recycle the connections after this ammount of seconds
466 ## recycle the connections after this ammount of seconds
468 sqlalchemy.db1.pool_recycle = 3600
467 sqlalchemy.db1.pool_recycle = 3600
469 sqlalchemy.db1.convert_unicode = true
468 sqlalchemy.db1.convert_unicode = true
470
469
471 ## the number of connections to keep open inside the connection pool.
470 ## the number of connections to keep open inside the connection pool.
472 ## 0 indicates no limit
471 ## 0 indicates no limit
473 #sqlalchemy.db1.pool_size = 5
472 #sqlalchemy.db1.pool_size = 5
474
473
475 ## the number of connections to allow in connection pool "overflow", that is
474 ## the number of connections to allow in connection pool "overflow", that is
476 ## connections that can be opened above and beyond the pool_size setting,
475 ## connections that can be opened above and beyond the pool_size setting,
477 ## which defaults to five.
476 ## which defaults to five.
478 #sqlalchemy.db1.max_overflow = 10
477 #sqlalchemy.db1.max_overflow = 10
479
478
480
479
481 ##################
480 ##################
482 ### VCS CONFIG ###
481 ### VCS CONFIG ###
483 ##################
482 ##################
484 vcs.server.enable = true
483 vcs.server.enable = true
485 vcs.server = localhost:9900
484 vcs.server = localhost:9900
486
485
487 ## Web server connectivity protocol, responsible for web based VCS operatations
486 ## Web server connectivity protocol, responsible for web based VCS operatations
488 ## Available protocols are:
487 ## Available protocols are:
489 ## `pyro4` - using pyro4 server
488 ## `pyro4` - using pyro4 server
490 ## `http` - using http-rpc backend
489 ## `http` - using http-rpc backend
491 #vcs.server.protocol = http
490 #vcs.server.protocol = http
492
491
493 ## Push/Pull operations protocol, available options are:
492 ## Push/Pull operations protocol, available options are:
494 ## `pyro4` - using pyro4 server
493 ## `pyro4` - using pyro4 server
495 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
494 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
496 ## `vcsserver.scm_app` - internal app (EE only)
495 ## `vcsserver.scm_app` - internal app (EE only)
497 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
496 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
498
497
499 ## Push/Pull operations hooks protocol, available options are:
498 ## Push/Pull operations hooks protocol, available options are:
500 ## `pyro4` - using pyro4 server
499 ## `pyro4` - using pyro4 server
501 ## `http` - using http-rpc backend
500 ## `http` - using http-rpc backend
502 #vcs.hooks.protocol = http
501 #vcs.hooks.protocol = http
503
502
504 vcs.server.log_level = debug
503 vcs.server.log_level = debug
505 ## Start VCSServer with this instance as a subprocess, usefull for development
504 ## Start VCSServer with this instance as a subprocess, usefull for development
506 vcs.start_server = true
505 vcs.start_server = true
507 vcs.backends = hg, git, svn
506 vcs.backends = hg, git, svn
508 vcs.connection_timeout = 3600
507 vcs.connection_timeout = 3600
509 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
508 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
510 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
509 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
511 #vcs.svn.compatible_version = pre-1.8-compatible
510 #vcs.svn.compatible_version = pre-1.8-compatible
512
511
513 ################################
512 ################################
514 ### LOGGING CONFIGURATION ####
513 ### LOGGING CONFIGURATION ####
515 ################################
514 ################################
516 [loggers]
515 [loggers]
517 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
516 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
518
517
519 [handlers]
518 [handlers]
520 keys = console, console_sql
519 keys = console, console_sql
521
520
522 [formatters]
521 [formatters]
523 keys = generic, color_formatter, color_formatter_sql
522 keys = generic, color_formatter, color_formatter_sql
524
523
525 #############
524 #############
526 ## LOGGERS ##
525 ## LOGGERS ##
527 #############
526 #############
528 [logger_root]
527 [logger_root]
529 level = NOTSET
528 level = NOTSET
530 handlers = console
529 handlers = console
531
530
532 [logger_routes]
531 [logger_routes]
533 level = DEBUG
532 level = DEBUG
534 handlers =
533 handlers =
535 qualname = routes.middleware
534 qualname = routes.middleware
536 ## "level = DEBUG" logs the route matched and routing variables.
535 ## "level = DEBUG" logs the route matched and routing variables.
537 propagate = 1
536 propagate = 1
538
537
539 [logger_beaker]
538 [logger_beaker]
540 level = DEBUG
539 level = DEBUG
541 handlers =
540 handlers =
542 qualname = beaker.container
541 qualname = beaker.container
543 propagate = 1
542 propagate = 1
544
543
545 [logger_pyro4]
544 [logger_pyro4]
546 level = DEBUG
545 level = DEBUG
547 handlers =
546 handlers =
548 qualname = Pyro4
547 qualname = Pyro4
549 propagate = 1
548 propagate = 1
550
549
551 [logger_templates]
550 [logger_templates]
552 level = INFO
551 level = INFO
553 handlers =
552 handlers =
554 qualname = pylons.templating
553 qualname = pylons.templating
555 propagate = 1
554 propagate = 1
556
555
557 [logger_rhodecode]
556 [logger_rhodecode]
558 level = DEBUG
557 level = DEBUG
559 handlers =
558 handlers =
560 qualname = rhodecode
559 qualname = rhodecode
561 propagate = 1
560 propagate = 1
562
561
563 [logger_sqlalchemy]
562 [logger_sqlalchemy]
564 level = INFO
563 level = INFO
565 handlers = console_sql
564 handlers = console_sql
566 qualname = sqlalchemy.engine
565 qualname = sqlalchemy.engine
567 propagate = 0
566 propagate = 0
568
567
569 [logger_whoosh_indexer]
568 [logger_whoosh_indexer]
570 level = DEBUG
569 level = DEBUG
571 handlers =
570 handlers =
572 qualname = whoosh_indexer
571 qualname = whoosh_indexer
573 propagate = 1
572 propagate = 1
574
573
575 ##############
574 ##############
576 ## HANDLERS ##
575 ## HANDLERS ##
577 ##############
576 ##############
578
577
579 [handler_console]
578 [handler_console]
580 class = StreamHandler
579 class = StreamHandler
581 args = (sys.stderr,)
580 args = (sys.stderr,)
582 level = DEBUG
581 level = DEBUG
583 formatter = color_formatter
582 formatter = color_formatter
584
583
585 [handler_console_sql]
584 [handler_console_sql]
586 class = StreamHandler
585 class = StreamHandler
587 args = (sys.stderr,)
586 args = (sys.stderr,)
588 level = DEBUG
587 level = DEBUG
589 formatter = color_formatter_sql
588 formatter = color_formatter_sql
590
589
591 ################
590 ################
592 ## FORMATTERS ##
591 ## FORMATTERS ##
593 ################
592 ################
594
593
595 [formatter_generic]
594 [formatter_generic]
596 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
595 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
597 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
596 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
598 datefmt = %Y-%m-%d %H:%M:%S
597 datefmt = %Y-%m-%d %H:%M:%S
599
598
600 [formatter_color_formatter]
599 [formatter_color_formatter]
601 class = rhodecode.lib.logging_formatter.ColorFormatter
600 class = rhodecode.lib.logging_formatter.ColorFormatter
602 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
601 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
603 datefmt = %Y-%m-%d %H:%M:%S
602 datefmt = %Y-%m-%d %H:%M:%S
604
603
605 [formatter_color_formatter_sql]
604 [formatter_color_formatter_sql]
606 class = rhodecode.lib.logging_formatter.ColorFormatterSql
605 class = rhodecode.lib.logging_formatter.ColorFormatterSql
607 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
606 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
608 datefmt = %Y-%m-%d %H:%M:%S
607 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,577 +1,576 b''
1 ################################################################################
1 ################################################################################
2 ################################################################################
2 ################################################################################
3 # RhodeCode Enterprise - configuration file #
3 # RhodeCode Enterprise - configuration file #
4 # Built-in functions and variables #
4 # Built-in functions and variables #
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 # #
6 # #
7 ################################################################################
7 ################################################################################
8
8
9 [DEFAULT]
9 [DEFAULT]
10 debug = true
10 debug = true
11 pdebug = false
12 ################################################################################
11 ################################################################################
13 ## Uncomment and replace with the email address which should receive ##
12 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
13 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
14 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
15 ################################################################################
17 #email_to = admin@localhost
16 #email_to = admin@localhost
18 #error_email_from = paste_error@localhost
17 #error_email_from = paste_error@localhost
19 #app_email_from = rhodecode-noreply@localhost
18 #app_email_from = rhodecode-noreply@localhost
20 #error_message =
19 #error_message =
21 #email_prefix = [RhodeCode]
20 #email_prefix = [RhodeCode]
22
21
23 #smtp_server = mail.server.com
22 #smtp_server = mail.server.com
24 #smtp_username =
23 #smtp_username =
25 #smtp_password =
24 #smtp_password =
26 #smtp_port =
25 #smtp_port =
27 #smtp_use_tls = false
26 #smtp_use_tls = false
28 #smtp_use_ssl = true
27 #smtp_use_ssl = true
29 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
28 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
30 #smtp_auth =
29 #smtp_auth =
31
30
32 [server:main]
31 [server:main]
33 ## COMMON ##
32 ## COMMON ##
34 host = 127.0.0.1
33 host = 127.0.0.1
35 port = 5000
34 port = 5000
36
35
37 ##################################
36 ##################################
38 ## WAITRESS WSGI SERVER ##
37 ## WAITRESS WSGI SERVER ##
39 ## Recommended for Development ##
38 ## Recommended for Development ##
40 ##################################
39 ##################################
41 #use = egg:waitress#main
40 #use = egg:waitress#main
42 ## number of worker threads
41 ## number of worker threads
43 #threads = 5
42 #threads = 5
44 ## MAX BODY SIZE 100GB
43 ## MAX BODY SIZE 100GB
45 #max_request_body_size = 107374182400
44 #max_request_body_size = 107374182400
46 ## Use poll instead of select, fixes file descriptors limits problems.
45 ## Use poll instead of select, fixes file descriptors limits problems.
47 ## May not work on old windows systems.
46 ## May not work on old windows systems.
48 #asyncore_use_poll = true
47 #asyncore_use_poll = true
49
48
50
49
51 ##########################
50 ##########################
52 ## GUNICORN WSGI SERVER ##
51 ## GUNICORN WSGI SERVER ##
53 ##########################
52 ##########################
54 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
53 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
55 use = egg:gunicorn#main
54 use = egg:gunicorn#main
56 ## Sets the number of process workers. You must set `instance_id = *`
55 ## Sets the number of process workers. You must set `instance_id = *`
57 ## when this option is set to more than one worker, recommended
56 ## when this option is set to more than one worker, recommended
58 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
57 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
59 ## The `instance_id = *` must be set in the [app:main] section below
58 ## The `instance_id = *` must be set in the [app:main] section below
60 workers = 2
59 workers = 2
61 ## number of threads for each of the worker, must be set to 1 for gevent
60 ## number of threads for each of the worker, must be set to 1 for gevent
62 ## generally recommened to be at 1
61 ## generally recommened to be at 1
63 #threads = 1
62 #threads = 1
64 ## process name
63 ## process name
65 proc_name = rhodecode
64 proc_name = rhodecode
66 ## type of worker class, one of sync, gevent
65 ## type of worker class, one of sync, gevent
67 ## recommended for bigger setup is using of of other than sync one
66 ## recommended for bigger setup is using of of other than sync one
68 worker_class = sync
67 worker_class = sync
69 ## The maximum number of simultaneous clients. Valid only for Gevent
68 ## The maximum number of simultaneous clients. Valid only for Gevent
70 #worker_connections = 10
69 #worker_connections = 10
71 ## max number of requests that worker will handle before being gracefully
70 ## max number of requests that worker will handle before being gracefully
72 ## restarted, could prevent memory leaks
71 ## restarted, could prevent memory leaks
73 max_requests = 1000
72 max_requests = 1000
74 max_requests_jitter = 30
73 max_requests_jitter = 30
75 ## amount of time a worker can spend with handling a request before it
74 ## amount of time a worker can spend with handling a request before it
76 ## gets killed and restarted. Set to 6hrs
75 ## gets killed and restarted. Set to 6hrs
77 timeout = 21600
76 timeout = 21600
78
77
79
78
80 ## prefix middleware for RhodeCode, disables force_https flag.
79 ## prefix middleware for RhodeCode, disables force_https flag.
81 ## allows to set RhodeCode under a prefix in server.
80 ## allows to set RhodeCode under a prefix in server.
82 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
81 ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well.
83 #[filter:proxy-prefix]
82 #[filter:proxy-prefix]
84 #use = egg:PasteDeploy#prefix
83 #use = egg:PasteDeploy#prefix
85 #prefix = /<your-prefix>
84 #prefix = /<your-prefix>
86
85
87 [app:main]
86 [app:main]
88 use = egg:rhodecode-enterprise-ce
87 use = egg:rhodecode-enterprise-ce
89 ## enable proxy prefix middleware, defined below
88 ## enable proxy prefix middleware, defined below
90 #filter-with = proxy-prefix
89 #filter-with = proxy-prefix
91
90
92 full_stack = true
91 full_stack = true
93
92
94 ## Serve static files via RhodeCode, disable to serve them via HTTP server
93 ## Serve static files via RhodeCode, disable to serve them via HTTP server
95 static_files = true
94 static_files = true
96
95
97 ## Optional Languages
96 ## Optional Languages
98 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
97 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
99 lang = en
98 lang = en
100
99
101 ## perform a full repository scan on each server start, this should be
100 ## perform a full repository scan on each server start, this should be
102 ## set to false after first startup, to allow faster server restarts.
101 ## set to false after first startup, to allow faster server restarts.
103 startup.import_repos = false
102 startup.import_repos = false
104
103
105 ## Uncomment and set this path to use archive download cache.
104 ## Uncomment and set this path to use archive download cache.
106 ## Once enabled, generated archives will be cached at this location
105 ## Once enabled, generated archives will be cached at this location
107 ## and served from the cache during subsequent requests for the same archive of
106 ## and served from the cache during subsequent requests for the same archive of
108 ## the repository.
107 ## the repository.
109 #archive_cache_dir = /tmp/tarballcache
108 #archive_cache_dir = /tmp/tarballcache
110
109
111 ## change this to unique ID for security
110 ## change this to unique ID for security
112 app_instance_uuid = rc-production
111 app_instance_uuid = rc-production
113
112
114 ## cut off limit for large diffs (size in bytes)
113 ## cut off limit for large diffs (size in bytes)
115 cut_off_limit_diff = 1024000
114 cut_off_limit_diff = 1024000
116 cut_off_limit_file = 256000
115 cut_off_limit_file = 256000
117
116
118 ## use cache version of scm repo everywhere
117 ## use cache version of scm repo everywhere
119 vcs_full_cache = true
118 vcs_full_cache = true
120
119
121 ## force https in RhodeCode, fixes https redirects, assumes it's always https
120 ## force https in RhodeCode, fixes https redirects, assumes it's always https
122 ## Normally this is controlled by proper http flags sent from http server
121 ## Normally this is controlled by proper http flags sent from http server
123 force_https = false
122 force_https = false
124
123
125 ## use Strict-Transport-Security headers
124 ## use Strict-Transport-Security headers
126 use_htsts = false
125 use_htsts = false
127
126
128 ## number of commits stats will parse on each iteration
127 ## number of commits stats will parse on each iteration
129 commit_parse_limit = 25
128 commit_parse_limit = 25
130
129
131 ## git rev filter option, --all is the default filter, if you need to
130 ## git rev filter option, --all is the default filter, if you need to
132 ## hide all refs in changelog switch this to --branches --tags
131 ## hide all refs in changelog switch this to --branches --tags
133 git_rev_filter = --branches --tags
132 git_rev_filter = --branches --tags
134
133
135 # Set to true if your repos are exposed using the dumb protocol
134 # Set to true if your repos are exposed using the dumb protocol
136 git_update_server_info = false
135 git_update_server_info = false
137
136
138 ## RSS/ATOM feed options
137 ## RSS/ATOM feed options
139 rss_cut_off_limit = 256000
138 rss_cut_off_limit = 256000
140 rss_items_per_page = 10
139 rss_items_per_page = 10
141 rss_include_diff = false
140 rss_include_diff = false
142
141
143 ## gist URL alias, used to create nicer urls for gist. This should be an
142 ## gist URL alias, used to create nicer urls for gist. This should be an
144 ## url that does rewrites to _admin/gists/<gistid>.
143 ## url that does rewrites to _admin/gists/<gistid>.
145 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
144 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
146 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
145 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid>
147 gist_alias_url =
146 gist_alias_url =
148
147
149 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
148 ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be
150 ## used for access.
149 ## used for access.
151 ## Adding ?auth_token = <token> to the url authenticates this request as if it
150 ## Adding ?auth_token = <token> to the url authenticates this request as if it
152 ## came from the the logged in user who own this authentication token.
151 ## came from the the logged in user who own this authentication token.
153 ##
152 ##
154 ## Syntax is <ControllerClass>:<function_pattern>.
153 ## Syntax is <ControllerClass>:<function_pattern>.
155 ## To enable access to raw_files put `FilesController:raw`.
154 ## To enable access to raw_files put `FilesController:raw`.
156 ## To enable access to patches add `ChangesetController:changeset_patch`.
155 ## To enable access to patches add `ChangesetController:changeset_patch`.
157 ## The list should be "," separated and on a single line.
156 ## The list should be "," separated and on a single line.
158 ##
157 ##
159 ## Recommended controllers to enable:
158 ## Recommended controllers to enable:
160 # ChangesetController:changeset_patch,
159 # ChangesetController:changeset_patch,
161 # ChangesetController:changeset_raw,
160 # ChangesetController:changeset_raw,
162 # FilesController:raw,
161 # FilesController:raw,
163 # FilesController:archivefile,
162 # FilesController:archivefile,
164 # GistsController:*,
163 # GistsController:*,
165 api_access_controllers_whitelist =
164 api_access_controllers_whitelist =
166
165
167 ## default encoding used to convert from and to unicode
166 ## default encoding used to convert from and to unicode
168 ## can be also a comma separated list of encoding in case of mixed encodings
167 ## can be also a comma separated list of encoding in case of mixed encodings
169 default_encoding = UTF-8
168 default_encoding = UTF-8
170
169
171 ## instance-id prefix
170 ## instance-id prefix
172 ## a prefix key for this instance used for cache invalidation when running
171 ## a prefix key for this instance used for cache invalidation when running
173 ## multiple instances of rhodecode, make sure it's globally unique for
172 ## multiple instances of rhodecode, make sure it's globally unique for
174 ## all running rhodecode instances. Leave empty if you don't use it
173 ## all running rhodecode instances. Leave empty if you don't use it
175 instance_id =
174 instance_id =
176
175
177 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
176 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
178 ## of an authentication plugin also if it is disabled by it's settings.
177 ## of an authentication plugin also if it is disabled by it's settings.
179 ## This could be useful if you are unable to log in to the system due to broken
178 ## This could be useful if you are unable to log in to the system due to broken
180 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
179 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
181 ## module to log in again and fix the settings.
180 ## module to log in again and fix the settings.
182 ##
181 ##
183 ## Available builtin plugin IDs (hash is part of the ID):
182 ## Available builtin plugin IDs (hash is part of the ID):
184 ## egg:rhodecode-enterprise-ce#rhodecode
183 ## egg:rhodecode-enterprise-ce#rhodecode
185 ## egg:rhodecode-enterprise-ce#pam
184 ## egg:rhodecode-enterprise-ce#pam
186 ## egg:rhodecode-enterprise-ce#ldap
185 ## egg:rhodecode-enterprise-ce#ldap
187 ## egg:rhodecode-enterprise-ce#jasig_cas
186 ## egg:rhodecode-enterprise-ce#jasig_cas
188 ## egg:rhodecode-enterprise-ce#headers
187 ## egg:rhodecode-enterprise-ce#headers
189 ## egg:rhodecode-enterprise-ce#crowd
188 ## egg:rhodecode-enterprise-ce#crowd
190 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
189 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
191
190
192 ## alternative return HTTP header for failed authentication. Default HTTP
191 ## alternative return HTTP header for failed authentication. Default HTTP
193 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
192 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
194 ## handling that causing a series of failed authentication calls.
193 ## handling that causing a series of failed authentication calls.
195 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
194 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
196 ## This will be served instead of default 401 on bad authnetication
195 ## This will be served instead of default 401 on bad authnetication
197 auth_ret_code =
196 auth_ret_code =
198
197
199 ## use special detection method when serving auth_ret_code, instead of serving
198 ## use special detection method when serving auth_ret_code, instead of serving
200 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
199 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
201 ## and then serve auth_ret_code to clients
200 ## and then serve auth_ret_code to clients
202 auth_ret_code_detection = false
201 auth_ret_code_detection = false
203
202
204 ## locking return code. When repository is locked return this HTTP code. 2XX
203 ## locking return code. When repository is locked return this HTTP code. 2XX
205 ## codes don't break the transactions while 4XX codes do
204 ## codes don't break the transactions while 4XX codes do
206 lock_ret_code = 423
205 lock_ret_code = 423
207
206
208 ## allows to change the repository location in settings page
207 ## allows to change the repository location in settings page
209 allow_repo_location_change = true
208 allow_repo_location_change = true
210
209
211 ## allows to setup custom hooks in settings page
210 ## allows to setup custom hooks in settings page
212 allow_custom_hooks_settings = true
211 allow_custom_hooks_settings = true
213
212
214 ## generated license token, goto license page in RhodeCode settings to obtain
213 ## generated license token, goto license page in RhodeCode settings to obtain
215 ## new token
214 ## new token
216 license_token =
215 license_token =
217
216
218 ## supervisor connection uri, for managing supervisor and logs.
217 ## supervisor connection uri, for managing supervisor and logs.
219 supervisor.uri =
218 supervisor.uri =
220 ## supervisord group name/id we only want this RC instance to handle
219 ## supervisord group name/id we only want this RC instance to handle
221 supervisor.group_id = prod
220 supervisor.group_id = prod
222
221
223 ## Display extended labs settings
222 ## Display extended labs settings
224 labs_settings_active = true
223 labs_settings_active = true
225
224
226 ####################################
225 ####################################
227 ### CELERY CONFIG ####
226 ### CELERY CONFIG ####
228 ####################################
227 ####################################
229 use_celery = false
228 use_celery = false
230 broker.host = localhost
229 broker.host = localhost
231 broker.vhost = rabbitmqhost
230 broker.vhost = rabbitmqhost
232 broker.port = 5672
231 broker.port = 5672
233 broker.user = rabbitmq
232 broker.user = rabbitmq
234 broker.password = qweqwe
233 broker.password = qweqwe
235
234
236 celery.imports = rhodecode.lib.celerylib.tasks
235 celery.imports = rhodecode.lib.celerylib.tasks
237
236
238 celery.result.backend = amqp
237 celery.result.backend = amqp
239 celery.result.dburi = amqp://
238 celery.result.dburi = amqp://
240 celery.result.serialier = json
239 celery.result.serialier = json
241
240
242 #celery.send.task.error.emails = true
241 #celery.send.task.error.emails = true
243 #celery.amqp.task.result.expires = 18000
242 #celery.amqp.task.result.expires = 18000
244
243
245 celeryd.concurrency = 2
244 celeryd.concurrency = 2
246 #celeryd.log.file = celeryd.log
245 #celeryd.log.file = celeryd.log
247 celeryd.log.level = debug
246 celeryd.log.level = debug
248 celeryd.max.tasks.per.child = 1
247 celeryd.max.tasks.per.child = 1
249
248
250 ## tasks will never be sent to the queue, but executed locally instead.
249 ## tasks will never be sent to the queue, but executed locally instead.
251 celery.always.eager = false
250 celery.always.eager = false
252
251
253 ####################################
252 ####################################
254 ### BEAKER CACHE ####
253 ### BEAKER CACHE ####
255 ####################################
254 ####################################
256 # default cache dir for templates. Putting this into a ramdisk
255 # default cache dir for templates. Putting this into a ramdisk
257 ## can boost performance, eg. %(here)s/data_ramdisk
256 ## can boost performance, eg. %(here)s/data_ramdisk
258 cache_dir = %(here)s/data
257 cache_dir = %(here)s/data
259
258
260 ## locking and default file storage for Beaker. Putting this into a ramdisk
259 ## locking and default file storage for Beaker. Putting this into a ramdisk
261 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
260 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
262 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
261 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
263 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
262 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
264
263
265 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
264 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
266
265
267 beaker.cache.super_short_term.type = memory
266 beaker.cache.super_short_term.type = memory
268 beaker.cache.super_short_term.expire = 10
267 beaker.cache.super_short_term.expire = 10
269 beaker.cache.super_short_term.key_length = 256
268 beaker.cache.super_short_term.key_length = 256
270
269
271 beaker.cache.short_term.type = memory
270 beaker.cache.short_term.type = memory
272 beaker.cache.short_term.expire = 60
271 beaker.cache.short_term.expire = 60
273 beaker.cache.short_term.key_length = 256
272 beaker.cache.short_term.key_length = 256
274
273
275 beaker.cache.long_term.type = memory
274 beaker.cache.long_term.type = memory
276 beaker.cache.long_term.expire = 36000
275 beaker.cache.long_term.expire = 36000
277 beaker.cache.long_term.key_length = 256
276 beaker.cache.long_term.key_length = 256
278
277
279 beaker.cache.sql_cache_short.type = memory
278 beaker.cache.sql_cache_short.type = memory
280 beaker.cache.sql_cache_short.expire = 10
279 beaker.cache.sql_cache_short.expire = 10
281 beaker.cache.sql_cache_short.key_length = 256
280 beaker.cache.sql_cache_short.key_length = 256
282
281
283 # default is memory cache, configure only if required
282 # default is memory cache, configure only if required
284 # using multi-node or multi-worker setup
283 # using multi-node or multi-worker setup
285 #beaker.cache.auth_plugins.type = ext:database
284 #beaker.cache.auth_plugins.type = ext:database
286 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
285 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
287 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
286 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
288 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
287 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
289 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
288 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
290 #beaker.cache.auth_plugins.sa.pool_size = 10
289 #beaker.cache.auth_plugins.sa.pool_size = 10
291 #beaker.cache.auth_plugins.sa.max_overflow = 0
290 #beaker.cache.auth_plugins.sa.max_overflow = 0
292
291
293 beaker.cache.repo_cache_long.type = memorylru_base
292 beaker.cache.repo_cache_long.type = memorylru_base
294 beaker.cache.repo_cache_long.max_items = 4096
293 beaker.cache.repo_cache_long.max_items = 4096
295 beaker.cache.repo_cache_long.expire = 2592000
294 beaker.cache.repo_cache_long.expire = 2592000
296
295
297 # default is memorylru_base cache, configure only if required
296 # default is memorylru_base cache, configure only if required
298 # using multi-node or multi-worker setup
297 # using multi-node or multi-worker setup
299 #beaker.cache.repo_cache_long.type = ext:memcached
298 #beaker.cache.repo_cache_long.type = ext:memcached
300 #beaker.cache.repo_cache_long.url = localhost:11211
299 #beaker.cache.repo_cache_long.url = localhost:11211
301 #beaker.cache.repo_cache_long.expire = 1209600
300 #beaker.cache.repo_cache_long.expire = 1209600
302 #beaker.cache.repo_cache_long.key_length = 256
301 #beaker.cache.repo_cache_long.key_length = 256
303
302
304 ####################################
303 ####################################
305 ### BEAKER SESSION ####
304 ### BEAKER SESSION ####
306 ####################################
305 ####################################
307
306
308 ## .session.type is type of storage options for the session, current allowed
307 ## .session.type is type of storage options for the session, current allowed
309 ## types are file, ext:memcached, ext:database, and memory (default).
308 ## types are file, ext:memcached, ext:database, and memory (default).
310 beaker.session.type = file
309 beaker.session.type = file
311 beaker.session.data_dir = %(here)s/data/sessions/data
310 beaker.session.data_dir = %(here)s/data/sessions/data
312
311
313 ## db based session, fast, and allows easy management over logged in users ##
312 ## db based session, fast, and allows easy management over logged in users ##
314 #beaker.session.type = ext:database
313 #beaker.session.type = ext:database
315 #beaker.session.table_name = db_session
314 #beaker.session.table_name = db_session
316 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
315 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
317 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
316 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
318 #beaker.session.sa.pool_recycle = 3600
317 #beaker.session.sa.pool_recycle = 3600
319 #beaker.session.sa.echo = false
318 #beaker.session.sa.echo = false
320
319
321 beaker.session.key = rhodecode
320 beaker.session.key = rhodecode
322 beaker.session.secret = production-rc-uytcxaz
321 beaker.session.secret = production-rc-uytcxaz
323 beaker.session.lock_dir = %(here)s/data/sessions/lock
322 beaker.session.lock_dir = %(here)s/data/sessions/lock
324
323
325 ## Secure encrypted cookie. Requires AES and AES python libraries
324 ## Secure encrypted cookie. Requires AES and AES python libraries
326 ## you must disable beaker.session.secret to use this
325 ## you must disable beaker.session.secret to use this
327 #beaker.session.encrypt_key = <key_for_encryption>
326 #beaker.session.encrypt_key = <key_for_encryption>
328 #beaker.session.validate_key = <validation_key>
327 #beaker.session.validate_key = <validation_key>
329
328
330 ## sets session as invalid(also logging out user) if it haven not been
329 ## sets session as invalid(also logging out user) if it haven not been
331 ## accessed for given amount of time in seconds
330 ## accessed for given amount of time in seconds
332 beaker.session.timeout = 2592000
331 beaker.session.timeout = 2592000
333 beaker.session.httponly = true
332 beaker.session.httponly = true
334 #beaker.session.cookie_path = /<your-prefix>
333 #beaker.session.cookie_path = /<your-prefix>
335
334
336 ## uncomment for https secure cookie
335 ## uncomment for https secure cookie
337 beaker.session.secure = false
336 beaker.session.secure = false
338
337
339 ## auto save the session to not to use .save()
338 ## auto save the session to not to use .save()
340 beaker.session.auto = false
339 beaker.session.auto = false
341
340
342 ## default cookie expiration time in seconds, set to `true` to set expire
341 ## default cookie expiration time in seconds, set to `true` to set expire
343 ## at browser close
342 ## at browser close
344 #beaker.session.cookie_expires = 3600
343 #beaker.session.cookie_expires = 3600
345
344
346 ###################################
345 ###################################
347 ## SEARCH INDEXING CONFIGURATION ##
346 ## SEARCH INDEXING CONFIGURATION ##
348 ###################################
347 ###################################
349 ## Full text search indexer is available in rhodecode-tools under
348 ## Full text search indexer is available in rhodecode-tools under
350 ## `rhodecode-tools index` command
349 ## `rhodecode-tools index` command
351
350
352 # WHOOSH Backend, doesn't require additional services to run
351 # WHOOSH Backend, doesn't require additional services to run
353 # it works good with few dozen repos
352 # it works good with few dozen repos
354 search.module = rhodecode.lib.index.whoosh
353 search.module = rhodecode.lib.index.whoosh
355 search.location = %(here)s/data/index
354 search.location = %(here)s/data/index
356
355
357
356
358 ###################################
357 ###################################
359 ## APPENLIGHT CONFIG ##
358 ## APPENLIGHT CONFIG ##
360 ###################################
359 ###################################
361
360
362 ## Appenlight is tailored to work with RhodeCode, see
361 ## Appenlight is tailored to work with RhodeCode, see
363 ## http://appenlight.com for details how to obtain an account
362 ## http://appenlight.com for details how to obtain an account
364
363
365 ## appenlight integration enabled
364 ## appenlight integration enabled
366 appenlight = false
365 appenlight = false
367
366
368 appenlight.server_url = https://api.appenlight.com
367 appenlight.server_url = https://api.appenlight.com
369 appenlight.api_key = YOUR_API_KEY
368 appenlight.api_key = YOUR_API_KEY
370 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
369 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
371
370
372 # used for JS client
371 # used for JS client
373 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
372 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
374
373
375 ## TWEAK AMOUNT OF INFO SENT HERE
374 ## TWEAK AMOUNT OF INFO SENT HERE
376
375
377 ## enables 404 error logging (default False)
376 ## enables 404 error logging (default False)
378 appenlight.report_404 = false
377 appenlight.report_404 = false
379
378
380 ## time in seconds after request is considered being slow (default 1)
379 ## time in seconds after request is considered being slow (default 1)
381 appenlight.slow_request_time = 1
380 appenlight.slow_request_time = 1
382
381
383 ## record slow requests in application
382 ## record slow requests in application
384 ## (needs to be enabled for slow datastore recording and time tracking)
383 ## (needs to be enabled for slow datastore recording and time tracking)
385 appenlight.slow_requests = true
384 appenlight.slow_requests = true
386
385
387 ## enable hooking to application loggers
386 ## enable hooking to application loggers
388 appenlight.logging = true
387 appenlight.logging = true
389
388
390 ## minimum log level for log capture
389 ## minimum log level for log capture
391 appenlight.logging.level = WARNING
390 appenlight.logging.level = WARNING
392
391
393 ## send logs only from erroneous/slow requests
392 ## send logs only from erroneous/slow requests
394 ## (saves API quota for intensive logging)
393 ## (saves API quota for intensive logging)
395 appenlight.logging_on_error = false
394 appenlight.logging_on_error = false
396
395
397 ## list of additonal keywords that should be grabbed from environ object
396 ## list of additonal keywords that should be grabbed from environ object
398 ## can be string with comma separated list of words in lowercase
397 ## can be string with comma separated list of words in lowercase
399 ## (by default client will always send following info:
398 ## (by default client will always send following info:
400 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
399 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
401 ## start with HTTP* this list be extended with additional keywords here
400 ## start with HTTP* this list be extended with additional keywords here
402 appenlight.environ_keys_whitelist =
401 appenlight.environ_keys_whitelist =
403
402
404 ## list of keywords that should be blanked from request object
403 ## list of keywords that should be blanked from request object
405 ## can be string with comma separated list of words in lowercase
404 ## can be string with comma separated list of words in lowercase
406 ## (by default client will always blank keys that contain following words
405 ## (by default client will always blank keys that contain following words
407 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
406 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
408 ## this list be extended with additional keywords set here
407 ## this list be extended with additional keywords set here
409 appenlight.request_keys_blacklist =
408 appenlight.request_keys_blacklist =
410
409
411 ## list of namespaces that should be ignores when gathering log entries
410 ## list of namespaces that should be ignores when gathering log entries
412 ## can be string with comma separated list of namespaces
411 ## can be string with comma separated list of namespaces
413 ## (by default the client ignores own entries: appenlight_client.client)
412 ## (by default the client ignores own entries: appenlight_client.client)
414 appenlight.log_namespace_blacklist =
413 appenlight.log_namespace_blacklist =
415
414
416
415
417 ################################################################################
416 ################################################################################
418 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
417 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
419 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
418 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
420 ## execute malicious code after an exception is raised. ##
419 ## execute malicious code after an exception is raised. ##
421 ################################################################################
420 ################################################################################
422 set debug = false
421 set debug = false
423
422
424
423
425 #########################################################
424 #########################################################
426 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
425 ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
427 #########################################################
426 #########################################################
428 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
427 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
429 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
428 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
430 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
429 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
431
430
432 # see sqlalchemy docs for other advanced settings
431 # see sqlalchemy docs for other advanced settings
433
432
434 ## print the sql statements to output
433 ## print the sql statements to output
435 sqlalchemy.db1.echo = false
434 sqlalchemy.db1.echo = false
436 ## recycle the connections after this ammount of seconds
435 ## recycle the connections after this ammount of seconds
437 sqlalchemy.db1.pool_recycle = 3600
436 sqlalchemy.db1.pool_recycle = 3600
438 sqlalchemy.db1.convert_unicode = true
437 sqlalchemy.db1.convert_unicode = true
439
438
440 ## the number of connections to keep open inside the connection pool.
439 ## the number of connections to keep open inside the connection pool.
441 ## 0 indicates no limit
440 ## 0 indicates no limit
442 #sqlalchemy.db1.pool_size = 5
441 #sqlalchemy.db1.pool_size = 5
443
442
444 ## the number of connections to allow in connection pool "overflow", that is
443 ## the number of connections to allow in connection pool "overflow", that is
445 ## connections that can be opened above and beyond the pool_size setting,
444 ## connections that can be opened above and beyond the pool_size setting,
446 ## which defaults to five.
445 ## which defaults to five.
447 #sqlalchemy.db1.max_overflow = 10
446 #sqlalchemy.db1.max_overflow = 10
448
447
449
448
450 ##################
449 ##################
451 ### VCS CONFIG ###
450 ### VCS CONFIG ###
452 ##################
451 ##################
453 vcs.server.enable = true
452 vcs.server.enable = true
454 vcs.server = localhost:9900
453 vcs.server = localhost:9900
455
454
456 ## Web server connectivity protocol, responsible for web based VCS operatations
455 ## Web server connectivity protocol, responsible for web based VCS operatations
457 ## Available protocols are:
456 ## Available protocols are:
458 ## `pyro4` - using pyro4 server
457 ## `pyro4` - using pyro4 server
459 ## `http` - using http-rpc backend
458 ## `http` - using http-rpc backend
460 #vcs.server.protocol = http
459 #vcs.server.protocol = http
461
460
462 ## Push/Pull operations protocol, available options are:
461 ## Push/Pull operations protocol, available options are:
463 ## `pyro4` - using pyro4 server
462 ## `pyro4` - using pyro4 server
464 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
463 ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended
465 ## `vcsserver.scm_app` - internal app (EE only)
464 ## `vcsserver.scm_app` - internal app (EE only)
466 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
465 #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http
467
466
468 ## Push/Pull operations hooks protocol, available options are:
467 ## Push/Pull operations hooks protocol, available options are:
469 ## `pyro4` - using pyro4 server
468 ## `pyro4` - using pyro4 server
470 ## `http` - using http-rpc backend
469 ## `http` - using http-rpc backend
471 #vcs.hooks.protocol = http
470 #vcs.hooks.protocol = http
472
471
473 vcs.server.log_level = info
472 vcs.server.log_level = info
474 ## Start VCSServer with this instance as a subprocess, usefull for development
473 ## Start VCSServer with this instance as a subprocess, usefull for development
475 vcs.start_server = false
474 vcs.start_server = false
476 vcs.backends = hg, git, svn
475 vcs.backends = hg, git, svn
477 vcs.connection_timeout = 3600
476 vcs.connection_timeout = 3600
478 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
477 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
479 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
478 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
480 #vcs.svn.compatible_version = pre-1.8-compatible
479 #vcs.svn.compatible_version = pre-1.8-compatible
481
480
482 ################################
481 ################################
483 ### LOGGING CONFIGURATION ####
482 ### LOGGING CONFIGURATION ####
484 ################################
483 ################################
485 [loggers]
484 [loggers]
486 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
485 keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer
487
486
488 [handlers]
487 [handlers]
489 keys = console, console_sql
488 keys = console, console_sql
490
489
491 [formatters]
490 [formatters]
492 keys = generic, color_formatter, color_formatter_sql
491 keys = generic, color_formatter, color_formatter_sql
493
492
494 #############
493 #############
495 ## LOGGERS ##
494 ## LOGGERS ##
496 #############
495 #############
497 [logger_root]
496 [logger_root]
498 level = NOTSET
497 level = NOTSET
499 handlers = console
498 handlers = console
500
499
501 [logger_routes]
500 [logger_routes]
502 level = DEBUG
501 level = DEBUG
503 handlers =
502 handlers =
504 qualname = routes.middleware
503 qualname = routes.middleware
505 ## "level = DEBUG" logs the route matched and routing variables.
504 ## "level = DEBUG" logs the route matched and routing variables.
506 propagate = 1
505 propagate = 1
507
506
508 [logger_beaker]
507 [logger_beaker]
509 level = DEBUG
508 level = DEBUG
510 handlers =
509 handlers =
511 qualname = beaker.container
510 qualname = beaker.container
512 propagate = 1
511 propagate = 1
513
512
514 [logger_pyro4]
513 [logger_pyro4]
515 level = DEBUG
514 level = DEBUG
516 handlers =
515 handlers =
517 qualname = Pyro4
516 qualname = Pyro4
518 propagate = 1
517 propagate = 1
519
518
520 [logger_templates]
519 [logger_templates]
521 level = INFO
520 level = INFO
522 handlers =
521 handlers =
523 qualname = pylons.templating
522 qualname = pylons.templating
524 propagate = 1
523 propagate = 1
525
524
526 [logger_rhodecode]
525 [logger_rhodecode]
527 level = DEBUG
526 level = DEBUG
528 handlers =
527 handlers =
529 qualname = rhodecode
528 qualname = rhodecode
530 propagate = 1
529 propagate = 1
531
530
532 [logger_sqlalchemy]
531 [logger_sqlalchemy]
533 level = INFO
532 level = INFO
534 handlers = console_sql
533 handlers = console_sql
535 qualname = sqlalchemy.engine
534 qualname = sqlalchemy.engine
536 propagate = 0
535 propagate = 0
537
536
538 [logger_whoosh_indexer]
537 [logger_whoosh_indexer]
539 level = DEBUG
538 level = DEBUG
540 handlers =
539 handlers =
541 qualname = whoosh_indexer
540 qualname = whoosh_indexer
542 propagate = 1
541 propagate = 1
543
542
544 ##############
543 ##############
545 ## HANDLERS ##
544 ## HANDLERS ##
546 ##############
545 ##############
547
546
548 [handler_console]
547 [handler_console]
549 class = StreamHandler
548 class = StreamHandler
550 args = (sys.stderr,)
549 args = (sys.stderr,)
551 level = INFO
550 level = INFO
552 formatter = generic
551 formatter = generic
553
552
554 [handler_console_sql]
553 [handler_console_sql]
555 class = StreamHandler
554 class = StreamHandler
556 args = (sys.stderr,)
555 args = (sys.stderr,)
557 level = WARN
556 level = WARN
558 formatter = generic
557 formatter = generic
559
558
560 ################
559 ################
561 ## FORMATTERS ##
560 ## FORMATTERS ##
562 ################
561 ################
563
562
564 [formatter_generic]
563 [formatter_generic]
565 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
564 class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter
566 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
565 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
567 datefmt = %Y-%m-%d %H:%M:%S
566 datefmt = %Y-%m-%d %H:%M:%S
568
567
569 [formatter_color_formatter]
568 [formatter_color_formatter]
570 class = rhodecode.lib.logging_formatter.ColorFormatter
569 class = rhodecode.lib.logging_formatter.ColorFormatter
571 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
570 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
572 datefmt = %Y-%m-%d %H:%M:%S
571 datefmt = %Y-%m-%d %H:%M:%S
573
572
574 [formatter_color_formatter_sql]
573 [formatter_color_formatter_sql]
575 class = rhodecode.lib.logging_formatter.ColorFormatterSql
574 class = rhodecode.lib.logging_formatter.ColorFormatterSql
576 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
575 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
577 datefmt = %Y-%m-%d %H:%M:%S
576 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,315 +1,351 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Pylons middleware initialization
22 Pylons middleware initialization
23 """
23 """
24 import logging
24 import logging
25
25
26 from paste.registry import RegistryManager
26 from paste.registry import RegistryManager
27 from paste.gzipper import make_gzip_middleware
27 from paste.gzipper import make_gzip_middleware
28 from pylons.middleware import ErrorHandler, StatusCodeRedirect
29 from pylons.wsgiapp import PylonsApp
28 from pylons.wsgiapp import PylonsApp
30 from pyramid.authorization import ACLAuthorizationPolicy
29 from pyramid.authorization import ACLAuthorizationPolicy
31 from pyramid.config import Configurator
30 from pyramid.config import Configurator
32 from pyramid.static import static_view
31 from pyramid.static import static_view
33 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
34 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.httpexceptions import HTTPError
35 import pyramid.httpexceptions as httpexceptions
36 from pyramid.renderers import render_to_response
35 from routes.middleware import RoutesMiddleware
37 from routes.middleware import RoutesMiddleware
36 import routes.util
38 import routes.util
37
39
38 import rhodecode
40 import rhodecode
39 from rhodecode.config import patches
41 from rhodecode.config import patches
40 from rhodecode.config.environment import (
42 from rhodecode.config.environment import (
41 load_environment, load_pyramid_environment)
43 load_environment, load_pyramid_environment)
42 from rhodecode.lib.middleware import csrf
44 from rhodecode.lib.middleware import csrf
43 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
44 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
45 from rhodecode.lib.middleware.https_fixup import HttpsFixup
47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
46 from rhodecode.lib.middleware.vcs import VCSMiddleware
48 from rhodecode.lib.middleware.vcs import VCSMiddleware
47 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
48
50
49
51
50 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
51
53
52
54
53 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
54 """Create a Pylons WSGI application and return it
56 """Create a Pylons WSGI application and return it
55
57
56 ``global_conf``
58 ``global_conf``
57 The inherited configuration for this application. Normally from
59 The inherited configuration for this application. Normally from
58 the [DEFAULT] section of the Paste ini file.
60 the [DEFAULT] section of the Paste ini file.
59
61
60 ``full_stack``
62 ``full_stack``
61 Whether or not this application provides a full WSGI stack (by
63 Whether or not this application provides a full WSGI stack (by
62 default, meaning it handles its own exceptions and errors).
64 default, meaning it handles its own exceptions and errors).
63 Disable full_stack when this application is "managed" by
65 Disable full_stack when this application is "managed" by
64 another WSGI middleware.
66 another WSGI middleware.
65
67
66 ``app_conf``
68 ``app_conf``
67 The application's local configuration. Normally specified in
69 The application's local configuration. Normally specified in
68 the [app:<name>] section of the Paste ini file (where <name>
70 the [app:<name>] section of the Paste ini file (where <name>
69 defaults to main).
71 defaults to main).
70
72
71 """
73 """
72 # Apply compatibility patches
74 # Apply compatibility patches
73 patches.kombu_1_5_1_python_2_7_11()
75 patches.kombu_1_5_1_python_2_7_11()
74 patches.inspect_getargspec()
76 patches.inspect_getargspec()
75
77
76 # Configure the Pylons environment
78 # Configure the Pylons environment
77 config = load_environment(global_conf, app_conf)
79 config = load_environment(global_conf, app_conf)
78
80
79 # The Pylons WSGI app
81 # The Pylons WSGI app
80 app = PylonsApp(config=config)
82 app = PylonsApp(config=config)
81 if rhodecode.is_test:
83 if rhodecode.is_test:
82 app = csrf.CSRFDetector(app)
84 app = csrf.CSRFDetector(app)
83
85
84 expected_origin = config.get('expected_origin')
86 expected_origin = config.get('expected_origin')
85 if expected_origin:
87 if expected_origin:
86 # The API can be accessed from other Origins.
88 # The API can be accessed from other Origins.
87 app = csrf.OriginChecker(app, expected_origin,
89 app = csrf.OriginChecker(app, expected_origin,
88 skip_urls=[routes.util.url_for('api')])
90 skip_urls=[routes.util.url_for('api')])
89
91
90 # Add RoutesMiddleware. Currently we have two instances in the stack. This
91 # is the lower one to make the StatusCodeRedirect middleware happy.
92 # TODO: johbo: This is not optimal, search for a better solution.
93 app = RoutesMiddleware(app, config['routes.map'])
94
95 # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
96 if asbool(config['pdebug']):
97 from rhodecode.lib.profiler import ProfilingMiddleware
98 app = ProfilingMiddleware(app)
99
100 # Protect from VCS Server error related pages when server is not available
101 vcs_server_enabled = asbool(config.get('vcs.server.enable', 'true'))
102 if not vcs_server_enabled:
103 app = DisableVCSPagesWrapper(app)
104
92
105 if asbool(full_stack):
93 if asbool(full_stack):
106
94
107 # Appenlight monitoring and error handler
95 # Appenlight monitoring and error handler
108 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
109
97
110 # Handle Python exceptions
111 app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
112
113 # we want our low level middleware to get to the request ASAP. We don't
98 # we want our low level middleware to get to the request ASAP. We don't
114 # need any pylons stack middleware in them
99 # need any pylons stack middleware in them
115 app = VCSMiddleware(app, config, appenlight_client)
100 app = VCSMiddleware(app, config, appenlight_client)
116 # Display error documents for 401, 403, 404 status codes (and
117 # 500 when debug is disabled)
118 if asbool(config['debug']):
119 app = StatusCodeRedirect(app)
120 else:
121 app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
122
101
123 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
102 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
124 app = HttpsFixup(app, config)
103 app = HttpsFixup(app, config)
125
104
126 # Establish the Registry for this application
105 # Establish the Registry for this application
127 app = RegistryManager(app)
106 app = RegistryManager(app)
128
107
129 app.config = config
108 app.config = config
130
109
131 return app
110 return app
132
111
133
112
134 def make_pyramid_app(global_config, **settings):
113 def make_pyramid_app(global_config, **settings):
135 """
114 """
136 Constructs the WSGI application based on Pyramid and wraps the Pylons based
115 Constructs the WSGI application based on Pyramid and wraps the Pylons based
137 application.
116 application.
138
117
139 Specials:
118 Specials:
140
119
141 * We migrate from Pylons to Pyramid. While doing this, we keep both
120 * We migrate from Pylons to Pyramid. While doing this, we keep both
142 frameworks functional. This involves moving some WSGI middlewares around
121 frameworks functional. This involves moving some WSGI middlewares around
143 and providing access to some data internals, so that the old code is
122 and providing access to some data internals, so that the old code is
144 still functional.
123 still functional.
145
124
146 * The application can also be integrated like a plugin via the call to
125 * The application can also be integrated like a plugin via the call to
147 `includeme`. This is accompanied with the other utility functions which
126 `includeme`. This is accompanied with the other utility functions which
148 are called. Changing this should be done with great care to not break
127 are called. Changing this should be done with great care to not break
149 cases when these fragments are assembled from another place.
128 cases when these fragments are assembled from another place.
150
129
151 """
130 """
152 # The edition string should be available in pylons too, so we add it here
131 # The edition string should be available in pylons too, so we add it here
153 # before copying the settings.
132 # before copying the settings.
154 settings.setdefault('rhodecode.edition', 'Community Edition')
133 settings.setdefault('rhodecode.edition', 'Community Edition')
155
134
156 # As long as our Pylons application does expect "unprepared" settings, make
135 # As long as our Pylons application does expect "unprepared" settings, make
157 # sure that we keep an unmodified copy. This avoids unintentional change of
136 # sure that we keep an unmodified copy. This avoids unintentional change of
158 # behavior in the old application.
137 # behavior in the old application.
159 settings_pylons = settings.copy()
138 settings_pylons = settings.copy()
160
139
161 sanitize_settings_and_apply_defaults(settings)
140 sanitize_settings_and_apply_defaults(settings)
162 config = Configurator(settings=settings)
141 config = Configurator(settings=settings)
163 add_pylons_compat_data(config.registry, global_config, settings_pylons)
142 add_pylons_compat_data(config.registry, global_config, settings_pylons)
164
143
165 load_pyramid_environment(global_config, settings)
144 load_pyramid_environment(global_config, settings)
166
145
167 includeme(config)
146 includeme(config)
168 includeme_last(config)
147 includeme_last(config)
169 pyramid_app = config.make_wsgi_app()
148 pyramid_app = config.make_wsgi_app()
170 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
149 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
171 return pyramid_app
150 return pyramid_app
172
151
173
152
174 def add_pylons_compat_data(registry, global_config, settings):
153 def add_pylons_compat_data(registry, global_config, settings):
175 """
154 """
176 Attach data to the registry to support the Pylons integration.
155 Attach data to the registry to support the Pylons integration.
177 """
156 """
178 registry._pylons_compat_global_config = global_config
157 registry._pylons_compat_global_config = global_config
179 registry._pylons_compat_settings = settings
158 registry._pylons_compat_settings = settings
180
159
181
160
161 def error_handler(exc, request):
162 # TODO: dan: replace the old pylons error controller with this
163 from rhodecode.model.settings import SettingsModel
164 from rhodecode.lib.utils2 import AttributeDict
165
166 try:
167 rc_config = SettingsModel().get_all_settings()
168 except Exception:
169 log.exception('failed to fetch settings')
170 rc_config = {}
171
172 c = AttributeDict()
173 c.error_message = exc.status
174 c.error_explanation = exc.explanation or str(exc)
175 c.visual = AttributeDict()
176
177 c.visual.rhodecode_support_url = (
178 request.registry.settings.get('rhodecode_support_url') or
179 request.route_url('rhodecode_support')
180 )
181 c.redirect_time = 0
182 c.rhodecode_name = rc_config.get('rhodecode_title')
183 if not c.rhodecode_name:
184 c.rhodecode_name = 'Rhodecode'
185
186 response = render_to_response(
187 '/errors/error_document.html', {'c': c}, request=request)
188 return response
189
190
182 def includeme(config):
191 def includeme(config):
183 settings = config.registry.settings
192 settings = config.registry.settings
184
193
185 # Includes which are required. The application would fail without them.
194 # Includes which are required. The application would fail without them.
186 config.include('pyramid_mako')
195 config.include('pyramid_mako')
187 config.include('pyramid_beaker')
196 config.include('pyramid_beaker')
188 config.include('rhodecode.authentication')
197 config.include('rhodecode.authentication')
189 config.include('rhodecode.login')
198 config.include('rhodecode.login')
190 config.include('rhodecode.tweens')
199 config.include('rhodecode.tweens')
191 config.include('rhodecode.api')
200 config.include('rhodecode.api')
201 config.add_route(
202 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
192
203
193 # Set the authorization policy.
204 # Set the authorization policy.
194 authz_policy = ACLAuthorizationPolicy()
205 authz_policy = ACLAuthorizationPolicy()
195 config.set_authorization_policy(authz_policy)
206 config.set_authorization_policy(authz_policy)
196
207
197 # Set the default renderer for HTML templates to mako.
208 # Set the default renderer for HTML templates to mako.
198 config.add_mako_renderer('.html')
209 config.add_mako_renderer('.html')
199
210
200 # plugin information
211 # plugin information
201 config.registry.rhodecode_plugins = {}
212 config.registry.rhodecode_plugins = {}
202
213
203 config.add_directive(
214 config.add_directive(
204 'register_rhodecode_plugin', register_rhodecode_plugin)
215 'register_rhodecode_plugin', register_rhodecode_plugin)
205 # include RhodeCode plugins
216 # include RhodeCode plugins
206 includes = aslist(settings.get('rhodecode.includes', []))
217 includes = aslist(settings.get('rhodecode.includes', []))
207 for inc in includes:
218 for inc in includes:
208 config.include(inc)
219 config.include(inc)
209
220
221 pylons_app = make_app(
222 config.registry._pylons_compat_global_config,
223 **config.registry._pylons_compat_settings)
224 config.registry._pylons_compat_config = pylons_app.config
225
226 pylons_app_as_view = wsgiapp(pylons_app)
227
228 # Protect from VCS Server error related pages when server is not available
229 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
230 if not vcs_server_enabled:
231 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
232
233
234 def pylons_app_with_error_handler(context, request):
235 """
236 Handle exceptions from rc pylons app:
237
238 - old webob type exceptions get converted to pyramid exceptions
239 - pyramid exceptions are passed to the error handler view
240 """
241 try:
242 response = pylons_app_as_view(context, request)
243 if 400 <= response.status_int <= 599: # webob type error responses
244 ExcClass = httpexceptions.status_map[response.status_int]
245 return error_handler(ExcClass(response.status), request)
246 except HTTPError as e: # pyramid type exceptions
247 return error_handler(e, request)
248
249 return response
250
210 # This is the glue which allows us to migrate in chunks. By registering the
251 # This is the glue which allows us to migrate in chunks. By registering the
211 # pylons based application as the "Not Found" view in Pyramid, we will
252 # pylons based application as the "Not Found" view in Pyramid, we will
212 # fallback to the old application each time the new one does not yet know
253 # fallback to the old application each time the new one does not yet know
213 # how to handle a request.
254 # how to handle a request.
214 pylons_app = make_app(
255 config.add_notfound_view(pylons_app_with_error_handler)
215 config.registry._pylons_compat_global_config,
216 **config.registry._pylons_compat_settings)
217 config.registry._pylons_compat_config = pylons_app.config
218 pylons_app_as_view = wsgiapp(pylons_app)
219 config.add_notfound_view(pylons_app_as_view)
220
256
257 config.add_view(error_handler, context=HTTPError) # exceptions in rc pyramid
221
258
222 def includeme_last(config):
259 def includeme_last(config):
223 """
260 """
224 The static file catchall needs to be last in the view configuration.
261 The static file catchall needs to be last in the view configuration.
225 """
262 """
226 settings = config.registry.settings
263 settings = config.registry.settings
227
264
228 # Note: johbo: I would prefer to register a prefix for static files at some
265 # Note: johbo: I would prefer to register a prefix for static files at some
229 # point, e.g. move them under '_static/'. This would fully avoid that we
266 # point, e.g. move them under '_static/'. This would fully avoid that we
230 # can have name clashes with a repository name. Imaging someone calling his
267 # can have name clashes with a repository name. Imaging someone calling his
231 # repo "css" ;-) Also having an external web server to serve out the static
268 # repo "css" ;-) Also having an external web server to serve out the static
232 # files seems to be easier to set up if they have a common prefix.
269 # files seems to be easier to set up if they have a common prefix.
233 #
270 #
234 # Example: config.add_static_view('_static', path='rhodecode:public')
271 # Example: config.add_static_view('_static', path='rhodecode:public')
235 #
272 #
236 # It might be an option to register both paths for a while and then migrate
273 # It might be an option to register both paths for a while and then migrate
237 # over to the new location.
274 # over to the new location.
238
275
239 # Serving static files with a catchall.
276 # Serving static files with a catchall.
240 if settings['static_files']:
277 if settings['static_files']:
241 config.add_route('catchall_static', '/*subpath')
278 config.add_route('catchall_static', '/*subpath')
242 config.add_view(
279 config.add_view(
243 static_view('rhodecode:public'), route_name='catchall_static')
280 static_view('rhodecode:public'), route_name='catchall_static')
244
281
245
282
246 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
283 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
247 """
284 """
248 Apply outer WSGI middlewares around the application.
285 Apply outer WSGI middlewares around the application.
249
286
250 Part of this has been moved up from the Pylons layer, so that the
287 Part of this has been moved up from the Pylons layer, so that the
251 data is also available if old Pylons code is hit through an already ported
288 data is also available if old Pylons code is hit through an already ported
252 view.
289 view.
253 """
290 """
254 settings = config.registry.settings
291 settings = config.registry.settings
255
292
256 # Add RoutesMiddleware. Currently we have two instances in the stack. This
293 # Add RoutesMiddleware to support the pylons compatibility tween during
257 # is the upper one to support the pylons compatibility tween during
258 # migration to pyramid.
294 # migration to pyramid.
259 pyramid_app = RoutesMiddleware(
295 pyramid_app = RoutesMiddleware(
260 pyramid_app, config.registry._pylons_compat_config['routes.map'])
296 pyramid_app, config.registry._pylons_compat_config['routes.map'])
261
297
262 # TODO: johbo: Don't really see why we enable the gzip middleware when
298 # TODO: johbo: Don't really see why we enable the gzip middleware when
263 # serving static files, might be something that should have its own setting
299 # serving static files, might be something that should have its own setting
264 # as well?
300 # as well?
265 if settings['static_files']:
301 if settings['static_files']:
266 pyramid_app = make_gzip_middleware(
302 pyramid_app = make_gzip_middleware(
267 pyramid_app, settings, compress_level=1)
303 pyramid_app, settings, compress_level=1)
268
304
269 return pyramid_app
305 return pyramid_app
270
306
271
307
272 def sanitize_settings_and_apply_defaults(settings):
308 def sanitize_settings_and_apply_defaults(settings):
273 """
309 """
274 Applies settings defaults and does all type conversion.
310 Applies settings defaults and does all type conversion.
275
311
276 We would move all settings parsing and preparation into this place, so that
312 We would move all settings parsing and preparation into this place, so that
277 we have only one place left which deals with this part. The remaining parts
313 we have only one place left which deals with this part. The remaining parts
278 of the application would start to rely fully on well prepared settings.
314 of the application would start to rely fully on well prepared settings.
279
315
280 This piece would later be split up per topic to avoid a big fat monster
316 This piece would later be split up per topic to avoid a big fat monster
281 function.
317 function.
282 """
318 """
283
319
284 # Pyramid's mako renderer has to search in the templates folder so that the
320 # Pyramid's mako renderer has to search in the templates folder so that the
285 # old templates still work. Ported and new templates are expected to use
321 # old templates still work. Ported and new templates are expected to use
286 # real asset specifications for the includes.
322 # real asset specifications for the includes.
287 mako_directories = settings.setdefault('mako.directories', [
323 mako_directories = settings.setdefault('mako.directories', [
288 # Base templates of the original Pylons application
324 # Base templates of the original Pylons application
289 'rhodecode:templates',
325 'rhodecode:templates',
290 ])
326 ])
291 log.debug(
327 log.debug(
292 "Using the following Mako template directories: %s",
328 "Using the following Mako template directories: %s",
293 mako_directories)
329 mako_directories)
294
330
295 # Default includes, possible to change as a user
331 # Default includes, possible to change as a user
296 pyramid_includes = settings.setdefault('pyramid.includes', [
332 pyramid_includes = settings.setdefault('pyramid.includes', [
297 'rhodecode.lib.middleware.request_wrapper',
333 'rhodecode.lib.middleware.request_wrapper',
298 ])
334 ])
299 log.debug(
335 log.debug(
300 "Using the following pyramid.includes: %s",
336 "Using the following pyramid.includes: %s",
301 pyramid_includes)
337 pyramid_includes)
302
338
303 # TODO: johbo: Re-think this, usually the call to config.include
339 # TODO: johbo: Re-think this, usually the call to config.include
304 # should allow to pass in a prefix.
340 # should allow to pass in a prefix.
305 settings.setdefault('rhodecode.api.url', '/_admin/api')
341 settings.setdefault('rhodecode.api.url', '/_admin/api')
306
342
307 _bool_setting(settings, 'vcs.server.enable', 'true')
343 _bool_setting(settings, 'vcs.server.enable', 'true')
308 _bool_setting(settings, 'static_files', 'true')
344 _bool_setting(settings, 'static_files', 'true')
309 _bool_setting(settings, 'is_test', 'false')
345 _bool_setting(settings, 'is_test', 'false')
310
346
311 return settings
347 return settings
312
348
313
349
314 def _bool_setting(settings, name, default):
350 def _bool_setting(settings, name, default):
315 settings[name] = asbool(settings.get(name, default))
351 settings[name] = asbool(settings.get(name, default))
@@ -1,1149 +1,1145 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Routes configuration
22 Routes configuration
23
23
24 The more specific and detailed routes should be defined first so they
24 The more specific and detailed routes should be defined first so they
25 may take precedent over the more generic routes. For more information
25 may take precedent over the more generic routes. For more information
26 refer to the routes manual at http://routes.groovie.org/docs/
26 refer to the routes manual at http://routes.groovie.org/docs/
27
27
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 and _route_name variable which uses some of stored naming here to do redirects.
29 and _route_name variable which uses some of stored naming here to do redirects.
30 """
30 """
31 import os
31 import os
32 import re
32 import re
33 from routes import Mapper
33 from routes import Mapper
34
34
35 from rhodecode.config import routing_links
35 from rhodecode.config import routing_links
36
36
37 # prefix for non repository related links needs to be prefixed with `/`
37 # prefix for non repository related links needs to be prefixed with `/`
38 ADMIN_PREFIX = '/_admin'
38 ADMIN_PREFIX = '/_admin'
39
39
40 # Default requirements for URL parts
40 # Default requirements for URL parts
41 URL_NAME_REQUIREMENTS = {
41 URL_NAME_REQUIREMENTS = {
42 # group name can have a slash in them, but they must not end with a slash
42 # group name can have a slash in them, but they must not end with a slash
43 'group_name': r'.*?[^/]',
43 'group_name': r'.*?[^/]',
44 # repo names can have a slash in them, but they must not end with a slash
44 # repo names can have a slash in them, but they must not end with a slash
45 'repo_name': r'.*?[^/]',
45 'repo_name': r'.*?[^/]',
46 # file path eats up everything at the end
46 # file path eats up everything at the end
47 'f_path': r'.*',
47 'f_path': r'.*',
48 # reference types
48 # reference types
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 }
51 }
52
52
53
53
54 class JSRoutesMapper(Mapper):
54 class JSRoutesMapper(Mapper):
55 """
55 """
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 """
57 """
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 def __init__(self, *args, **kw):
60 def __init__(self, *args, **kw):
61 super(JSRoutesMapper, self).__init__(*args, **kw)
61 super(JSRoutesMapper, self).__init__(*args, **kw)
62 self._jsroutes = []
62 self._jsroutes = []
63
63
64 def connect(self, *args, **kw):
64 def connect(self, *args, **kw):
65 """
65 """
66 Wrapper for connect to take an extra argument jsroute=True
66 Wrapper for connect to take an extra argument jsroute=True
67
67
68 :param jsroute: boolean, if True will add the route to the pyroutes list
68 :param jsroute: boolean, if True will add the route to the pyroutes list
69 """
69 """
70 if kw.pop('jsroute', False):
70 if kw.pop('jsroute', False):
71 if not self._named_route_regex.match(args[0]):
71 if not self._named_route_regex.match(args[0]):
72 raise Exception('only named routes can be added to pyroutes')
72 raise Exception('only named routes can be added to pyroutes')
73 self._jsroutes.append(args[0])
73 self._jsroutes.append(args[0])
74
74
75 super(JSRoutesMapper, self).connect(*args, **kw)
75 super(JSRoutesMapper, self).connect(*args, **kw)
76
76
77 def _extract_route_information(self, route):
77 def _extract_route_information(self, route):
78 """
78 """
79 Convert a route into tuple(name, path, args), eg:
79 Convert a route into tuple(name, path, args), eg:
80 ('user_profile', '/profile/%(username)s', ['username'])
80 ('user_profile', '/profile/%(username)s', ['username'])
81 """
81 """
82 routepath = route.routepath
82 routepath = route.routepath
83 def replace(matchobj):
83 def replace(matchobj):
84 if matchobj.group(1):
84 if matchobj.group(1):
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 else:
86 else:
87 return "%%(%s)s" % matchobj.group(2)
87 return "%%(%s)s" % matchobj.group(2)
88
88
89 routepath = self._argument_prog.sub(replace, routepath)
89 routepath = self._argument_prog.sub(replace, routepath)
90 return (
90 return (
91 route.name,
91 route.name,
92 routepath,
92 routepath,
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 for arg in self._argument_prog.findall(route.routepath)]
94 for arg in self._argument_prog.findall(route.routepath)]
95 )
95 )
96
96
97 def jsroutes(self):
97 def jsroutes(self):
98 """
98 """
99 Return a list of pyroutes.js compatible routes
99 Return a list of pyroutes.js compatible routes
100 """
100 """
101 for route_name in self._jsroutes:
101 for route_name in self._jsroutes:
102 yield self._extract_route_information(self._routenames[route_name])
102 yield self._extract_route_information(self._routenames[route_name])
103
103
104
104
105 def make_map(config):
105 def make_map(config):
106 """Create, configure and return the routes Mapper"""
106 """Create, configure and return the routes Mapper"""
107 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
107 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
108 always_scan=config['debug'])
108 always_scan=config['debug'])
109 rmap.minimization = False
109 rmap.minimization = False
110 rmap.explicit = False
110 rmap.explicit = False
111
111
112 from rhodecode.lib.utils2 import str2bool
112 from rhodecode.lib.utils2 import str2bool
113 from rhodecode.model import repo, repo_group
113 from rhodecode.model import repo, repo_group
114
114
115 def check_repo(environ, match_dict):
115 def check_repo(environ, match_dict):
116 """
116 """
117 check for valid repository for proper 404 handling
117 check for valid repository for proper 404 handling
118
118
119 :param environ:
119 :param environ:
120 :param match_dict:
120 :param match_dict:
121 """
121 """
122 repo_name = match_dict.get('repo_name')
122 repo_name = match_dict.get('repo_name')
123
123
124 if match_dict.get('f_path'):
124 if match_dict.get('f_path'):
125 # fix for multiple initial slashes that causes errors
125 # fix for multiple initial slashes that causes errors
126 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
126 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
127 repo_model = repo.RepoModel()
127 repo_model = repo.RepoModel()
128 by_name_match = repo_model.get_by_repo_name(repo_name)
128 by_name_match = repo_model.get_by_repo_name(repo_name)
129 # if we match quickly from database, short circuit the operation,
129 # if we match quickly from database, short circuit the operation,
130 # and validate repo based on the type.
130 # and validate repo based on the type.
131 if by_name_match:
131 if by_name_match:
132 return True
132 return True
133
133
134 by_id_match = repo_model.get_repo_by_id(repo_name)
134 by_id_match = repo_model.get_repo_by_id(repo_name)
135 if by_id_match:
135 if by_id_match:
136 repo_name = by_id_match.repo_name
136 repo_name = by_id_match.repo_name
137 match_dict['repo_name'] = repo_name
137 match_dict['repo_name'] = repo_name
138 return True
138 return True
139
139
140 return False
140 return False
141
141
142 def check_group(environ, match_dict):
142 def check_group(environ, match_dict):
143 """
143 """
144 check for valid repository group path for proper 404 handling
144 check for valid repository group path for proper 404 handling
145
145
146 :param environ:
146 :param environ:
147 :param match_dict:
147 :param match_dict:
148 """
148 """
149 repo_group_name = match_dict.get('group_name')
149 repo_group_name = match_dict.get('group_name')
150 repo_group_model = repo_group.RepoGroupModel()
150 repo_group_model = repo_group.RepoGroupModel()
151 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
151 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
152 if by_name_match:
152 if by_name_match:
153 return True
153 return True
154
154
155 return False
155 return False
156
156
157 def check_user_group(environ, match_dict):
157 def check_user_group(environ, match_dict):
158 """
158 """
159 check for valid user group for proper 404 handling
159 check for valid user group for proper 404 handling
160
160
161 :param environ:
161 :param environ:
162 :param match_dict:
162 :param match_dict:
163 """
163 """
164 return True
164 return True
165
165
166 def check_int(environ, match_dict):
166 def check_int(environ, match_dict):
167 return match_dict.get('id').isdigit()
167 return match_dict.get('id').isdigit()
168
168
169 # The ErrorController route (handles 404/500 error pages); it should
170 # likely stay at the top, ensuring it can always be resolved
171 rmap.connect('/error/{action}', controller='error')
172 rmap.connect('/error/{action}/{id}', controller='error')
173
169
174 #==========================================================================
170 #==========================================================================
175 # CUSTOM ROUTES HERE
171 # CUSTOM ROUTES HERE
176 #==========================================================================
172 #==========================================================================
177
173
178 # MAIN PAGE
174 # MAIN PAGE
179 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
175 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
180 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
176 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
181 action='goto_switcher_data')
177 action='goto_switcher_data')
182 rmap.connect('repo_list_data', '/_repos', controller='home',
178 rmap.connect('repo_list_data', '/_repos', controller='home',
183 action='repo_list_data')
179 action='repo_list_data')
184
180
185 rmap.connect('user_autocomplete_data', '/_users', controller='home',
181 rmap.connect('user_autocomplete_data', '/_users', controller='home',
186 action='user_autocomplete_data', jsroute=True)
182 action='user_autocomplete_data', jsroute=True)
187 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
183 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
188 action='user_group_autocomplete_data')
184 action='user_group_autocomplete_data')
189
185
190 rmap.connect(
186 rmap.connect(
191 'user_profile', '/_profiles/{username}', controller='users',
187 'user_profile', '/_profiles/{username}', controller='users',
192 action='user_profile')
188 action='user_profile')
193
189
194 # TODO: johbo: Static links, to be replaced by our redirection mechanism
190 # TODO: johbo: Static links, to be replaced by our redirection mechanism
195 rmap.connect('rst_help',
191 rmap.connect('rst_help',
196 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
192 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
197 _static=True)
193 _static=True)
198 rmap.connect('markdown_help',
194 rmap.connect('markdown_help',
199 'http://daringfireball.net/projects/markdown/syntax',
195 'http://daringfireball.net/projects/markdown/syntax',
200 _static=True)
196 _static=True)
201 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
197 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
202 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
198 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
203 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
199 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
204 # TODO: anderson - making this a static link since redirect won't play
200 # TODO: anderson - making this a static link since redirect won't play
205 # nice with POST requests
201 # nice with POST requests
206 rmap.connect('enterprise_license_convert_from_old',
202 rmap.connect('enterprise_license_convert_from_old',
207 'https://rhodecode.com/u/license-upgrade',
203 'https://rhodecode.com/u/license-upgrade',
208 _static=True)
204 _static=True)
209
205
210 routing_links.connect_redirection_links(rmap)
206 routing_links.connect_redirection_links(rmap)
211
207
212 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
208 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
213 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
209 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
214
210
215 # ADMIN REPOSITORY ROUTES
211 # ADMIN REPOSITORY ROUTES
216 with rmap.submapper(path_prefix=ADMIN_PREFIX,
212 with rmap.submapper(path_prefix=ADMIN_PREFIX,
217 controller='admin/repos') as m:
213 controller='admin/repos') as m:
218 m.connect('repos', '/repos',
214 m.connect('repos', '/repos',
219 action='create', conditions={'method': ['POST']})
215 action='create', conditions={'method': ['POST']})
220 m.connect('repos', '/repos',
216 m.connect('repos', '/repos',
221 action='index', conditions={'method': ['GET']})
217 action='index', conditions={'method': ['GET']})
222 m.connect('new_repo', '/create_repository', jsroute=True,
218 m.connect('new_repo', '/create_repository', jsroute=True,
223 action='create_repository', conditions={'method': ['GET']})
219 action='create_repository', conditions={'method': ['GET']})
224 m.connect('/repos/{repo_name}',
220 m.connect('/repos/{repo_name}',
225 action='update', conditions={'method': ['PUT'],
221 action='update', conditions={'method': ['PUT'],
226 'function': check_repo},
222 'function': check_repo},
227 requirements=URL_NAME_REQUIREMENTS)
223 requirements=URL_NAME_REQUIREMENTS)
228 m.connect('delete_repo', '/repos/{repo_name}',
224 m.connect('delete_repo', '/repos/{repo_name}',
229 action='delete', conditions={'method': ['DELETE']},
225 action='delete', conditions={'method': ['DELETE']},
230 requirements=URL_NAME_REQUIREMENTS)
226 requirements=URL_NAME_REQUIREMENTS)
231 m.connect('repo', '/repos/{repo_name}',
227 m.connect('repo', '/repos/{repo_name}',
232 action='show', conditions={'method': ['GET'],
228 action='show', conditions={'method': ['GET'],
233 'function': check_repo},
229 'function': check_repo},
234 requirements=URL_NAME_REQUIREMENTS)
230 requirements=URL_NAME_REQUIREMENTS)
235
231
236 # ADMIN REPOSITORY GROUPS ROUTES
232 # ADMIN REPOSITORY GROUPS ROUTES
237 with rmap.submapper(path_prefix=ADMIN_PREFIX,
233 with rmap.submapper(path_prefix=ADMIN_PREFIX,
238 controller='admin/repo_groups') as m:
234 controller='admin/repo_groups') as m:
239 m.connect('repo_groups', '/repo_groups',
235 m.connect('repo_groups', '/repo_groups',
240 action='create', conditions={'method': ['POST']})
236 action='create', conditions={'method': ['POST']})
241 m.connect('repo_groups', '/repo_groups',
237 m.connect('repo_groups', '/repo_groups',
242 action='index', conditions={'method': ['GET']})
238 action='index', conditions={'method': ['GET']})
243 m.connect('new_repo_group', '/repo_groups/new',
239 m.connect('new_repo_group', '/repo_groups/new',
244 action='new', conditions={'method': ['GET']})
240 action='new', conditions={'method': ['GET']})
245 m.connect('update_repo_group', '/repo_groups/{group_name}',
241 m.connect('update_repo_group', '/repo_groups/{group_name}',
246 action='update', conditions={'method': ['PUT'],
242 action='update', conditions={'method': ['PUT'],
247 'function': check_group},
243 'function': check_group},
248 requirements=URL_NAME_REQUIREMENTS)
244 requirements=URL_NAME_REQUIREMENTS)
249
245
250 # EXTRAS REPO GROUP ROUTES
246 # EXTRAS REPO GROUP ROUTES
251 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
247 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
252 action='edit',
248 action='edit',
253 conditions={'method': ['GET'], 'function': check_group},
249 conditions={'method': ['GET'], 'function': check_group},
254 requirements=URL_NAME_REQUIREMENTS)
250 requirements=URL_NAME_REQUIREMENTS)
255 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
251 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
256 action='edit',
252 action='edit',
257 conditions={'method': ['PUT'], 'function': check_group},
253 conditions={'method': ['PUT'], 'function': check_group},
258 requirements=URL_NAME_REQUIREMENTS)
254 requirements=URL_NAME_REQUIREMENTS)
259
255
260 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
256 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
261 action='edit_repo_group_advanced',
257 action='edit_repo_group_advanced',
262 conditions={'method': ['GET'], 'function': check_group},
258 conditions={'method': ['GET'], 'function': check_group},
263 requirements=URL_NAME_REQUIREMENTS)
259 requirements=URL_NAME_REQUIREMENTS)
264 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
260 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
265 action='edit_repo_group_advanced',
261 action='edit_repo_group_advanced',
266 conditions={'method': ['PUT'], 'function': check_group},
262 conditions={'method': ['PUT'], 'function': check_group},
267 requirements=URL_NAME_REQUIREMENTS)
263 requirements=URL_NAME_REQUIREMENTS)
268
264
269 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
265 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
270 action='edit_repo_group_perms',
266 action='edit_repo_group_perms',
271 conditions={'method': ['GET'], 'function': check_group},
267 conditions={'method': ['GET'], 'function': check_group},
272 requirements=URL_NAME_REQUIREMENTS)
268 requirements=URL_NAME_REQUIREMENTS)
273 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
269 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
274 action='update_perms',
270 action='update_perms',
275 conditions={'method': ['PUT'], 'function': check_group},
271 conditions={'method': ['PUT'], 'function': check_group},
276 requirements=URL_NAME_REQUIREMENTS)
272 requirements=URL_NAME_REQUIREMENTS)
277
273
278 m.connect('delete_repo_group', '/repo_groups/{group_name}',
274 m.connect('delete_repo_group', '/repo_groups/{group_name}',
279 action='delete', conditions={'method': ['DELETE'],
275 action='delete', conditions={'method': ['DELETE'],
280 'function': check_group},
276 'function': check_group},
281 requirements=URL_NAME_REQUIREMENTS)
277 requirements=URL_NAME_REQUIREMENTS)
282
278
283 # ADMIN USER ROUTES
279 # ADMIN USER ROUTES
284 with rmap.submapper(path_prefix=ADMIN_PREFIX,
280 with rmap.submapper(path_prefix=ADMIN_PREFIX,
285 controller='admin/users') as m:
281 controller='admin/users') as m:
286 m.connect('users', '/users',
282 m.connect('users', '/users',
287 action='create', conditions={'method': ['POST']})
283 action='create', conditions={'method': ['POST']})
288 m.connect('users', '/users',
284 m.connect('users', '/users',
289 action='index', conditions={'method': ['GET']})
285 action='index', conditions={'method': ['GET']})
290 m.connect('new_user', '/users/new',
286 m.connect('new_user', '/users/new',
291 action='new', conditions={'method': ['GET']})
287 action='new', conditions={'method': ['GET']})
292 m.connect('update_user', '/users/{user_id}',
288 m.connect('update_user', '/users/{user_id}',
293 action='update', conditions={'method': ['PUT']})
289 action='update', conditions={'method': ['PUT']})
294 m.connect('delete_user', '/users/{user_id}',
290 m.connect('delete_user', '/users/{user_id}',
295 action='delete', conditions={'method': ['DELETE']})
291 action='delete', conditions={'method': ['DELETE']})
296 m.connect('edit_user', '/users/{user_id}/edit',
292 m.connect('edit_user', '/users/{user_id}/edit',
297 action='edit', conditions={'method': ['GET']})
293 action='edit', conditions={'method': ['GET']})
298 m.connect('user', '/users/{user_id}',
294 m.connect('user', '/users/{user_id}',
299 action='show', conditions={'method': ['GET']})
295 action='show', conditions={'method': ['GET']})
300 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
296 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
301 action='reset_password', conditions={'method': ['POST']})
297 action='reset_password', conditions={'method': ['POST']})
302 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
298 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
303 action='create_personal_repo_group', conditions={'method': ['POST']})
299 action='create_personal_repo_group', conditions={'method': ['POST']})
304
300
305 # EXTRAS USER ROUTES
301 # EXTRAS USER ROUTES
306 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
302 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
307 action='edit_advanced', conditions={'method': ['GET']})
303 action='edit_advanced', conditions={'method': ['GET']})
308 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
304 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
309 action='update_advanced', conditions={'method': ['PUT']})
305 action='update_advanced', conditions={'method': ['PUT']})
310
306
311 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
307 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
312 action='edit_auth_tokens', conditions={'method': ['GET']})
308 action='edit_auth_tokens', conditions={'method': ['GET']})
313 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
309 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
314 action='add_auth_token', conditions={'method': ['PUT']})
310 action='add_auth_token', conditions={'method': ['PUT']})
315 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
311 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
316 action='delete_auth_token', conditions={'method': ['DELETE']})
312 action='delete_auth_token', conditions={'method': ['DELETE']})
317
313
318 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
314 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
319 action='edit_global_perms', conditions={'method': ['GET']})
315 action='edit_global_perms', conditions={'method': ['GET']})
320 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
316 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
321 action='update_global_perms', conditions={'method': ['PUT']})
317 action='update_global_perms', conditions={'method': ['PUT']})
322
318
323 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
319 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
324 action='edit_perms_summary', conditions={'method': ['GET']})
320 action='edit_perms_summary', conditions={'method': ['GET']})
325
321
326 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
322 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
327 action='edit_emails', conditions={'method': ['GET']})
323 action='edit_emails', conditions={'method': ['GET']})
328 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
324 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
329 action='add_email', conditions={'method': ['PUT']})
325 action='add_email', conditions={'method': ['PUT']})
330 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
326 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
331 action='delete_email', conditions={'method': ['DELETE']})
327 action='delete_email', conditions={'method': ['DELETE']})
332
328
333 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
329 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
334 action='edit_ips', conditions={'method': ['GET']})
330 action='edit_ips', conditions={'method': ['GET']})
335 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
331 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
336 action='add_ip', conditions={'method': ['PUT']})
332 action='add_ip', conditions={'method': ['PUT']})
337 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
333 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
338 action='delete_ip', conditions={'method': ['DELETE']})
334 action='delete_ip', conditions={'method': ['DELETE']})
339
335
340 # ADMIN USER GROUPS REST ROUTES
336 # ADMIN USER GROUPS REST ROUTES
341 with rmap.submapper(path_prefix=ADMIN_PREFIX,
337 with rmap.submapper(path_prefix=ADMIN_PREFIX,
342 controller='admin/user_groups') as m:
338 controller='admin/user_groups') as m:
343 m.connect('users_groups', '/user_groups',
339 m.connect('users_groups', '/user_groups',
344 action='create', conditions={'method': ['POST']})
340 action='create', conditions={'method': ['POST']})
345 m.connect('users_groups', '/user_groups',
341 m.connect('users_groups', '/user_groups',
346 action='index', conditions={'method': ['GET']})
342 action='index', conditions={'method': ['GET']})
347 m.connect('new_users_group', '/user_groups/new',
343 m.connect('new_users_group', '/user_groups/new',
348 action='new', conditions={'method': ['GET']})
344 action='new', conditions={'method': ['GET']})
349 m.connect('update_users_group', '/user_groups/{user_group_id}',
345 m.connect('update_users_group', '/user_groups/{user_group_id}',
350 action='update', conditions={'method': ['PUT']})
346 action='update', conditions={'method': ['PUT']})
351 m.connect('delete_users_group', '/user_groups/{user_group_id}',
347 m.connect('delete_users_group', '/user_groups/{user_group_id}',
352 action='delete', conditions={'method': ['DELETE']})
348 action='delete', conditions={'method': ['DELETE']})
353 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
349 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
354 action='edit', conditions={'method': ['GET']},
350 action='edit', conditions={'method': ['GET']},
355 function=check_user_group)
351 function=check_user_group)
356
352
357 # EXTRAS USER GROUP ROUTES
353 # EXTRAS USER GROUP ROUTES
358 m.connect('edit_user_group_global_perms',
354 m.connect('edit_user_group_global_perms',
359 '/user_groups/{user_group_id}/edit/global_permissions',
355 '/user_groups/{user_group_id}/edit/global_permissions',
360 action='edit_global_perms', conditions={'method': ['GET']})
356 action='edit_global_perms', conditions={'method': ['GET']})
361 m.connect('edit_user_group_global_perms',
357 m.connect('edit_user_group_global_perms',
362 '/user_groups/{user_group_id}/edit/global_permissions',
358 '/user_groups/{user_group_id}/edit/global_permissions',
363 action='update_global_perms', conditions={'method': ['PUT']})
359 action='update_global_perms', conditions={'method': ['PUT']})
364 m.connect('edit_user_group_perms_summary',
360 m.connect('edit_user_group_perms_summary',
365 '/user_groups/{user_group_id}/edit/permissions_summary',
361 '/user_groups/{user_group_id}/edit/permissions_summary',
366 action='edit_perms_summary', conditions={'method': ['GET']})
362 action='edit_perms_summary', conditions={'method': ['GET']})
367
363
368 m.connect('edit_user_group_perms',
364 m.connect('edit_user_group_perms',
369 '/user_groups/{user_group_id}/edit/permissions',
365 '/user_groups/{user_group_id}/edit/permissions',
370 action='edit_perms', conditions={'method': ['GET']})
366 action='edit_perms', conditions={'method': ['GET']})
371 m.connect('edit_user_group_perms',
367 m.connect('edit_user_group_perms',
372 '/user_groups/{user_group_id}/edit/permissions',
368 '/user_groups/{user_group_id}/edit/permissions',
373 action='update_perms', conditions={'method': ['PUT']})
369 action='update_perms', conditions={'method': ['PUT']})
374
370
375 m.connect('edit_user_group_advanced',
371 m.connect('edit_user_group_advanced',
376 '/user_groups/{user_group_id}/edit/advanced',
372 '/user_groups/{user_group_id}/edit/advanced',
377 action='edit_advanced', conditions={'method': ['GET']})
373 action='edit_advanced', conditions={'method': ['GET']})
378
374
379 m.connect('edit_user_group_members',
375 m.connect('edit_user_group_members',
380 '/user_groups/{user_group_id}/edit/members', jsroute=True,
376 '/user_groups/{user_group_id}/edit/members', jsroute=True,
381 action='edit_members', conditions={'method': ['GET']})
377 action='edit_members', conditions={'method': ['GET']})
382
378
383 # ADMIN PERMISSIONS ROUTES
379 # ADMIN PERMISSIONS ROUTES
384 with rmap.submapper(path_prefix=ADMIN_PREFIX,
380 with rmap.submapper(path_prefix=ADMIN_PREFIX,
385 controller='admin/permissions') as m:
381 controller='admin/permissions') as m:
386 m.connect('admin_permissions_application', '/permissions/application',
382 m.connect('admin_permissions_application', '/permissions/application',
387 action='permission_application_update', conditions={'method': ['POST']})
383 action='permission_application_update', conditions={'method': ['POST']})
388 m.connect('admin_permissions_application', '/permissions/application',
384 m.connect('admin_permissions_application', '/permissions/application',
389 action='permission_application', conditions={'method': ['GET']})
385 action='permission_application', conditions={'method': ['GET']})
390
386
391 m.connect('admin_permissions_global', '/permissions/global',
387 m.connect('admin_permissions_global', '/permissions/global',
392 action='permission_global_update', conditions={'method': ['POST']})
388 action='permission_global_update', conditions={'method': ['POST']})
393 m.connect('admin_permissions_global', '/permissions/global',
389 m.connect('admin_permissions_global', '/permissions/global',
394 action='permission_global', conditions={'method': ['GET']})
390 action='permission_global', conditions={'method': ['GET']})
395
391
396 m.connect('admin_permissions_object', '/permissions/object',
392 m.connect('admin_permissions_object', '/permissions/object',
397 action='permission_objects_update', conditions={'method': ['POST']})
393 action='permission_objects_update', conditions={'method': ['POST']})
398 m.connect('admin_permissions_object', '/permissions/object',
394 m.connect('admin_permissions_object', '/permissions/object',
399 action='permission_objects', conditions={'method': ['GET']})
395 action='permission_objects', conditions={'method': ['GET']})
400
396
401 m.connect('admin_permissions_ips', '/permissions/ips',
397 m.connect('admin_permissions_ips', '/permissions/ips',
402 action='permission_ips', conditions={'method': ['POST']})
398 action='permission_ips', conditions={'method': ['POST']})
403 m.connect('admin_permissions_ips', '/permissions/ips',
399 m.connect('admin_permissions_ips', '/permissions/ips',
404 action='permission_ips', conditions={'method': ['GET']})
400 action='permission_ips', conditions={'method': ['GET']})
405
401
406 m.connect('admin_permissions_overview', '/permissions/overview',
402 m.connect('admin_permissions_overview', '/permissions/overview',
407 action='permission_perms', conditions={'method': ['GET']})
403 action='permission_perms', conditions={'method': ['GET']})
408
404
409 # ADMIN DEFAULTS REST ROUTES
405 # ADMIN DEFAULTS REST ROUTES
410 with rmap.submapper(path_prefix=ADMIN_PREFIX,
406 with rmap.submapper(path_prefix=ADMIN_PREFIX,
411 controller='admin/defaults') as m:
407 controller='admin/defaults') as m:
412 m.connect('admin_defaults_repositories', '/defaults/repositories',
408 m.connect('admin_defaults_repositories', '/defaults/repositories',
413 action='update_repository_defaults', conditions={'method': ['POST']})
409 action='update_repository_defaults', conditions={'method': ['POST']})
414 m.connect('admin_defaults_repositories', '/defaults/repositories',
410 m.connect('admin_defaults_repositories', '/defaults/repositories',
415 action='index', conditions={'method': ['GET']})
411 action='index', conditions={'method': ['GET']})
416
412
417 # ADMIN DEBUG STYLE ROUTES
413 # ADMIN DEBUG STYLE ROUTES
418 if str2bool(config.get('debug_style')):
414 if str2bool(config.get('debug_style')):
419 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
415 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
420 controller='debug_style') as m:
416 controller='debug_style') as m:
421 m.connect('debug_style_home', '',
417 m.connect('debug_style_home', '',
422 action='index', conditions={'method': ['GET']})
418 action='index', conditions={'method': ['GET']})
423 m.connect('debug_style_template', '/t/{t_path}',
419 m.connect('debug_style_template', '/t/{t_path}',
424 action='template', conditions={'method': ['GET']})
420 action='template', conditions={'method': ['GET']})
425
421
426 # ADMIN SETTINGS ROUTES
422 # ADMIN SETTINGS ROUTES
427 with rmap.submapper(path_prefix=ADMIN_PREFIX,
423 with rmap.submapper(path_prefix=ADMIN_PREFIX,
428 controller='admin/settings') as m:
424 controller='admin/settings') as m:
429
425
430 # default
426 # default
431 m.connect('admin_settings', '/settings',
427 m.connect('admin_settings', '/settings',
432 action='settings_global_update',
428 action='settings_global_update',
433 conditions={'method': ['POST']})
429 conditions={'method': ['POST']})
434 m.connect('admin_settings', '/settings',
430 m.connect('admin_settings', '/settings',
435 action='settings_global', conditions={'method': ['GET']})
431 action='settings_global', conditions={'method': ['GET']})
436
432
437 m.connect('admin_settings_vcs', '/settings/vcs',
433 m.connect('admin_settings_vcs', '/settings/vcs',
438 action='settings_vcs_update',
434 action='settings_vcs_update',
439 conditions={'method': ['POST']})
435 conditions={'method': ['POST']})
440 m.connect('admin_settings_vcs', '/settings/vcs',
436 m.connect('admin_settings_vcs', '/settings/vcs',
441 action='settings_vcs',
437 action='settings_vcs',
442 conditions={'method': ['GET']})
438 conditions={'method': ['GET']})
443 m.connect('admin_settings_vcs', '/settings/vcs',
439 m.connect('admin_settings_vcs', '/settings/vcs',
444 action='delete_svn_pattern',
440 action='delete_svn_pattern',
445 conditions={'method': ['DELETE']})
441 conditions={'method': ['DELETE']})
446
442
447 m.connect('admin_settings_mapping', '/settings/mapping',
443 m.connect('admin_settings_mapping', '/settings/mapping',
448 action='settings_mapping_update',
444 action='settings_mapping_update',
449 conditions={'method': ['POST']})
445 conditions={'method': ['POST']})
450 m.connect('admin_settings_mapping', '/settings/mapping',
446 m.connect('admin_settings_mapping', '/settings/mapping',
451 action='settings_mapping', conditions={'method': ['GET']})
447 action='settings_mapping', conditions={'method': ['GET']})
452
448
453 m.connect('admin_settings_global', '/settings/global',
449 m.connect('admin_settings_global', '/settings/global',
454 action='settings_global_update',
450 action='settings_global_update',
455 conditions={'method': ['POST']})
451 conditions={'method': ['POST']})
456 m.connect('admin_settings_global', '/settings/global',
452 m.connect('admin_settings_global', '/settings/global',
457 action='settings_global', conditions={'method': ['GET']})
453 action='settings_global', conditions={'method': ['GET']})
458
454
459 m.connect('admin_settings_visual', '/settings/visual',
455 m.connect('admin_settings_visual', '/settings/visual',
460 action='settings_visual_update',
456 action='settings_visual_update',
461 conditions={'method': ['POST']})
457 conditions={'method': ['POST']})
462 m.connect('admin_settings_visual', '/settings/visual',
458 m.connect('admin_settings_visual', '/settings/visual',
463 action='settings_visual', conditions={'method': ['GET']})
459 action='settings_visual', conditions={'method': ['GET']})
464
460
465 m.connect('admin_settings_issuetracker',
461 m.connect('admin_settings_issuetracker',
466 '/settings/issue-tracker', action='settings_issuetracker',
462 '/settings/issue-tracker', action='settings_issuetracker',
467 conditions={'method': ['GET']})
463 conditions={'method': ['GET']})
468 m.connect('admin_settings_issuetracker_save',
464 m.connect('admin_settings_issuetracker_save',
469 '/settings/issue-tracker/save',
465 '/settings/issue-tracker/save',
470 action='settings_issuetracker_save',
466 action='settings_issuetracker_save',
471 conditions={'method': ['POST']})
467 conditions={'method': ['POST']})
472 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
468 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
473 action='settings_issuetracker_test',
469 action='settings_issuetracker_test',
474 conditions={'method': ['POST']})
470 conditions={'method': ['POST']})
475 m.connect('admin_issuetracker_delete',
471 m.connect('admin_issuetracker_delete',
476 '/settings/issue-tracker/delete',
472 '/settings/issue-tracker/delete',
477 action='settings_issuetracker_delete',
473 action='settings_issuetracker_delete',
478 conditions={'method': ['DELETE']})
474 conditions={'method': ['DELETE']})
479
475
480 m.connect('admin_settings_email', '/settings/email',
476 m.connect('admin_settings_email', '/settings/email',
481 action='settings_email_update',
477 action='settings_email_update',
482 conditions={'method': ['POST']})
478 conditions={'method': ['POST']})
483 m.connect('admin_settings_email', '/settings/email',
479 m.connect('admin_settings_email', '/settings/email',
484 action='settings_email', conditions={'method': ['GET']})
480 action='settings_email', conditions={'method': ['GET']})
485
481
486 m.connect('admin_settings_hooks', '/settings/hooks',
482 m.connect('admin_settings_hooks', '/settings/hooks',
487 action='settings_hooks_update',
483 action='settings_hooks_update',
488 conditions={'method': ['POST', 'DELETE']})
484 conditions={'method': ['POST', 'DELETE']})
489 m.connect('admin_settings_hooks', '/settings/hooks',
485 m.connect('admin_settings_hooks', '/settings/hooks',
490 action='settings_hooks', conditions={'method': ['GET']})
486 action='settings_hooks', conditions={'method': ['GET']})
491
487
492 m.connect('admin_settings_search', '/settings/search',
488 m.connect('admin_settings_search', '/settings/search',
493 action='settings_search', conditions={'method': ['GET']})
489 action='settings_search', conditions={'method': ['GET']})
494
490
495 m.connect('admin_settings_system', '/settings/system',
491 m.connect('admin_settings_system', '/settings/system',
496 action='settings_system', conditions={'method': ['GET']})
492 action='settings_system', conditions={'method': ['GET']})
497
493
498 m.connect('admin_settings_system_update', '/settings/system/updates',
494 m.connect('admin_settings_system_update', '/settings/system/updates',
499 action='settings_system_update', conditions={'method': ['GET']})
495 action='settings_system_update', conditions={'method': ['GET']})
500
496
501 m.connect('admin_settings_supervisor', '/settings/supervisor',
497 m.connect('admin_settings_supervisor', '/settings/supervisor',
502 action='settings_supervisor', conditions={'method': ['GET']})
498 action='settings_supervisor', conditions={'method': ['GET']})
503 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
499 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
504 action='settings_supervisor_log', conditions={'method': ['GET']})
500 action='settings_supervisor_log', conditions={'method': ['GET']})
505
501
506 m.connect('admin_settings_labs', '/settings/labs',
502 m.connect('admin_settings_labs', '/settings/labs',
507 action='settings_labs_update',
503 action='settings_labs_update',
508 conditions={'method': ['POST']})
504 conditions={'method': ['POST']})
509 m.connect('admin_settings_labs', '/settings/labs',
505 m.connect('admin_settings_labs', '/settings/labs',
510 action='settings_labs', conditions={'method': ['GET']})
506 action='settings_labs', conditions={'method': ['GET']})
511
507
512 m.connect('admin_settings_open_source', '/settings/open_source',
508 m.connect('admin_settings_open_source', '/settings/open_source',
513 action='settings_open_source',
509 action='settings_open_source',
514 conditions={'method': ['GET']})
510 conditions={'method': ['GET']})
515
511
516 # ADMIN MY ACCOUNT
512 # ADMIN MY ACCOUNT
517 with rmap.submapper(path_prefix=ADMIN_PREFIX,
513 with rmap.submapper(path_prefix=ADMIN_PREFIX,
518 controller='admin/my_account') as m:
514 controller='admin/my_account') as m:
519
515
520 m.connect('my_account', '/my_account',
516 m.connect('my_account', '/my_account',
521 action='my_account', conditions={'method': ['GET']})
517 action='my_account', conditions={'method': ['GET']})
522 m.connect('my_account_edit', '/my_account/edit',
518 m.connect('my_account_edit', '/my_account/edit',
523 action='my_account_edit', conditions={'method': ['GET']})
519 action='my_account_edit', conditions={'method': ['GET']})
524 m.connect('my_account', '/my_account',
520 m.connect('my_account', '/my_account',
525 action='my_account_update', conditions={'method': ['POST']})
521 action='my_account_update', conditions={'method': ['POST']})
526
522
527 m.connect('my_account_password', '/my_account/password',
523 m.connect('my_account_password', '/my_account/password',
528 action='my_account_password', conditions={'method': ['GET']})
524 action='my_account_password', conditions={'method': ['GET']})
529 m.connect('my_account_password', '/my_account/password',
525 m.connect('my_account_password', '/my_account/password',
530 action='my_account_password_update', conditions={'method': ['POST']})
526 action='my_account_password_update', conditions={'method': ['POST']})
531
527
532 m.connect('my_account_repos', '/my_account/repos',
528 m.connect('my_account_repos', '/my_account/repos',
533 action='my_account_repos', conditions={'method': ['GET']})
529 action='my_account_repos', conditions={'method': ['GET']})
534
530
535 m.connect('my_account_watched', '/my_account/watched',
531 m.connect('my_account_watched', '/my_account/watched',
536 action='my_account_watched', conditions={'method': ['GET']})
532 action='my_account_watched', conditions={'method': ['GET']})
537
533
538 m.connect('my_account_pullrequests', '/my_account/pull_requests',
534 m.connect('my_account_pullrequests', '/my_account/pull_requests',
539 action='my_account_pullrequests', conditions={'method': ['GET']})
535 action='my_account_pullrequests', conditions={'method': ['GET']})
540
536
541 m.connect('my_account_perms', '/my_account/perms',
537 m.connect('my_account_perms', '/my_account/perms',
542 action='my_account_perms', conditions={'method': ['GET']})
538 action='my_account_perms', conditions={'method': ['GET']})
543
539
544 m.connect('my_account_emails', '/my_account/emails',
540 m.connect('my_account_emails', '/my_account/emails',
545 action='my_account_emails', conditions={'method': ['GET']})
541 action='my_account_emails', conditions={'method': ['GET']})
546 m.connect('my_account_emails', '/my_account/emails',
542 m.connect('my_account_emails', '/my_account/emails',
547 action='my_account_emails_add', conditions={'method': ['POST']})
543 action='my_account_emails_add', conditions={'method': ['POST']})
548 m.connect('my_account_emails', '/my_account/emails',
544 m.connect('my_account_emails', '/my_account/emails',
549 action='my_account_emails_delete', conditions={'method': ['DELETE']})
545 action='my_account_emails_delete', conditions={'method': ['DELETE']})
550
546
551 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
547 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
552 action='my_account_auth_tokens', conditions={'method': ['GET']})
548 action='my_account_auth_tokens', conditions={'method': ['GET']})
553 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
549 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
554 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
550 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
555 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
551 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
556 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
552 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
557
553
558 # NOTIFICATION REST ROUTES
554 # NOTIFICATION REST ROUTES
559 with rmap.submapper(path_prefix=ADMIN_PREFIX,
555 with rmap.submapper(path_prefix=ADMIN_PREFIX,
560 controller='admin/notifications') as m:
556 controller='admin/notifications') as m:
561 m.connect('notifications', '/notifications',
557 m.connect('notifications', '/notifications',
562 action='index', conditions={'method': ['GET']})
558 action='index', conditions={'method': ['GET']})
563 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
559 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
564 action='mark_all_read', conditions={'method': ['POST']})
560 action='mark_all_read', conditions={'method': ['POST']})
565
561
566 m.connect('/notifications/{notification_id}',
562 m.connect('/notifications/{notification_id}',
567 action='update', conditions={'method': ['PUT']})
563 action='update', conditions={'method': ['PUT']})
568 m.connect('/notifications/{notification_id}',
564 m.connect('/notifications/{notification_id}',
569 action='delete', conditions={'method': ['DELETE']})
565 action='delete', conditions={'method': ['DELETE']})
570 m.connect('notification', '/notifications/{notification_id}',
566 m.connect('notification', '/notifications/{notification_id}',
571 action='show', conditions={'method': ['GET']})
567 action='show', conditions={'method': ['GET']})
572
568
573 # ADMIN GIST
569 # ADMIN GIST
574 with rmap.submapper(path_prefix=ADMIN_PREFIX,
570 with rmap.submapper(path_prefix=ADMIN_PREFIX,
575 controller='admin/gists') as m:
571 controller='admin/gists') as m:
576 m.connect('gists', '/gists',
572 m.connect('gists', '/gists',
577 action='create', conditions={'method': ['POST']})
573 action='create', conditions={'method': ['POST']})
578 m.connect('gists', '/gists', jsroute=True,
574 m.connect('gists', '/gists', jsroute=True,
579 action='index', conditions={'method': ['GET']})
575 action='index', conditions={'method': ['GET']})
580 m.connect('new_gist', '/gists/new', jsroute=True,
576 m.connect('new_gist', '/gists/new', jsroute=True,
581 action='new', conditions={'method': ['GET']})
577 action='new', conditions={'method': ['GET']})
582
578
583 m.connect('/gists/{gist_id}',
579 m.connect('/gists/{gist_id}',
584 action='delete', conditions={'method': ['DELETE']})
580 action='delete', conditions={'method': ['DELETE']})
585 m.connect('edit_gist', '/gists/{gist_id}/edit',
581 m.connect('edit_gist', '/gists/{gist_id}/edit',
586 action='edit_form', conditions={'method': ['GET']})
582 action='edit_form', conditions={'method': ['GET']})
587 m.connect('edit_gist', '/gists/{gist_id}/edit',
583 m.connect('edit_gist', '/gists/{gist_id}/edit',
588 action='edit', conditions={'method': ['POST']})
584 action='edit', conditions={'method': ['POST']})
589 m.connect(
585 m.connect(
590 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
586 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
591 action='check_revision', conditions={'method': ['GET']})
587 action='check_revision', conditions={'method': ['GET']})
592
588
593 m.connect('gist', '/gists/{gist_id}',
589 m.connect('gist', '/gists/{gist_id}',
594 action='show', conditions={'method': ['GET']})
590 action='show', conditions={'method': ['GET']})
595 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
591 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
596 revision='tip',
592 revision='tip',
597 action='show', conditions={'method': ['GET']})
593 action='show', conditions={'method': ['GET']})
598 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
594 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
599 revision='tip',
595 revision='tip',
600 action='show', conditions={'method': ['GET']})
596 action='show', conditions={'method': ['GET']})
601 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
597 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
602 revision='tip',
598 revision='tip',
603 action='show', conditions={'method': ['GET']},
599 action='show', conditions={'method': ['GET']},
604 requirements=URL_NAME_REQUIREMENTS)
600 requirements=URL_NAME_REQUIREMENTS)
605
601
606 # ADMIN MAIN PAGES
602 # ADMIN MAIN PAGES
607 with rmap.submapper(path_prefix=ADMIN_PREFIX,
603 with rmap.submapper(path_prefix=ADMIN_PREFIX,
608 controller='admin/admin') as m:
604 controller='admin/admin') as m:
609 m.connect('admin_home', '', action='index')
605 m.connect('admin_home', '', action='index')
610 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
606 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
611 action='add_repo')
607 action='add_repo')
612 m.connect(
608 m.connect(
613 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
609 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
614 action='pull_requests')
610 action='pull_requests')
615 m.connect(
611 m.connect(
616 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
612 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
617 action='pull_requests')
613 action='pull_requests')
618
614
619
615
620 # USER JOURNAL
616 # USER JOURNAL
621 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
617 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
622 controller='journal', action='index')
618 controller='journal', action='index')
623 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
619 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
624 controller='journal', action='journal_rss')
620 controller='journal', action='journal_rss')
625 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
621 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
626 controller='journal', action='journal_atom')
622 controller='journal', action='journal_atom')
627
623
628 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
624 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
629 controller='journal', action='public_journal')
625 controller='journal', action='public_journal')
630
626
631 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
627 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
632 controller='journal', action='public_journal_rss')
628 controller='journal', action='public_journal_rss')
633
629
634 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
630 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
635 controller='journal', action='public_journal_rss')
631 controller='journal', action='public_journal_rss')
636
632
637 rmap.connect('public_journal_atom',
633 rmap.connect('public_journal_atom',
638 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
634 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
639 action='public_journal_atom')
635 action='public_journal_atom')
640
636
641 rmap.connect('public_journal_atom_old',
637 rmap.connect('public_journal_atom_old',
642 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
638 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
643 action='public_journal_atom')
639 action='public_journal_atom')
644
640
645 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
641 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
646 controller='journal', action='toggle_following', jsroute=True,
642 controller='journal', action='toggle_following', jsroute=True,
647 conditions={'method': ['POST']})
643 conditions={'method': ['POST']})
648
644
649 # FULL TEXT SEARCH
645 # FULL TEXT SEARCH
650 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
646 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
651 controller='search')
647 controller='search')
652 rmap.connect('search_repo_home', '/{repo_name}/search',
648 rmap.connect('search_repo_home', '/{repo_name}/search',
653 controller='search',
649 controller='search',
654 action='index',
650 action='index',
655 conditions={'function': check_repo},
651 conditions={'function': check_repo},
656 requirements=URL_NAME_REQUIREMENTS)
652 requirements=URL_NAME_REQUIREMENTS)
657
653
658 # FEEDS
654 # FEEDS
659 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
655 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
660 controller='feed', action='rss',
656 controller='feed', action='rss',
661 conditions={'function': check_repo},
657 conditions={'function': check_repo},
662 requirements=URL_NAME_REQUIREMENTS)
658 requirements=URL_NAME_REQUIREMENTS)
663
659
664 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
660 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
665 controller='feed', action='atom',
661 controller='feed', action='atom',
666 conditions={'function': check_repo},
662 conditions={'function': check_repo},
667 requirements=URL_NAME_REQUIREMENTS)
663 requirements=URL_NAME_REQUIREMENTS)
668
664
669 #==========================================================================
665 #==========================================================================
670 # REPOSITORY ROUTES
666 # REPOSITORY ROUTES
671 #==========================================================================
667 #==========================================================================
672
668
673 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
669 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
674 controller='admin/repos', action='repo_creating',
670 controller='admin/repos', action='repo_creating',
675 requirements=URL_NAME_REQUIREMENTS)
671 requirements=URL_NAME_REQUIREMENTS)
676 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
672 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
677 controller='admin/repos', action='repo_check',
673 controller='admin/repos', action='repo_check',
678 requirements=URL_NAME_REQUIREMENTS)
674 requirements=URL_NAME_REQUIREMENTS)
679
675
680 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
676 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
681 controller='summary', action='repo_stats',
677 controller='summary', action='repo_stats',
682 conditions={'function': check_repo},
678 conditions={'function': check_repo},
683 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
679 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
684
680
685 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
681 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
686 controller='summary', action='repo_refs_data', jsroute=True,
682 controller='summary', action='repo_refs_data', jsroute=True,
687 requirements=URL_NAME_REQUIREMENTS)
683 requirements=URL_NAME_REQUIREMENTS)
688 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
684 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
689 controller='summary', action='repo_refs_changelog_data',
685 controller='summary', action='repo_refs_changelog_data',
690 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
686 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
691
687
692 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
688 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
693 controller='changeset', revision='tip', jsroute=True,
689 controller='changeset', revision='tip', jsroute=True,
694 conditions={'function': check_repo},
690 conditions={'function': check_repo},
695 requirements=URL_NAME_REQUIREMENTS)
691 requirements=URL_NAME_REQUIREMENTS)
696 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
692 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
697 controller='changeset', revision='tip', action='changeset_children',
693 controller='changeset', revision='tip', action='changeset_children',
698 conditions={'function': check_repo},
694 conditions={'function': check_repo},
699 requirements=URL_NAME_REQUIREMENTS)
695 requirements=URL_NAME_REQUIREMENTS)
700 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
696 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
701 controller='changeset', revision='tip', action='changeset_parents',
697 controller='changeset', revision='tip', action='changeset_parents',
702 conditions={'function': check_repo},
698 conditions={'function': check_repo},
703 requirements=URL_NAME_REQUIREMENTS)
699 requirements=URL_NAME_REQUIREMENTS)
704
700
705 # repo edit options
701 # repo edit options
706 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
702 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
707 controller='admin/repos', action='edit',
703 controller='admin/repos', action='edit',
708 conditions={'method': ['GET'], 'function': check_repo},
704 conditions={'method': ['GET'], 'function': check_repo},
709 requirements=URL_NAME_REQUIREMENTS)
705 requirements=URL_NAME_REQUIREMENTS)
710
706
711 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
707 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
712 jsroute=True,
708 jsroute=True,
713 controller='admin/repos', action='edit_permissions',
709 controller='admin/repos', action='edit_permissions',
714 conditions={'method': ['GET'], 'function': check_repo},
710 conditions={'method': ['GET'], 'function': check_repo},
715 requirements=URL_NAME_REQUIREMENTS)
711 requirements=URL_NAME_REQUIREMENTS)
716 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
712 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
717 controller='admin/repos', action='edit_permissions_update',
713 controller='admin/repos', action='edit_permissions_update',
718 conditions={'method': ['PUT'], 'function': check_repo},
714 conditions={'method': ['PUT'], 'function': check_repo},
719 requirements=URL_NAME_REQUIREMENTS)
715 requirements=URL_NAME_REQUIREMENTS)
720
716
721 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
717 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
722 controller='admin/repos', action='edit_fields',
718 controller='admin/repos', action='edit_fields',
723 conditions={'method': ['GET'], 'function': check_repo},
719 conditions={'method': ['GET'], 'function': check_repo},
724 requirements=URL_NAME_REQUIREMENTS)
720 requirements=URL_NAME_REQUIREMENTS)
725 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
721 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
726 controller='admin/repos', action='create_repo_field',
722 controller='admin/repos', action='create_repo_field',
727 conditions={'method': ['PUT'], 'function': check_repo},
723 conditions={'method': ['PUT'], 'function': check_repo},
728 requirements=URL_NAME_REQUIREMENTS)
724 requirements=URL_NAME_REQUIREMENTS)
729 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
725 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
730 controller='admin/repos', action='delete_repo_field',
726 controller='admin/repos', action='delete_repo_field',
731 conditions={'method': ['DELETE'], 'function': check_repo},
727 conditions={'method': ['DELETE'], 'function': check_repo},
732 requirements=URL_NAME_REQUIREMENTS)
728 requirements=URL_NAME_REQUIREMENTS)
733
729
734 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
730 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
735 controller='admin/repos', action='edit_advanced',
731 controller='admin/repos', action='edit_advanced',
736 conditions={'method': ['GET'], 'function': check_repo},
732 conditions={'method': ['GET'], 'function': check_repo},
737 requirements=URL_NAME_REQUIREMENTS)
733 requirements=URL_NAME_REQUIREMENTS)
738
734
739 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
735 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
740 controller='admin/repos', action='edit_advanced_locking',
736 controller='admin/repos', action='edit_advanced_locking',
741 conditions={'method': ['PUT'], 'function': check_repo},
737 conditions={'method': ['PUT'], 'function': check_repo},
742 requirements=URL_NAME_REQUIREMENTS)
738 requirements=URL_NAME_REQUIREMENTS)
743 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
739 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
744 controller='admin/repos', action='toggle_locking',
740 controller='admin/repos', action='toggle_locking',
745 conditions={'method': ['GET'], 'function': check_repo},
741 conditions={'method': ['GET'], 'function': check_repo},
746 requirements=URL_NAME_REQUIREMENTS)
742 requirements=URL_NAME_REQUIREMENTS)
747
743
748 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
744 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
749 controller='admin/repos', action='edit_advanced_journal',
745 controller='admin/repos', action='edit_advanced_journal',
750 conditions={'method': ['PUT'], 'function': check_repo},
746 conditions={'method': ['PUT'], 'function': check_repo},
751 requirements=URL_NAME_REQUIREMENTS)
747 requirements=URL_NAME_REQUIREMENTS)
752
748
753 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
749 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
754 controller='admin/repos', action='edit_advanced_fork',
750 controller='admin/repos', action='edit_advanced_fork',
755 conditions={'method': ['PUT'], 'function': check_repo},
751 conditions={'method': ['PUT'], 'function': check_repo},
756 requirements=URL_NAME_REQUIREMENTS)
752 requirements=URL_NAME_REQUIREMENTS)
757
753
758 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
754 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
759 controller='admin/repos', action='edit_caches_form',
755 controller='admin/repos', action='edit_caches_form',
760 conditions={'method': ['GET'], 'function': check_repo},
756 conditions={'method': ['GET'], 'function': check_repo},
761 requirements=URL_NAME_REQUIREMENTS)
757 requirements=URL_NAME_REQUIREMENTS)
762 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
758 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
763 controller='admin/repos', action='edit_caches',
759 controller='admin/repos', action='edit_caches',
764 conditions={'method': ['PUT'], 'function': check_repo},
760 conditions={'method': ['PUT'], 'function': check_repo},
765 requirements=URL_NAME_REQUIREMENTS)
761 requirements=URL_NAME_REQUIREMENTS)
766
762
767 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
763 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
768 controller='admin/repos', action='edit_remote_form',
764 controller='admin/repos', action='edit_remote_form',
769 conditions={'method': ['GET'], 'function': check_repo},
765 conditions={'method': ['GET'], 'function': check_repo},
770 requirements=URL_NAME_REQUIREMENTS)
766 requirements=URL_NAME_REQUIREMENTS)
771 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
767 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
772 controller='admin/repos', action='edit_remote',
768 controller='admin/repos', action='edit_remote',
773 conditions={'method': ['PUT'], 'function': check_repo},
769 conditions={'method': ['PUT'], 'function': check_repo},
774 requirements=URL_NAME_REQUIREMENTS)
770 requirements=URL_NAME_REQUIREMENTS)
775
771
776 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
772 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
777 controller='admin/repos', action='edit_statistics_form',
773 controller='admin/repos', action='edit_statistics_form',
778 conditions={'method': ['GET'], 'function': check_repo},
774 conditions={'method': ['GET'], 'function': check_repo},
779 requirements=URL_NAME_REQUIREMENTS)
775 requirements=URL_NAME_REQUIREMENTS)
780 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
776 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
781 controller='admin/repos', action='edit_statistics',
777 controller='admin/repos', action='edit_statistics',
782 conditions={'method': ['PUT'], 'function': check_repo},
778 conditions={'method': ['PUT'], 'function': check_repo},
783 requirements=URL_NAME_REQUIREMENTS)
779 requirements=URL_NAME_REQUIREMENTS)
784 rmap.connect('repo_settings_issuetracker',
780 rmap.connect('repo_settings_issuetracker',
785 '/{repo_name}/settings/issue-tracker',
781 '/{repo_name}/settings/issue-tracker',
786 controller='admin/repos', action='repo_issuetracker',
782 controller='admin/repos', action='repo_issuetracker',
787 conditions={'method': ['GET'], 'function': check_repo},
783 conditions={'method': ['GET'], 'function': check_repo},
788 requirements=URL_NAME_REQUIREMENTS)
784 requirements=URL_NAME_REQUIREMENTS)
789 rmap.connect('repo_issuetracker_test',
785 rmap.connect('repo_issuetracker_test',
790 '/{repo_name}/settings/issue-tracker/test',
786 '/{repo_name}/settings/issue-tracker/test',
791 controller='admin/repos', action='repo_issuetracker_test',
787 controller='admin/repos', action='repo_issuetracker_test',
792 conditions={'method': ['POST'], 'function': check_repo},
788 conditions={'method': ['POST'], 'function': check_repo},
793 requirements=URL_NAME_REQUIREMENTS)
789 requirements=URL_NAME_REQUIREMENTS)
794 rmap.connect('repo_issuetracker_delete',
790 rmap.connect('repo_issuetracker_delete',
795 '/{repo_name}/settings/issue-tracker/delete',
791 '/{repo_name}/settings/issue-tracker/delete',
796 controller='admin/repos', action='repo_issuetracker_delete',
792 controller='admin/repos', action='repo_issuetracker_delete',
797 conditions={'method': ['DELETE'], 'function': check_repo},
793 conditions={'method': ['DELETE'], 'function': check_repo},
798 requirements=URL_NAME_REQUIREMENTS)
794 requirements=URL_NAME_REQUIREMENTS)
799 rmap.connect('repo_issuetracker_save',
795 rmap.connect('repo_issuetracker_save',
800 '/{repo_name}/settings/issue-tracker/save',
796 '/{repo_name}/settings/issue-tracker/save',
801 controller='admin/repos', action='repo_issuetracker_save',
797 controller='admin/repos', action='repo_issuetracker_save',
802 conditions={'method': ['POST'], 'function': check_repo},
798 conditions={'method': ['POST'], 'function': check_repo},
803 requirements=URL_NAME_REQUIREMENTS)
799 requirements=URL_NAME_REQUIREMENTS)
804 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
800 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
805 controller='admin/repos', action='repo_settings_vcs_update',
801 controller='admin/repos', action='repo_settings_vcs_update',
806 conditions={'method': ['POST'], 'function': check_repo},
802 conditions={'method': ['POST'], 'function': check_repo},
807 requirements=URL_NAME_REQUIREMENTS)
803 requirements=URL_NAME_REQUIREMENTS)
808 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
804 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
809 controller='admin/repos', action='repo_settings_vcs',
805 controller='admin/repos', action='repo_settings_vcs',
810 conditions={'method': ['GET'], 'function': check_repo},
806 conditions={'method': ['GET'], 'function': check_repo},
811 requirements=URL_NAME_REQUIREMENTS)
807 requirements=URL_NAME_REQUIREMENTS)
812 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
808 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
813 controller='admin/repos', action='repo_delete_svn_pattern',
809 controller='admin/repos', action='repo_delete_svn_pattern',
814 conditions={'method': ['DELETE'], 'function': check_repo},
810 conditions={'method': ['DELETE'], 'function': check_repo},
815 requirements=URL_NAME_REQUIREMENTS)
811 requirements=URL_NAME_REQUIREMENTS)
816
812
817 # still working url for backward compat.
813 # still working url for backward compat.
818 rmap.connect('raw_changeset_home_depraced',
814 rmap.connect('raw_changeset_home_depraced',
819 '/{repo_name}/raw-changeset/{revision}',
815 '/{repo_name}/raw-changeset/{revision}',
820 controller='changeset', action='changeset_raw',
816 controller='changeset', action='changeset_raw',
821 revision='tip', conditions={'function': check_repo},
817 revision='tip', conditions={'function': check_repo},
822 requirements=URL_NAME_REQUIREMENTS)
818 requirements=URL_NAME_REQUIREMENTS)
823
819
824 # new URLs
820 # new URLs
825 rmap.connect('changeset_raw_home',
821 rmap.connect('changeset_raw_home',
826 '/{repo_name}/changeset-diff/{revision}',
822 '/{repo_name}/changeset-diff/{revision}',
827 controller='changeset', action='changeset_raw',
823 controller='changeset', action='changeset_raw',
828 revision='tip', conditions={'function': check_repo},
824 revision='tip', conditions={'function': check_repo},
829 requirements=URL_NAME_REQUIREMENTS)
825 requirements=URL_NAME_REQUIREMENTS)
830
826
831 rmap.connect('changeset_patch_home',
827 rmap.connect('changeset_patch_home',
832 '/{repo_name}/changeset-patch/{revision}',
828 '/{repo_name}/changeset-patch/{revision}',
833 controller='changeset', action='changeset_patch',
829 controller='changeset', action='changeset_patch',
834 revision='tip', conditions={'function': check_repo},
830 revision='tip', conditions={'function': check_repo},
835 requirements=URL_NAME_REQUIREMENTS)
831 requirements=URL_NAME_REQUIREMENTS)
836
832
837 rmap.connect('changeset_download_home',
833 rmap.connect('changeset_download_home',
838 '/{repo_name}/changeset-download/{revision}',
834 '/{repo_name}/changeset-download/{revision}',
839 controller='changeset', action='changeset_download',
835 controller='changeset', action='changeset_download',
840 revision='tip', conditions={'function': check_repo},
836 revision='tip', conditions={'function': check_repo},
841 requirements=URL_NAME_REQUIREMENTS)
837 requirements=URL_NAME_REQUIREMENTS)
842
838
843 rmap.connect('changeset_comment',
839 rmap.connect('changeset_comment',
844 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
840 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
845 controller='changeset', revision='tip', action='comment',
841 controller='changeset', revision='tip', action='comment',
846 conditions={'function': check_repo},
842 conditions={'function': check_repo},
847 requirements=URL_NAME_REQUIREMENTS)
843 requirements=URL_NAME_REQUIREMENTS)
848
844
849 rmap.connect('changeset_comment_preview',
845 rmap.connect('changeset_comment_preview',
850 '/{repo_name}/changeset/comment/preview', jsroute=True,
846 '/{repo_name}/changeset/comment/preview', jsroute=True,
851 controller='changeset', action='preview_comment',
847 controller='changeset', action='preview_comment',
852 conditions={'function': check_repo, 'method': ['POST']},
848 conditions={'function': check_repo, 'method': ['POST']},
853 requirements=URL_NAME_REQUIREMENTS)
849 requirements=URL_NAME_REQUIREMENTS)
854
850
855 rmap.connect('changeset_comment_delete',
851 rmap.connect('changeset_comment_delete',
856 '/{repo_name}/changeset/comment/{comment_id}/delete',
852 '/{repo_name}/changeset/comment/{comment_id}/delete',
857 controller='changeset', action='delete_comment',
853 controller='changeset', action='delete_comment',
858 conditions={'function': check_repo, 'method': ['DELETE']},
854 conditions={'function': check_repo, 'method': ['DELETE']},
859 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
855 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
860
856
861 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
857 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
862 controller='changeset', action='changeset_info',
858 controller='changeset', action='changeset_info',
863 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
859 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
864
860
865 rmap.connect('compare_home',
861 rmap.connect('compare_home',
866 '/{repo_name}/compare',
862 '/{repo_name}/compare',
867 controller='compare', action='index',
863 controller='compare', action='index',
868 conditions={'function': check_repo},
864 conditions={'function': check_repo},
869 requirements=URL_NAME_REQUIREMENTS)
865 requirements=URL_NAME_REQUIREMENTS)
870
866
871 rmap.connect('compare_url',
867 rmap.connect('compare_url',
872 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
868 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
873 controller='compare', action='compare',
869 controller='compare', action='compare',
874 conditions={'function': check_repo},
870 conditions={'function': check_repo},
875 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
871 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
876
872
877 rmap.connect('pullrequest_home',
873 rmap.connect('pullrequest_home',
878 '/{repo_name}/pull-request/new', controller='pullrequests',
874 '/{repo_name}/pull-request/new', controller='pullrequests',
879 action='index', conditions={'function': check_repo,
875 action='index', conditions={'function': check_repo,
880 'method': ['GET']},
876 'method': ['GET']},
881 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
877 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
882
878
883 rmap.connect('pullrequest',
879 rmap.connect('pullrequest',
884 '/{repo_name}/pull-request/new', controller='pullrequests',
880 '/{repo_name}/pull-request/new', controller='pullrequests',
885 action='create', conditions={'function': check_repo,
881 action='create', conditions={'function': check_repo,
886 'method': ['POST']},
882 'method': ['POST']},
887 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
883 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
888
884
889 rmap.connect('pullrequest_repo_refs',
885 rmap.connect('pullrequest_repo_refs',
890 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
886 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
891 controller='pullrequests',
887 controller='pullrequests',
892 action='get_repo_refs',
888 action='get_repo_refs',
893 conditions={'function': check_repo, 'method': ['GET']},
889 conditions={'function': check_repo, 'method': ['GET']},
894 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
890 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
895
891
896 rmap.connect('pullrequest_repo_destinations',
892 rmap.connect('pullrequest_repo_destinations',
897 '/{repo_name}/pull-request/repo-destinations',
893 '/{repo_name}/pull-request/repo-destinations',
898 controller='pullrequests',
894 controller='pullrequests',
899 action='get_repo_destinations',
895 action='get_repo_destinations',
900 conditions={'function': check_repo, 'method': ['GET']},
896 conditions={'function': check_repo, 'method': ['GET']},
901 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
897 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
902
898
903 rmap.connect('pullrequest_show',
899 rmap.connect('pullrequest_show',
904 '/{repo_name}/pull-request/{pull_request_id}',
900 '/{repo_name}/pull-request/{pull_request_id}',
905 controller='pullrequests',
901 controller='pullrequests',
906 action='show', conditions={'function': check_repo,
902 action='show', conditions={'function': check_repo,
907 'method': ['GET']},
903 'method': ['GET']},
908 requirements=URL_NAME_REQUIREMENTS)
904 requirements=URL_NAME_REQUIREMENTS)
909
905
910 rmap.connect('pullrequest_update',
906 rmap.connect('pullrequest_update',
911 '/{repo_name}/pull-request/{pull_request_id}',
907 '/{repo_name}/pull-request/{pull_request_id}',
912 controller='pullrequests',
908 controller='pullrequests',
913 action='update', conditions={'function': check_repo,
909 action='update', conditions={'function': check_repo,
914 'method': ['PUT']},
910 'method': ['PUT']},
915 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
911 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
916
912
917 rmap.connect('pullrequest_merge',
913 rmap.connect('pullrequest_merge',
918 '/{repo_name}/pull-request/{pull_request_id}',
914 '/{repo_name}/pull-request/{pull_request_id}',
919 controller='pullrequests',
915 controller='pullrequests',
920 action='merge', conditions={'function': check_repo,
916 action='merge', conditions={'function': check_repo,
921 'method': ['POST']},
917 'method': ['POST']},
922 requirements=URL_NAME_REQUIREMENTS)
918 requirements=URL_NAME_REQUIREMENTS)
923
919
924 rmap.connect('pullrequest_delete',
920 rmap.connect('pullrequest_delete',
925 '/{repo_name}/pull-request/{pull_request_id}',
921 '/{repo_name}/pull-request/{pull_request_id}',
926 controller='pullrequests',
922 controller='pullrequests',
927 action='delete', conditions={'function': check_repo,
923 action='delete', conditions={'function': check_repo,
928 'method': ['DELETE']},
924 'method': ['DELETE']},
929 requirements=URL_NAME_REQUIREMENTS)
925 requirements=URL_NAME_REQUIREMENTS)
930
926
931 rmap.connect('pullrequest_show_all',
927 rmap.connect('pullrequest_show_all',
932 '/{repo_name}/pull-request',
928 '/{repo_name}/pull-request',
933 controller='pullrequests',
929 controller='pullrequests',
934 action='show_all', conditions={'function': check_repo,
930 action='show_all', conditions={'function': check_repo,
935 'method': ['GET']},
931 'method': ['GET']},
936 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
932 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
937
933
938 rmap.connect('pullrequest_comment',
934 rmap.connect('pullrequest_comment',
939 '/{repo_name}/pull-request-comment/{pull_request_id}',
935 '/{repo_name}/pull-request-comment/{pull_request_id}',
940 controller='pullrequests',
936 controller='pullrequests',
941 action='comment', conditions={'function': check_repo,
937 action='comment', conditions={'function': check_repo,
942 'method': ['POST']},
938 'method': ['POST']},
943 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
939 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
944
940
945 rmap.connect('pullrequest_comment_delete',
941 rmap.connect('pullrequest_comment_delete',
946 '/{repo_name}/pull-request-comment/{comment_id}/delete',
942 '/{repo_name}/pull-request-comment/{comment_id}/delete',
947 controller='pullrequests', action='delete_comment',
943 controller='pullrequests', action='delete_comment',
948 conditions={'function': check_repo, 'method': ['DELETE']},
944 conditions={'function': check_repo, 'method': ['DELETE']},
949 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
945 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
950
946
951 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
947 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
952 controller='summary', conditions={'function': check_repo},
948 controller='summary', conditions={'function': check_repo},
953 requirements=URL_NAME_REQUIREMENTS)
949 requirements=URL_NAME_REQUIREMENTS)
954
950
955 rmap.connect('branches_home', '/{repo_name}/branches',
951 rmap.connect('branches_home', '/{repo_name}/branches',
956 controller='branches', conditions={'function': check_repo},
952 controller='branches', conditions={'function': check_repo},
957 requirements=URL_NAME_REQUIREMENTS)
953 requirements=URL_NAME_REQUIREMENTS)
958
954
959 rmap.connect('tags_home', '/{repo_name}/tags',
955 rmap.connect('tags_home', '/{repo_name}/tags',
960 controller='tags', conditions={'function': check_repo},
956 controller='tags', conditions={'function': check_repo},
961 requirements=URL_NAME_REQUIREMENTS)
957 requirements=URL_NAME_REQUIREMENTS)
962
958
963 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
959 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
964 controller='bookmarks', conditions={'function': check_repo},
960 controller='bookmarks', conditions={'function': check_repo},
965 requirements=URL_NAME_REQUIREMENTS)
961 requirements=URL_NAME_REQUIREMENTS)
966
962
967 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
963 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
968 controller='changelog', conditions={'function': check_repo},
964 controller='changelog', conditions={'function': check_repo},
969 requirements=URL_NAME_REQUIREMENTS)
965 requirements=URL_NAME_REQUIREMENTS)
970
966
971 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
967 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
972 controller='changelog', action='changelog_summary',
968 controller='changelog', action='changelog_summary',
973 conditions={'function': check_repo},
969 conditions={'function': check_repo},
974 requirements=URL_NAME_REQUIREMENTS)
970 requirements=URL_NAME_REQUIREMENTS)
975
971
976 rmap.connect('changelog_file_home',
972 rmap.connect('changelog_file_home',
977 '/{repo_name}/changelog/{revision}/{f_path}',
973 '/{repo_name}/changelog/{revision}/{f_path}',
978 controller='changelog', f_path=None,
974 controller='changelog', f_path=None,
979 conditions={'function': check_repo},
975 conditions={'function': check_repo},
980 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
976 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
981
977
982 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
978 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
983 controller='changelog', action='changelog_details',
979 controller='changelog', action='changelog_details',
984 conditions={'function': check_repo},
980 conditions={'function': check_repo},
985 requirements=URL_NAME_REQUIREMENTS)
981 requirements=URL_NAME_REQUIREMENTS)
986
982
987 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
983 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
988 controller='files', revision='tip', f_path='',
984 controller='files', revision='tip', f_path='',
989 conditions={'function': check_repo},
985 conditions={'function': check_repo},
990 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
986 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
991
987
992 rmap.connect('files_home_simple_catchrev',
988 rmap.connect('files_home_simple_catchrev',
993 '/{repo_name}/files/{revision}',
989 '/{repo_name}/files/{revision}',
994 controller='files', revision='tip', f_path='',
990 controller='files', revision='tip', f_path='',
995 conditions={'function': check_repo},
991 conditions={'function': check_repo},
996 requirements=URL_NAME_REQUIREMENTS)
992 requirements=URL_NAME_REQUIREMENTS)
997
993
998 rmap.connect('files_home_simple_catchall',
994 rmap.connect('files_home_simple_catchall',
999 '/{repo_name}/files',
995 '/{repo_name}/files',
1000 controller='files', revision='tip', f_path='',
996 controller='files', revision='tip', f_path='',
1001 conditions={'function': check_repo},
997 conditions={'function': check_repo},
1002 requirements=URL_NAME_REQUIREMENTS)
998 requirements=URL_NAME_REQUIREMENTS)
1003
999
1004 rmap.connect('files_history_home',
1000 rmap.connect('files_history_home',
1005 '/{repo_name}/history/{revision}/{f_path}',
1001 '/{repo_name}/history/{revision}/{f_path}',
1006 controller='files', action='history', revision='tip', f_path='',
1002 controller='files', action='history', revision='tip', f_path='',
1007 conditions={'function': check_repo},
1003 conditions={'function': check_repo},
1008 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1004 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1009
1005
1010 rmap.connect('files_authors_home',
1006 rmap.connect('files_authors_home',
1011 '/{repo_name}/authors/{revision}/{f_path}',
1007 '/{repo_name}/authors/{revision}/{f_path}',
1012 controller='files', action='authors', revision='tip', f_path='',
1008 controller='files', action='authors', revision='tip', f_path='',
1013 conditions={'function': check_repo},
1009 conditions={'function': check_repo},
1014 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1010 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1015
1011
1016 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1012 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1017 controller='files', action='diff', f_path='',
1013 controller='files', action='diff', f_path='',
1018 conditions={'function': check_repo},
1014 conditions={'function': check_repo},
1019 requirements=URL_NAME_REQUIREMENTS)
1015 requirements=URL_NAME_REQUIREMENTS)
1020
1016
1021 rmap.connect('files_diff_2way_home',
1017 rmap.connect('files_diff_2way_home',
1022 '/{repo_name}/diff-2way/{f_path}',
1018 '/{repo_name}/diff-2way/{f_path}',
1023 controller='files', action='diff_2way', f_path='',
1019 controller='files', action='diff_2way', f_path='',
1024 conditions={'function': check_repo},
1020 conditions={'function': check_repo},
1025 requirements=URL_NAME_REQUIREMENTS)
1021 requirements=URL_NAME_REQUIREMENTS)
1026
1022
1027 rmap.connect('files_rawfile_home',
1023 rmap.connect('files_rawfile_home',
1028 '/{repo_name}/rawfile/{revision}/{f_path}',
1024 '/{repo_name}/rawfile/{revision}/{f_path}',
1029 controller='files', action='rawfile', revision='tip',
1025 controller='files', action='rawfile', revision='tip',
1030 f_path='', conditions={'function': check_repo},
1026 f_path='', conditions={'function': check_repo},
1031 requirements=URL_NAME_REQUIREMENTS)
1027 requirements=URL_NAME_REQUIREMENTS)
1032
1028
1033 rmap.connect('files_raw_home',
1029 rmap.connect('files_raw_home',
1034 '/{repo_name}/raw/{revision}/{f_path}',
1030 '/{repo_name}/raw/{revision}/{f_path}',
1035 controller='files', action='raw', revision='tip', f_path='',
1031 controller='files', action='raw', revision='tip', f_path='',
1036 conditions={'function': check_repo},
1032 conditions={'function': check_repo},
1037 requirements=URL_NAME_REQUIREMENTS)
1033 requirements=URL_NAME_REQUIREMENTS)
1038
1034
1039 rmap.connect('files_render_home',
1035 rmap.connect('files_render_home',
1040 '/{repo_name}/render/{revision}/{f_path}',
1036 '/{repo_name}/render/{revision}/{f_path}',
1041 controller='files', action='index', revision='tip', f_path='',
1037 controller='files', action='index', revision='tip', f_path='',
1042 rendered=True, conditions={'function': check_repo},
1038 rendered=True, conditions={'function': check_repo},
1043 requirements=URL_NAME_REQUIREMENTS)
1039 requirements=URL_NAME_REQUIREMENTS)
1044
1040
1045 rmap.connect('files_annotate_home',
1041 rmap.connect('files_annotate_home',
1046 '/{repo_name}/annotate/{revision}/{f_path}',
1042 '/{repo_name}/annotate/{revision}/{f_path}',
1047 controller='files', action='index', revision='tip',
1043 controller='files', action='index', revision='tip',
1048 f_path='', annotate=True, conditions={'function': check_repo},
1044 f_path='', annotate=True, conditions={'function': check_repo},
1049 requirements=URL_NAME_REQUIREMENTS)
1045 requirements=URL_NAME_REQUIREMENTS)
1050
1046
1051 rmap.connect('files_edit',
1047 rmap.connect('files_edit',
1052 '/{repo_name}/edit/{revision}/{f_path}',
1048 '/{repo_name}/edit/{revision}/{f_path}',
1053 controller='files', action='edit', revision='tip',
1049 controller='files', action='edit', revision='tip',
1054 f_path='',
1050 f_path='',
1055 conditions={'function': check_repo, 'method': ['POST']},
1051 conditions={'function': check_repo, 'method': ['POST']},
1056 requirements=URL_NAME_REQUIREMENTS)
1052 requirements=URL_NAME_REQUIREMENTS)
1057
1053
1058 rmap.connect('files_edit_home',
1054 rmap.connect('files_edit_home',
1059 '/{repo_name}/edit/{revision}/{f_path}',
1055 '/{repo_name}/edit/{revision}/{f_path}',
1060 controller='files', action='edit_home', revision='tip',
1056 controller='files', action='edit_home', revision='tip',
1061 f_path='', conditions={'function': check_repo},
1057 f_path='', conditions={'function': check_repo},
1062 requirements=URL_NAME_REQUIREMENTS)
1058 requirements=URL_NAME_REQUIREMENTS)
1063
1059
1064 rmap.connect('files_add',
1060 rmap.connect('files_add',
1065 '/{repo_name}/add/{revision}/{f_path}',
1061 '/{repo_name}/add/{revision}/{f_path}',
1066 controller='files', action='add', revision='tip',
1062 controller='files', action='add', revision='tip',
1067 f_path='',
1063 f_path='',
1068 conditions={'function': check_repo, 'method': ['POST']},
1064 conditions={'function': check_repo, 'method': ['POST']},
1069 requirements=URL_NAME_REQUIREMENTS)
1065 requirements=URL_NAME_REQUIREMENTS)
1070
1066
1071 rmap.connect('files_add_home',
1067 rmap.connect('files_add_home',
1072 '/{repo_name}/add/{revision}/{f_path}',
1068 '/{repo_name}/add/{revision}/{f_path}',
1073 controller='files', action='add_home', revision='tip',
1069 controller='files', action='add_home', revision='tip',
1074 f_path='', conditions={'function': check_repo},
1070 f_path='', conditions={'function': check_repo},
1075 requirements=URL_NAME_REQUIREMENTS)
1071 requirements=URL_NAME_REQUIREMENTS)
1076
1072
1077 rmap.connect('files_delete',
1073 rmap.connect('files_delete',
1078 '/{repo_name}/delete/{revision}/{f_path}',
1074 '/{repo_name}/delete/{revision}/{f_path}',
1079 controller='files', action='delete', revision='tip',
1075 controller='files', action='delete', revision='tip',
1080 f_path='',
1076 f_path='',
1081 conditions={'function': check_repo, 'method': ['POST']},
1077 conditions={'function': check_repo, 'method': ['POST']},
1082 requirements=URL_NAME_REQUIREMENTS)
1078 requirements=URL_NAME_REQUIREMENTS)
1083
1079
1084 rmap.connect('files_delete_home',
1080 rmap.connect('files_delete_home',
1085 '/{repo_name}/delete/{revision}/{f_path}',
1081 '/{repo_name}/delete/{revision}/{f_path}',
1086 controller='files', action='delete_home', revision='tip',
1082 controller='files', action='delete_home', revision='tip',
1087 f_path='', conditions={'function': check_repo},
1083 f_path='', conditions={'function': check_repo},
1088 requirements=URL_NAME_REQUIREMENTS)
1084 requirements=URL_NAME_REQUIREMENTS)
1089
1085
1090 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1086 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1091 controller='files', action='archivefile',
1087 controller='files', action='archivefile',
1092 conditions={'function': check_repo},
1088 conditions={'function': check_repo},
1093 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1089 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1094
1090
1095 rmap.connect('files_nodelist_home',
1091 rmap.connect('files_nodelist_home',
1096 '/{repo_name}/nodelist/{revision}/{f_path}',
1092 '/{repo_name}/nodelist/{revision}/{f_path}',
1097 controller='files', action='nodelist',
1093 controller='files', action='nodelist',
1098 conditions={'function': check_repo},
1094 conditions={'function': check_repo},
1099 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1095 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1100
1096
1101 rmap.connect('files_metadata_list_home',
1097 rmap.connect('files_metadata_list_home',
1102 '/{repo_name}/metadata_list/{revision}/{f_path}',
1098 '/{repo_name}/metadata_list/{revision}/{f_path}',
1103 controller='files', action='metadata_list',
1099 controller='files', action='metadata_list',
1104 conditions={'function': check_repo},
1100 conditions={'function': check_repo},
1105 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1101 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1106
1102
1107 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1103 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1108 controller='forks', action='fork_create',
1104 controller='forks', action='fork_create',
1109 conditions={'function': check_repo, 'method': ['POST']},
1105 conditions={'function': check_repo, 'method': ['POST']},
1110 requirements=URL_NAME_REQUIREMENTS)
1106 requirements=URL_NAME_REQUIREMENTS)
1111
1107
1112 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1108 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1113 controller='forks', action='fork',
1109 controller='forks', action='fork',
1114 conditions={'function': check_repo},
1110 conditions={'function': check_repo},
1115 requirements=URL_NAME_REQUIREMENTS)
1111 requirements=URL_NAME_REQUIREMENTS)
1116
1112
1117 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1113 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1118 controller='forks', action='forks',
1114 controller='forks', action='forks',
1119 conditions={'function': check_repo},
1115 conditions={'function': check_repo},
1120 requirements=URL_NAME_REQUIREMENTS)
1116 requirements=URL_NAME_REQUIREMENTS)
1121
1117
1122 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1118 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1123 controller='followers', action='followers',
1119 controller='followers', action='followers',
1124 conditions={'function': check_repo},
1120 conditions={'function': check_repo},
1125 requirements=URL_NAME_REQUIREMENTS)
1121 requirements=URL_NAME_REQUIREMENTS)
1126
1122
1127 # must be here for proper group/repo catching pattern
1123 # must be here for proper group/repo catching pattern
1128 _connect_with_slash(
1124 _connect_with_slash(
1129 rmap, 'repo_group_home', '/{group_name}',
1125 rmap, 'repo_group_home', '/{group_name}',
1130 controller='home', action='index_repo_group',
1126 controller='home', action='index_repo_group',
1131 conditions={'function': check_group},
1127 conditions={'function': check_group},
1132 requirements=URL_NAME_REQUIREMENTS)
1128 requirements=URL_NAME_REQUIREMENTS)
1133
1129
1134 # catch all, at the end
1130 # catch all, at the end
1135 _connect_with_slash(
1131 _connect_with_slash(
1136 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1132 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1137 controller='summary', action='index',
1133 controller='summary', action='index',
1138 conditions={'function': check_repo},
1134 conditions={'function': check_repo},
1139 requirements=URL_NAME_REQUIREMENTS)
1135 requirements=URL_NAME_REQUIREMENTS)
1140
1136
1141 return rmap
1137 return rmap
1142
1138
1143
1139
1144 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1140 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1145 """
1141 """
1146 Connect a route with an optional trailing slash in `path`.
1142 Connect a route with an optional trailing slash in `path`.
1147 """
1143 """
1148 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1144 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1149 mapper.connect(name, path, *args, **kwargs)
1145 mapper.connect(name, path, *args, **kwargs)
@@ -1,70 +1,76 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2016 RhodeCode GmbH
3 # Copyright (C) 2015-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Disable VCS pages when VCS Server is not available
22 Disable VCS pages when VCS Server is not available
23 """
23 """
24
24
25 import logging
25 import logging
26 import re
26 import re
27
27 from pyramid.httpexceptions import HTTPBadGateway
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 class VCSServerUnavailable(HTTPBadGateway):
33 """ HTTP Exception class for when VCS Server is unavailable """
34 code = 502
35 title = 'VCS Server Required'
36 explanation = 'A VCS Server is required for this action. There is currently no VCS Server configured.'
37
32 class DisableVCSPagesWrapper(object):
38 class DisableVCSPagesWrapper(object):
33 """
39 """
34 Wrapper to disable all pages that require VCS Server to be running,
40 Pyramid view wrapper to disable all pages that require VCS Server to be
35 avoiding that errors explode to the user.
41 running, avoiding that errors explode to the user.
36
42
37 This Wrapper should be enabled only in case VCS Server is not available
43 This Wrapper should be enabled only in case VCS Server is not available
38 for the instance.
44 for the instance.
39 """
45 """
40
46
41 VCS_NOT_REQUIRED = [
47 VCS_NOT_REQUIRED = [
42 '^/$',
48 '^/$',
43 ('/_admin(?!/settings/mapping)(?!/my_account/repos)'
49 ('/_admin(?!/settings/mapping)(?!/my_account/repos)'
44 '(?!/create_repository)(?!/gists)(?!/notifications/)'
50 '(?!/create_repository)(?!/gists)(?!/notifications/)'
45 ),
51 ),
46 ]
52 ]
47 _REGEX_VCS_NOT_REQUIRED = [re.compile(path) for path in VCS_NOT_REQUIRED]
53 _REGEX_VCS_NOT_REQUIRED = [re.compile(path) for path in VCS_NOT_REQUIRED]
48
54
49 def _check_vcs_requirement(self, path_info):
55 def _check_vcs_requirement(self, path_info):
50 """
56 """
51 Tries to match the current path to one of the safe URLs to be rendered.
57 Tries to match the current path to one of the safe URLs to be rendered.
52 Displays an error message in case
58 Displays an error message in case
53 """
59 """
54 for regex in self._REGEX_VCS_NOT_REQUIRED:
60 for regex in self._REGEX_VCS_NOT_REQUIRED:
55 safe_url = regex.match(path_info)
61 safe_url = regex.match(path_info)
56 if safe_url:
62 if safe_url:
57 return True
63 return True
58
64
59 # Url is not safe to be rendered without VCS Server
65 # Url is not safe to be rendered without VCS Server
60 log.debug('accessing: `%s` with VCS Server disabled', path_info)
66 log.debug('accessing: `%s` with VCS Server disabled', path_info)
61 return False
67 return False
62
68
63 def __init__(self, app):
69 def __init__(self, handler):
64 self.application = app
70 self.handler = handler
65
71
66 def __call__(self, environ, start_response):
72 def __call__(self, context, request):
67 if not self._check_vcs_requirement(environ['PATH_INFO']):
73 if not self._check_vcs_requirement(request.environ['PATH_INFO']):
68 environ['PATH_INFO'] = '/error/vcs_unavailable'
74 raise VCSServerUnavailable('VCS Server is not available')
69
75
70 return self.application(environ, start_response)
76 return self.handler(context, request)
@@ -1,87 +1,88 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import pylons
23 import pylons
24 import rhodecode
24 import rhodecode
25
25
26 from pylons.i18n.translation import _get_translator
26 from pylons.i18n.translation import _get_translator
27 from pylons.util import ContextObj
27 from pylons.util import ContextObj
28 from routes.util import URLGenerator
28 from routes.util import URLGenerator
29 from pyramid.httpexceptions import HTTPInternalServerError, HTTPError, HTTPServiceUnavailable
29
30
30 from rhodecode.lib.base import attach_context_attributes, get_auth_user
31 from rhodecode.lib.base import attach_context_attributes, get_auth_user
31 from rhodecode.model import meta
32 from rhodecode.model import meta
32
33
33 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
34
35
35
36
36 def pylons_compatibility_tween_factory(handler, registry):
37 def pylons_compatibility_tween_factory(handler, registry):
37 def pylons_compatibility_tween(request):
38 def pylons_compatibility_tween(request):
38 """
39 """
39 While migrating from pylons to pyramid we need to call some pylons code
40 While migrating from pylons to pyramid we need to call some pylons code
40 from pyramid. For example while rendering an old template that uses the
41 from pyramid. For example while rendering an old template that uses the
41 'c' or 'h' objects. This tween sets up the needed pylons globals.
42 'c' or 'h' objects. This tween sets up the needed pylons globals.
42 """
43 """
43 try:
44 try:
44 config = rhodecode.CONFIG
45 config = rhodecode.CONFIG
45 environ = request.environ
46 environ = request.environ
46 session = request.session
47 session = request.session
47 session_key = (config['pylons.environ_config']
48 session_key = (config['pylons.environ_config']
48 .get('session', 'beaker.session'))
49 .get('session', 'beaker.session'))
49
50
50 # Setup pylons globals.
51 # Setup pylons globals.
51 pylons.config._push_object(config)
52 pylons.config._push_object(config)
52 pylons.request._push_object(request)
53 pylons.request._push_object(request)
53 pylons.session._push_object(session)
54 pylons.session._push_object(session)
54 environ[session_key] = session
55 environ[session_key] = session
55 pylons.url._push_object(URLGenerator(config['routes.map'],
56 pylons.url._push_object(URLGenerator(config['routes.map'],
56 environ))
57 environ))
57
58
58 # TODO: Maybe we should use the language from pyramid.
59 # TODO: Maybe we should use the language from pyramid.
59 translator = _get_translator(config.get('lang'))
60 translator = _get_translator(config.get('lang'))
60 pylons.translator._push_object(translator)
61 pylons.translator._push_object(translator)
61
62
62 # Get the rhodecode auth user object and make it available.
63 # Get the rhodecode auth user object and make it available.
63 auth_user = get_auth_user(environ)
64 auth_user = get_auth_user(environ)
64 request.user = auth_user
65 request.user = auth_user
65 environ['rc_auth_user'] = auth_user
66 environ['rc_auth_user'] = auth_user
66
67
67 # Setup the pylons context object ('c')
68 # Setup the pylons context object ('c')
68 context = ContextObj()
69 context = ContextObj()
69 context.rhodecode_user = auth_user
70 context.rhodecode_user = auth_user
70 attach_context_attributes(context)
71 attach_context_attributes(context)
71 pylons.tmpl_context._push_object(context)
72 pylons.tmpl_context._push_object(context)
72
73 response = handler(request)
73 return handler(request)
74 return response
74 finally:
75 finally:
75 # Dispose current database session and rollback uncommitted
76 # Dispose current database session and rollback uncommitted
76 # transactions.
77 # transactions.
77 meta.Session.remove()
78 meta.Session.remove()
78
79
79 return pylons_compatibility_tween
80 return pylons_compatibility_tween
80
81
81
82
82 def includeme(config):
83 def includeme(config):
83 config.add_subscriber('rhodecode.subscribers.add_renderer_globals',
84 config.add_subscriber('rhodecode.subscribers.add_renderer_globals',
84 'pyramid.events.BeforeRender')
85 'pyramid.events.BeforeRender')
85 config.add_subscriber('rhodecode.subscribers.add_localizer',
86 config.add_subscriber('rhodecode.subscribers.add_localizer',
86 'pyramid.events.NewRequest')
87 'pyramid.events.NewRequest')
87 config.add_tween('rhodecode.tweens.pylons_compatibility_tween_factory')
88 config.add_tween('rhodecode.tweens.pylons_compatibility_tween_factory')
General Comments 0
You need to be logged in to leave comments. Login now