Show More
@@ -1,608 +1,607 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # RhodeCode Enterprise - configuration file # |
|
4 | 4 | # Built-in functions and variables # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | # # |
|
7 | 7 | ################################################################################ |
|
8 | 8 | |
|
9 | 9 | [DEFAULT] |
|
10 | 10 | debug = true |
|
11 | pdebug = false | |
|
12 | 11 | ################################################################################ |
|
13 | 12 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 13 | ## any error reports after an application crash ## |
|
15 | 14 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 15 | ################################################################################ |
|
17 | 16 | #email_to = admin@localhost |
|
18 | 17 | #error_email_from = paste_error@localhost |
|
19 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
20 | 19 | #error_message = |
|
21 | 20 | #email_prefix = [RhodeCode] |
|
22 | 21 | |
|
23 | 22 | #smtp_server = mail.server.com |
|
24 | 23 | #smtp_username = |
|
25 | 24 | #smtp_password = |
|
26 | 25 | #smtp_port = |
|
27 | 26 | #smtp_use_tls = false |
|
28 | 27 | #smtp_use_ssl = true |
|
29 | 28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
30 | 29 | #smtp_auth = |
|
31 | 30 | |
|
32 | 31 | [server:main] |
|
33 | 32 | ## COMMON ## |
|
34 | 33 | host = 127.0.0.1 |
|
35 | 34 | port = 5000 |
|
36 | 35 | |
|
37 | 36 | ################################## |
|
38 | 37 | ## WAITRESS WSGI SERVER ## |
|
39 | 38 | ## Recommended for Development ## |
|
40 | 39 | ################################## |
|
41 | 40 | use = egg:waitress#main |
|
42 | 41 | ## number of worker threads |
|
43 | 42 | threads = 5 |
|
44 | 43 | ## MAX BODY SIZE 100GB |
|
45 | 44 | max_request_body_size = 107374182400 |
|
46 | 45 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
47 | 46 | ## May not work on old windows systems. |
|
48 | 47 | asyncore_use_poll = true |
|
49 | 48 | |
|
50 | 49 | |
|
51 | 50 | ########################## |
|
52 | 51 | ## GUNICORN WSGI SERVER ## |
|
53 | 52 | ########################## |
|
54 | 53 | ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini> |
|
55 | 54 | #use = egg:gunicorn#main |
|
56 | 55 | ## Sets the number of process workers. You must set `instance_id = *` |
|
57 | 56 | ## when this option is set to more than one worker, recommended |
|
58 | 57 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
59 | 58 | ## The `instance_id = *` must be set in the [app:main] section below |
|
60 | 59 | #workers = 2 |
|
61 | 60 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
62 | 61 | ## generally recommened to be at 1 |
|
63 | 62 | #threads = 1 |
|
64 | 63 | ## process name |
|
65 | 64 | #proc_name = rhodecode |
|
66 | 65 | ## type of worker class, one of sync, gevent |
|
67 | 66 | ## recommended for bigger setup is using of of other than sync one |
|
68 | 67 | #worker_class = sync |
|
69 | 68 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
70 | 69 | #worker_connections = 10 |
|
71 | 70 | ## max number of requests that worker will handle before being gracefully |
|
72 | 71 | ## restarted, could prevent memory leaks |
|
73 | 72 | #max_requests = 1000 |
|
74 | 73 | #max_requests_jitter = 30 |
|
75 | 74 | ## amount of time a worker can spend with handling a request before it |
|
76 | 75 | ## gets killed and restarted. Set to 6hrs |
|
77 | 76 | #timeout = 21600 |
|
78 | 77 | |
|
79 | 78 | |
|
80 | 79 | ## prefix middleware for RhodeCode, disables force_https flag. |
|
81 | 80 | ## allows to set RhodeCode under a prefix in server. |
|
82 | 81 | ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well. |
|
83 | 82 | #[filter:proxy-prefix] |
|
84 | 83 | #use = egg:PasteDeploy#prefix |
|
85 | 84 | #prefix = /<your-prefix> |
|
86 | 85 | |
|
87 | 86 | [app:main] |
|
88 | 87 | use = egg:rhodecode-enterprise-ce |
|
89 | 88 | ## enable proxy prefix middleware, defined below |
|
90 | 89 | #filter-with = proxy-prefix |
|
91 | 90 | |
|
92 | 91 | # During development the we want to have the debug toolbar enabled |
|
93 | 92 | pyramid.includes = |
|
94 | 93 | pyramid_debugtoolbar |
|
95 | 94 | rhodecode.utils.debugtoolbar |
|
96 | 95 | rhodecode.lib.middleware.request_wrapper |
|
97 | 96 | |
|
98 | 97 | pyramid.reload_templates = true |
|
99 | 98 | |
|
100 | 99 | debugtoolbar.hosts = 0.0.0.0/0 |
|
101 | 100 | debugtoolbar.exclude_prefixes = |
|
102 | 101 | /css |
|
103 | 102 | /fonts |
|
104 | 103 | /images |
|
105 | 104 | /js |
|
106 | 105 | |
|
107 | 106 | ## RHODECODE PLUGINS ## |
|
108 | 107 | rhodecode.includes = |
|
109 | 108 | rhodecode.api |
|
110 | 109 | |
|
111 | 110 | |
|
112 | 111 | # api prefix url |
|
113 | 112 | rhodecode.api.url = /_admin/api |
|
114 | 113 | |
|
115 | 114 | |
|
116 | 115 | ## END RHODECODE PLUGINS ## |
|
117 | 116 | |
|
118 | 117 | full_stack = true |
|
119 | 118 | |
|
120 | 119 | ## Serve static files via RhodeCode, disable to serve them via HTTP server |
|
121 | 120 | static_files = true |
|
122 | 121 | |
|
123 | 122 | ## Optional Languages |
|
124 | 123 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
125 | 124 | lang = en |
|
126 | 125 | |
|
127 | 126 | ## perform a full repository scan on each server start, this should be |
|
128 | 127 | ## set to false after first startup, to allow faster server restarts. |
|
129 | 128 | startup.import_repos = false |
|
130 | 129 | |
|
131 | 130 | ## Uncomment and set this path to use archive download cache. |
|
132 | 131 | ## Once enabled, generated archives will be cached at this location |
|
133 | 132 | ## and served from the cache during subsequent requests for the same archive of |
|
134 | 133 | ## the repository. |
|
135 | 134 | #archive_cache_dir = /tmp/tarballcache |
|
136 | 135 | |
|
137 | 136 | ## change this to unique ID for security |
|
138 | 137 | app_instance_uuid = rc-production |
|
139 | 138 | |
|
140 | 139 | ## cut off limit for large diffs (size in bytes) |
|
141 | 140 | cut_off_limit_diff = 1024000 |
|
142 | 141 | cut_off_limit_file = 256000 |
|
143 | 142 | |
|
144 | 143 | ## use cache version of scm repo everywhere |
|
145 | 144 | vcs_full_cache = true |
|
146 | 145 | |
|
147 | 146 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
148 | 147 | ## Normally this is controlled by proper http flags sent from http server |
|
149 | 148 | force_https = false |
|
150 | 149 | |
|
151 | 150 | ## use Strict-Transport-Security headers |
|
152 | 151 | use_htsts = false |
|
153 | 152 | |
|
154 | 153 | ## number of commits stats will parse on each iteration |
|
155 | 154 | commit_parse_limit = 25 |
|
156 | 155 | |
|
157 | 156 | ## git rev filter option, --all is the default filter, if you need to |
|
158 | 157 | ## hide all refs in changelog switch this to --branches --tags |
|
159 | 158 | git_rev_filter = --branches --tags |
|
160 | 159 | |
|
161 | 160 | # Set to true if your repos are exposed using the dumb protocol |
|
162 | 161 | git_update_server_info = false |
|
163 | 162 | |
|
164 | 163 | ## RSS/ATOM feed options |
|
165 | 164 | rss_cut_off_limit = 256000 |
|
166 | 165 | rss_items_per_page = 10 |
|
167 | 166 | rss_include_diff = false |
|
168 | 167 | |
|
169 | 168 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
170 | 169 | ## url that does rewrites to _admin/gists/<gistid>. |
|
171 | 170 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
172 | 171 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid> |
|
173 | 172 | gist_alias_url = |
|
174 | 173 | |
|
175 | 174 | ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be |
|
176 | 175 | ## used for access. |
|
177 | 176 | ## Adding ?auth_token = <token> to the url authenticates this request as if it |
|
178 | 177 | ## came from the the logged in user who own this authentication token. |
|
179 | 178 | ## |
|
180 | 179 | ## Syntax is <ControllerClass>:<function_pattern>. |
|
181 | 180 | ## To enable access to raw_files put `FilesController:raw`. |
|
182 | 181 | ## To enable access to patches add `ChangesetController:changeset_patch`. |
|
183 | 182 | ## The list should be "," separated and on a single line. |
|
184 | 183 | ## |
|
185 | 184 | ## Recommended controllers to enable: |
|
186 | 185 | # ChangesetController:changeset_patch, |
|
187 | 186 | # ChangesetController:changeset_raw, |
|
188 | 187 | # FilesController:raw, |
|
189 | 188 | # FilesController:archivefile, |
|
190 | 189 | # GistsController:*, |
|
191 | 190 | api_access_controllers_whitelist = |
|
192 | 191 | |
|
193 | 192 | ## default encoding used to convert from and to unicode |
|
194 | 193 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
195 | 194 | default_encoding = UTF-8 |
|
196 | 195 | |
|
197 | 196 | ## instance-id prefix |
|
198 | 197 | ## a prefix key for this instance used for cache invalidation when running |
|
199 | 198 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
200 | 199 | ## all running rhodecode instances. Leave empty if you don't use it |
|
201 | 200 | instance_id = |
|
202 | 201 | |
|
203 | 202 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
204 | 203 | ## of an authentication plugin also if it is disabled by it's settings. |
|
205 | 204 | ## This could be useful if you are unable to log in to the system due to broken |
|
206 | 205 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
207 | 206 | ## module to log in again and fix the settings. |
|
208 | 207 | ## |
|
209 | 208 | ## Available builtin plugin IDs (hash is part of the ID): |
|
210 | 209 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
211 | 210 | ## egg:rhodecode-enterprise-ce#pam |
|
212 | 211 | ## egg:rhodecode-enterprise-ce#ldap |
|
213 | 212 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
214 | 213 | ## egg:rhodecode-enterprise-ce#headers |
|
215 | 214 | ## egg:rhodecode-enterprise-ce#crowd |
|
216 | 215 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
217 | 216 | |
|
218 | 217 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
219 | 218 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
220 | 219 | ## handling that causing a series of failed authentication calls. |
|
221 | 220 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
222 | 221 | ## This will be served instead of default 401 on bad authnetication |
|
223 | 222 | auth_ret_code = |
|
224 | 223 | |
|
225 | 224 | ## use special detection method when serving auth_ret_code, instead of serving |
|
226 | 225 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
227 | 226 | ## and then serve auth_ret_code to clients |
|
228 | 227 | auth_ret_code_detection = false |
|
229 | 228 | |
|
230 | 229 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
231 | 230 | ## codes don't break the transactions while 4XX codes do |
|
232 | 231 | lock_ret_code = 423 |
|
233 | 232 | |
|
234 | 233 | ## allows to change the repository location in settings page |
|
235 | 234 | allow_repo_location_change = true |
|
236 | 235 | |
|
237 | 236 | ## allows to setup custom hooks in settings page |
|
238 | 237 | allow_custom_hooks_settings = true |
|
239 | 238 | |
|
240 | 239 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
241 | 240 | ## new token |
|
242 | 241 | license_token = |
|
243 | 242 | |
|
244 | 243 | ## supervisor connection uri, for managing supervisor and logs. |
|
245 | 244 | supervisor.uri = |
|
246 | 245 | ## supervisord group name/id we only want this RC instance to handle |
|
247 | 246 | supervisor.group_id = dev |
|
248 | 247 | |
|
249 | 248 | ## Display extended labs settings |
|
250 | 249 | labs_settings_active = true |
|
251 | 250 | |
|
252 | 251 | #################################### |
|
253 | 252 | ### CELERY CONFIG #### |
|
254 | 253 | #################################### |
|
255 | 254 | use_celery = false |
|
256 | 255 | broker.host = localhost |
|
257 | 256 | broker.vhost = rabbitmqhost |
|
258 | 257 | broker.port = 5672 |
|
259 | 258 | broker.user = rabbitmq |
|
260 | 259 | broker.password = qweqwe |
|
261 | 260 | |
|
262 | 261 | celery.imports = rhodecode.lib.celerylib.tasks |
|
263 | 262 | |
|
264 | 263 | celery.result.backend = amqp |
|
265 | 264 | celery.result.dburi = amqp:// |
|
266 | 265 | celery.result.serialier = json |
|
267 | 266 | |
|
268 | 267 | #celery.send.task.error.emails = true |
|
269 | 268 | #celery.amqp.task.result.expires = 18000 |
|
270 | 269 | |
|
271 | 270 | celeryd.concurrency = 2 |
|
272 | 271 | #celeryd.log.file = celeryd.log |
|
273 | 272 | celeryd.log.level = debug |
|
274 | 273 | celeryd.max.tasks.per.child = 1 |
|
275 | 274 | |
|
276 | 275 | ## tasks will never be sent to the queue, but executed locally instead. |
|
277 | 276 | celery.always.eager = false |
|
278 | 277 | |
|
279 | 278 | #################################### |
|
280 | 279 | ### BEAKER CACHE #### |
|
281 | 280 | #################################### |
|
282 | 281 | # default cache dir for templates. Putting this into a ramdisk |
|
283 | 282 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
284 | 283 | cache_dir = %(here)s/data |
|
285 | 284 | |
|
286 | 285 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
287 | 286 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
288 | 287 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
289 | 288 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
290 | 289 | |
|
291 | 290 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
292 | 291 | |
|
293 | 292 | beaker.cache.super_short_term.type = memory |
|
294 | 293 | beaker.cache.super_short_term.expire = 10 |
|
295 | 294 | beaker.cache.super_short_term.key_length = 256 |
|
296 | 295 | |
|
297 | 296 | beaker.cache.short_term.type = memory |
|
298 | 297 | beaker.cache.short_term.expire = 60 |
|
299 | 298 | beaker.cache.short_term.key_length = 256 |
|
300 | 299 | |
|
301 | 300 | beaker.cache.long_term.type = memory |
|
302 | 301 | beaker.cache.long_term.expire = 36000 |
|
303 | 302 | beaker.cache.long_term.key_length = 256 |
|
304 | 303 | |
|
305 | 304 | beaker.cache.sql_cache_short.type = memory |
|
306 | 305 | beaker.cache.sql_cache_short.expire = 10 |
|
307 | 306 | beaker.cache.sql_cache_short.key_length = 256 |
|
308 | 307 | |
|
309 | 308 | # default is memory cache, configure only if required |
|
310 | 309 | # using multi-node or multi-worker setup |
|
311 | 310 | #beaker.cache.auth_plugins.type = ext:database |
|
312 | 311 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
313 | 312 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
314 | 313 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
315 | 314 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
316 | 315 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
317 | 316 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
318 | 317 | |
|
319 | 318 | beaker.cache.repo_cache_long.type = memorylru_base |
|
320 | 319 | beaker.cache.repo_cache_long.max_items = 4096 |
|
321 | 320 | beaker.cache.repo_cache_long.expire = 2592000 |
|
322 | 321 | |
|
323 | 322 | # default is memorylru_base cache, configure only if required |
|
324 | 323 | # using multi-node or multi-worker setup |
|
325 | 324 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
326 | 325 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
327 | 326 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
328 | 327 | #beaker.cache.repo_cache_long.key_length = 256 |
|
329 | 328 | |
|
330 | 329 | #################################### |
|
331 | 330 | ### BEAKER SESSION #### |
|
332 | 331 | #################################### |
|
333 | 332 | |
|
334 | 333 | ## .session.type is type of storage options for the session, current allowed |
|
335 | 334 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
336 | 335 | beaker.session.type = file |
|
337 | 336 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
338 | 337 | |
|
339 | 338 | ## db based session, fast, and allows easy management over logged in users ## |
|
340 | 339 | #beaker.session.type = ext:database |
|
341 | 340 | #beaker.session.table_name = db_session |
|
342 | 341 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
343 | 342 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
344 | 343 | #beaker.session.sa.pool_recycle = 3600 |
|
345 | 344 | #beaker.session.sa.echo = false |
|
346 | 345 | |
|
347 | 346 | beaker.session.key = rhodecode |
|
348 | 347 | beaker.session.secret = develop-rc-uytcxaz |
|
349 | 348 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
350 | 349 | |
|
351 | 350 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
352 | 351 | ## you must disable beaker.session.secret to use this |
|
353 | 352 | #beaker.session.encrypt_key = <key_for_encryption> |
|
354 | 353 | #beaker.session.validate_key = <validation_key> |
|
355 | 354 | |
|
356 | 355 | ## sets session as invalid(also logging out user) if it haven not been |
|
357 | 356 | ## accessed for given amount of time in seconds |
|
358 | 357 | beaker.session.timeout = 2592000 |
|
359 | 358 | beaker.session.httponly = true |
|
360 | 359 | #beaker.session.cookie_path = /<your-prefix> |
|
361 | 360 | |
|
362 | 361 | ## uncomment for https secure cookie |
|
363 | 362 | beaker.session.secure = false |
|
364 | 363 | |
|
365 | 364 | ## auto save the session to not to use .save() |
|
366 | 365 | beaker.session.auto = false |
|
367 | 366 | |
|
368 | 367 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
369 | 368 | ## at browser close |
|
370 | 369 | #beaker.session.cookie_expires = 3600 |
|
371 | 370 | |
|
372 | 371 | ################################### |
|
373 | 372 | ## SEARCH INDEXING CONFIGURATION ## |
|
374 | 373 | ################################### |
|
375 | 374 | ## Full text search indexer is available in rhodecode-tools under |
|
376 | 375 | ## `rhodecode-tools index` command |
|
377 | 376 | |
|
378 | 377 | # WHOOSH Backend, doesn't require additional services to run |
|
379 | 378 | # it works good with few dozen repos |
|
380 | 379 | search.module = rhodecode.lib.index.whoosh |
|
381 | 380 | search.location = %(here)s/data/index |
|
382 | 381 | |
|
383 | 382 | |
|
384 | 383 | ################################### |
|
385 | 384 | ## APPENLIGHT CONFIG ## |
|
386 | 385 | ################################### |
|
387 | 386 | |
|
388 | 387 | ## Appenlight is tailored to work with RhodeCode, see |
|
389 | 388 | ## http://appenlight.com for details how to obtain an account |
|
390 | 389 | |
|
391 | 390 | ## appenlight integration enabled |
|
392 | 391 | appenlight = false |
|
393 | 392 | |
|
394 | 393 | appenlight.server_url = https://api.appenlight.com |
|
395 | 394 | appenlight.api_key = YOUR_API_KEY |
|
396 | 395 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
397 | 396 | |
|
398 | 397 | # used for JS client |
|
399 | 398 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
400 | 399 | |
|
401 | 400 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
402 | 401 | |
|
403 | 402 | ## enables 404 error logging (default False) |
|
404 | 403 | appenlight.report_404 = false |
|
405 | 404 | |
|
406 | 405 | ## time in seconds after request is considered being slow (default 1) |
|
407 | 406 | appenlight.slow_request_time = 1 |
|
408 | 407 | |
|
409 | 408 | ## record slow requests in application |
|
410 | 409 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
411 | 410 | appenlight.slow_requests = true |
|
412 | 411 | |
|
413 | 412 | ## enable hooking to application loggers |
|
414 | 413 | appenlight.logging = true |
|
415 | 414 | |
|
416 | 415 | ## minimum log level for log capture |
|
417 | 416 | appenlight.logging.level = WARNING |
|
418 | 417 | |
|
419 | 418 | ## send logs only from erroneous/slow requests |
|
420 | 419 | ## (saves API quota for intensive logging) |
|
421 | 420 | appenlight.logging_on_error = false |
|
422 | 421 | |
|
423 | 422 | ## list of additonal keywords that should be grabbed from environ object |
|
424 | 423 | ## can be string with comma separated list of words in lowercase |
|
425 | 424 | ## (by default client will always send following info: |
|
426 | 425 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
427 | 426 | ## start with HTTP* this list be extended with additional keywords here |
|
428 | 427 | appenlight.environ_keys_whitelist = |
|
429 | 428 | |
|
430 | 429 | ## list of keywords that should be blanked from request object |
|
431 | 430 | ## can be string with comma separated list of words in lowercase |
|
432 | 431 | ## (by default client will always blank keys that contain following words |
|
433 | 432 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
434 | 433 | ## this list be extended with additional keywords set here |
|
435 | 434 | appenlight.request_keys_blacklist = |
|
436 | 435 | |
|
437 | 436 | ## list of namespaces that should be ignores when gathering log entries |
|
438 | 437 | ## can be string with comma separated list of namespaces |
|
439 | 438 | ## (by default the client ignores own entries: appenlight_client.client) |
|
440 | 439 | appenlight.log_namespace_blacklist = |
|
441 | 440 | |
|
442 | 441 | |
|
443 | 442 | ################################################################################ |
|
444 | 443 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
445 | 444 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
446 | 445 | ## execute malicious code after an exception is raised. ## |
|
447 | 446 | ################################################################################ |
|
448 | 447 | #set debug = false |
|
449 | 448 | |
|
450 | 449 | |
|
451 | 450 | ############## |
|
452 | 451 | ## STYLING ## |
|
453 | 452 | ############## |
|
454 | 453 | debug_style = true |
|
455 | 454 | |
|
456 | 455 | ######################################################### |
|
457 | 456 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
458 | 457 | ######################################################### |
|
459 | 458 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
460 | 459 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
461 | 460 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
462 | 461 | |
|
463 | 462 | # see sqlalchemy docs for other advanced settings |
|
464 | 463 | |
|
465 | 464 | ## print the sql statements to output |
|
466 | 465 | sqlalchemy.db1.echo = false |
|
467 | 466 | ## recycle the connections after this ammount of seconds |
|
468 | 467 | sqlalchemy.db1.pool_recycle = 3600 |
|
469 | 468 | sqlalchemy.db1.convert_unicode = true |
|
470 | 469 | |
|
471 | 470 | ## the number of connections to keep open inside the connection pool. |
|
472 | 471 | ## 0 indicates no limit |
|
473 | 472 | #sqlalchemy.db1.pool_size = 5 |
|
474 | 473 | |
|
475 | 474 | ## the number of connections to allow in connection pool "overflow", that is |
|
476 | 475 | ## connections that can be opened above and beyond the pool_size setting, |
|
477 | 476 | ## which defaults to five. |
|
478 | 477 | #sqlalchemy.db1.max_overflow = 10 |
|
479 | 478 | |
|
480 | 479 | |
|
481 | 480 | ################## |
|
482 | 481 | ### VCS CONFIG ### |
|
483 | 482 | ################## |
|
484 | 483 | vcs.server.enable = true |
|
485 | 484 | vcs.server = localhost:9900 |
|
486 | 485 | |
|
487 | 486 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
488 | 487 | ## Available protocols are: |
|
489 | 488 | ## `pyro4` - using pyro4 server |
|
490 | 489 | ## `http` - using http-rpc backend |
|
491 | 490 | #vcs.server.protocol = http |
|
492 | 491 | |
|
493 | 492 | ## Push/Pull operations protocol, available options are: |
|
494 | 493 | ## `pyro4` - using pyro4 server |
|
495 | 494 | ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended |
|
496 | 495 | ## `vcsserver.scm_app` - internal app (EE only) |
|
497 | 496 | #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http |
|
498 | 497 | |
|
499 | 498 | ## Push/Pull operations hooks protocol, available options are: |
|
500 | 499 | ## `pyro4` - using pyro4 server |
|
501 | 500 | ## `http` - using http-rpc backend |
|
502 | 501 | #vcs.hooks.protocol = http |
|
503 | 502 | |
|
504 | 503 | vcs.server.log_level = debug |
|
505 | 504 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
506 | 505 | vcs.start_server = true |
|
507 | 506 | vcs.backends = hg, git, svn |
|
508 | 507 | vcs.connection_timeout = 3600 |
|
509 | 508 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
510 | 509 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
511 | 510 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
512 | 511 | |
|
513 | 512 | ################################ |
|
514 | 513 | ### LOGGING CONFIGURATION #### |
|
515 | 514 | ################################ |
|
516 | 515 | [loggers] |
|
517 | 516 | keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer |
|
518 | 517 | |
|
519 | 518 | [handlers] |
|
520 | 519 | keys = console, console_sql |
|
521 | 520 | |
|
522 | 521 | [formatters] |
|
523 | 522 | keys = generic, color_formatter, color_formatter_sql |
|
524 | 523 | |
|
525 | 524 | ############# |
|
526 | 525 | ## LOGGERS ## |
|
527 | 526 | ############# |
|
528 | 527 | [logger_root] |
|
529 | 528 | level = NOTSET |
|
530 | 529 | handlers = console |
|
531 | 530 | |
|
532 | 531 | [logger_routes] |
|
533 | 532 | level = DEBUG |
|
534 | 533 | handlers = |
|
535 | 534 | qualname = routes.middleware |
|
536 | 535 | ## "level = DEBUG" logs the route matched and routing variables. |
|
537 | 536 | propagate = 1 |
|
538 | 537 | |
|
539 | 538 | [logger_beaker] |
|
540 | 539 | level = DEBUG |
|
541 | 540 | handlers = |
|
542 | 541 | qualname = beaker.container |
|
543 | 542 | propagate = 1 |
|
544 | 543 | |
|
545 | 544 | [logger_pyro4] |
|
546 | 545 | level = DEBUG |
|
547 | 546 | handlers = |
|
548 | 547 | qualname = Pyro4 |
|
549 | 548 | propagate = 1 |
|
550 | 549 | |
|
551 | 550 | [logger_templates] |
|
552 | 551 | level = INFO |
|
553 | 552 | handlers = |
|
554 | 553 | qualname = pylons.templating |
|
555 | 554 | propagate = 1 |
|
556 | 555 | |
|
557 | 556 | [logger_rhodecode] |
|
558 | 557 | level = DEBUG |
|
559 | 558 | handlers = |
|
560 | 559 | qualname = rhodecode |
|
561 | 560 | propagate = 1 |
|
562 | 561 | |
|
563 | 562 | [logger_sqlalchemy] |
|
564 | 563 | level = INFO |
|
565 | 564 | handlers = console_sql |
|
566 | 565 | qualname = sqlalchemy.engine |
|
567 | 566 | propagate = 0 |
|
568 | 567 | |
|
569 | 568 | [logger_whoosh_indexer] |
|
570 | 569 | level = DEBUG |
|
571 | 570 | handlers = |
|
572 | 571 | qualname = whoosh_indexer |
|
573 | 572 | propagate = 1 |
|
574 | 573 | |
|
575 | 574 | ############## |
|
576 | 575 | ## HANDLERS ## |
|
577 | 576 | ############## |
|
578 | 577 | |
|
579 | 578 | [handler_console] |
|
580 | 579 | class = StreamHandler |
|
581 | 580 | args = (sys.stderr,) |
|
582 | 581 | level = DEBUG |
|
583 | 582 | formatter = color_formatter |
|
584 | 583 | |
|
585 | 584 | [handler_console_sql] |
|
586 | 585 | class = StreamHandler |
|
587 | 586 | args = (sys.stderr,) |
|
588 | 587 | level = DEBUG |
|
589 | 588 | formatter = color_formatter_sql |
|
590 | 589 | |
|
591 | 590 | ################ |
|
592 | 591 | ## FORMATTERS ## |
|
593 | 592 | ################ |
|
594 | 593 | |
|
595 | 594 | [formatter_generic] |
|
596 | 595 | class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter |
|
597 | 596 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
598 | 597 | datefmt = %Y-%m-%d %H:%M:%S |
|
599 | 598 | |
|
600 | 599 | [formatter_color_formatter] |
|
601 | 600 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
602 | 601 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
603 | 602 | datefmt = %Y-%m-%d %H:%M:%S |
|
604 | 603 | |
|
605 | 604 | [formatter_color_formatter_sql] |
|
606 | 605 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
607 | 606 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
608 | 607 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,577 +1,576 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # RhodeCode Enterprise - configuration file # |
|
4 | 4 | # Built-in functions and variables # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | # # |
|
7 | 7 | ################################################################################ |
|
8 | 8 | |
|
9 | 9 | [DEFAULT] |
|
10 | 10 | debug = true |
|
11 | pdebug = false | |
|
12 | 11 | ################################################################################ |
|
13 | 12 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 13 | ## any error reports after an application crash ## |
|
15 | 14 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 15 | ################################################################################ |
|
17 | 16 | #email_to = admin@localhost |
|
18 | 17 | #error_email_from = paste_error@localhost |
|
19 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
20 | 19 | #error_message = |
|
21 | 20 | #email_prefix = [RhodeCode] |
|
22 | 21 | |
|
23 | 22 | #smtp_server = mail.server.com |
|
24 | 23 | #smtp_username = |
|
25 | 24 | #smtp_password = |
|
26 | 25 | #smtp_port = |
|
27 | 26 | #smtp_use_tls = false |
|
28 | 27 | #smtp_use_ssl = true |
|
29 | 28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
30 | 29 | #smtp_auth = |
|
31 | 30 | |
|
32 | 31 | [server:main] |
|
33 | 32 | ## COMMON ## |
|
34 | 33 | host = 127.0.0.1 |
|
35 | 34 | port = 5000 |
|
36 | 35 | |
|
37 | 36 | ################################## |
|
38 | 37 | ## WAITRESS WSGI SERVER ## |
|
39 | 38 | ## Recommended for Development ## |
|
40 | 39 | ################################## |
|
41 | 40 | #use = egg:waitress#main |
|
42 | 41 | ## number of worker threads |
|
43 | 42 | #threads = 5 |
|
44 | 43 | ## MAX BODY SIZE 100GB |
|
45 | 44 | #max_request_body_size = 107374182400 |
|
46 | 45 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
47 | 46 | ## May not work on old windows systems. |
|
48 | 47 | #asyncore_use_poll = true |
|
49 | 48 | |
|
50 | 49 | |
|
51 | 50 | ########################## |
|
52 | 51 | ## GUNICORN WSGI SERVER ## |
|
53 | 52 | ########################## |
|
54 | 53 | ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini> |
|
55 | 54 | use = egg:gunicorn#main |
|
56 | 55 | ## Sets the number of process workers. You must set `instance_id = *` |
|
57 | 56 | ## when this option is set to more than one worker, recommended |
|
58 | 57 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
59 | 58 | ## The `instance_id = *` must be set in the [app:main] section below |
|
60 | 59 | workers = 2 |
|
61 | 60 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
62 | 61 | ## generally recommened to be at 1 |
|
63 | 62 | #threads = 1 |
|
64 | 63 | ## process name |
|
65 | 64 | proc_name = rhodecode |
|
66 | 65 | ## type of worker class, one of sync, gevent |
|
67 | 66 | ## recommended for bigger setup is using of of other than sync one |
|
68 | 67 | worker_class = sync |
|
69 | 68 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
70 | 69 | #worker_connections = 10 |
|
71 | 70 | ## max number of requests that worker will handle before being gracefully |
|
72 | 71 | ## restarted, could prevent memory leaks |
|
73 | 72 | max_requests = 1000 |
|
74 | 73 | max_requests_jitter = 30 |
|
75 | 74 | ## amount of time a worker can spend with handling a request before it |
|
76 | 75 | ## gets killed and restarted. Set to 6hrs |
|
77 | 76 | timeout = 21600 |
|
78 | 77 | |
|
79 | 78 | |
|
80 | 79 | ## prefix middleware for RhodeCode, disables force_https flag. |
|
81 | 80 | ## allows to set RhodeCode under a prefix in server. |
|
82 | 81 | ## eg https://server.com/<prefix>. Enable `filter-with =` option below as well. |
|
83 | 82 | #[filter:proxy-prefix] |
|
84 | 83 | #use = egg:PasteDeploy#prefix |
|
85 | 84 | #prefix = /<your-prefix> |
|
86 | 85 | |
|
87 | 86 | [app:main] |
|
88 | 87 | use = egg:rhodecode-enterprise-ce |
|
89 | 88 | ## enable proxy prefix middleware, defined below |
|
90 | 89 | #filter-with = proxy-prefix |
|
91 | 90 | |
|
92 | 91 | full_stack = true |
|
93 | 92 | |
|
94 | 93 | ## Serve static files via RhodeCode, disable to serve them via HTTP server |
|
95 | 94 | static_files = true |
|
96 | 95 | |
|
97 | 96 | ## Optional Languages |
|
98 | 97 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
99 | 98 | lang = en |
|
100 | 99 | |
|
101 | 100 | ## perform a full repository scan on each server start, this should be |
|
102 | 101 | ## set to false after first startup, to allow faster server restarts. |
|
103 | 102 | startup.import_repos = false |
|
104 | 103 | |
|
105 | 104 | ## Uncomment and set this path to use archive download cache. |
|
106 | 105 | ## Once enabled, generated archives will be cached at this location |
|
107 | 106 | ## and served from the cache during subsequent requests for the same archive of |
|
108 | 107 | ## the repository. |
|
109 | 108 | #archive_cache_dir = /tmp/tarballcache |
|
110 | 109 | |
|
111 | 110 | ## change this to unique ID for security |
|
112 | 111 | app_instance_uuid = rc-production |
|
113 | 112 | |
|
114 | 113 | ## cut off limit for large diffs (size in bytes) |
|
115 | 114 | cut_off_limit_diff = 1024000 |
|
116 | 115 | cut_off_limit_file = 256000 |
|
117 | 116 | |
|
118 | 117 | ## use cache version of scm repo everywhere |
|
119 | 118 | vcs_full_cache = true |
|
120 | 119 | |
|
121 | 120 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
122 | 121 | ## Normally this is controlled by proper http flags sent from http server |
|
123 | 122 | force_https = false |
|
124 | 123 | |
|
125 | 124 | ## use Strict-Transport-Security headers |
|
126 | 125 | use_htsts = false |
|
127 | 126 | |
|
128 | 127 | ## number of commits stats will parse on each iteration |
|
129 | 128 | commit_parse_limit = 25 |
|
130 | 129 | |
|
131 | 130 | ## git rev filter option, --all is the default filter, if you need to |
|
132 | 131 | ## hide all refs in changelog switch this to --branches --tags |
|
133 | 132 | git_rev_filter = --branches --tags |
|
134 | 133 | |
|
135 | 134 | # Set to true if your repos are exposed using the dumb protocol |
|
136 | 135 | git_update_server_info = false |
|
137 | 136 | |
|
138 | 137 | ## RSS/ATOM feed options |
|
139 | 138 | rss_cut_off_limit = 256000 |
|
140 | 139 | rss_items_per_page = 10 |
|
141 | 140 | rss_include_diff = false |
|
142 | 141 | |
|
143 | 142 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
144 | 143 | ## url that does rewrites to _admin/gists/<gistid>. |
|
145 | 144 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
146 | 145 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/<gistid> |
|
147 | 146 | gist_alias_url = |
|
148 | 147 | |
|
149 | 148 | ## List of controllers (using glob pattern syntax) that AUTH TOKENS could be |
|
150 | 149 | ## used for access. |
|
151 | 150 | ## Adding ?auth_token = <token> to the url authenticates this request as if it |
|
152 | 151 | ## came from the the logged in user who own this authentication token. |
|
153 | 152 | ## |
|
154 | 153 | ## Syntax is <ControllerClass>:<function_pattern>. |
|
155 | 154 | ## To enable access to raw_files put `FilesController:raw`. |
|
156 | 155 | ## To enable access to patches add `ChangesetController:changeset_patch`. |
|
157 | 156 | ## The list should be "," separated and on a single line. |
|
158 | 157 | ## |
|
159 | 158 | ## Recommended controllers to enable: |
|
160 | 159 | # ChangesetController:changeset_patch, |
|
161 | 160 | # ChangesetController:changeset_raw, |
|
162 | 161 | # FilesController:raw, |
|
163 | 162 | # FilesController:archivefile, |
|
164 | 163 | # GistsController:*, |
|
165 | 164 | api_access_controllers_whitelist = |
|
166 | 165 | |
|
167 | 166 | ## default encoding used to convert from and to unicode |
|
168 | 167 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
169 | 168 | default_encoding = UTF-8 |
|
170 | 169 | |
|
171 | 170 | ## instance-id prefix |
|
172 | 171 | ## a prefix key for this instance used for cache invalidation when running |
|
173 | 172 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
174 | 173 | ## all running rhodecode instances. Leave empty if you don't use it |
|
175 | 174 | instance_id = |
|
176 | 175 | |
|
177 | 176 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
178 | 177 | ## of an authentication plugin also if it is disabled by it's settings. |
|
179 | 178 | ## This could be useful if you are unable to log in to the system due to broken |
|
180 | 179 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
181 | 180 | ## module to log in again and fix the settings. |
|
182 | 181 | ## |
|
183 | 182 | ## Available builtin plugin IDs (hash is part of the ID): |
|
184 | 183 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
185 | 184 | ## egg:rhodecode-enterprise-ce#pam |
|
186 | 185 | ## egg:rhodecode-enterprise-ce#ldap |
|
187 | 186 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
188 | 187 | ## egg:rhodecode-enterprise-ce#headers |
|
189 | 188 | ## egg:rhodecode-enterprise-ce#crowd |
|
190 | 189 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
191 | 190 | |
|
192 | 191 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
193 | 192 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
194 | 193 | ## handling that causing a series of failed authentication calls. |
|
195 | 194 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
196 | 195 | ## This will be served instead of default 401 on bad authnetication |
|
197 | 196 | auth_ret_code = |
|
198 | 197 | |
|
199 | 198 | ## use special detection method when serving auth_ret_code, instead of serving |
|
200 | 199 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
201 | 200 | ## and then serve auth_ret_code to clients |
|
202 | 201 | auth_ret_code_detection = false |
|
203 | 202 | |
|
204 | 203 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
205 | 204 | ## codes don't break the transactions while 4XX codes do |
|
206 | 205 | lock_ret_code = 423 |
|
207 | 206 | |
|
208 | 207 | ## allows to change the repository location in settings page |
|
209 | 208 | allow_repo_location_change = true |
|
210 | 209 | |
|
211 | 210 | ## allows to setup custom hooks in settings page |
|
212 | 211 | allow_custom_hooks_settings = true |
|
213 | 212 | |
|
214 | 213 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
215 | 214 | ## new token |
|
216 | 215 | license_token = |
|
217 | 216 | |
|
218 | 217 | ## supervisor connection uri, for managing supervisor and logs. |
|
219 | 218 | supervisor.uri = |
|
220 | 219 | ## supervisord group name/id we only want this RC instance to handle |
|
221 | 220 | supervisor.group_id = prod |
|
222 | 221 | |
|
223 | 222 | ## Display extended labs settings |
|
224 | 223 | labs_settings_active = true |
|
225 | 224 | |
|
226 | 225 | #################################### |
|
227 | 226 | ### CELERY CONFIG #### |
|
228 | 227 | #################################### |
|
229 | 228 | use_celery = false |
|
230 | 229 | broker.host = localhost |
|
231 | 230 | broker.vhost = rabbitmqhost |
|
232 | 231 | broker.port = 5672 |
|
233 | 232 | broker.user = rabbitmq |
|
234 | 233 | broker.password = qweqwe |
|
235 | 234 | |
|
236 | 235 | celery.imports = rhodecode.lib.celerylib.tasks |
|
237 | 236 | |
|
238 | 237 | celery.result.backend = amqp |
|
239 | 238 | celery.result.dburi = amqp:// |
|
240 | 239 | celery.result.serialier = json |
|
241 | 240 | |
|
242 | 241 | #celery.send.task.error.emails = true |
|
243 | 242 | #celery.amqp.task.result.expires = 18000 |
|
244 | 243 | |
|
245 | 244 | celeryd.concurrency = 2 |
|
246 | 245 | #celeryd.log.file = celeryd.log |
|
247 | 246 | celeryd.log.level = debug |
|
248 | 247 | celeryd.max.tasks.per.child = 1 |
|
249 | 248 | |
|
250 | 249 | ## tasks will never be sent to the queue, but executed locally instead. |
|
251 | 250 | celery.always.eager = false |
|
252 | 251 | |
|
253 | 252 | #################################### |
|
254 | 253 | ### BEAKER CACHE #### |
|
255 | 254 | #################################### |
|
256 | 255 | # default cache dir for templates. Putting this into a ramdisk |
|
257 | 256 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
258 | 257 | cache_dir = %(here)s/data |
|
259 | 258 | |
|
260 | 259 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
261 | 260 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
262 | 261 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
263 | 262 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
264 | 263 | |
|
265 | 264 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
266 | 265 | |
|
267 | 266 | beaker.cache.super_short_term.type = memory |
|
268 | 267 | beaker.cache.super_short_term.expire = 10 |
|
269 | 268 | beaker.cache.super_short_term.key_length = 256 |
|
270 | 269 | |
|
271 | 270 | beaker.cache.short_term.type = memory |
|
272 | 271 | beaker.cache.short_term.expire = 60 |
|
273 | 272 | beaker.cache.short_term.key_length = 256 |
|
274 | 273 | |
|
275 | 274 | beaker.cache.long_term.type = memory |
|
276 | 275 | beaker.cache.long_term.expire = 36000 |
|
277 | 276 | beaker.cache.long_term.key_length = 256 |
|
278 | 277 | |
|
279 | 278 | beaker.cache.sql_cache_short.type = memory |
|
280 | 279 | beaker.cache.sql_cache_short.expire = 10 |
|
281 | 280 | beaker.cache.sql_cache_short.key_length = 256 |
|
282 | 281 | |
|
283 | 282 | # default is memory cache, configure only if required |
|
284 | 283 | # using multi-node or multi-worker setup |
|
285 | 284 | #beaker.cache.auth_plugins.type = ext:database |
|
286 | 285 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
287 | 286 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
288 | 287 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
289 | 288 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
290 | 289 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
291 | 290 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
292 | 291 | |
|
293 | 292 | beaker.cache.repo_cache_long.type = memorylru_base |
|
294 | 293 | beaker.cache.repo_cache_long.max_items = 4096 |
|
295 | 294 | beaker.cache.repo_cache_long.expire = 2592000 |
|
296 | 295 | |
|
297 | 296 | # default is memorylru_base cache, configure only if required |
|
298 | 297 | # using multi-node or multi-worker setup |
|
299 | 298 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
300 | 299 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
301 | 300 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
302 | 301 | #beaker.cache.repo_cache_long.key_length = 256 |
|
303 | 302 | |
|
304 | 303 | #################################### |
|
305 | 304 | ### BEAKER SESSION #### |
|
306 | 305 | #################################### |
|
307 | 306 | |
|
308 | 307 | ## .session.type is type of storage options for the session, current allowed |
|
309 | 308 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
310 | 309 | beaker.session.type = file |
|
311 | 310 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
312 | 311 | |
|
313 | 312 | ## db based session, fast, and allows easy management over logged in users ## |
|
314 | 313 | #beaker.session.type = ext:database |
|
315 | 314 | #beaker.session.table_name = db_session |
|
316 | 315 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
317 | 316 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
318 | 317 | #beaker.session.sa.pool_recycle = 3600 |
|
319 | 318 | #beaker.session.sa.echo = false |
|
320 | 319 | |
|
321 | 320 | beaker.session.key = rhodecode |
|
322 | 321 | beaker.session.secret = production-rc-uytcxaz |
|
323 | 322 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
324 | 323 | |
|
325 | 324 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
326 | 325 | ## you must disable beaker.session.secret to use this |
|
327 | 326 | #beaker.session.encrypt_key = <key_for_encryption> |
|
328 | 327 | #beaker.session.validate_key = <validation_key> |
|
329 | 328 | |
|
330 | 329 | ## sets session as invalid(also logging out user) if it haven not been |
|
331 | 330 | ## accessed for given amount of time in seconds |
|
332 | 331 | beaker.session.timeout = 2592000 |
|
333 | 332 | beaker.session.httponly = true |
|
334 | 333 | #beaker.session.cookie_path = /<your-prefix> |
|
335 | 334 | |
|
336 | 335 | ## uncomment for https secure cookie |
|
337 | 336 | beaker.session.secure = false |
|
338 | 337 | |
|
339 | 338 | ## auto save the session to not to use .save() |
|
340 | 339 | beaker.session.auto = false |
|
341 | 340 | |
|
342 | 341 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
343 | 342 | ## at browser close |
|
344 | 343 | #beaker.session.cookie_expires = 3600 |
|
345 | 344 | |
|
346 | 345 | ################################### |
|
347 | 346 | ## SEARCH INDEXING CONFIGURATION ## |
|
348 | 347 | ################################### |
|
349 | 348 | ## Full text search indexer is available in rhodecode-tools under |
|
350 | 349 | ## `rhodecode-tools index` command |
|
351 | 350 | |
|
352 | 351 | # WHOOSH Backend, doesn't require additional services to run |
|
353 | 352 | # it works good with few dozen repos |
|
354 | 353 | search.module = rhodecode.lib.index.whoosh |
|
355 | 354 | search.location = %(here)s/data/index |
|
356 | 355 | |
|
357 | 356 | |
|
358 | 357 | ################################### |
|
359 | 358 | ## APPENLIGHT CONFIG ## |
|
360 | 359 | ################################### |
|
361 | 360 | |
|
362 | 361 | ## Appenlight is tailored to work with RhodeCode, see |
|
363 | 362 | ## http://appenlight.com for details how to obtain an account |
|
364 | 363 | |
|
365 | 364 | ## appenlight integration enabled |
|
366 | 365 | appenlight = false |
|
367 | 366 | |
|
368 | 367 | appenlight.server_url = https://api.appenlight.com |
|
369 | 368 | appenlight.api_key = YOUR_API_KEY |
|
370 | 369 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
371 | 370 | |
|
372 | 371 | # used for JS client |
|
373 | 372 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
374 | 373 | |
|
375 | 374 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
376 | 375 | |
|
377 | 376 | ## enables 404 error logging (default False) |
|
378 | 377 | appenlight.report_404 = false |
|
379 | 378 | |
|
380 | 379 | ## time in seconds after request is considered being slow (default 1) |
|
381 | 380 | appenlight.slow_request_time = 1 |
|
382 | 381 | |
|
383 | 382 | ## record slow requests in application |
|
384 | 383 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
385 | 384 | appenlight.slow_requests = true |
|
386 | 385 | |
|
387 | 386 | ## enable hooking to application loggers |
|
388 | 387 | appenlight.logging = true |
|
389 | 388 | |
|
390 | 389 | ## minimum log level for log capture |
|
391 | 390 | appenlight.logging.level = WARNING |
|
392 | 391 | |
|
393 | 392 | ## send logs only from erroneous/slow requests |
|
394 | 393 | ## (saves API quota for intensive logging) |
|
395 | 394 | appenlight.logging_on_error = false |
|
396 | 395 | |
|
397 | 396 | ## list of additonal keywords that should be grabbed from environ object |
|
398 | 397 | ## can be string with comma separated list of words in lowercase |
|
399 | 398 | ## (by default client will always send following info: |
|
400 | 399 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
401 | 400 | ## start with HTTP* this list be extended with additional keywords here |
|
402 | 401 | appenlight.environ_keys_whitelist = |
|
403 | 402 | |
|
404 | 403 | ## list of keywords that should be blanked from request object |
|
405 | 404 | ## can be string with comma separated list of words in lowercase |
|
406 | 405 | ## (by default client will always blank keys that contain following words |
|
407 | 406 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
408 | 407 | ## this list be extended with additional keywords set here |
|
409 | 408 | appenlight.request_keys_blacklist = |
|
410 | 409 | |
|
411 | 410 | ## list of namespaces that should be ignores when gathering log entries |
|
412 | 411 | ## can be string with comma separated list of namespaces |
|
413 | 412 | ## (by default the client ignores own entries: appenlight_client.client) |
|
414 | 413 | appenlight.log_namespace_blacklist = |
|
415 | 414 | |
|
416 | 415 | |
|
417 | 416 | ################################################################################ |
|
418 | 417 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
419 | 418 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
420 | 419 | ## execute malicious code after an exception is raised. ## |
|
421 | 420 | ################################################################################ |
|
422 | 421 | set debug = false |
|
423 | 422 | |
|
424 | 423 | |
|
425 | 424 | ######################################################### |
|
426 | 425 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
427 | 426 | ######################################################### |
|
428 | 427 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
429 | 428 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
430 | 429 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
431 | 430 | |
|
432 | 431 | # see sqlalchemy docs for other advanced settings |
|
433 | 432 | |
|
434 | 433 | ## print the sql statements to output |
|
435 | 434 | sqlalchemy.db1.echo = false |
|
436 | 435 | ## recycle the connections after this ammount of seconds |
|
437 | 436 | sqlalchemy.db1.pool_recycle = 3600 |
|
438 | 437 | sqlalchemy.db1.convert_unicode = true |
|
439 | 438 | |
|
440 | 439 | ## the number of connections to keep open inside the connection pool. |
|
441 | 440 | ## 0 indicates no limit |
|
442 | 441 | #sqlalchemy.db1.pool_size = 5 |
|
443 | 442 | |
|
444 | 443 | ## the number of connections to allow in connection pool "overflow", that is |
|
445 | 444 | ## connections that can be opened above and beyond the pool_size setting, |
|
446 | 445 | ## which defaults to five. |
|
447 | 446 | #sqlalchemy.db1.max_overflow = 10 |
|
448 | 447 | |
|
449 | 448 | |
|
450 | 449 | ################## |
|
451 | 450 | ### VCS CONFIG ### |
|
452 | 451 | ################## |
|
453 | 452 | vcs.server.enable = true |
|
454 | 453 | vcs.server = localhost:9900 |
|
455 | 454 | |
|
456 | 455 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
457 | 456 | ## Available protocols are: |
|
458 | 457 | ## `pyro4` - using pyro4 server |
|
459 | 458 | ## `http` - using http-rpc backend |
|
460 | 459 | #vcs.server.protocol = http |
|
461 | 460 | |
|
462 | 461 | ## Push/Pull operations protocol, available options are: |
|
463 | 462 | ## `pyro4` - using pyro4 server |
|
464 | 463 | ## `rhodecode.lib.middleware.utils.scm_app_http` - Http based, recommended |
|
465 | 464 | ## `vcsserver.scm_app` - internal app (EE only) |
|
466 | 465 | #vcs.scm_app_implementation = rhodecode.lib.middleware.utils.scm_app_http |
|
467 | 466 | |
|
468 | 467 | ## Push/Pull operations hooks protocol, available options are: |
|
469 | 468 | ## `pyro4` - using pyro4 server |
|
470 | 469 | ## `http` - using http-rpc backend |
|
471 | 470 | #vcs.hooks.protocol = http |
|
472 | 471 | |
|
473 | 472 | vcs.server.log_level = info |
|
474 | 473 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
475 | 474 | vcs.start_server = false |
|
476 | 475 | vcs.backends = hg, git, svn |
|
477 | 476 | vcs.connection_timeout = 3600 |
|
478 | 477 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
479 | 478 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
480 | 479 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
481 | 480 | |
|
482 | 481 | ################################ |
|
483 | 482 | ### LOGGING CONFIGURATION #### |
|
484 | 483 | ################################ |
|
485 | 484 | [loggers] |
|
486 | 485 | keys = root, routes, rhodecode, sqlalchemy, beaker, pyro4, templates, whoosh_indexer |
|
487 | 486 | |
|
488 | 487 | [handlers] |
|
489 | 488 | keys = console, console_sql |
|
490 | 489 | |
|
491 | 490 | [formatters] |
|
492 | 491 | keys = generic, color_formatter, color_formatter_sql |
|
493 | 492 | |
|
494 | 493 | ############# |
|
495 | 494 | ## LOGGERS ## |
|
496 | 495 | ############# |
|
497 | 496 | [logger_root] |
|
498 | 497 | level = NOTSET |
|
499 | 498 | handlers = console |
|
500 | 499 | |
|
501 | 500 | [logger_routes] |
|
502 | 501 | level = DEBUG |
|
503 | 502 | handlers = |
|
504 | 503 | qualname = routes.middleware |
|
505 | 504 | ## "level = DEBUG" logs the route matched and routing variables. |
|
506 | 505 | propagate = 1 |
|
507 | 506 | |
|
508 | 507 | [logger_beaker] |
|
509 | 508 | level = DEBUG |
|
510 | 509 | handlers = |
|
511 | 510 | qualname = beaker.container |
|
512 | 511 | propagate = 1 |
|
513 | 512 | |
|
514 | 513 | [logger_pyro4] |
|
515 | 514 | level = DEBUG |
|
516 | 515 | handlers = |
|
517 | 516 | qualname = Pyro4 |
|
518 | 517 | propagate = 1 |
|
519 | 518 | |
|
520 | 519 | [logger_templates] |
|
521 | 520 | level = INFO |
|
522 | 521 | handlers = |
|
523 | 522 | qualname = pylons.templating |
|
524 | 523 | propagate = 1 |
|
525 | 524 | |
|
526 | 525 | [logger_rhodecode] |
|
527 | 526 | level = DEBUG |
|
528 | 527 | handlers = |
|
529 | 528 | qualname = rhodecode |
|
530 | 529 | propagate = 1 |
|
531 | 530 | |
|
532 | 531 | [logger_sqlalchemy] |
|
533 | 532 | level = INFO |
|
534 | 533 | handlers = console_sql |
|
535 | 534 | qualname = sqlalchemy.engine |
|
536 | 535 | propagate = 0 |
|
537 | 536 | |
|
538 | 537 | [logger_whoosh_indexer] |
|
539 | 538 | level = DEBUG |
|
540 | 539 | handlers = |
|
541 | 540 | qualname = whoosh_indexer |
|
542 | 541 | propagate = 1 |
|
543 | 542 | |
|
544 | 543 | ############## |
|
545 | 544 | ## HANDLERS ## |
|
546 | 545 | ############## |
|
547 | 546 | |
|
548 | 547 | [handler_console] |
|
549 | 548 | class = StreamHandler |
|
550 | 549 | args = (sys.stderr,) |
|
551 | 550 | level = INFO |
|
552 | 551 | formatter = generic |
|
553 | 552 | |
|
554 | 553 | [handler_console_sql] |
|
555 | 554 | class = StreamHandler |
|
556 | 555 | args = (sys.stderr,) |
|
557 | 556 | level = WARN |
|
558 | 557 | formatter = generic |
|
559 | 558 | |
|
560 | 559 | ################ |
|
561 | 560 | ## FORMATTERS ## |
|
562 | 561 | ################ |
|
563 | 562 | |
|
564 | 563 | [formatter_generic] |
|
565 | 564 | class = rhodecode.lib.logging_formatter.Pyro4AwareFormatter |
|
566 | 565 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
567 | 566 | datefmt = %Y-%m-%d %H:%M:%S |
|
568 | 567 | |
|
569 | 568 | [formatter_color_formatter] |
|
570 | 569 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
571 | 570 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
572 | 571 | datefmt = %Y-%m-%d %H:%M:%S |
|
573 | 572 | |
|
574 | 573 | [formatter_color_formatter_sql] |
|
575 | 574 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
576 | 575 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
577 | 576 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,315 +1,351 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Pylons middleware initialization |
|
23 | 23 | """ |
|
24 | 24 | import logging |
|
25 | 25 | |
|
26 | 26 | from paste.registry import RegistryManager |
|
27 | 27 | from paste.gzipper import make_gzip_middleware |
|
28 | from pylons.middleware import ErrorHandler, StatusCodeRedirect | |
|
29 | 28 | from pylons.wsgiapp import PylonsApp |
|
30 | 29 | from pyramid.authorization import ACLAuthorizationPolicy |
|
31 | 30 | from pyramid.config import Configurator |
|
32 | 31 | from pyramid.static import static_view |
|
33 | 32 | from pyramid.settings import asbool, aslist |
|
34 | 33 | from pyramid.wsgi import wsgiapp |
|
34 | from pyramid.httpexceptions import HTTPError | |
|
35 | import pyramid.httpexceptions as httpexceptions | |
|
36 | from pyramid.renderers import render_to_response | |
|
35 | 37 | from routes.middleware import RoutesMiddleware |
|
36 | 38 | import routes.util |
|
37 | 39 | |
|
38 | 40 | import rhodecode |
|
39 | 41 | from rhodecode.config import patches |
|
40 | 42 | from rhodecode.config.environment import ( |
|
41 | 43 | load_environment, load_pyramid_environment) |
|
42 | 44 | from rhodecode.lib.middleware import csrf |
|
43 | 45 | from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled |
|
44 | 46 | from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper |
|
45 | 47 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
46 | 48 | from rhodecode.lib.middleware.vcs import VCSMiddleware |
|
47 | 49 | from rhodecode.lib.plugins.utils import register_rhodecode_plugin |
|
48 | 50 | |
|
49 | 51 | |
|
50 | 52 | log = logging.getLogger(__name__) |
|
51 | 53 | |
|
52 | 54 | |
|
53 | 55 | def make_app(global_conf, full_stack=True, static_files=True, **app_conf): |
|
54 | 56 | """Create a Pylons WSGI application and return it |
|
55 | 57 | |
|
56 | 58 | ``global_conf`` |
|
57 | 59 | The inherited configuration for this application. Normally from |
|
58 | 60 | the [DEFAULT] section of the Paste ini file. |
|
59 | 61 | |
|
60 | 62 | ``full_stack`` |
|
61 | 63 | Whether or not this application provides a full WSGI stack (by |
|
62 | 64 | default, meaning it handles its own exceptions and errors). |
|
63 | 65 | Disable full_stack when this application is "managed" by |
|
64 | 66 | another WSGI middleware. |
|
65 | 67 | |
|
66 | 68 | ``app_conf`` |
|
67 | 69 | The application's local configuration. Normally specified in |
|
68 | 70 | the [app:<name>] section of the Paste ini file (where <name> |
|
69 | 71 | defaults to main). |
|
70 | 72 | |
|
71 | 73 | """ |
|
72 | 74 | # Apply compatibility patches |
|
73 | 75 | patches.kombu_1_5_1_python_2_7_11() |
|
74 | 76 | patches.inspect_getargspec() |
|
75 | 77 | |
|
76 | 78 | # Configure the Pylons environment |
|
77 | 79 | config = load_environment(global_conf, app_conf) |
|
78 | 80 | |
|
79 | 81 | # The Pylons WSGI app |
|
80 | 82 | app = PylonsApp(config=config) |
|
81 | 83 | if rhodecode.is_test: |
|
82 | 84 | app = csrf.CSRFDetector(app) |
|
83 | 85 | |
|
84 | 86 | expected_origin = config.get('expected_origin') |
|
85 | 87 | if expected_origin: |
|
86 | 88 | # The API can be accessed from other Origins. |
|
87 | 89 | app = csrf.OriginChecker(app, expected_origin, |
|
88 | 90 | skip_urls=[routes.util.url_for('api')]) |
|
89 | 91 | |
|
90 | # Add RoutesMiddleware. Currently we have two instances in the stack. This | |
|
91 | # is the lower one to make the StatusCodeRedirect middleware happy. | |
|
92 | # TODO: johbo: This is not optimal, search for a better solution. | |
|
93 | app = RoutesMiddleware(app, config['routes.map']) | |
|
94 | ||
|
95 | # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares) | |
|
96 | if asbool(config['pdebug']): | |
|
97 | from rhodecode.lib.profiler import ProfilingMiddleware | |
|
98 | app = ProfilingMiddleware(app) | |
|
99 | ||
|
100 | # Protect from VCS Server error related pages when server is not available | |
|
101 | vcs_server_enabled = asbool(config.get('vcs.server.enable', 'true')) | |
|
102 | if not vcs_server_enabled: | |
|
103 | app = DisableVCSPagesWrapper(app) | |
|
104 | 92 | |
|
105 | 93 | if asbool(full_stack): |
|
106 | 94 | |
|
107 | 95 | # Appenlight monitoring and error handler |
|
108 | 96 | app, appenlight_client = wrap_in_appenlight_if_enabled(app, config) |
|
109 | 97 | |
|
110 | # Handle Python exceptions | |
|
111 | app = ErrorHandler(app, global_conf, **config['pylons.errorware']) | |
|
112 | ||
|
113 | 98 | # we want our low level middleware to get to the request ASAP. We don't |
|
114 | 99 | # need any pylons stack middleware in them |
|
115 | 100 | app = VCSMiddleware(app, config, appenlight_client) |
|
116 | # Display error documents for 401, 403, 404 status codes (and | |
|
117 | # 500 when debug is disabled) | |
|
118 | if asbool(config['debug']): | |
|
119 | app = StatusCodeRedirect(app) | |
|
120 | else: | |
|
121 | app = StatusCodeRedirect(app, [400, 401, 403, 404, 500]) | |
|
122 | 101 | |
|
123 | 102 | # enable https redirects based on HTTP_X_URL_SCHEME set by proxy |
|
124 | 103 | app = HttpsFixup(app, config) |
|
125 | 104 | |
|
126 | 105 | # Establish the Registry for this application |
|
127 | 106 | app = RegistryManager(app) |
|
128 | 107 | |
|
129 | 108 | app.config = config |
|
130 | 109 | |
|
131 | 110 | return app |
|
132 | 111 | |
|
133 | 112 | |
|
134 | 113 | def make_pyramid_app(global_config, **settings): |
|
135 | 114 | """ |
|
136 | 115 | Constructs the WSGI application based on Pyramid and wraps the Pylons based |
|
137 | 116 | application. |
|
138 | 117 | |
|
139 | 118 | Specials: |
|
140 | 119 | |
|
141 | 120 | * We migrate from Pylons to Pyramid. While doing this, we keep both |
|
142 | 121 | frameworks functional. This involves moving some WSGI middlewares around |
|
143 | 122 | and providing access to some data internals, so that the old code is |
|
144 | 123 | still functional. |
|
145 | 124 | |
|
146 | 125 | * The application can also be integrated like a plugin via the call to |
|
147 | 126 | `includeme`. This is accompanied with the other utility functions which |
|
148 | 127 | are called. Changing this should be done with great care to not break |
|
149 | 128 | cases when these fragments are assembled from another place. |
|
150 | 129 | |
|
151 | 130 | """ |
|
152 | 131 | # The edition string should be available in pylons too, so we add it here |
|
153 | 132 | # before copying the settings. |
|
154 | 133 | settings.setdefault('rhodecode.edition', 'Community Edition') |
|
155 | 134 | |
|
156 | 135 | # As long as our Pylons application does expect "unprepared" settings, make |
|
157 | 136 | # sure that we keep an unmodified copy. This avoids unintentional change of |
|
158 | 137 | # behavior in the old application. |
|
159 | 138 | settings_pylons = settings.copy() |
|
160 | 139 | |
|
161 | 140 | sanitize_settings_and_apply_defaults(settings) |
|
162 | 141 | config = Configurator(settings=settings) |
|
163 | 142 | add_pylons_compat_data(config.registry, global_config, settings_pylons) |
|
164 | 143 | |
|
165 | 144 | load_pyramid_environment(global_config, settings) |
|
166 | 145 | |
|
167 | 146 | includeme(config) |
|
168 | 147 | includeme_last(config) |
|
169 | 148 | pyramid_app = config.make_wsgi_app() |
|
170 | 149 | pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config) |
|
171 | 150 | return pyramid_app |
|
172 | 151 | |
|
173 | 152 | |
|
174 | 153 | def add_pylons_compat_data(registry, global_config, settings): |
|
175 | 154 | """ |
|
176 | 155 | Attach data to the registry to support the Pylons integration. |
|
177 | 156 | """ |
|
178 | 157 | registry._pylons_compat_global_config = global_config |
|
179 | 158 | registry._pylons_compat_settings = settings |
|
180 | 159 | |
|
181 | 160 | |
|
161 | def error_handler(exc, request): | |
|
162 | # TODO: dan: replace the old pylons error controller with this | |
|
163 | from rhodecode.model.settings import SettingsModel | |
|
164 | from rhodecode.lib.utils2 import AttributeDict | |
|
165 | ||
|
166 | try: | |
|
167 | rc_config = SettingsModel().get_all_settings() | |
|
168 | except Exception: | |
|
169 | log.exception('failed to fetch settings') | |
|
170 | rc_config = {} | |
|
171 | ||
|
172 | c = AttributeDict() | |
|
173 | c.error_message = exc.status | |
|
174 | c.error_explanation = exc.explanation or str(exc) | |
|
175 | c.visual = AttributeDict() | |
|
176 | ||
|
177 | c.visual.rhodecode_support_url = ( | |
|
178 | request.registry.settings.get('rhodecode_support_url') or | |
|
179 | request.route_url('rhodecode_support') | |
|
180 | ) | |
|
181 | c.redirect_time = 0 | |
|
182 | c.rhodecode_name = rc_config.get('rhodecode_title') | |
|
183 | if not c.rhodecode_name: | |
|
184 | c.rhodecode_name = 'Rhodecode' | |
|
185 | ||
|
186 | response = render_to_response( | |
|
187 | '/errors/error_document.html', {'c': c}, request=request) | |
|
188 | return response | |
|
189 | ||
|
190 | ||
|
182 | 191 | def includeme(config): |
|
183 | 192 | settings = config.registry.settings |
|
184 | 193 | |
|
185 | 194 | # Includes which are required. The application would fail without them. |
|
186 | 195 | config.include('pyramid_mako') |
|
187 | 196 | config.include('pyramid_beaker') |
|
188 | 197 | config.include('rhodecode.authentication') |
|
189 | 198 | config.include('rhodecode.login') |
|
190 | 199 | config.include('rhodecode.tweens') |
|
191 | 200 | config.include('rhodecode.api') |
|
201 | config.add_route( | |
|
202 | 'rhodecode_support', 'https://rhodecode.com/help/', static=True) | |
|
192 | 203 | |
|
193 | 204 | # Set the authorization policy. |
|
194 | 205 | authz_policy = ACLAuthorizationPolicy() |
|
195 | 206 | config.set_authorization_policy(authz_policy) |
|
196 | 207 | |
|
197 | 208 | # Set the default renderer for HTML templates to mako. |
|
198 | 209 | config.add_mako_renderer('.html') |
|
199 | 210 | |
|
200 | 211 | # plugin information |
|
201 | 212 | config.registry.rhodecode_plugins = {} |
|
202 | 213 | |
|
203 | 214 | config.add_directive( |
|
204 | 215 | 'register_rhodecode_plugin', register_rhodecode_plugin) |
|
205 | 216 | # include RhodeCode plugins |
|
206 | 217 | includes = aslist(settings.get('rhodecode.includes', [])) |
|
207 | 218 | for inc in includes: |
|
208 | 219 | config.include(inc) |
|
209 | 220 | |
|
221 | pylons_app = make_app( | |
|
222 | config.registry._pylons_compat_global_config, | |
|
223 | **config.registry._pylons_compat_settings) | |
|
224 | config.registry._pylons_compat_config = pylons_app.config | |
|
225 | ||
|
226 | pylons_app_as_view = wsgiapp(pylons_app) | |
|
227 | ||
|
228 | # Protect from VCS Server error related pages when server is not available | |
|
229 | vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true')) | |
|
230 | if not vcs_server_enabled: | |
|
231 | pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view) | |
|
232 | ||
|
233 | ||
|
234 | def pylons_app_with_error_handler(context, request): | |
|
235 | """ | |
|
236 | Handle exceptions from rc pylons app: | |
|
237 | ||
|
238 | - old webob type exceptions get converted to pyramid exceptions | |
|
239 | - pyramid exceptions are passed to the error handler view | |
|
240 | """ | |
|
241 | try: | |
|
242 | response = pylons_app_as_view(context, request) | |
|
243 | if 400 <= response.status_int <= 599: # webob type error responses | |
|
244 | ExcClass = httpexceptions.status_map[response.status_int] | |
|
245 | return error_handler(ExcClass(response.status), request) | |
|
246 | except HTTPError as e: # pyramid type exceptions | |
|
247 | return error_handler(e, request) | |
|
248 | ||
|
249 | return response | |
|
250 | ||
|
210 | 251 | # This is the glue which allows us to migrate in chunks. By registering the |
|
211 | 252 | # pylons based application as the "Not Found" view in Pyramid, we will |
|
212 | 253 | # fallback to the old application each time the new one does not yet know |
|
213 | 254 | # how to handle a request. |
|
214 | pylons_app = make_app( | |
|
215 | config.registry._pylons_compat_global_config, | |
|
216 | **config.registry._pylons_compat_settings) | |
|
217 | config.registry._pylons_compat_config = pylons_app.config | |
|
218 | pylons_app_as_view = wsgiapp(pylons_app) | |
|
219 | config.add_notfound_view(pylons_app_as_view) | |
|
255 | config.add_notfound_view(pylons_app_with_error_handler) | |
|
220 | 256 | |
|
257 | config.add_view(error_handler, context=HTTPError) # exceptions in rc pyramid | |
|
221 | 258 | |
|
222 | 259 | def includeme_last(config): |
|
223 | 260 | """ |
|
224 | 261 | The static file catchall needs to be last in the view configuration. |
|
225 | 262 | """ |
|
226 | 263 | settings = config.registry.settings |
|
227 | 264 | |
|
228 | 265 | # Note: johbo: I would prefer to register a prefix for static files at some |
|
229 | 266 | # point, e.g. move them under '_static/'. This would fully avoid that we |
|
230 | 267 | # can have name clashes with a repository name. Imaging someone calling his |
|
231 | 268 | # repo "css" ;-) Also having an external web server to serve out the static |
|
232 | 269 | # files seems to be easier to set up if they have a common prefix. |
|
233 | 270 | # |
|
234 | 271 | # Example: config.add_static_view('_static', path='rhodecode:public') |
|
235 | 272 | # |
|
236 | 273 | # It might be an option to register both paths for a while and then migrate |
|
237 | 274 | # over to the new location. |
|
238 | 275 | |
|
239 | 276 | # Serving static files with a catchall. |
|
240 | 277 | if settings['static_files']: |
|
241 | 278 | config.add_route('catchall_static', '/*subpath') |
|
242 | 279 | config.add_view( |
|
243 | 280 | static_view('rhodecode:public'), route_name='catchall_static') |
|
244 | 281 | |
|
245 | 282 | |
|
246 | 283 | def wrap_app_in_wsgi_middlewares(pyramid_app, config): |
|
247 | 284 | """ |
|
248 | 285 | Apply outer WSGI middlewares around the application. |
|
249 | 286 | |
|
250 | 287 | Part of this has been moved up from the Pylons layer, so that the |
|
251 | 288 | data is also available if old Pylons code is hit through an already ported |
|
252 | 289 | view. |
|
253 | 290 | """ |
|
254 | 291 | settings = config.registry.settings |
|
255 | 292 | |
|
256 | # Add RoutesMiddleware. Currently we have two instances in the stack. This | |
|
257 | # is the upper one to support the pylons compatibility tween during | |
|
293 | # Add RoutesMiddleware to support the pylons compatibility tween during | |
|
258 | 294 | # migration to pyramid. |
|
259 | 295 | pyramid_app = RoutesMiddleware( |
|
260 | 296 | pyramid_app, config.registry._pylons_compat_config['routes.map']) |
|
261 | 297 | |
|
262 | 298 | # TODO: johbo: Don't really see why we enable the gzip middleware when |
|
263 | 299 | # serving static files, might be something that should have its own setting |
|
264 | 300 | # as well? |
|
265 | 301 | if settings['static_files']: |
|
266 | 302 | pyramid_app = make_gzip_middleware( |
|
267 | 303 | pyramid_app, settings, compress_level=1) |
|
268 | 304 | |
|
269 | 305 | return pyramid_app |
|
270 | 306 | |
|
271 | 307 | |
|
272 | 308 | def sanitize_settings_and_apply_defaults(settings): |
|
273 | 309 | """ |
|
274 | 310 | Applies settings defaults and does all type conversion. |
|
275 | 311 | |
|
276 | 312 | We would move all settings parsing and preparation into this place, so that |
|
277 | 313 | we have only one place left which deals with this part. The remaining parts |
|
278 | 314 | of the application would start to rely fully on well prepared settings. |
|
279 | 315 | |
|
280 | 316 | This piece would later be split up per topic to avoid a big fat monster |
|
281 | 317 | function. |
|
282 | 318 | """ |
|
283 | 319 | |
|
284 | 320 | # Pyramid's mako renderer has to search in the templates folder so that the |
|
285 | 321 | # old templates still work. Ported and new templates are expected to use |
|
286 | 322 | # real asset specifications for the includes. |
|
287 | 323 | mako_directories = settings.setdefault('mako.directories', [ |
|
288 | 324 | # Base templates of the original Pylons application |
|
289 | 325 | 'rhodecode:templates', |
|
290 | 326 | ]) |
|
291 | 327 | log.debug( |
|
292 | 328 | "Using the following Mako template directories: %s", |
|
293 | 329 | mako_directories) |
|
294 | 330 | |
|
295 | 331 | # Default includes, possible to change as a user |
|
296 | 332 | pyramid_includes = settings.setdefault('pyramid.includes', [ |
|
297 | 333 | 'rhodecode.lib.middleware.request_wrapper', |
|
298 | 334 | ]) |
|
299 | 335 | log.debug( |
|
300 | 336 | "Using the following pyramid.includes: %s", |
|
301 | 337 | pyramid_includes) |
|
302 | 338 | |
|
303 | 339 | # TODO: johbo: Re-think this, usually the call to config.include |
|
304 | 340 | # should allow to pass in a prefix. |
|
305 | 341 | settings.setdefault('rhodecode.api.url', '/_admin/api') |
|
306 | 342 | |
|
307 | 343 | _bool_setting(settings, 'vcs.server.enable', 'true') |
|
308 | 344 | _bool_setting(settings, 'static_files', 'true') |
|
309 | 345 | _bool_setting(settings, 'is_test', 'false') |
|
310 | 346 | |
|
311 | 347 | return settings |
|
312 | 348 | |
|
313 | 349 | |
|
314 | 350 | def _bool_setting(settings, name, default): |
|
315 | 351 | settings[name] = asbool(settings.get(name, default)) |
@@ -1,1149 +1,1145 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | from rhodecode.config import routing_links |
|
36 | 36 | |
|
37 | 37 | # prefix for non repository related links needs to be prefixed with `/` |
|
38 | 38 | ADMIN_PREFIX = '/_admin' |
|
39 | 39 | |
|
40 | 40 | # Default requirements for URL parts |
|
41 | 41 | URL_NAME_REQUIREMENTS = { |
|
42 | 42 | # group name can have a slash in them, but they must not end with a slash |
|
43 | 43 | 'group_name': r'.*?[^/]', |
|
44 | 44 | # repo names can have a slash in them, but they must not end with a slash |
|
45 | 45 | 'repo_name': r'.*?[^/]', |
|
46 | 46 | # file path eats up everything at the end |
|
47 | 47 | 'f_path': r'.*', |
|
48 | 48 | # reference types |
|
49 | 49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
50 | 50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class JSRoutesMapper(Mapper): |
|
55 | 55 | """ |
|
56 | 56 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
57 | 57 | """ |
|
58 | 58 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
59 | 59 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
60 | 60 | def __init__(self, *args, **kw): |
|
61 | 61 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
62 | 62 | self._jsroutes = [] |
|
63 | 63 | |
|
64 | 64 | def connect(self, *args, **kw): |
|
65 | 65 | """ |
|
66 | 66 | Wrapper for connect to take an extra argument jsroute=True |
|
67 | 67 | |
|
68 | 68 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
69 | 69 | """ |
|
70 | 70 | if kw.pop('jsroute', False): |
|
71 | 71 | if not self._named_route_regex.match(args[0]): |
|
72 | 72 | raise Exception('only named routes can be added to pyroutes') |
|
73 | 73 | self._jsroutes.append(args[0]) |
|
74 | 74 | |
|
75 | 75 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
76 | 76 | |
|
77 | 77 | def _extract_route_information(self, route): |
|
78 | 78 | """ |
|
79 | 79 | Convert a route into tuple(name, path, args), eg: |
|
80 | 80 | ('user_profile', '/profile/%(username)s', ['username']) |
|
81 | 81 | """ |
|
82 | 82 | routepath = route.routepath |
|
83 | 83 | def replace(matchobj): |
|
84 | 84 | if matchobj.group(1): |
|
85 | 85 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
86 | 86 | else: |
|
87 | 87 | return "%%(%s)s" % matchobj.group(2) |
|
88 | 88 | |
|
89 | 89 | routepath = self._argument_prog.sub(replace, routepath) |
|
90 | 90 | return ( |
|
91 | 91 | route.name, |
|
92 | 92 | routepath, |
|
93 | 93 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
94 | 94 | for arg in self._argument_prog.findall(route.routepath)] |
|
95 | 95 | ) |
|
96 | 96 | |
|
97 | 97 | def jsroutes(self): |
|
98 | 98 | """ |
|
99 | 99 | Return a list of pyroutes.js compatible routes |
|
100 | 100 | """ |
|
101 | 101 | for route_name in self._jsroutes: |
|
102 | 102 | yield self._extract_route_information(self._routenames[route_name]) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def make_map(config): |
|
106 | 106 | """Create, configure and return the routes Mapper""" |
|
107 | 107 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
108 | 108 | always_scan=config['debug']) |
|
109 | 109 | rmap.minimization = False |
|
110 | 110 | rmap.explicit = False |
|
111 | 111 | |
|
112 | 112 | from rhodecode.lib.utils2 import str2bool |
|
113 | 113 | from rhodecode.model import repo, repo_group |
|
114 | 114 | |
|
115 | 115 | def check_repo(environ, match_dict): |
|
116 | 116 | """ |
|
117 | 117 | check for valid repository for proper 404 handling |
|
118 | 118 | |
|
119 | 119 | :param environ: |
|
120 | 120 | :param match_dict: |
|
121 | 121 | """ |
|
122 | 122 | repo_name = match_dict.get('repo_name') |
|
123 | 123 | |
|
124 | 124 | if match_dict.get('f_path'): |
|
125 | 125 | # fix for multiple initial slashes that causes errors |
|
126 | 126 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
127 | 127 | repo_model = repo.RepoModel() |
|
128 | 128 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
129 | 129 | # if we match quickly from database, short circuit the operation, |
|
130 | 130 | # and validate repo based on the type. |
|
131 | 131 | if by_name_match: |
|
132 | 132 | return True |
|
133 | 133 | |
|
134 | 134 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
135 | 135 | if by_id_match: |
|
136 | 136 | repo_name = by_id_match.repo_name |
|
137 | 137 | match_dict['repo_name'] = repo_name |
|
138 | 138 | return True |
|
139 | 139 | |
|
140 | 140 | return False |
|
141 | 141 | |
|
142 | 142 | def check_group(environ, match_dict): |
|
143 | 143 | """ |
|
144 | 144 | check for valid repository group path for proper 404 handling |
|
145 | 145 | |
|
146 | 146 | :param environ: |
|
147 | 147 | :param match_dict: |
|
148 | 148 | """ |
|
149 | 149 | repo_group_name = match_dict.get('group_name') |
|
150 | 150 | repo_group_model = repo_group.RepoGroupModel() |
|
151 | 151 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
152 | 152 | if by_name_match: |
|
153 | 153 | return True |
|
154 | 154 | |
|
155 | 155 | return False |
|
156 | 156 | |
|
157 | 157 | def check_user_group(environ, match_dict): |
|
158 | 158 | """ |
|
159 | 159 | check for valid user group for proper 404 handling |
|
160 | 160 | |
|
161 | 161 | :param environ: |
|
162 | 162 | :param match_dict: |
|
163 | 163 | """ |
|
164 | 164 | return True |
|
165 | 165 | |
|
166 | 166 | def check_int(environ, match_dict): |
|
167 | 167 | return match_dict.get('id').isdigit() |
|
168 | 168 | |
|
169 | # The ErrorController route (handles 404/500 error pages); it should | |
|
170 | # likely stay at the top, ensuring it can always be resolved | |
|
171 | rmap.connect('/error/{action}', controller='error') | |
|
172 | rmap.connect('/error/{action}/{id}', controller='error') | |
|
173 | 169 | |
|
174 | 170 | #========================================================================== |
|
175 | 171 | # CUSTOM ROUTES HERE |
|
176 | 172 | #========================================================================== |
|
177 | 173 | |
|
178 | 174 | # MAIN PAGE |
|
179 | 175 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
180 | 176 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
181 | 177 | action='goto_switcher_data') |
|
182 | 178 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
183 | 179 | action='repo_list_data') |
|
184 | 180 | |
|
185 | 181 | rmap.connect('user_autocomplete_data', '/_users', controller='home', |
|
186 | 182 | action='user_autocomplete_data', jsroute=True) |
|
187 | 183 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', |
|
188 | 184 | action='user_group_autocomplete_data') |
|
189 | 185 | |
|
190 | 186 | rmap.connect( |
|
191 | 187 | 'user_profile', '/_profiles/{username}', controller='users', |
|
192 | 188 | action='user_profile') |
|
193 | 189 | |
|
194 | 190 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
195 | 191 | rmap.connect('rst_help', |
|
196 | 192 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
197 | 193 | _static=True) |
|
198 | 194 | rmap.connect('markdown_help', |
|
199 | 195 | 'http://daringfireball.net/projects/markdown/syntax', |
|
200 | 196 | _static=True) |
|
201 | 197 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
202 | 198 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
203 | 199 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
204 | 200 | # TODO: anderson - making this a static link since redirect won't play |
|
205 | 201 | # nice with POST requests |
|
206 | 202 | rmap.connect('enterprise_license_convert_from_old', |
|
207 | 203 | 'https://rhodecode.com/u/license-upgrade', |
|
208 | 204 | _static=True) |
|
209 | 205 | |
|
210 | 206 | routing_links.connect_redirection_links(rmap) |
|
211 | 207 | |
|
212 | 208 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
213 | 209 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
214 | 210 | |
|
215 | 211 | # ADMIN REPOSITORY ROUTES |
|
216 | 212 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
217 | 213 | controller='admin/repos') as m: |
|
218 | 214 | m.connect('repos', '/repos', |
|
219 | 215 | action='create', conditions={'method': ['POST']}) |
|
220 | 216 | m.connect('repos', '/repos', |
|
221 | 217 | action='index', conditions={'method': ['GET']}) |
|
222 | 218 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
223 | 219 | action='create_repository', conditions={'method': ['GET']}) |
|
224 | 220 | m.connect('/repos/{repo_name}', |
|
225 | 221 | action='update', conditions={'method': ['PUT'], |
|
226 | 222 | 'function': check_repo}, |
|
227 | 223 | requirements=URL_NAME_REQUIREMENTS) |
|
228 | 224 | m.connect('delete_repo', '/repos/{repo_name}', |
|
229 | 225 | action='delete', conditions={'method': ['DELETE']}, |
|
230 | 226 | requirements=URL_NAME_REQUIREMENTS) |
|
231 | 227 | m.connect('repo', '/repos/{repo_name}', |
|
232 | 228 | action='show', conditions={'method': ['GET'], |
|
233 | 229 | 'function': check_repo}, |
|
234 | 230 | requirements=URL_NAME_REQUIREMENTS) |
|
235 | 231 | |
|
236 | 232 | # ADMIN REPOSITORY GROUPS ROUTES |
|
237 | 233 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
238 | 234 | controller='admin/repo_groups') as m: |
|
239 | 235 | m.connect('repo_groups', '/repo_groups', |
|
240 | 236 | action='create', conditions={'method': ['POST']}) |
|
241 | 237 | m.connect('repo_groups', '/repo_groups', |
|
242 | 238 | action='index', conditions={'method': ['GET']}) |
|
243 | 239 | m.connect('new_repo_group', '/repo_groups/new', |
|
244 | 240 | action='new', conditions={'method': ['GET']}) |
|
245 | 241 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
246 | 242 | action='update', conditions={'method': ['PUT'], |
|
247 | 243 | 'function': check_group}, |
|
248 | 244 | requirements=URL_NAME_REQUIREMENTS) |
|
249 | 245 | |
|
250 | 246 | # EXTRAS REPO GROUP ROUTES |
|
251 | 247 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
252 | 248 | action='edit', |
|
253 | 249 | conditions={'method': ['GET'], 'function': check_group}, |
|
254 | 250 | requirements=URL_NAME_REQUIREMENTS) |
|
255 | 251 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
256 | 252 | action='edit', |
|
257 | 253 | conditions={'method': ['PUT'], 'function': check_group}, |
|
258 | 254 | requirements=URL_NAME_REQUIREMENTS) |
|
259 | 255 | |
|
260 | 256 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
261 | 257 | action='edit_repo_group_advanced', |
|
262 | 258 | conditions={'method': ['GET'], 'function': check_group}, |
|
263 | 259 | requirements=URL_NAME_REQUIREMENTS) |
|
264 | 260 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
265 | 261 | action='edit_repo_group_advanced', |
|
266 | 262 | conditions={'method': ['PUT'], 'function': check_group}, |
|
267 | 263 | requirements=URL_NAME_REQUIREMENTS) |
|
268 | 264 | |
|
269 | 265 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
270 | 266 | action='edit_repo_group_perms', |
|
271 | 267 | conditions={'method': ['GET'], 'function': check_group}, |
|
272 | 268 | requirements=URL_NAME_REQUIREMENTS) |
|
273 | 269 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
274 | 270 | action='update_perms', |
|
275 | 271 | conditions={'method': ['PUT'], 'function': check_group}, |
|
276 | 272 | requirements=URL_NAME_REQUIREMENTS) |
|
277 | 273 | |
|
278 | 274 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
279 | 275 | action='delete', conditions={'method': ['DELETE'], |
|
280 | 276 | 'function': check_group}, |
|
281 | 277 | requirements=URL_NAME_REQUIREMENTS) |
|
282 | 278 | |
|
283 | 279 | # ADMIN USER ROUTES |
|
284 | 280 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
285 | 281 | controller='admin/users') as m: |
|
286 | 282 | m.connect('users', '/users', |
|
287 | 283 | action='create', conditions={'method': ['POST']}) |
|
288 | 284 | m.connect('users', '/users', |
|
289 | 285 | action='index', conditions={'method': ['GET']}) |
|
290 | 286 | m.connect('new_user', '/users/new', |
|
291 | 287 | action='new', conditions={'method': ['GET']}) |
|
292 | 288 | m.connect('update_user', '/users/{user_id}', |
|
293 | 289 | action='update', conditions={'method': ['PUT']}) |
|
294 | 290 | m.connect('delete_user', '/users/{user_id}', |
|
295 | 291 | action='delete', conditions={'method': ['DELETE']}) |
|
296 | 292 | m.connect('edit_user', '/users/{user_id}/edit', |
|
297 | 293 | action='edit', conditions={'method': ['GET']}) |
|
298 | 294 | m.connect('user', '/users/{user_id}', |
|
299 | 295 | action='show', conditions={'method': ['GET']}) |
|
300 | 296 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
301 | 297 | action='reset_password', conditions={'method': ['POST']}) |
|
302 | 298 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
303 | 299 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
304 | 300 | |
|
305 | 301 | # EXTRAS USER ROUTES |
|
306 | 302 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
307 | 303 | action='edit_advanced', conditions={'method': ['GET']}) |
|
308 | 304 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
309 | 305 | action='update_advanced', conditions={'method': ['PUT']}) |
|
310 | 306 | |
|
311 | 307 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
312 | 308 | action='edit_auth_tokens', conditions={'method': ['GET']}) |
|
313 | 309 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
314 | 310 | action='add_auth_token', conditions={'method': ['PUT']}) |
|
315 | 311 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
316 | 312 | action='delete_auth_token', conditions={'method': ['DELETE']}) |
|
317 | 313 | |
|
318 | 314 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
319 | 315 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
320 | 316 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
321 | 317 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
322 | 318 | |
|
323 | 319 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
324 | 320 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
325 | 321 | |
|
326 | 322 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
327 | 323 | action='edit_emails', conditions={'method': ['GET']}) |
|
328 | 324 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
329 | 325 | action='add_email', conditions={'method': ['PUT']}) |
|
330 | 326 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
331 | 327 | action='delete_email', conditions={'method': ['DELETE']}) |
|
332 | 328 | |
|
333 | 329 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
334 | 330 | action='edit_ips', conditions={'method': ['GET']}) |
|
335 | 331 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
336 | 332 | action='add_ip', conditions={'method': ['PUT']}) |
|
337 | 333 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
338 | 334 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
339 | 335 | |
|
340 | 336 | # ADMIN USER GROUPS REST ROUTES |
|
341 | 337 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
342 | 338 | controller='admin/user_groups') as m: |
|
343 | 339 | m.connect('users_groups', '/user_groups', |
|
344 | 340 | action='create', conditions={'method': ['POST']}) |
|
345 | 341 | m.connect('users_groups', '/user_groups', |
|
346 | 342 | action='index', conditions={'method': ['GET']}) |
|
347 | 343 | m.connect('new_users_group', '/user_groups/new', |
|
348 | 344 | action='new', conditions={'method': ['GET']}) |
|
349 | 345 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
350 | 346 | action='update', conditions={'method': ['PUT']}) |
|
351 | 347 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
352 | 348 | action='delete', conditions={'method': ['DELETE']}) |
|
353 | 349 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
354 | 350 | action='edit', conditions={'method': ['GET']}, |
|
355 | 351 | function=check_user_group) |
|
356 | 352 | |
|
357 | 353 | # EXTRAS USER GROUP ROUTES |
|
358 | 354 | m.connect('edit_user_group_global_perms', |
|
359 | 355 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
360 | 356 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
361 | 357 | m.connect('edit_user_group_global_perms', |
|
362 | 358 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
363 | 359 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
364 | 360 | m.connect('edit_user_group_perms_summary', |
|
365 | 361 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
366 | 362 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
367 | 363 | |
|
368 | 364 | m.connect('edit_user_group_perms', |
|
369 | 365 | '/user_groups/{user_group_id}/edit/permissions', |
|
370 | 366 | action='edit_perms', conditions={'method': ['GET']}) |
|
371 | 367 | m.connect('edit_user_group_perms', |
|
372 | 368 | '/user_groups/{user_group_id}/edit/permissions', |
|
373 | 369 | action='update_perms', conditions={'method': ['PUT']}) |
|
374 | 370 | |
|
375 | 371 | m.connect('edit_user_group_advanced', |
|
376 | 372 | '/user_groups/{user_group_id}/edit/advanced', |
|
377 | 373 | action='edit_advanced', conditions={'method': ['GET']}) |
|
378 | 374 | |
|
379 | 375 | m.connect('edit_user_group_members', |
|
380 | 376 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
381 | 377 | action='edit_members', conditions={'method': ['GET']}) |
|
382 | 378 | |
|
383 | 379 | # ADMIN PERMISSIONS ROUTES |
|
384 | 380 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
385 | 381 | controller='admin/permissions') as m: |
|
386 | 382 | m.connect('admin_permissions_application', '/permissions/application', |
|
387 | 383 | action='permission_application_update', conditions={'method': ['POST']}) |
|
388 | 384 | m.connect('admin_permissions_application', '/permissions/application', |
|
389 | 385 | action='permission_application', conditions={'method': ['GET']}) |
|
390 | 386 | |
|
391 | 387 | m.connect('admin_permissions_global', '/permissions/global', |
|
392 | 388 | action='permission_global_update', conditions={'method': ['POST']}) |
|
393 | 389 | m.connect('admin_permissions_global', '/permissions/global', |
|
394 | 390 | action='permission_global', conditions={'method': ['GET']}) |
|
395 | 391 | |
|
396 | 392 | m.connect('admin_permissions_object', '/permissions/object', |
|
397 | 393 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
398 | 394 | m.connect('admin_permissions_object', '/permissions/object', |
|
399 | 395 | action='permission_objects', conditions={'method': ['GET']}) |
|
400 | 396 | |
|
401 | 397 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
402 | 398 | action='permission_ips', conditions={'method': ['POST']}) |
|
403 | 399 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
404 | 400 | action='permission_ips', conditions={'method': ['GET']}) |
|
405 | 401 | |
|
406 | 402 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
407 | 403 | action='permission_perms', conditions={'method': ['GET']}) |
|
408 | 404 | |
|
409 | 405 | # ADMIN DEFAULTS REST ROUTES |
|
410 | 406 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
411 | 407 | controller='admin/defaults') as m: |
|
412 | 408 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
413 | 409 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
414 | 410 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
415 | 411 | action='index', conditions={'method': ['GET']}) |
|
416 | 412 | |
|
417 | 413 | # ADMIN DEBUG STYLE ROUTES |
|
418 | 414 | if str2bool(config.get('debug_style')): |
|
419 | 415 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
420 | 416 | controller='debug_style') as m: |
|
421 | 417 | m.connect('debug_style_home', '', |
|
422 | 418 | action='index', conditions={'method': ['GET']}) |
|
423 | 419 | m.connect('debug_style_template', '/t/{t_path}', |
|
424 | 420 | action='template', conditions={'method': ['GET']}) |
|
425 | 421 | |
|
426 | 422 | # ADMIN SETTINGS ROUTES |
|
427 | 423 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
428 | 424 | controller='admin/settings') as m: |
|
429 | 425 | |
|
430 | 426 | # default |
|
431 | 427 | m.connect('admin_settings', '/settings', |
|
432 | 428 | action='settings_global_update', |
|
433 | 429 | conditions={'method': ['POST']}) |
|
434 | 430 | m.connect('admin_settings', '/settings', |
|
435 | 431 | action='settings_global', conditions={'method': ['GET']}) |
|
436 | 432 | |
|
437 | 433 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
438 | 434 | action='settings_vcs_update', |
|
439 | 435 | conditions={'method': ['POST']}) |
|
440 | 436 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
441 | 437 | action='settings_vcs', |
|
442 | 438 | conditions={'method': ['GET']}) |
|
443 | 439 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
444 | 440 | action='delete_svn_pattern', |
|
445 | 441 | conditions={'method': ['DELETE']}) |
|
446 | 442 | |
|
447 | 443 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
448 | 444 | action='settings_mapping_update', |
|
449 | 445 | conditions={'method': ['POST']}) |
|
450 | 446 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
451 | 447 | action='settings_mapping', conditions={'method': ['GET']}) |
|
452 | 448 | |
|
453 | 449 | m.connect('admin_settings_global', '/settings/global', |
|
454 | 450 | action='settings_global_update', |
|
455 | 451 | conditions={'method': ['POST']}) |
|
456 | 452 | m.connect('admin_settings_global', '/settings/global', |
|
457 | 453 | action='settings_global', conditions={'method': ['GET']}) |
|
458 | 454 | |
|
459 | 455 | m.connect('admin_settings_visual', '/settings/visual', |
|
460 | 456 | action='settings_visual_update', |
|
461 | 457 | conditions={'method': ['POST']}) |
|
462 | 458 | m.connect('admin_settings_visual', '/settings/visual', |
|
463 | 459 | action='settings_visual', conditions={'method': ['GET']}) |
|
464 | 460 | |
|
465 | 461 | m.connect('admin_settings_issuetracker', |
|
466 | 462 | '/settings/issue-tracker', action='settings_issuetracker', |
|
467 | 463 | conditions={'method': ['GET']}) |
|
468 | 464 | m.connect('admin_settings_issuetracker_save', |
|
469 | 465 | '/settings/issue-tracker/save', |
|
470 | 466 | action='settings_issuetracker_save', |
|
471 | 467 | conditions={'method': ['POST']}) |
|
472 | 468 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
473 | 469 | action='settings_issuetracker_test', |
|
474 | 470 | conditions={'method': ['POST']}) |
|
475 | 471 | m.connect('admin_issuetracker_delete', |
|
476 | 472 | '/settings/issue-tracker/delete', |
|
477 | 473 | action='settings_issuetracker_delete', |
|
478 | 474 | conditions={'method': ['DELETE']}) |
|
479 | 475 | |
|
480 | 476 | m.connect('admin_settings_email', '/settings/email', |
|
481 | 477 | action='settings_email_update', |
|
482 | 478 | conditions={'method': ['POST']}) |
|
483 | 479 | m.connect('admin_settings_email', '/settings/email', |
|
484 | 480 | action='settings_email', conditions={'method': ['GET']}) |
|
485 | 481 | |
|
486 | 482 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
487 | 483 | action='settings_hooks_update', |
|
488 | 484 | conditions={'method': ['POST', 'DELETE']}) |
|
489 | 485 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
490 | 486 | action='settings_hooks', conditions={'method': ['GET']}) |
|
491 | 487 | |
|
492 | 488 | m.connect('admin_settings_search', '/settings/search', |
|
493 | 489 | action='settings_search', conditions={'method': ['GET']}) |
|
494 | 490 | |
|
495 | 491 | m.connect('admin_settings_system', '/settings/system', |
|
496 | 492 | action='settings_system', conditions={'method': ['GET']}) |
|
497 | 493 | |
|
498 | 494 | m.connect('admin_settings_system_update', '/settings/system/updates', |
|
499 | 495 | action='settings_system_update', conditions={'method': ['GET']}) |
|
500 | 496 | |
|
501 | 497 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
502 | 498 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
503 | 499 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
504 | 500 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
505 | 501 | |
|
506 | 502 | m.connect('admin_settings_labs', '/settings/labs', |
|
507 | 503 | action='settings_labs_update', |
|
508 | 504 | conditions={'method': ['POST']}) |
|
509 | 505 | m.connect('admin_settings_labs', '/settings/labs', |
|
510 | 506 | action='settings_labs', conditions={'method': ['GET']}) |
|
511 | 507 | |
|
512 | 508 | m.connect('admin_settings_open_source', '/settings/open_source', |
|
513 | 509 | action='settings_open_source', |
|
514 | 510 | conditions={'method': ['GET']}) |
|
515 | 511 | |
|
516 | 512 | # ADMIN MY ACCOUNT |
|
517 | 513 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
518 | 514 | controller='admin/my_account') as m: |
|
519 | 515 | |
|
520 | 516 | m.connect('my_account', '/my_account', |
|
521 | 517 | action='my_account', conditions={'method': ['GET']}) |
|
522 | 518 | m.connect('my_account_edit', '/my_account/edit', |
|
523 | 519 | action='my_account_edit', conditions={'method': ['GET']}) |
|
524 | 520 | m.connect('my_account', '/my_account', |
|
525 | 521 | action='my_account_update', conditions={'method': ['POST']}) |
|
526 | 522 | |
|
527 | 523 | m.connect('my_account_password', '/my_account/password', |
|
528 | 524 | action='my_account_password', conditions={'method': ['GET']}) |
|
529 | 525 | m.connect('my_account_password', '/my_account/password', |
|
530 | 526 | action='my_account_password_update', conditions={'method': ['POST']}) |
|
531 | 527 | |
|
532 | 528 | m.connect('my_account_repos', '/my_account/repos', |
|
533 | 529 | action='my_account_repos', conditions={'method': ['GET']}) |
|
534 | 530 | |
|
535 | 531 | m.connect('my_account_watched', '/my_account/watched', |
|
536 | 532 | action='my_account_watched', conditions={'method': ['GET']}) |
|
537 | 533 | |
|
538 | 534 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
539 | 535 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
540 | 536 | |
|
541 | 537 | m.connect('my_account_perms', '/my_account/perms', |
|
542 | 538 | action='my_account_perms', conditions={'method': ['GET']}) |
|
543 | 539 | |
|
544 | 540 | m.connect('my_account_emails', '/my_account/emails', |
|
545 | 541 | action='my_account_emails', conditions={'method': ['GET']}) |
|
546 | 542 | m.connect('my_account_emails', '/my_account/emails', |
|
547 | 543 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
548 | 544 | m.connect('my_account_emails', '/my_account/emails', |
|
549 | 545 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
550 | 546 | |
|
551 | 547 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
552 | 548 | action='my_account_auth_tokens', conditions={'method': ['GET']}) |
|
553 | 549 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
554 | 550 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) |
|
555 | 551 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
556 | 552 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) |
|
557 | 553 | |
|
558 | 554 | # NOTIFICATION REST ROUTES |
|
559 | 555 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
560 | 556 | controller='admin/notifications') as m: |
|
561 | 557 | m.connect('notifications', '/notifications', |
|
562 | 558 | action='index', conditions={'method': ['GET']}) |
|
563 | 559 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
564 | 560 | action='mark_all_read', conditions={'method': ['POST']}) |
|
565 | 561 | |
|
566 | 562 | m.connect('/notifications/{notification_id}', |
|
567 | 563 | action='update', conditions={'method': ['PUT']}) |
|
568 | 564 | m.connect('/notifications/{notification_id}', |
|
569 | 565 | action='delete', conditions={'method': ['DELETE']}) |
|
570 | 566 | m.connect('notification', '/notifications/{notification_id}', |
|
571 | 567 | action='show', conditions={'method': ['GET']}) |
|
572 | 568 | |
|
573 | 569 | # ADMIN GIST |
|
574 | 570 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
575 | 571 | controller='admin/gists') as m: |
|
576 | 572 | m.connect('gists', '/gists', |
|
577 | 573 | action='create', conditions={'method': ['POST']}) |
|
578 | 574 | m.connect('gists', '/gists', jsroute=True, |
|
579 | 575 | action='index', conditions={'method': ['GET']}) |
|
580 | 576 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
581 | 577 | action='new', conditions={'method': ['GET']}) |
|
582 | 578 | |
|
583 | 579 | m.connect('/gists/{gist_id}', |
|
584 | 580 | action='delete', conditions={'method': ['DELETE']}) |
|
585 | 581 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
586 | 582 | action='edit_form', conditions={'method': ['GET']}) |
|
587 | 583 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
588 | 584 | action='edit', conditions={'method': ['POST']}) |
|
589 | 585 | m.connect( |
|
590 | 586 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
591 | 587 | action='check_revision', conditions={'method': ['GET']}) |
|
592 | 588 | |
|
593 | 589 | m.connect('gist', '/gists/{gist_id}', |
|
594 | 590 | action='show', conditions={'method': ['GET']}) |
|
595 | 591 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
596 | 592 | revision='tip', |
|
597 | 593 | action='show', conditions={'method': ['GET']}) |
|
598 | 594 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
599 | 595 | revision='tip', |
|
600 | 596 | action='show', conditions={'method': ['GET']}) |
|
601 | 597 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
602 | 598 | revision='tip', |
|
603 | 599 | action='show', conditions={'method': ['GET']}, |
|
604 | 600 | requirements=URL_NAME_REQUIREMENTS) |
|
605 | 601 | |
|
606 | 602 | # ADMIN MAIN PAGES |
|
607 | 603 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
608 | 604 | controller='admin/admin') as m: |
|
609 | 605 | m.connect('admin_home', '', action='index') |
|
610 | 606 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
611 | 607 | action='add_repo') |
|
612 | 608 | m.connect( |
|
613 | 609 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
614 | 610 | action='pull_requests') |
|
615 | 611 | m.connect( |
|
616 | 612 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', |
|
617 | 613 | action='pull_requests') |
|
618 | 614 | |
|
619 | 615 | |
|
620 | 616 | # USER JOURNAL |
|
621 | 617 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
622 | 618 | controller='journal', action='index') |
|
623 | 619 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
624 | 620 | controller='journal', action='journal_rss') |
|
625 | 621 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
626 | 622 | controller='journal', action='journal_atom') |
|
627 | 623 | |
|
628 | 624 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
629 | 625 | controller='journal', action='public_journal') |
|
630 | 626 | |
|
631 | 627 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
632 | 628 | controller='journal', action='public_journal_rss') |
|
633 | 629 | |
|
634 | 630 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
635 | 631 | controller='journal', action='public_journal_rss') |
|
636 | 632 | |
|
637 | 633 | rmap.connect('public_journal_atom', |
|
638 | 634 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
639 | 635 | action='public_journal_atom') |
|
640 | 636 | |
|
641 | 637 | rmap.connect('public_journal_atom_old', |
|
642 | 638 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
643 | 639 | action='public_journal_atom') |
|
644 | 640 | |
|
645 | 641 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
646 | 642 | controller='journal', action='toggle_following', jsroute=True, |
|
647 | 643 | conditions={'method': ['POST']}) |
|
648 | 644 | |
|
649 | 645 | # FULL TEXT SEARCH |
|
650 | 646 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
651 | 647 | controller='search') |
|
652 | 648 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
653 | 649 | controller='search', |
|
654 | 650 | action='index', |
|
655 | 651 | conditions={'function': check_repo}, |
|
656 | 652 | requirements=URL_NAME_REQUIREMENTS) |
|
657 | 653 | |
|
658 | 654 | # FEEDS |
|
659 | 655 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
660 | 656 | controller='feed', action='rss', |
|
661 | 657 | conditions={'function': check_repo}, |
|
662 | 658 | requirements=URL_NAME_REQUIREMENTS) |
|
663 | 659 | |
|
664 | 660 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
665 | 661 | controller='feed', action='atom', |
|
666 | 662 | conditions={'function': check_repo}, |
|
667 | 663 | requirements=URL_NAME_REQUIREMENTS) |
|
668 | 664 | |
|
669 | 665 | #========================================================================== |
|
670 | 666 | # REPOSITORY ROUTES |
|
671 | 667 | #========================================================================== |
|
672 | 668 | |
|
673 | 669 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
674 | 670 | controller='admin/repos', action='repo_creating', |
|
675 | 671 | requirements=URL_NAME_REQUIREMENTS) |
|
676 | 672 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
677 | 673 | controller='admin/repos', action='repo_check', |
|
678 | 674 | requirements=URL_NAME_REQUIREMENTS) |
|
679 | 675 | |
|
680 | 676 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
681 | 677 | controller='summary', action='repo_stats', |
|
682 | 678 | conditions={'function': check_repo}, |
|
683 | 679 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
684 | 680 | |
|
685 | 681 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
686 | 682 | controller='summary', action='repo_refs_data', jsroute=True, |
|
687 | 683 | requirements=URL_NAME_REQUIREMENTS) |
|
688 | 684 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
689 | 685 | controller='summary', action='repo_refs_changelog_data', |
|
690 | 686 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
691 | 687 | |
|
692 | 688 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
693 | 689 | controller='changeset', revision='tip', jsroute=True, |
|
694 | 690 | conditions={'function': check_repo}, |
|
695 | 691 | requirements=URL_NAME_REQUIREMENTS) |
|
696 | 692 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
697 | 693 | controller='changeset', revision='tip', action='changeset_children', |
|
698 | 694 | conditions={'function': check_repo}, |
|
699 | 695 | requirements=URL_NAME_REQUIREMENTS) |
|
700 | 696 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
701 | 697 | controller='changeset', revision='tip', action='changeset_parents', |
|
702 | 698 | conditions={'function': check_repo}, |
|
703 | 699 | requirements=URL_NAME_REQUIREMENTS) |
|
704 | 700 | |
|
705 | 701 | # repo edit options |
|
706 | 702 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
707 | 703 | controller='admin/repos', action='edit', |
|
708 | 704 | conditions={'method': ['GET'], 'function': check_repo}, |
|
709 | 705 | requirements=URL_NAME_REQUIREMENTS) |
|
710 | 706 | |
|
711 | 707 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
712 | 708 | jsroute=True, |
|
713 | 709 | controller='admin/repos', action='edit_permissions', |
|
714 | 710 | conditions={'method': ['GET'], 'function': check_repo}, |
|
715 | 711 | requirements=URL_NAME_REQUIREMENTS) |
|
716 | 712 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
717 | 713 | controller='admin/repos', action='edit_permissions_update', |
|
718 | 714 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
719 | 715 | requirements=URL_NAME_REQUIREMENTS) |
|
720 | 716 | |
|
721 | 717 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
722 | 718 | controller='admin/repos', action='edit_fields', |
|
723 | 719 | conditions={'method': ['GET'], 'function': check_repo}, |
|
724 | 720 | requirements=URL_NAME_REQUIREMENTS) |
|
725 | 721 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
726 | 722 | controller='admin/repos', action='create_repo_field', |
|
727 | 723 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
728 | 724 | requirements=URL_NAME_REQUIREMENTS) |
|
729 | 725 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
730 | 726 | controller='admin/repos', action='delete_repo_field', |
|
731 | 727 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
732 | 728 | requirements=URL_NAME_REQUIREMENTS) |
|
733 | 729 | |
|
734 | 730 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
735 | 731 | controller='admin/repos', action='edit_advanced', |
|
736 | 732 | conditions={'method': ['GET'], 'function': check_repo}, |
|
737 | 733 | requirements=URL_NAME_REQUIREMENTS) |
|
738 | 734 | |
|
739 | 735 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
740 | 736 | controller='admin/repos', action='edit_advanced_locking', |
|
741 | 737 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
742 | 738 | requirements=URL_NAME_REQUIREMENTS) |
|
743 | 739 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
744 | 740 | controller='admin/repos', action='toggle_locking', |
|
745 | 741 | conditions={'method': ['GET'], 'function': check_repo}, |
|
746 | 742 | requirements=URL_NAME_REQUIREMENTS) |
|
747 | 743 | |
|
748 | 744 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
749 | 745 | controller='admin/repos', action='edit_advanced_journal', |
|
750 | 746 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
751 | 747 | requirements=URL_NAME_REQUIREMENTS) |
|
752 | 748 | |
|
753 | 749 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
754 | 750 | controller='admin/repos', action='edit_advanced_fork', |
|
755 | 751 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
756 | 752 | requirements=URL_NAME_REQUIREMENTS) |
|
757 | 753 | |
|
758 | 754 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
759 | 755 | controller='admin/repos', action='edit_caches_form', |
|
760 | 756 | conditions={'method': ['GET'], 'function': check_repo}, |
|
761 | 757 | requirements=URL_NAME_REQUIREMENTS) |
|
762 | 758 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
763 | 759 | controller='admin/repos', action='edit_caches', |
|
764 | 760 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
765 | 761 | requirements=URL_NAME_REQUIREMENTS) |
|
766 | 762 | |
|
767 | 763 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
768 | 764 | controller='admin/repos', action='edit_remote_form', |
|
769 | 765 | conditions={'method': ['GET'], 'function': check_repo}, |
|
770 | 766 | requirements=URL_NAME_REQUIREMENTS) |
|
771 | 767 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
772 | 768 | controller='admin/repos', action='edit_remote', |
|
773 | 769 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
774 | 770 | requirements=URL_NAME_REQUIREMENTS) |
|
775 | 771 | |
|
776 | 772 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
777 | 773 | controller='admin/repos', action='edit_statistics_form', |
|
778 | 774 | conditions={'method': ['GET'], 'function': check_repo}, |
|
779 | 775 | requirements=URL_NAME_REQUIREMENTS) |
|
780 | 776 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
781 | 777 | controller='admin/repos', action='edit_statistics', |
|
782 | 778 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
783 | 779 | requirements=URL_NAME_REQUIREMENTS) |
|
784 | 780 | rmap.connect('repo_settings_issuetracker', |
|
785 | 781 | '/{repo_name}/settings/issue-tracker', |
|
786 | 782 | controller='admin/repos', action='repo_issuetracker', |
|
787 | 783 | conditions={'method': ['GET'], 'function': check_repo}, |
|
788 | 784 | requirements=URL_NAME_REQUIREMENTS) |
|
789 | 785 | rmap.connect('repo_issuetracker_test', |
|
790 | 786 | '/{repo_name}/settings/issue-tracker/test', |
|
791 | 787 | controller='admin/repos', action='repo_issuetracker_test', |
|
792 | 788 | conditions={'method': ['POST'], 'function': check_repo}, |
|
793 | 789 | requirements=URL_NAME_REQUIREMENTS) |
|
794 | 790 | rmap.connect('repo_issuetracker_delete', |
|
795 | 791 | '/{repo_name}/settings/issue-tracker/delete', |
|
796 | 792 | controller='admin/repos', action='repo_issuetracker_delete', |
|
797 | 793 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
798 | 794 | requirements=URL_NAME_REQUIREMENTS) |
|
799 | 795 | rmap.connect('repo_issuetracker_save', |
|
800 | 796 | '/{repo_name}/settings/issue-tracker/save', |
|
801 | 797 | controller='admin/repos', action='repo_issuetracker_save', |
|
802 | 798 | conditions={'method': ['POST'], 'function': check_repo}, |
|
803 | 799 | requirements=URL_NAME_REQUIREMENTS) |
|
804 | 800 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
805 | 801 | controller='admin/repos', action='repo_settings_vcs_update', |
|
806 | 802 | conditions={'method': ['POST'], 'function': check_repo}, |
|
807 | 803 | requirements=URL_NAME_REQUIREMENTS) |
|
808 | 804 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
809 | 805 | controller='admin/repos', action='repo_settings_vcs', |
|
810 | 806 | conditions={'method': ['GET'], 'function': check_repo}, |
|
811 | 807 | requirements=URL_NAME_REQUIREMENTS) |
|
812 | 808 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
813 | 809 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
814 | 810 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
815 | 811 | requirements=URL_NAME_REQUIREMENTS) |
|
816 | 812 | |
|
817 | 813 | # still working url for backward compat. |
|
818 | 814 | rmap.connect('raw_changeset_home_depraced', |
|
819 | 815 | '/{repo_name}/raw-changeset/{revision}', |
|
820 | 816 | controller='changeset', action='changeset_raw', |
|
821 | 817 | revision='tip', conditions={'function': check_repo}, |
|
822 | 818 | requirements=URL_NAME_REQUIREMENTS) |
|
823 | 819 | |
|
824 | 820 | # new URLs |
|
825 | 821 | rmap.connect('changeset_raw_home', |
|
826 | 822 | '/{repo_name}/changeset-diff/{revision}', |
|
827 | 823 | controller='changeset', action='changeset_raw', |
|
828 | 824 | revision='tip', conditions={'function': check_repo}, |
|
829 | 825 | requirements=URL_NAME_REQUIREMENTS) |
|
830 | 826 | |
|
831 | 827 | rmap.connect('changeset_patch_home', |
|
832 | 828 | '/{repo_name}/changeset-patch/{revision}', |
|
833 | 829 | controller='changeset', action='changeset_patch', |
|
834 | 830 | revision='tip', conditions={'function': check_repo}, |
|
835 | 831 | requirements=URL_NAME_REQUIREMENTS) |
|
836 | 832 | |
|
837 | 833 | rmap.connect('changeset_download_home', |
|
838 | 834 | '/{repo_name}/changeset-download/{revision}', |
|
839 | 835 | controller='changeset', action='changeset_download', |
|
840 | 836 | revision='tip', conditions={'function': check_repo}, |
|
841 | 837 | requirements=URL_NAME_REQUIREMENTS) |
|
842 | 838 | |
|
843 | 839 | rmap.connect('changeset_comment', |
|
844 | 840 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
845 | 841 | controller='changeset', revision='tip', action='comment', |
|
846 | 842 | conditions={'function': check_repo}, |
|
847 | 843 | requirements=URL_NAME_REQUIREMENTS) |
|
848 | 844 | |
|
849 | 845 | rmap.connect('changeset_comment_preview', |
|
850 | 846 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
851 | 847 | controller='changeset', action='preview_comment', |
|
852 | 848 | conditions={'function': check_repo, 'method': ['POST']}, |
|
853 | 849 | requirements=URL_NAME_REQUIREMENTS) |
|
854 | 850 | |
|
855 | 851 | rmap.connect('changeset_comment_delete', |
|
856 | 852 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
857 | 853 | controller='changeset', action='delete_comment', |
|
858 | 854 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
859 | 855 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
860 | 856 | |
|
861 | 857 | rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}', |
|
862 | 858 | controller='changeset', action='changeset_info', |
|
863 | 859 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
864 | 860 | |
|
865 | 861 | rmap.connect('compare_home', |
|
866 | 862 | '/{repo_name}/compare', |
|
867 | 863 | controller='compare', action='index', |
|
868 | 864 | conditions={'function': check_repo}, |
|
869 | 865 | requirements=URL_NAME_REQUIREMENTS) |
|
870 | 866 | |
|
871 | 867 | rmap.connect('compare_url', |
|
872 | 868 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
873 | 869 | controller='compare', action='compare', |
|
874 | 870 | conditions={'function': check_repo}, |
|
875 | 871 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
876 | 872 | |
|
877 | 873 | rmap.connect('pullrequest_home', |
|
878 | 874 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
879 | 875 | action='index', conditions={'function': check_repo, |
|
880 | 876 | 'method': ['GET']}, |
|
881 | 877 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
882 | 878 | |
|
883 | 879 | rmap.connect('pullrequest', |
|
884 | 880 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
885 | 881 | action='create', conditions={'function': check_repo, |
|
886 | 882 | 'method': ['POST']}, |
|
887 | 883 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
888 | 884 | |
|
889 | 885 | rmap.connect('pullrequest_repo_refs', |
|
890 | 886 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
891 | 887 | controller='pullrequests', |
|
892 | 888 | action='get_repo_refs', |
|
893 | 889 | conditions={'function': check_repo, 'method': ['GET']}, |
|
894 | 890 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
895 | 891 | |
|
896 | 892 | rmap.connect('pullrequest_repo_destinations', |
|
897 | 893 | '/{repo_name}/pull-request/repo-destinations', |
|
898 | 894 | controller='pullrequests', |
|
899 | 895 | action='get_repo_destinations', |
|
900 | 896 | conditions={'function': check_repo, 'method': ['GET']}, |
|
901 | 897 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
902 | 898 | |
|
903 | 899 | rmap.connect('pullrequest_show', |
|
904 | 900 | '/{repo_name}/pull-request/{pull_request_id}', |
|
905 | 901 | controller='pullrequests', |
|
906 | 902 | action='show', conditions={'function': check_repo, |
|
907 | 903 | 'method': ['GET']}, |
|
908 | 904 | requirements=URL_NAME_REQUIREMENTS) |
|
909 | 905 | |
|
910 | 906 | rmap.connect('pullrequest_update', |
|
911 | 907 | '/{repo_name}/pull-request/{pull_request_id}', |
|
912 | 908 | controller='pullrequests', |
|
913 | 909 | action='update', conditions={'function': check_repo, |
|
914 | 910 | 'method': ['PUT']}, |
|
915 | 911 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
916 | 912 | |
|
917 | 913 | rmap.connect('pullrequest_merge', |
|
918 | 914 | '/{repo_name}/pull-request/{pull_request_id}', |
|
919 | 915 | controller='pullrequests', |
|
920 | 916 | action='merge', conditions={'function': check_repo, |
|
921 | 917 | 'method': ['POST']}, |
|
922 | 918 | requirements=URL_NAME_REQUIREMENTS) |
|
923 | 919 | |
|
924 | 920 | rmap.connect('pullrequest_delete', |
|
925 | 921 | '/{repo_name}/pull-request/{pull_request_id}', |
|
926 | 922 | controller='pullrequests', |
|
927 | 923 | action='delete', conditions={'function': check_repo, |
|
928 | 924 | 'method': ['DELETE']}, |
|
929 | 925 | requirements=URL_NAME_REQUIREMENTS) |
|
930 | 926 | |
|
931 | 927 | rmap.connect('pullrequest_show_all', |
|
932 | 928 | '/{repo_name}/pull-request', |
|
933 | 929 | controller='pullrequests', |
|
934 | 930 | action='show_all', conditions={'function': check_repo, |
|
935 | 931 | 'method': ['GET']}, |
|
936 | 932 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
937 | 933 | |
|
938 | 934 | rmap.connect('pullrequest_comment', |
|
939 | 935 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
940 | 936 | controller='pullrequests', |
|
941 | 937 | action='comment', conditions={'function': check_repo, |
|
942 | 938 | 'method': ['POST']}, |
|
943 | 939 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
944 | 940 | |
|
945 | 941 | rmap.connect('pullrequest_comment_delete', |
|
946 | 942 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
947 | 943 | controller='pullrequests', action='delete_comment', |
|
948 | 944 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
949 | 945 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
950 | 946 | |
|
951 | 947 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
952 | 948 | controller='summary', conditions={'function': check_repo}, |
|
953 | 949 | requirements=URL_NAME_REQUIREMENTS) |
|
954 | 950 | |
|
955 | 951 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
956 | 952 | controller='branches', conditions={'function': check_repo}, |
|
957 | 953 | requirements=URL_NAME_REQUIREMENTS) |
|
958 | 954 | |
|
959 | 955 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
960 | 956 | controller='tags', conditions={'function': check_repo}, |
|
961 | 957 | requirements=URL_NAME_REQUIREMENTS) |
|
962 | 958 | |
|
963 | 959 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
964 | 960 | controller='bookmarks', conditions={'function': check_repo}, |
|
965 | 961 | requirements=URL_NAME_REQUIREMENTS) |
|
966 | 962 | |
|
967 | 963 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
968 | 964 | controller='changelog', conditions={'function': check_repo}, |
|
969 | 965 | requirements=URL_NAME_REQUIREMENTS) |
|
970 | 966 | |
|
971 | 967 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
972 | 968 | controller='changelog', action='changelog_summary', |
|
973 | 969 | conditions={'function': check_repo}, |
|
974 | 970 | requirements=URL_NAME_REQUIREMENTS) |
|
975 | 971 | |
|
976 | 972 | rmap.connect('changelog_file_home', |
|
977 | 973 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
978 | 974 | controller='changelog', f_path=None, |
|
979 | 975 | conditions={'function': check_repo}, |
|
980 | 976 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
981 | 977 | |
|
982 | 978 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', |
|
983 | 979 | controller='changelog', action='changelog_details', |
|
984 | 980 | conditions={'function': check_repo}, |
|
985 | 981 | requirements=URL_NAME_REQUIREMENTS) |
|
986 | 982 | |
|
987 | 983 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
988 | 984 | controller='files', revision='tip', f_path='', |
|
989 | 985 | conditions={'function': check_repo}, |
|
990 | 986 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
991 | 987 | |
|
992 | 988 | rmap.connect('files_home_simple_catchrev', |
|
993 | 989 | '/{repo_name}/files/{revision}', |
|
994 | 990 | controller='files', revision='tip', f_path='', |
|
995 | 991 | conditions={'function': check_repo}, |
|
996 | 992 | requirements=URL_NAME_REQUIREMENTS) |
|
997 | 993 | |
|
998 | 994 | rmap.connect('files_home_simple_catchall', |
|
999 | 995 | '/{repo_name}/files', |
|
1000 | 996 | controller='files', revision='tip', f_path='', |
|
1001 | 997 | conditions={'function': check_repo}, |
|
1002 | 998 | requirements=URL_NAME_REQUIREMENTS) |
|
1003 | 999 | |
|
1004 | 1000 | rmap.connect('files_history_home', |
|
1005 | 1001 | '/{repo_name}/history/{revision}/{f_path}', |
|
1006 | 1002 | controller='files', action='history', revision='tip', f_path='', |
|
1007 | 1003 | conditions={'function': check_repo}, |
|
1008 | 1004 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1009 | 1005 | |
|
1010 | 1006 | rmap.connect('files_authors_home', |
|
1011 | 1007 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1012 | 1008 | controller='files', action='authors', revision='tip', f_path='', |
|
1013 | 1009 | conditions={'function': check_repo}, |
|
1014 | 1010 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1015 | 1011 | |
|
1016 | 1012 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1017 | 1013 | controller='files', action='diff', f_path='', |
|
1018 | 1014 | conditions={'function': check_repo}, |
|
1019 | 1015 | requirements=URL_NAME_REQUIREMENTS) |
|
1020 | 1016 | |
|
1021 | 1017 | rmap.connect('files_diff_2way_home', |
|
1022 | 1018 | '/{repo_name}/diff-2way/{f_path}', |
|
1023 | 1019 | controller='files', action='diff_2way', f_path='', |
|
1024 | 1020 | conditions={'function': check_repo}, |
|
1025 | 1021 | requirements=URL_NAME_REQUIREMENTS) |
|
1026 | 1022 | |
|
1027 | 1023 | rmap.connect('files_rawfile_home', |
|
1028 | 1024 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1029 | 1025 | controller='files', action='rawfile', revision='tip', |
|
1030 | 1026 | f_path='', conditions={'function': check_repo}, |
|
1031 | 1027 | requirements=URL_NAME_REQUIREMENTS) |
|
1032 | 1028 | |
|
1033 | 1029 | rmap.connect('files_raw_home', |
|
1034 | 1030 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1035 | 1031 | controller='files', action='raw', revision='tip', f_path='', |
|
1036 | 1032 | conditions={'function': check_repo}, |
|
1037 | 1033 | requirements=URL_NAME_REQUIREMENTS) |
|
1038 | 1034 | |
|
1039 | 1035 | rmap.connect('files_render_home', |
|
1040 | 1036 | '/{repo_name}/render/{revision}/{f_path}', |
|
1041 | 1037 | controller='files', action='index', revision='tip', f_path='', |
|
1042 | 1038 | rendered=True, conditions={'function': check_repo}, |
|
1043 | 1039 | requirements=URL_NAME_REQUIREMENTS) |
|
1044 | 1040 | |
|
1045 | 1041 | rmap.connect('files_annotate_home', |
|
1046 | 1042 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1047 | 1043 | controller='files', action='index', revision='tip', |
|
1048 | 1044 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1049 | 1045 | requirements=URL_NAME_REQUIREMENTS) |
|
1050 | 1046 | |
|
1051 | 1047 | rmap.connect('files_edit', |
|
1052 | 1048 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1053 | 1049 | controller='files', action='edit', revision='tip', |
|
1054 | 1050 | f_path='', |
|
1055 | 1051 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1056 | 1052 | requirements=URL_NAME_REQUIREMENTS) |
|
1057 | 1053 | |
|
1058 | 1054 | rmap.connect('files_edit_home', |
|
1059 | 1055 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1060 | 1056 | controller='files', action='edit_home', revision='tip', |
|
1061 | 1057 | f_path='', conditions={'function': check_repo}, |
|
1062 | 1058 | requirements=URL_NAME_REQUIREMENTS) |
|
1063 | 1059 | |
|
1064 | 1060 | rmap.connect('files_add', |
|
1065 | 1061 | '/{repo_name}/add/{revision}/{f_path}', |
|
1066 | 1062 | controller='files', action='add', revision='tip', |
|
1067 | 1063 | f_path='', |
|
1068 | 1064 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1069 | 1065 | requirements=URL_NAME_REQUIREMENTS) |
|
1070 | 1066 | |
|
1071 | 1067 | rmap.connect('files_add_home', |
|
1072 | 1068 | '/{repo_name}/add/{revision}/{f_path}', |
|
1073 | 1069 | controller='files', action='add_home', revision='tip', |
|
1074 | 1070 | f_path='', conditions={'function': check_repo}, |
|
1075 | 1071 | requirements=URL_NAME_REQUIREMENTS) |
|
1076 | 1072 | |
|
1077 | 1073 | rmap.connect('files_delete', |
|
1078 | 1074 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1079 | 1075 | controller='files', action='delete', revision='tip', |
|
1080 | 1076 | f_path='', |
|
1081 | 1077 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1082 | 1078 | requirements=URL_NAME_REQUIREMENTS) |
|
1083 | 1079 | |
|
1084 | 1080 | rmap.connect('files_delete_home', |
|
1085 | 1081 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1086 | 1082 | controller='files', action='delete_home', revision='tip', |
|
1087 | 1083 | f_path='', conditions={'function': check_repo}, |
|
1088 | 1084 | requirements=URL_NAME_REQUIREMENTS) |
|
1089 | 1085 | |
|
1090 | 1086 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1091 | 1087 | controller='files', action='archivefile', |
|
1092 | 1088 | conditions={'function': check_repo}, |
|
1093 | 1089 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1094 | 1090 | |
|
1095 | 1091 | rmap.connect('files_nodelist_home', |
|
1096 | 1092 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1097 | 1093 | controller='files', action='nodelist', |
|
1098 | 1094 | conditions={'function': check_repo}, |
|
1099 | 1095 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1100 | 1096 | |
|
1101 | 1097 | rmap.connect('files_metadata_list_home', |
|
1102 | 1098 | '/{repo_name}/metadata_list/{revision}/{f_path}', |
|
1103 | 1099 | controller='files', action='metadata_list', |
|
1104 | 1100 | conditions={'function': check_repo}, |
|
1105 | 1101 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1106 | 1102 | |
|
1107 | 1103 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1108 | 1104 | controller='forks', action='fork_create', |
|
1109 | 1105 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1110 | 1106 | requirements=URL_NAME_REQUIREMENTS) |
|
1111 | 1107 | |
|
1112 | 1108 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1113 | 1109 | controller='forks', action='fork', |
|
1114 | 1110 | conditions={'function': check_repo}, |
|
1115 | 1111 | requirements=URL_NAME_REQUIREMENTS) |
|
1116 | 1112 | |
|
1117 | 1113 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1118 | 1114 | controller='forks', action='forks', |
|
1119 | 1115 | conditions={'function': check_repo}, |
|
1120 | 1116 | requirements=URL_NAME_REQUIREMENTS) |
|
1121 | 1117 | |
|
1122 | 1118 | rmap.connect('repo_followers_home', '/{repo_name}/followers', |
|
1123 | 1119 | controller='followers', action='followers', |
|
1124 | 1120 | conditions={'function': check_repo}, |
|
1125 | 1121 | requirements=URL_NAME_REQUIREMENTS) |
|
1126 | 1122 | |
|
1127 | 1123 | # must be here for proper group/repo catching pattern |
|
1128 | 1124 | _connect_with_slash( |
|
1129 | 1125 | rmap, 'repo_group_home', '/{group_name}', |
|
1130 | 1126 | controller='home', action='index_repo_group', |
|
1131 | 1127 | conditions={'function': check_group}, |
|
1132 | 1128 | requirements=URL_NAME_REQUIREMENTS) |
|
1133 | 1129 | |
|
1134 | 1130 | # catch all, at the end |
|
1135 | 1131 | _connect_with_slash( |
|
1136 | 1132 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1137 | 1133 | controller='summary', action='index', |
|
1138 | 1134 | conditions={'function': check_repo}, |
|
1139 | 1135 | requirements=URL_NAME_REQUIREMENTS) |
|
1140 | 1136 | |
|
1141 | 1137 | return rmap |
|
1142 | 1138 | |
|
1143 | 1139 | |
|
1144 | 1140 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1145 | 1141 | """ |
|
1146 | 1142 | Connect a route with an optional trailing slash in `path`. |
|
1147 | 1143 | """ |
|
1148 | 1144 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1149 | 1145 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,70 +1,76 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2015-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Disable VCS pages when VCS Server is not available |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import re |
|
27 | ||
|
27 | from pyramid.httpexceptions import HTTPBadGateway | |
|
28 | 28 | |
|
29 | 29 | log = logging.getLogger(__name__) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | class VCSServerUnavailable(HTTPBadGateway): | |
|
33 | """ HTTP Exception class for when VCS Server is unavailable """ | |
|
34 | code = 502 | |
|
35 | title = 'VCS Server Required' | |
|
36 | explanation = 'A VCS Server is required for this action. There is currently no VCS Server configured.' | |
|
37 | ||
|
32 | 38 | class DisableVCSPagesWrapper(object): |
|
33 | 39 | """ |
|
34 |
|
|
|
35 | avoiding that errors explode to the user. | |
|
40 | Pyramid view wrapper to disable all pages that require VCS Server to be | |
|
41 | running, avoiding that errors explode to the user. | |
|
36 | 42 | |
|
37 | 43 | This Wrapper should be enabled only in case VCS Server is not available |
|
38 | 44 | for the instance. |
|
39 | 45 | """ |
|
40 | 46 | |
|
41 | 47 | VCS_NOT_REQUIRED = [ |
|
42 | 48 | '^/$', |
|
43 | 49 | ('/_admin(?!/settings/mapping)(?!/my_account/repos)' |
|
44 | 50 | '(?!/create_repository)(?!/gists)(?!/notifications/)' |
|
45 | 51 | ), |
|
46 | 52 | ] |
|
47 | 53 | _REGEX_VCS_NOT_REQUIRED = [re.compile(path) for path in VCS_NOT_REQUIRED] |
|
48 | 54 | |
|
49 | 55 | def _check_vcs_requirement(self, path_info): |
|
50 | 56 | """ |
|
51 | 57 | Tries to match the current path to one of the safe URLs to be rendered. |
|
52 | 58 | Displays an error message in case |
|
53 | 59 | """ |
|
54 | 60 | for regex in self._REGEX_VCS_NOT_REQUIRED: |
|
55 | 61 | safe_url = regex.match(path_info) |
|
56 | 62 | if safe_url: |
|
57 | 63 | return True |
|
58 | 64 | |
|
59 | 65 | # Url is not safe to be rendered without VCS Server |
|
60 | 66 | log.debug('accessing: `%s` with VCS Server disabled', path_info) |
|
61 | 67 | return False |
|
62 | 68 | |
|
63 |
def __init__(self, |
|
|
64 |
self. |
|
|
69 | def __init__(self, handler): | |
|
70 | self.handler = handler | |
|
65 | 71 | |
|
66 |
def __call__(self, |
|
|
67 | if not self._check_vcs_requirement(environ['PATH_INFO']): | |
|
68 | environ['PATH_INFO'] = '/error/vcs_unavailable' | |
|
72 | def __call__(self, context, request): | |
|
73 | if not self._check_vcs_requirement(request.environ['PATH_INFO']): | |
|
74 | raise VCSServerUnavailable('VCS Server is not available') | |
|
69 | 75 | |
|
70 |
return self. |
|
|
76 | return self.handler(context, request) |
@@ -1,87 +1,88 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | import pylons |
|
24 | 24 | import rhodecode |
|
25 | 25 | |
|
26 | 26 | from pylons.i18n.translation import _get_translator |
|
27 | 27 | from pylons.util import ContextObj |
|
28 | 28 | from routes.util import URLGenerator |
|
29 | from pyramid.httpexceptions import HTTPInternalServerError, HTTPError, HTTPServiceUnavailable | |
|
29 | 30 | |
|
30 | 31 | from rhodecode.lib.base import attach_context_attributes, get_auth_user |
|
31 | 32 | from rhodecode.model import meta |
|
32 | 33 | |
|
33 | 34 | log = logging.getLogger(__name__) |
|
34 | 35 | |
|
35 | 36 | |
|
36 | 37 | def pylons_compatibility_tween_factory(handler, registry): |
|
37 | 38 | def pylons_compatibility_tween(request): |
|
38 | 39 | """ |
|
39 | 40 | While migrating from pylons to pyramid we need to call some pylons code |
|
40 | 41 | from pyramid. For example while rendering an old template that uses the |
|
41 | 42 | 'c' or 'h' objects. This tween sets up the needed pylons globals. |
|
42 | 43 | """ |
|
43 | 44 | try: |
|
44 | 45 | config = rhodecode.CONFIG |
|
45 | 46 | environ = request.environ |
|
46 | 47 | session = request.session |
|
47 | 48 | session_key = (config['pylons.environ_config'] |
|
48 | 49 | .get('session', 'beaker.session')) |
|
49 | 50 | |
|
50 | 51 | # Setup pylons globals. |
|
51 | 52 | pylons.config._push_object(config) |
|
52 | 53 | pylons.request._push_object(request) |
|
53 | 54 | pylons.session._push_object(session) |
|
54 | 55 | environ[session_key] = session |
|
55 | 56 | pylons.url._push_object(URLGenerator(config['routes.map'], |
|
56 | 57 | environ)) |
|
57 | 58 | |
|
58 | 59 | # TODO: Maybe we should use the language from pyramid. |
|
59 | 60 | translator = _get_translator(config.get('lang')) |
|
60 | 61 | pylons.translator._push_object(translator) |
|
61 | 62 | |
|
62 | 63 | # Get the rhodecode auth user object and make it available. |
|
63 | 64 | auth_user = get_auth_user(environ) |
|
64 | 65 | request.user = auth_user |
|
65 | 66 | environ['rc_auth_user'] = auth_user |
|
66 | 67 | |
|
67 | 68 | # Setup the pylons context object ('c') |
|
68 | 69 | context = ContextObj() |
|
69 | 70 | context.rhodecode_user = auth_user |
|
70 | 71 | attach_context_attributes(context) |
|
71 | 72 | pylons.tmpl_context._push_object(context) |
|
72 | ||
|
73 |
return |
|
|
73 | response = handler(request) | |
|
74 | return response | |
|
74 | 75 | finally: |
|
75 | 76 | # Dispose current database session and rollback uncommitted |
|
76 | 77 | # transactions. |
|
77 | 78 | meta.Session.remove() |
|
78 | 79 | |
|
79 | 80 | return pylons_compatibility_tween |
|
80 | 81 | |
|
81 | 82 | |
|
82 | 83 | def includeme(config): |
|
83 | 84 | config.add_subscriber('rhodecode.subscribers.add_renderer_globals', |
|
84 | 85 | 'pyramid.events.BeforeRender') |
|
85 | 86 | config.add_subscriber('rhodecode.subscribers.add_localizer', |
|
86 | 87 | 'pyramid.events.NewRequest') |
|
87 | 88 | config.add_tween('rhodecode.tweens.pylons_compatibility_tween_factory') |
General Comments 0
You need to be logged in to leave comments.
Login now