Show More
@@ -1,481 +1,487 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # RhodeCode - Pylons environment configuration # |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | pdebug = false |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## Uncomment and replace with the address which should receive ## |
|
13 | 13 | ## any error reports after application crash ## |
|
14 | 14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
15 | 15 | ################################################################################ |
|
16 | 16 | #email_to = admin@localhost |
|
17 | 17 | #error_email_from = paste_error@localhost |
|
18 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
19 | 19 | #error_message = |
|
20 | 20 | #email_prefix = [RhodeCode] |
|
21 | 21 | |
|
22 | 22 | #smtp_server = mail.server.com |
|
23 | 23 | #smtp_username = |
|
24 | 24 | #smtp_password = |
|
25 | 25 | #smtp_port = |
|
26 | 26 | #smtp_use_tls = false |
|
27 | 27 | #smtp_use_ssl = true |
|
28 | 28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
29 | 29 | #smtp_auth = |
|
30 | 30 | |
|
31 | 31 | [server:main] |
|
32 | 32 | ## PASTE |
|
33 | 33 | ## nr of threads to spawn |
|
34 | 34 | #threadpool_workers = 5 |
|
35 | 35 | |
|
36 | 36 | ## max request before thread respawn |
|
37 | 37 | #threadpool_max_requests = 10 |
|
38 | 38 | |
|
39 | 39 | ## option to use threads of process |
|
40 | 40 | #use_threadpool = true |
|
41 | 41 | |
|
42 | 42 | #use = egg:Paste#http |
|
43 | 43 | |
|
44 | 44 | ## WAITRESS |
|
45 | 45 | threads = 5 |
|
46 | 46 | ## 100GB |
|
47 | 47 | max_request_body_size = 107374182400 |
|
48 | 48 | use = egg:waitress#main |
|
49 | 49 | |
|
50 | 50 | host = 0.0.0.0 |
|
51 | 51 | port = 5000 |
|
52 | 52 | |
|
53 | 53 | ## prefix middleware for rc |
|
54 | 54 | #[filter:proxy-prefix] |
|
55 | 55 | #use = egg:PasteDeploy#prefix |
|
56 | 56 | #prefix = /<your-prefix> |
|
57 | 57 | |
|
58 | 58 | [app:main] |
|
59 | 59 | use = egg:rhodecode |
|
60 | 60 | ## enable proxy prefix middleware |
|
61 | 61 | #filter-with = proxy-prefix |
|
62 | 62 | |
|
63 | 63 | full_stack = true |
|
64 | 64 | static_files = true |
|
65 | 65 | ## Optional Languages |
|
66 | 66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
67 | 67 | lang = en |
|
68 | 68 | cache_dir = %(here)s/data |
|
69 | 69 | index_dir = %(here)s/data/index |
|
70 | 70 | |
|
71 | 71 | ## uncomment and set this path to use archive download cache |
|
72 | 72 | #archive_cache_dir = /tmp/tarballcache |
|
73 | 73 | |
|
74 | 74 | ## change this to unique ID for security |
|
75 | 75 | app_instance_uuid = rc-production |
|
76 | 76 | |
|
77 | 77 | ## cut off limit for large diffs (size in bytes) |
|
78 | 78 | cut_off_limit = 256000 |
|
79 | 79 | |
|
80 | 80 | ## use cache version of scm repo everywhere |
|
81 | 81 | vcs_full_cache = true |
|
82 | 82 | |
|
83 | 83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
84 | 84 | force_https = false |
|
85 | 85 | |
|
86 | 86 | ## use Strict-Transport-Security headers |
|
87 | 87 | use_htsts = false |
|
88 | 88 | |
|
89 | 89 | ## number of commits stats will parse on each iteration |
|
90 | 90 | commit_parse_limit = 25 |
|
91 | 91 | |
|
92 | 92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
93 | 93 | dashboard_items = 100 |
|
94 | 94 | |
|
95 | 95 | ## use gravatar service to display avatars |
|
96 | 96 | use_gravatar = true |
|
97 | 97 | |
|
98 | 98 | ## path to git executable |
|
99 | 99 | git_path = git |
|
100 | 100 | |
|
101 | 101 | ## git rev filter option, --all is the default filter, if you need to |
|
102 | 102 | ## hide all refs in changelog switch this to --branches --tags |
|
103 | 103 | git_rev_filter=--all |
|
104 | 104 | |
|
105 | 105 | ## RSS feed options |
|
106 | 106 | rss_cut_off_limit = 256000 |
|
107 | 107 | rss_items_per_page = 10 |
|
108 | 108 | rss_include_diff = false |
|
109 | 109 | |
|
110 | 110 | ## options for showing and identifying changesets |
|
111 | 111 | show_sha_length = 12 |
|
112 | 112 | show_revision_number = true |
|
113 | 113 | |
|
114 | ## white list of API enabled controllers. This allows to add list of | |
|
115 | ## controllers to which access will be enabled by api_key. eg: to enable | |
|
116 | ## api access to raw_files put `FilesController:raw`, to enable access to patches | |
|
117 | ## add `ChangesetController:changeset_patch`. This list should be "," separated | |
|
118 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names | |
|
119 | api_access_controllers_whitelist = | |
|
114 | 120 | |
|
115 | 121 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
116 | 122 | ## the following parts of the URL will be replaced |
|
117 | 123 | ## {email} user email |
|
118 | 124 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
119 | 125 | ## {size} size of the image that is expected from the server application |
|
120 | 126 | ## {scheme} http/https from RhodeCode server |
|
121 | 127 | ## {netloc} network location from RhodeCode server |
|
122 | 128 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
123 | 129 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
124 | 130 | |
|
125 | 131 | |
|
126 | 132 | ## container auth options |
|
127 | 133 | container_auth_enabled = false |
|
128 | 134 | proxypass_auth_enabled = false |
|
129 | 135 | |
|
130 | 136 | ## default encoding used to convert from and to unicode |
|
131 | 137 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
132 | 138 | default_encoding = utf8 |
|
133 | 139 | |
|
134 | 140 | ## overwrite schema of clone url |
|
135 | 141 | ## available vars: |
|
136 | 142 | ## scheme - http/https |
|
137 | 143 | ## user - current user |
|
138 | 144 | ## pass - password |
|
139 | 145 | ## netloc - network location |
|
140 | 146 | ## path - usually repo_name |
|
141 | 147 | |
|
142 | 148 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
143 | 149 | |
|
144 | 150 | ## issue tracking mapping for commits messages |
|
145 | 151 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
146 | 152 | |
|
147 | 153 | ## pattern to get the issues from commit messages |
|
148 | 154 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
149 | 155 | ## {id} will be all groups matched from this pattern |
|
150 | 156 | |
|
151 | 157 | issue_pat = (?:\s*#)(\d+) |
|
152 | 158 | |
|
153 | 159 | ## server url to the issue, each {id} will be replaced with match |
|
154 | 160 | ## fetched from the regex and {repo} is replaced with full repository name |
|
155 | 161 | ## including groups {repo_name} is replaced with just name of repo |
|
156 | 162 | |
|
157 | 163 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
158 | 164 | |
|
159 | 165 | ## prefix to add to link to indicate it's an url |
|
160 | 166 | ## #314 will be replaced by <issue_prefix><id> |
|
161 | 167 | |
|
162 | 168 | issue_prefix = # |
|
163 | 169 | |
|
164 | 170 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
165 | 171 | ## multiple patterns, to other issues server, wiki or others |
|
166 | 172 | ## below an example how to create a wiki pattern |
|
167 | 173 | # #wiki-some-id -> https://mywiki.com/some-id |
|
168 | 174 | |
|
169 | 175 | #issue_pat_wiki = (?:wiki-)(.+) |
|
170 | 176 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
171 | 177 | #issue_prefix_wiki = WIKI- |
|
172 | 178 | |
|
173 | 179 | |
|
174 | 180 | ## instance-id prefix |
|
175 | 181 | ## a prefix key for this instance used for cache invalidation when running |
|
176 | 182 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
177 | 183 | ## all running rhodecode instances. Leave empty if you don't use it |
|
178 | 184 | instance_id = |
|
179 | 185 | |
|
180 | 186 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
181 | 187 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
182 | 188 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
183 | 189 | auth_ret_code = |
|
184 | 190 | |
|
185 | 191 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
186 | 192 | ## codes don't break the transactions while 4XX codes do |
|
187 | 193 | lock_ret_code = 423 |
|
188 | 194 | |
|
189 | 195 | |
|
190 | 196 | #################################### |
|
191 | 197 | ### CELERY CONFIG #### |
|
192 | 198 | #################################### |
|
193 | 199 | use_celery = false |
|
194 | 200 | broker.host = localhost |
|
195 | 201 | broker.vhost = rabbitmqhost |
|
196 | 202 | broker.port = 5672 |
|
197 | 203 | broker.user = rabbitmq |
|
198 | 204 | broker.password = qweqwe |
|
199 | 205 | |
|
200 | 206 | celery.imports = rhodecode.lib.celerylib.tasks |
|
201 | 207 | |
|
202 | 208 | celery.result.backend = amqp |
|
203 | 209 | celery.result.dburi = amqp:// |
|
204 | 210 | celery.result.serialier = json |
|
205 | 211 | |
|
206 | 212 | #celery.send.task.error.emails = true |
|
207 | 213 | #celery.amqp.task.result.expires = 18000 |
|
208 | 214 | |
|
209 | 215 | celeryd.concurrency = 2 |
|
210 | 216 | #celeryd.log.file = celeryd.log |
|
211 | 217 | celeryd.log.level = debug |
|
212 | 218 | celeryd.max.tasks.per.child = 1 |
|
213 | 219 | |
|
214 | 220 | ## tasks will never be sent to the queue, but executed locally instead. |
|
215 | 221 | celery.always.eager = false |
|
216 | 222 | |
|
217 | 223 | #################################### |
|
218 | 224 | ### BEAKER CACHE #### |
|
219 | 225 | #################################### |
|
220 | 226 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
221 | 227 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
222 | 228 | |
|
223 | 229 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
224 | 230 | |
|
225 | 231 | beaker.cache.super_short_term.type=memory |
|
226 | 232 | beaker.cache.super_short_term.expire=10 |
|
227 | 233 | beaker.cache.super_short_term.key_length = 256 |
|
228 | 234 | |
|
229 | 235 | beaker.cache.short_term.type=memory |
|
230 | 236 | beaker.cache.short_term.expire=60 |
|
231 | 237 | beaker.cache.short_term.key_length = 256 |
|
232 | 238 | |
|
233 | 239 | beaker.cache.long_term.type=memory |
|
234 | 240 | beaker.cache.long_term.expire=36000 |
|
235 | 241 | beaker.cache.long_term.key_length = 256 |
|
236 | 242 | |
|
237 | 243 | beaker.cache.sql_cache_short.type=memory |
|
238 | 244 | beaker.cache.sql_cache_short.expire=10 |
|
239 | 245 | beaker.cache.sql_cache_short.key_length = 256 |
|
240 | 246 | |
|
241 | 247 | beaker.cache.sql_cache_med.type=memory |
|
242 | 248 | beaker.cache.sql_cache_med.expire=360 |
|
243 | 249 | beaker.cache.sql_cache_med.key_length = 256 |
|
244 | 250 | |
|
245 | 251 | beaker.cache.sql_cache_long.type=file |
|
246 | 252 | beaker.cache.sql_cache_long.expire=3600 |
|
247 | 253 | beaker.cache.sql_cache_long.key_length = 256 |
|
248 | 254 | |
|
249 | 255 | #################################### |
|
250 | 256 | ### BEAKER SESSION #### |
|
251 | 257 | #################################### |
|
252 | 258 | ## Type of storage used for the session, current types are |
|
253 | 259 | ## dbm, file, memcached, database, and memory. |
|
254 | 260 | ## The storage uses the Container API |
|
255 | 261 | ## that is also used by the cache system. |
|
256 | 262 | |
|
257 | 263 | ## db session ## |
|
258 | 264 | #beaker.session.type = ext:database |
|
259 | 265 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
260 | 266 | #beaker.session.table_name = db_session |
|
261 | 267 | |
|
262 | 268 | ## encrypted cookie client side session, good for many instances ## |
|
263 | 269 | #beaker.session.type = cookie |
|
264 | 270 | |
|
265 | 271 | ## file based cookies (default) ## |
|
266 | 272 | #beaker.session.type = file |
|
267 | 273 | |
|
268 | 274 | |
|
269 | 275 | beaker.session.key = rhodecode |
|
270 | 276 | ## secure cookie requires AES python libraries |
|
271 | 277 | #beaker.session.encrypt_key = <key_for_encryption> |
|
272 | 278 | #beaker.session.validate_key = <validation_key> |
|
273 | 279 | |
|
274 | 280 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
275 | 281 | beaker.session.timeout = 2592000 |
|
276 | 282 | beaker.session.httponly = true |
|
277 | 283 | #beaker.session.cookie_path = /<your-prefix> |
|
278 | 284 | |
|
279 | 285 | ## uncomment for https secure cookie |
|
280 | 286 | beaker.session.secure = false |
|
281 | 287 | |
|
282 | 288 | ## auto save the session to not to use .save() |
|
283 | 289 | beaker.session.auto = False |
|
284 | 290 | |
|
285 | 291 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
286 | 292 | #beaker.session.cookie_expires = 3600 |
|
287 | 293 | |
|
288 | 294 | |
|
289 | 295 | ############################ |
|
290 | 296 | ## ERROR HANDLING SYSTEMS ## |
|
291 | 297 | ############################ |
|
292 | 298 | |
|
293 | 299 | #################### |
|
294 | 300 | ### [errormator] ### |
|
295 | 301 | #################### |
|
296 | 302 | |
|
297 | 303 | ## Errormator is tailored to work with RhodeCode, see |
|
298 | 304 | ## http://errormator.com for details how to obtain an account |
|
299 | 305 | ## you must install python package `errormator_client` to make it work |
|
300 | 306 | |
|
301 | 307 | ## errormator enabled |
|
302 | 308 | errormator = false |
|
303 | 309 | |
|
304 | 310 | errormator.server_url = https://api.errormator.com |
|
305 | 311 | errormator.api_key = YOUR_API_KEY |
|
306 | 312 | |
|
307 | 313 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
308 | 314 | |
|
309 | 315 | ## enables 404 error logging (default False) |
|
310 | 316 | errormator.report_404 = false |
|
311 | 317 | |
|
312 | 318 | ## time in seconds after request is considered being slow (default 1) |
|
313 | 319 | errormator.slow_request_time = 1 |
|
314 | 320 | |
|
315 | 321 | ## record slow requests in application |
|
316 | 322 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
317 | 323 | errormator.slow_requests = true |
|
318 | 324 | |
|
319 | 325 | ## enable hooking to application loggers |
|
320 | 326 | # errormator.logging = true |
|
321 | 327 | |
|
322 | 328 | ## minimum log level for log capture |
|
323 | 329 | # errormator.logging.level = WARNING |
|
324 | 330 | |
|
325 | 331 | ## send logs only from erroneous/slow requests |
|
326 | 332 | ## (saves API quota for intensive logging) |
|
327 | 333 | errormator.logging_on_error = false |
|
328 | 334 | |
|
329 | 335 | ## list of additonal keywords that should be grabbed from environ object |
|
330 | 336 | ## can be string with comma separated list of words in lowercase |
|
331 | 337 | ## (by default client will always send following info: |
|
332 | 338 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
333 | 339 | ## start with HTTP* this list be extended with additional keywords here |
|
334 | 340 | errormator.environ_keys_whitelist = |
|
335 | 341 | |
|
336 | 342 | |
|
337 | 343 | ## list of keywords that should be blanked from request object |
|
338 | 344 | ## can be string with comma separated list of words in lowercase |
|
339 | 345 | ## (by default client will always blank keys that contain following words |
|
340 | 346 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
341 | 347 | ## this list be extended with additional keywords set here |
|
342 | 348 | errormator.request_keys_blacklist = |
|
343 | 349 | |
|
344 | 350 | |
|
345 | 351 | ## list of namespaces that should be ignores when gathering log entries |
|
346 | 352 | ## can be string with comma separated list of namespaces |
|
347 | 353 | ## (by default the client ignores own entries: errormator_client.client) |
|
348 | 354 | errormator.log_namespace_blacklist = |
|
349 | 355 | |
|
350 | 356 | |
|
351 | 357 | ################ |
|
352 | 358 | ### [sentry] ### |
|
353 | 359 | ################ |
|
354 | 360 | |
|
355 | 361 | ## sentry is a alternative open source error aggregator |
|
356 | 362 | ## you must install python packages `sentry` and `raven` to enable |
|
357 | 363 | |
|
358 | 364 | sentry.dsn = YOUR_DNS |
|
359 | 365 | sentry.servers = |
|
360 | 366 | sentry.name = |
|
361 | 367 | sentry.key = |
|
362 | 368 | sentry.public_key = |
|
363 | 369 | sentry.secret_key = |
|
364 | 370 | sentry.project = |
|
365 | 371 | sentry.site = |
|
366 | 372 | sentry.include_paths = |
|
367 | 373 | sentry.exclude_paths = |
|
368 | 374 | |
|
369 | 375 | |
|
370 | 376 | ################################################################################ |
|
371 | 377 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
372 | 378 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
373 | 379 | ## execute malicious code after an exception is raised. ## |
|
374 | 380 | ################################################################################ |
|
375 | 381 | #set debug = false |
|
376 | 382 | |
|
377 | 383 | ################################## |
|
378 | 384 | ### LOGVIEW CONFIG ### |
|
379 | 385 | ################################## |
|
380 | 386 | logview.sqlalchemy = #faa |
|
381 | 387 | logview.pylons.templating = #bfb |
|
382 | 388 | logview.pylons.util = #eee |
|
383 | 389 | |
|
384 | 390 | ######################################################### |
|
385 | 391 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
386 | 392 | ######################################################### |
|
387 | 393 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
388 | 394 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
389 | 395 | sqlalchemy.db1.echo = false |
|
390 | 396 | sqlalchemy.db1.pool_recycle = 3600 |
|
391 | 397 | sqlalchemy.db1.convert_unicode = true |
|
392 | 398 | |
|
393 | 399 | ################################ |
|
394 | 400 | ### LOGGING CONFIGURATION #### |
|
395 | 401 | ################################ |
|
396 | 402 | [loggers] |
|
397 | 403 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
398 | 404 | |
|
399 | 405 | [handlers] |
|
400 | 406 | keys = console, console_sql |
|
401 | 407 | |
|
402 | 408 | [formatters] |
|
403 | 409 | keys = generic, color_formatter, color_formatter_sql |
|
404 | 410 | |
|
405 | 411 | ############# |
|
406 | 412 | ## LOGGERS ## |
|
407 | 413 | ############# |
|
408 | 414 | [logger_root] |
|
409 | 415 | level = NOTSET |
|
410 | 416 | handlers = console |
|
411 | 417 | |
|
412 | 418 | [logger_routes] |
|
413 | 419 | level = DEBUG |
|
414 | 420 | handlers = |
|
415 | 421 | qualname = routes.middleware |
|
416 | 422 | ## "level = DEBUG" logs the route matched and routing variables. |
|
417 | 423 | propagate = 1 |
|
418 | 424 | |
|
419 | 425 | [logger_beaker] |
|
420 | 426 | level = DEBUG |
|
421 | 427 | handlers = |
|
422 | 428 | qualname = beaker.container |
|
423 | 429 | propagate = 1 |
|
424 | 430 | |
|
425 | 431 | [logger_templates] |
|
426 | 432 | level = INFO |
|
427 | 433 | handlers = |
|
428 | 434 | qualname = pylons.templating |
|
429 | 435 | propagate = 1 |
|
430 | 436 | |
|
431 | 437 | [logger_rhodecode] |
|
432 | 438 | level = DEBUG |
|
433 | 439 | handlers = |
|
434 | 440 | qualname = rhodecode |
|
435 | 441 | propagate = 1 |
|
436 | 442 | |
|
437 | 443 | [logger_sqlalchemy] |
|
438 | 444 | level = INFO |
|
439 | 445 | handlers = console_sql |
|
440 | 446 | qualname = sqlalchemy.engine |
|
441 | 447 | propagate = 0 |
|
442 | 448 | |
|
443 | 449 | [logger_whoosh_indexer] |
|
444 | 450 | level = DEBUG |
|
445 | 451 | handlers = |
|
446 | 452 | qualname = whoosh_indexer |
|
447 | 453 | propagate = 1 |
|
448 | 454 | |
|
449 | 455 | ############## |
|
450 | 456 | ## HANDLERS ## |
|
451 | 457 | ############## |
|
452 | 458 | |
|
453 | 459 | [handler_console] |
|
454 | 460 | class = StreamHandler |
|
455 | 461 | args = (sys.stderr,) |
|
456 | 462 | level = DEBUG |
|
457 | 463 | formatter = color_formatter |
|
458 | 464 | |
|
459 | 465 | [handler_console_sql] |
|
460 | 466 | class = StreamHandler |
|
461 | 467 | args = (sys.stderr,) |
|
462 | 468 | level = DEBUG |
|
463 | 469 | formatter = color_formatter_sql |
|
464 | 470 | |
|
465 | 471 | ################ |
|
466 | 472 | ## FORMATTERS ## |
|
467 | 473 | ################ |
|
468 | 474 | |
|
469 | 475 | [formatter_generic] |
|
470 | 476 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
471 | 477 | datefmt = %Y-%m-%d %H:%M:%S |
|
472 | 478 | |
|
473 | 479 | [formatter_color_formatter] |
|
474 | 480 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
475 | 481 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
476 | 482 | datefmt = %Y-%m-%d %H:%M:%S |
|
477 | 483 | |
|
478 | 484 | [formatter_color_formatter_sql] |
|
479 | 485 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
480 | 486 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
481 | 487 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,481 +1,487 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # RhodeCode - Pylons environment configuration # |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | pdebug = false |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## Uncomment and replace with the address which should receive ## |
|
13 | 13 | ## any error reports after application crash ## |
|
14 | 14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
15 | 15 | ################################################################################ |
|
16 | 16 | #email_to = admin@localhost |
|
17 | 17 | #error_email_from = paste_error@localhost |
|
18 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
19 | 19 | #error_message = |
|
20 | 20 | #email_prefix = [RhodeCode] |
|
21 | 21 | |
|
22 | 22 | #smtp_server = mail.server.com |
|
23 | 23 | #smtp_username = |
|
24 | 24 | #smtp_password = |
|
25 | 25 | #smtp_port = |
|
26 | 26 | #smtp_use_tls = false |
|
27 | 27 | #smtp_use_ssl = true |
|
28 | 28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
29 | 29 | #smtp_auth = |
|
30 | 30 | |
|
31 | 31 | [server:main] |
|
32 | 32 | ## PASTE |
|
33 | 33 | ## nr of threads to spawn |
|
34 | 34 | #threadpool_workers = 5 |
|
35 | 35 | |
|
36 | 36 | ## max request before thread respawn |
|
37 | 37 | #threadpool_max_requests = 10 |
|
38 | 38 | |
|
39 | 39 | ## option to use threads of process |
|
40 | 40 | #use_threadpool = true |
|
41 | 41 | |
|
42 | 42 | #use = egg:Paste#http |
|
43 | 43 | |
|
44 | 44 | ## WAITRESS |
|
45 | 45 | threads = 5 |
|
46 | 46 | ## 100GB |
|
47 | 47 | max_request_body_size = 107374182400 |
|
48 | 48 | use = egg:waitress#main |
|
49 | 49 | |
|
50 | 50 | host = 127.0.0.1 |
|
51 | 51 | port = 8001 |
|
52 | 52 | |
|
53 | 53 | ## prefix middleware for rc |
|
54 | 54 | #[filter:proxy-prefix] |
|
55 | 55 | #use = egg:PasteDeploy#prefix |
|
56 | 56 | #prefix = /<your-prefix> |
|
57 | 57 | |
|
58 | 58 | [app:main] |
|
59 | 59 | use = egg:rhodecode |
|
60 | 60 | ## enable proxy prefix middleware |
|
61 | 61 | #filter-with = proxy-prefix |
|
62 | 62 | |
|
63 | 63 | full_stack = true |
|
64 | 64 | static_files = true |
|
65 | 65 | ## Optional Languages |
|
66 | 66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
67 | 67 | lang = en |
|
68 | 68 | cache_dir = %(here)s/data |
|
69 | 69 | index_dir = %(here)s/data/index |
|
70 | 70 | |
|
71 | 71 | ## uncomment and set this path to use archive download cache |
|
72 | 72 | #archive_cache_dir = /tmp/tarballcache |
|
73 | 73 | |
|
74 | 74 | ## change this to unique ID for security |
|
75 | 75 | app_instance_uuid = rc-production |
|
76 | 76 | |
|
77 | 77 | ## cut off limit for large diffs (size in bytes) |
|
78 | 78 | cut_off_limit = 256000 |
|
79 | 79 | |
|
80 | 80 | ## use cache version of scm repo everywhere |
|
81 | 81 | vcs_full_cache = true |
|
82 | 82 | |
|
83 | 83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
84 | 84 | force_https = false |
|
85 | 85 | |
|
86 | 86 | ## use Strict-Transport-Security headers |
|
87 | 87 | use_htsts = false |
|
88 | 88 | |
|
89 | 89 | ## number of commits stats will parse on each iteration |
|
90 | 90 | commit_parse_limit = 25 |
|
91 | 91 | |
|
92 | 92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
93 | 93 | dashboard_items = 100 |
|
94 | 94 | |
|
95 | 95 | ## use gravatar service to display avatars |
|
96 | 96 | use_gravatar = true |
|
97 | 97 | |
|
98 | 98 | ## path to git executable |
|
99 | 99 | git_path = git |
|
100 | 100 | |
|
101 | 101 | ## git rev filter option, --all is the default filter, if you need to |
|
102 | 102 | ## hide all refs in changelog switch this to --branches --tags |
|
103 | 103 | git_rev_filter=--all |
|
104 | 104 | |
|
105 | 105 | ## RSS feed options |
|
106 | 106 | rss_cut_off_limit = 256000 |
|
107 | 107 | rss_items_per_page = 10 |
|
108 | 108 | rss_include_diff = false |
|
109 | 109 | |
|
110 | 110 | ## options for showing and identifying changesets |
|
111 | 111 | show_sha_length = 12 |
|
112 | 112 | show_revision_number = true |
|
113 | 113 | |
|
114 | ## white list of API enabled controllers. This allows to add list of | |
|
115 | ## controllers to which access will be enabled by api_key. eg: to enable | |
|
116 | ## api access to raw_files put `FilesController:raw`, to enable access to patches | |
|
117 | ## add `ChangesetController:changeset_patch`. This list should be "," separated | |
|
118 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names | |
|
119 | api_access_controllers_whitelist = | |
|
114 | 120 | |
|
115 | 121 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
116 | 122 | ## the following parts of the URL will be replaced |
|
117 | 123 | ## {email} user email |
|
118 | 124 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
119 | 125 | ## {size} size of the image that is expected from the server application |
|
120 | 126 | ## {scheme} http/https from RhodeCode server |
|
121 | 127 | ## {netloc} network location from RhodeCode server |
|
122 | 128 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
123 | 129 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
124 | 130 | |
|
125 | 131 | |
|
126 | 132 | ## container auth options |
|
127 | 133 | container_auth_enabled = false |
|
128 | 134 | proxypass_auth_enabled = false |
|
129 | 135 | |
|
130 | 136 | ## default encoding used to convert from and to unicode |
|
131 | 137 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
132 | 138 | default_encoding = utf8 |
|
133 | 139 | |
|
134 | 140 | ## overwrite schema of clone url |
|
135 | 141 | ## available vars: |
|
136 | 142 | ## scheme - http/https |
|
137 | 143 | ## user - current user |
|
138 | 144 | ## pass - password |
|
139 | 145 | ## netloc - network location |
|
140 | 146 | ## path - usually repo_name |
|
141 | 147 | |
|
142 | 148 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
143 | 149 | |
|
144 | 150 | ## issue tracking mapping for commits messages |
|
145 | 151 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
146 | 152 | |
|
147 | 153 | ## pattern to get the issues from commit messages |
|
148 | 154 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
149 | 155 | ## {id} will be all groups matched from this pattern |
|
150 | 156 | |
|
151 | 157 | issue_pat = (?:\s*#)(\d+) |
|
152 | 158 | |
|
153 | 159 | ## server url to the issue, each {id} will be replaced with match |
|
154 | 160 | ## fetched from the regex and {repo} is replaced with full repository name |
|
155 | 161 | ## including groups {repo_name} is replaced with just name of repo |
|
156 | 162 | |
|
157 | 163 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
158 | 164 | |
|
159 | 165 | ## prefix to add to link to indicate it's an url |
|
160 | 166 | ## #314 will be replaced by <issue_prefix><id> |
|
161 | 167 | |
|
162 | 168 | issue_prefix = # |
|
163 | 169 | |
|
164 | 170 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
165 | 171 | ## multiple patterns, to other issues server, wiki or others |
|
166 | 172 | ## below an example how to create a wiki pattern |
|
167 | 173 | # #wiki-some-id -> https://mywiki.com/some-id |
|
168 | 174 | |
|
169 | 175 | #issue_pat_wiki = (?:wiki-)(.+) |
|
170 | 176 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
171 | 177 | #issue_prefix_wiki = WIKI- |
|
172 | 178 | |
|
173 | 179 | |
|
174 | 180 | ## instance-id prefix |
|
175 | 181 | ## a prefix key for this instance used for cache invalidation when running |
|
176 | 182 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
177 | 183 | ## all running rhodecode instances. Leave empty if you don't use it |
|
178 | 184 | instance_id = |
|
179 | 185 | |
|
180 | 186 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
181 | 187 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
182 | 188 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
183 | 189 | auth_ret_code = |
|
184 | 190 | |
|
185 | 191 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
186 | 192 | ## codes don't break the transactions while 4XX codes do |
|
187 | 193 | lock_ret_code = 423 |
|
188 | 194 | |
|
189 | 195 | |
|
190 | 196 | #################################### |
|
191 | 197 | ### CELERY CONFIG #### |
|
192 | 198 | #################################### |
|
193 | 199 | use_celery = false |
|
194 | 200 | broker.host = localhost |
|
195 | 201 | broker.vhost = rabbitmqhost |
|
196 | 202 | broker.port = 5672 |
|
197 | 203 | broker.user = rabbitmq |
|
198 | 204 | broker.password = qweqwe |
|
199 | 205 | |
|
200 | 206 | celery.imports = rhodecode.lib.celerylib.tasks |
|
201 | 207 | |
|
202 | 208 | celery.result.backend = amqp |
|
203 | 209 | celery.result.dburi = amqp:// |
|
204 | 210 | celery.result.serialier = json |
|
205 | 211 | |
|
206 | 212 | #celery.send.task.error.emails = true |
|
207 | 213 | #celery.amqp.task.result.expires = 18000 |
|
208 | 214 | |
|
209 | 215 | celeryd.concurrency = 2 |
|
210 | 216 | #celeryd.log.file = celeryd.log |
|
211 | 217 | celeryd.log.level = debug |
|
212 | 218 | celeryd.max.tasks.per.child = 1 |
|
213 | 219 | |
|
214 | 220 | ## tasks will never be sent to the queue, but executed locally instead. |
|
215 | 221 | celery.always.eager = false |
|
216 | 222 | |
|
217 | 223 | #################################### |
|
218 | 224 | ### BEAKER CACHE #### |
|
219 | 225 | #################################### |
|
220 | 226 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
221 | 227 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
222 | 228 | |
|
223 | 229 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
224 | 230 | |
|
225 | 231 | beaker.cache.super_short_term.type=memory |
|
226 | 232 | beaker.cache.super_short_term.expire=10 |
|
227 | 233 | beaker.cache.super_short_term.key_length = 256 |
|
228 | 234 | |
|
229 | 235 | beaker.cache.short_term.type=memory |
|
230 | 236 | beaker.cache.short_term.expire=60 |
|
231 | 237 | beaker.cache.short_term.key_length = 256 |
|
232 | 238 | |
|
233 | 239 | beaker.cache.long_term.type=memory |
|
234 | 240 | beaker.cache.long_term.expire=36000 |
|
235 | 241 | beaker.cache.long_term.key_length = 256 |
|
236 | 242 | |
|
237 | 243 | beaker.cache.sql_cache_short.type=memory |
|
238 | 244 | beaker.cache.sql_cache_short.expire=10 |
|
239 | 245 | beaker.cache.sql_cache_short.key_length = 256 |
|
240 | 246 | |
|
241 | 247 | beaker.cache.sql_cache_med.type=memory |
|
242 | 248 | beaker.cache.sql_cache_med.expire=360 |
|
243 | 249 | beaker.cache.sql_cache_med.key_length = 256 |
|
244 | 250 | |
|
245 | 251 | beaker.cache.sql_cache_long.type=file |
|
246 | 252 | beaker.cache.sql_cache_long.expire=3600 |
|
247 | 253 | beaker.cache.sql_cache_long.key_length = 256 |
|
248 | 254 | |
|
249 | 255 | #################################### |
|
250 | 256 | ### BEAKER SESSION #### |
|
251 | 257 | #################################### |
|
252 | 258 | ## Type of storage used for the session, current types are |
|
253 | 259 | ## dbm, file, memcached, database, and memory. |
|
254 | 260 | ## The storage uses the Container API |
|
255 | 261 | ## that is also used by the cache system. |
|
256 | 262 | |
|
257 | 263 | ## db session ## |
|
258 | 264 | #beaker.session.type = ext:database |
|
259 | 265 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
260 | 266 | #beaker.session.table_name = db_session |
|
261 | 267 | |
|
262 | 268 | ## encrypted cookie client side session, good for many instances ## |
|
263 | 269 | #beaker.session.type = cookie |
|
264 | 270 | |
|
265 | 271 | ## file based cookies (default) ## |
|
266 | 272 | #beaker.session.type = file |
|
267 | 273 | |
|
268 | 274 | |
|
269 | 275 | beaker.session.key = rhodecode |
|
270 | 276 | ## secure cookie requires AES python libraries |
|
271 | 277 | #beaker.session.encrypt_key = <key_for_encryption> |
|
272 | 278 | #beaker.session.validate_key = <validation_key> |
|
273 | 279 | |
|
274 | 280 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
275 | 281 | beaker.session.timeout = 2592000 |
|
276 | 282 | beaker.session.httponly = true |
|
277 | 283 | #beaker.session.cookie_path = /<your-prefix> |
|
278 | 284 | |
|
279 | 285 | ## uncomment for https secure cookie |
|
280 | 286 | beaker.session.secure = false |
|
281 | 287 | |
|
282 | 288 | ## auto save the session to not to use .save() |
|
283 | 289 | beaker.session.auto = False |
|
284 | 290 | |
|
285 | 291 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
286 | 292 | #beaker.session.cookie_expires = 3600 |
|
287 | 293 | |
|
288 | 294 | |
|
289 | 295 | ############################ |
|
290 | 296 | ## ERROR HANDLING SYSTEMS ## |
|
291 | 297 | ############################ |
|
292 | 298 | |
|
293 | 299 | #################### |
|
294 | 300 | ### [errormator] ### |
|
295 | 301 | #################### |
|
296 | 302 | |
|
297 | 303 | ## Errormator is tailored to work with RhodeCode, see |
|
298 | 304 | ## http://errormator.com for details how to obtain an account |
|
299 | 305 | ## you must install python package `errormator_client` to make it work |
|
300 | 306 | |
|
301 | 307 | ## errormator enabled |
|
302 | 308 | errormator = false |
|
303 | 309 | |
|
304 | 310 | errormator.server_url = https://api.errormator.com |
|
305 | 311 | errormator.api_key = YOUR_API_KEY |
|
306 | 312 | |
|
307 | 313 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
308 | 314 | |
|
309 | 315 | ## enables 404 error logging (default False) |
|
310 | 316 | errormator.report_404 = false |
|
311 | 317 | |
|
312 | 318 | ## time in seconds after request is considered being slow (default 1) |
|
313 | 319 | errormator.slow_request_time = 1 |
|
314 | 320 | |
|
315 | 321 | ## record slow requests in application |
|
316 | 322 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
317 | 323 | errormator.slow_requests = true |
|
318 | 324 | |
|
319 | 325 | ## enable hooking to application loggers |
|
320 | 326 | # errormator.logging = true |
|
321 | 327 | |
|
322 | 328 | ## minimum log level for log capture |
|
323 | 329 | # errormator.logging.level = WARNING |
|
324 | 330 | |
|
325 | 331 | ## send logs only from erroneous/slow requests |
|
326 | 332 | ## (saves API quota for intensive logging) |
|
327 | 333 | errormator.logging_on_error = false |
|
328 | 334 | |
|
329 | 335 | ## list of additonal keywords that should be grabbed from environ object |
|
330 | 336 | ## can be string with comma separated list of words in lowercase |
|
331 | 337 | ## (by default client will always send following info: |
|
332 | 338 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
333 | 339 | ## start with HTTP* this list be extended with additional keywords here |
|
334 | 340 | errormator.environ_keys_whitelist = |
|
335 | 341 | |
|
336 | 342 | |
|
337 | 343 | ## list of keywords that should be blanked from request object |
|
338 | 344 | ## can be string with comma separated list of words in lowercase |
|
339 | 345 | ## (by default client will always blank keys that contain following words |
|
340 | 346 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
341 | 347 | ## this list be extended with additional keywords set here |
|
342 | 348 | errormator.request_keys_blacklist = |
|
343 | 349 | |
|
344 | 350 | |
|
345 | 351 | ## list of namespaces that should be ignores when gathering log entries |
|
346 | 352 | ## can be string with comma separated list of namespaces |
|
347 | 353 | ## (by default the client ignores own entries: errormator_client.client) |
|
348 | 354 | errormator.log_namespace_blacklist = |
|
349 | 355 | |
|
350 | 356 | |
|
351 | 357 | ################ |
|
352 | 358 | ### [sentry] ### |
|
353 | 359 | ################ |
|
354 | 360 | |
|
355 | 361 | ## sentry is a alternative open source error aggregator |
|
356 | 362 | ## you must install python packages `sentry` and `raven` to enable |
|
357 | 363 | |
|
358 | 364 | sentry.dsn = YOUR_DNS |
|
359 | 365 | sentry.servers = |
|
360 | 366 | sentry.name = |
|
361 | 367 | sentry.key = |
|
362 | 368 | sentry.public_key = |
|
363 | 369 | sentry.secret_key = |
|
364 | 370 | sentry.project = |
|
365 | 371 | sentry.site = |
|
366 | 372 | sentry.include_paths = |
|
367 | 373 | sentry.exclude_paths = |
|
368 | 374 | |
|
369 | 375 | |
|
370 | 376 | ################################################################################ |
|
371 | 377 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
372 | 378 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
373 | 379 | ## execute malicious code after an exception is raised. ## |
|
374 | 380 | ################################################################################ |
|
375 | 381 | set debug = false |
|
376 | 382 | |
|
377 | 383 | ################################## |
|
378 | 384 | ### LOGVIEW CONFIG ### |
|
379 | 385 | ################################## |
|
380 | 386 | logview.sqlalchemy = #faa |
|
381 | 387 | logview.pylons.templating = #bfb |
|
382 | 388 | logview.pylons.util = #eee |
|
383 | 389 | |
|
384 | 390 | ######################################################### |
|
385 | 391 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
386 | 392 | ######################################################### |
|
387 | 393 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
388 | 394 | sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode |
|
389 | 395 | sqlalchemy.db1.echo = false |
|
390 | 396 | sqlalchemy.db1.pool_recycle = 3600 |
|
391 | 397 | sqlalchemy.db1.convert_unicode = true |
|
392 | 398 | |
|
393 | 399 | ################################ |
|
394 | 400 | ### LOGGING CONFIGURATION #### |
|
395 | 401 | ################################ |
|
396 | 402 | [loggers] |
|
397 | 403 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
398 | 404 | |
|
399 | 405 | [handlers] |
|
400 | 406 | keys = console, console_sql |
|
401 | 407 | |
|
402 | 408 | [formatters] |
|
403 | 409 | keys = generic, color_formatter, color_formatter_sql |
|
404 | 410 | |
|
405 | 411 | ############# |
|
406 | 412 | ## LOGGERS ## |
|
407 | 413 | ############# |
|
408 | 414 | [logger_root] |
|
409 | 415 | level = NOTSET |
|
410 | 416 | handlers = console |
|
411 | 417 | |
|
412 | 418 | [logger_routes] |
|
413 | 419 | level = DEBUG |
|
414 | 420 | handlers = |
|
415 | 421 | qualname = routes.middleware |
|
416 | 422 | ## "level = DEBUG" logs the route matched and routing variables. |
|
417 | 423 | propagate = 1 |
|
418 | 424 | |
|
419 | 425 | [logger_beaker] |
|
420 | 426 | level = DEBUG |
|
421 | 427 | handlers = |
|
422 | 428 | qualname = beaker.container |
|
423 | 429 | propagate = 1 |
|
424 | 430 | |
|
425 | 431 | [logger_templates] |
|
426 | 432 | level = INFO |
|
427 | 433 | handlers = |
|
428 | 434 | qualname = pylons.templating |
|
429 | 435 | propagate = 1 |
|
430 | 436 | |
|
431 | 437 | [logger_rhodecode] |
|
432 | 438 | level = DEBUG |
|
433 | 439 | handlers = |
|
434 | 440 | qualname = rhodecode |
|
435 | 441 | propagate = 1 |
|
436 | 442 | |
|
437 | 443 | [logger_sqlalchemy] |
|
438 | 444 | level = INFO |
|
439 | 445 | handlers = console_sql |
|
440 | 446 | qualname = sqlalchemy.engine |
|
441 | 447 | propagate = 0 |
|
442 | 448 | |
|
443 | 449 | [logger_whoosh_indexer] |
|
444 | 450 | level = DEBUG |
|
445 | 451 | handlers = |
|
446 | 452 | qualname = whoosh_indexer |
|
447 | 453 | propagate = 1 |
|
448 | 454 | |
|
449 | 455 | ############## |
|
450 | 456 | ## HANDLERS ## |
|
451 | 457 | ############## |
|
452 | 458 | |
|
453 | 459 | [handler_console] |
|
454 | 460 | class = StreamHandler |
|
455 | 461 | args = (sys.stderr,) |
|
456 | 462 | level = INFO |
|
457 | 463 | formatter = generic |
|
458 | 464 | |
|
459 | 465 | [handler_console_sql] |
|
460 | 466 | class = StreamHandler |
|
461 | 467 | args = (sys.stderr,) |
|
462 | 468 | level = WARN |
|
463 | 469 | formatter = generic |
|
464 | 470 | |
|
465 | 471 | ################ |
|
466 | 472 | ## FORMATTERS ## |
|
467 | 473 | ################ |
|
468 | 474 | |
|
469 | 475 | [formatter_generic] |
|
470 | 476 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
471 | 477 | datefmt = %Y-%m-%d %H:%M:%S |
|
472 | 478 | |
|
473 | 479 | [formatter_color_formatter] |
|
474 | 480 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
475 | 481 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
476 | 482 | datefmt = %Y-%m-%d %H:%M:%S |
|
477 | 483 | |
|
478 | 484 | [formatter_color_formatter_sql] |
|
479 | 485 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
480 | 486 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
481 | 487 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,491 +1,497 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # RhodeCode - Pylons environment configuration # |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | pdebug = false |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## Uncomment and replace with the address which should receive ## |
|
13 | 13 | ## any error reports after application crash ## |
|
14 | 14 | ## Additionally those settings will be used by RhodeCode mailing system ## |
|
15 | 15 | ################################################################################ |
|
16 | 16 | #email_to = admin@localhost |
|
17 | 17 | #error_email_from = paste_error@localhost |
|
18 | 18 | #app_email_from = rhodecode-noreply@localhost |
|
19 | 19 | #error_message = |
|
20 | 20 | #email_prefix = [RhodeCode] |
|
21 | 21 | |
|
22 | 22 | #smtp_server = mail.server.com |
|
23 | 23 | #smtp_username = |
|
24 | 24 | #smtp_password = |
|
25 | 25 | #smtp_port = |
|
26 | 26 | #smtp_use_tls = false |
|
27 | 27 | #smtp_use_ssl = true |
|
28 | 28 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
29 | 29 | #smtp_auth = |
|
30 | 30 | |
|
31 | 31 | [server:main] |
|
32 | 32 | ## PASTE |
|
33 | 33 | ## nr of threads to spawn |
|
34 | 34 | #threadpool_workers = 5 |
|
35 | 35 | |
|
36 | 36 | ## max request before thread respawn |
|
37 | 37 | #threadpool_max_requests = 10 |
|
38 | 38 | |
|
39 | 39 | ## option to use threads of process |
|
40 | 40 | #use_threadpool = true |
|
41 | 41 | |
|
42 | 42 | #use = egg:Paste#http |
|
43 | 43 | |
|
44 | 44 | ## WAITRESS |
|
45 | 45 | threads = 5 |
|
46 | 46 | ## 100GB |
|
47 | 47 | max_request_body_size = 107374182400 |
|
48 | 48 | use = egg:waitress#main |
|
49 | 49 | |
|
50 | 50 | host = 127.0.0.1 |
|
51 | 51 | port = 5000 |
|
52 | 52 | |
|
53 | 53 | ## prefix middleware for rc |
|
54 | 54 | #[filter:proxy-prefix] |
|
55 | 55 | #use = egg:PasteDeploy#prefix |
|
56 | 56 | #prefix = /<your-prefix> |
|
57 | 57 | |
|
58 | 58 | [app:main] |
|
59 | 59 | use = egg:rhodecode |
|
60 | 60 | ## enable proxy prefix middleware |
|
61 | 61 | #filter-with = proxy-prefix |
|
62 | 62 | |
|
63 | 63 | full_stack = true |
|
64 | 64 | static_files = true |
|
65 | 65 | ## Optional Languages |
|
66 | 66 | ## en, fr, ja, pt_BR, zh_CN, zh_TW, pl |
|
67 | 67 | lang = en |
|
68 | 68 | cache_dir = %(here)s/data |
|
69 | 69 | index_dir = %(here)s/data/index |
|
70 | 70 | |
|
71 | 71 | ## uncomment and set this path to use archive download cache |
|
72 | 72 | #archive_cache_dir = /tmp/tarballcache |
|
73 | 73 | |
|
74 | 74 | ## change this to unique ID for security |
|
75 | 75 | app_instance_uuid = ${app_instance_uuid} |
|
76 | 76 | |
|
77 | 77 | ## cut off limit for large diffs (size in bytes) |
|
78 | 78 | cut_off_limit = 256000 |
|
79 | 79 | |
|
80 | 80 | ## use cache version of scm repo everywhere |
|
81 | 81 | vcs_full_cache = true |
|
82 | 82 | |
|
83 | 83 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
84 | 84 | force_https = false |
|
85 | 85 | |
|
86 | 86 | ## use Strict-Transport-Security headers |
|
87 | 87 | use_htsts = false |
|
88 | 88 | |
|
89 | 89 | ## number of commits stats will parse on each iteration |
|
90 | 90 | commit_parse_limit = 25 |
|
91 | 91 | |
|
92 | 92 | ## number of items displayed in lightweight dashboard before paginating is shown |
|
93 | 93 | dashboard_items = 100 |
|
94 | 94 | |
|
95 | 95 | ## use gravatar service to display avatars |
|
96 | 96 | use_gravatar = true |
|
97 | 97 | |
|
98 | 98 | ## path to git executable |
|
99 | 99 | git_path = git |
|
100 | 100 | |
|
101 | 101 | ## git rev filter option, --all is the default filter, if you need to |
|
102 | 102 | ## hide all refs in changelog switch this to --branches --tags |
|
103 | 103 | git_rev_filter=--all |
|
104 | 104 | |
|
105 | 105 | ## RSS feed options |
|
106 | 106 | rss_cut_off_limit = 256000 |
|
107 | 107 | rss_items_per_page = 10 |
|
108 | 108 | rss_include_diff = false |
|
109 | 109 | |
|
110 | 110 | ## options for showing and identifying changesets |
|
111 | 111 | show_sha_length = 12 |
|
112 | 112 | show_revision_number = true |
|
113 | 113 | |
|
114 | ## white list of API enabled controllers. This allows to add list of | |
|
115 | ## controllers to which access will be enabled by api_key. eg: to enable | |
|
116 | ## api access to raw_files put `FilesController:raw`, to enable access to patches | |
|
117 | ## add `ChangesetController:changeset_patch`. This list should be "," separated | |
|
118 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names | |
|
119 | api_access_controllers_whitelist = | |
|
114 | 120 | |
|
115 | 121 | ## alternative_gravatar_url allows you to use your own avatar server application |
|
116 | 122 | ## the following parts of the URL will be replaced |
|
117 | 123 | ## {email} user email |
|
118 | 124 | ## {md5email} md5 hash of the user email (like at gravatar.com) |
|
119 | 125 | ## {size} size of the image that is expected from the server application |
|
120 | 126 | ## {scheme} http/https from RhodeCode server |
|
121 | 127 | ## {netloc} network location from RhodeCode server |
|
122 | 128 | #alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size} |
|
123 | 129 | #alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size} |
|
124 | 130 | |
|
125 | 131 | |
|
126 | 132 | ## container auth options |
|
127 | 133 | container_auth_enabled = false |
|
128 | 134 | proxypass_auth_enabled = false |
|
129 | 135 | |
|
130 | 136 | ## default encoding used to convert from and to unicode |
|
131 | 137 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
132 | 138 | default_encoding = utf8 |
|
133 | 139 | |
|
134 | 140 | ## overwrite schema of clone url |
|
135 | 141 | ## available vars: |
|
136 | 142 | ## scheme - http/https |
|
137 | 143 | ## user - current user |
|
138 | 144 | ## pass - password |
|
139 | 145 | ## netloc - network location |
|
140 | 146 | ## path - usually repo_name |
|
141 | 147 | |
|
142 | 148 | #clone_uri = {scheme}://{user}{pass}{netloc}{path} |
|
143 | 149 | |
|
144 | 150 | ## issue tracking mapping for commits messages |
|
145 | 151 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
146 | 152 | |
|
147 | 153 | ## pattern to get the issues from commit messages |
|
148 | 154 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
149 | 155 | ## {id} will be all groups matched from this pattern |
|
150 | 156 | |
|
151 | 157 | issue_pat = (?:\s*#)(\d+) |
|
152 | 158 | |
|
153 | 159 | ## server url to the issue, each {id} will be replaced with match |
|
154 | 160 | ## fetched from the regex and {repo} is replaced with full repository name |
|
155 | 161 | ## including groups {repo_name} is replaced with just name of repo |
|
156 | 162 | |
|
157 | 163 | issue_server_link = https://myissueserver.com/{repo}/issue/{id} |
|
158 | 164 | |
|
159 | 165 | ## prefix to add to link to indicate it's an url |
|
160 | 166 | ## #314 will be replaced by <issue_prefix><id> |
|
161 | 167 | |
|
162 | 168 | issue_prefix = # |
|
163 | 169 | |
|
164 | 170 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
165 | 171 | ## multiple patterns, to other issues server, wiki or others |
|
166 | 172 | ## below an example how to create a wiki pattern |
|
167 | 173 | # #wiki-some-id -> https://mywiki.com/some-id |
|
168 | 174 | |
|
169 | 175 | #issue_pat_wiki = (?:wiki-)(.+) |
|
170 | 176 | #issue_server_link_wiki = https://mywiki.com/{id} |
|
171 | 177 | #issue_prefix_wiki = WIKI- |
|
172 | 178 | |
|
173 | 179 | |
|
174 | 180 | ## instance-id prefix |
|
175 | 181 | ## a prefix key for this instance used for cache invalidation when running |
|
176 | 182 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
177 | 183 | ## all running rhodecode instances. Leave empty if you don't use it |
|
178 | 184 | instance_id = |
|
179 | 185 | |
|
180 | 186 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
181 | 187 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
182 | 188 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
183 | 189 | auth_ret_code = |
|
184 | 190 | |
|
185 | 191 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
186 | 192 | ## codes don't break the transactions while 4XX codes do |
|
187 | 193 | lock_ret_code = 423 |
|
188 | 194 | |
|
189 | 195 | |
|
190 | 196 | #################################### |
|
191 | 197 | ### CELERY CONFIG #### |
|
192 | 198 | #################################### |
|
193 | 199 | use_celery = false |
|
194 | 200 | broker.host = localhost |
|
195 | 201 | broker.vhost = rabbitmqhost |
|
196 | 202 | broker.port = 5672 |
|
197 | 203 | broker.user = rabbitmq |
|
198 | 204 | broker.password = qweqwe |
|
199 | 205 | |
|
200 | 206 | celery.imports = rhodecode.lib.celerylib.tasks |
|
201 | 207 | |
|
202 | 208 | celery.result.backend = amqp |
|
203 | 209 | celery.result.dburi = amqp:// |
|
204 | 210 | celery.result.serialier = json |
|
205 | 211 | |
|
206 | 212 | #celery.send.task.error.emails = true |
|
207 | 213 | #celery.amqp.task.result.expires = 18000 |
|
208 | 214 | |
|
209 | 215 | celeryd.concurrency = 2 |
|
210 | 216 | #celeryd.log.file = celeryd.log |
|
211 | 217 | celeryd.log.level = debug |
|
212 | 218 | celeryd.max.tasks.per.child = 1 |
|
213 | 219 | |
|
214 | 220 | ## tasks will never be sent to the queue, but executed locally instead. |
|
215 | 221 | celery.always.eager = false |
|
216 | 222 | |
|
217 | 223 | #################################### |
|
218 | 224 | ### BEAKER CACHE #### |
|
219 | 225 | #################################### |
|
220 | 226 | beaker.cache.data_dir=%(here)s/data/cache/data |
|
221 | 227 | beaker.cache.lock_dir=%(here)s/data/cache/lock |
|
222 | 228 | |
|
223 | 229 | beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long |
|
224 | 230 | |
|
225 | 231 | beaker.cache.super_short_term.type=memory |
|
226 | 232 | beaker.cache.super_short_term.expire=10 |
|
227 | 233 | beaker.cache.super_short_term.key_length = 256 |
|
228 | 234 | |
|
229 | 235 | beaker.cache.short_term.type=memory |
|
230 | 236 | beaker.cache.short_term.expire=60 |
|
231 | 237 | beaker.cache.short_term.key_length = 256 |
|
232 | 238 | |
|
233 | 239 | beaker.cache.long_term.type=memory |
|
234 | 240 | beaker.cache.long_term.expire=36000 |
|
235 | 241 | beaker.cache.long_term.key_length = 256 |
|
236 | 242 | |
|
237 | 243 | beaker.cache.sql_cache_short.type=memory |
|
238 | 244 | beaker.cache.sql_cache_short.expire=10 |
|
239 | 245 | beaker.cache.sql_cache_short.key_length = 256 |
|
240 | 246 | |
|
241 | 247 | beaker.cache.sql_cache_med.type=memory |
|
242 | 248 | beaker.cache.sql_cache_med.expire=360 |
|
243 | 249 | beaker.cache.sql_cache_med.key_length = 256 |
|
244 | 250 | |
|
245 | 251 | beaker.cache.sql_cache_long.type=file |
|
246 | 252 | beaker.cache.sql_cache_long.expire=3600 |
|
247 | 253 | beaker.cache.sql_cache_long.key_length = 256 |
|
248 | 254 | |
|
249 | 255 | #################################### |
|
250 | 256 | ### BEAKER SESSION #### |
|
251 | 257 | #################################### |
|
252 | 258 | ## Type of storage used for the session, current types are |
|
253 | 259 | ## dbm, file, memcached, database, and memory. |
|
254 | 260 | ## The storage uses the Container API |
|
255 | 261 | ## that is also used by the cache system. |
|
256 | 262 | |
|
257 | 263 | ## db session ## |
|
258 | 264 | #beaker.session.type = ext:database |
|
259 | 265 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode |
|
260 | 266 | #beaker.session.table_name = db_session |
|
261 | 267 | |
|
262 | 268 | ## encrypted cookie client side session, good for many instances ## |
|
263 | 269 | #beaker.session.type = cookie |
|
264 | 270 | |
|
265 | 271 | ## file based cookies (default) ## |
|
266 | 272 | #beaker.session.type = file |
|
267 | 273 | |
|
268 | 274 | |
|
269 | 275 | beaker.session.key = rhodecode |
|
270 | 276 | ## secure cookie requires AES python libraries |
|
271 | 277 | #beaker.session.encrypt_key = <key_for_encryption> |
|
272 | 278 | #beaker.session.validate_key = <validation_key> |
|
273 | 279 | |
|
274 | 280 | ## sets session as invalid if it haven't been accessed for given amount of time |
|
275 | 281 | beaker.session.timeout = 2592000 |
|
276 | 282 | beaker.session.httponly = true |
|
277 | 283 | #beaker.session.cookie_path = /<your-prefix> |
|
278 | 284 | |
|
279 | 285 | ## uncomment for https secure cookie |
|
280 | 286 | beaker.session.secure = false |
|
281 | 287 | |
|
282 | 288 | ## auto save the session to not to use .save() |
|
283 | 289 | beaker.session.auto = False |
|
284 | 290 | |
|
285 | 291 | ## default cookie expiration time in seconds `true` expire at browser close ## |
|
286 | 292 | #beaker.session.cookie_expires = 3600 |
|
287 | 293 | |
|
288 | 294 | |
|
289 | 295 | ############################ |
|
290 | 296 | ## ERROR HANDLING SYSTEMS ## |
|
291 | 297 | ############################ |
|
292 | 298 | |
|
293 | 299 | #################### |
|
294 | 300 | ### [errormator] ### |
|
295 | 301 | #################### |
|
296 | 302 | |
|
297 | 303 | ## Errormator is tailored to work with RhodeCode, see |
|
298 | 304 | ## http://errormator.com for details how to obtain an account |
|
299 | 305 | ## you must install python package `errormator_client` to make it work |
|
300 | 306 | |
|
301 | 307 | ## errormator enabled |
|
302 | 308 | errormator = false |
|
303 | 309 | |
|
304 | 310 | errormator.server_url = https://api.errormator.com |
|
305 | 311 | errormator.api_key = YOUR_API_KEY |
|
306 | 312 | |
|
307 | 313 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
308 | 314 | |
|
309 | 315 | ## enables 404 error logging (default False) |
|
310 | 316 | errormator.report_404 = false |
|
311 | 317 | |
|
312 | 318 | ## time in seconds after request is considered being slow (default 1) |
|
313 | 319 | errormator.slow_request_time = 1 |
|
314 | 320 | |
|
315 | 321 | ## record slow requests in application |
|
316 | 322 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
317 | 323 | errormator.slow_requests = true |
|
318 | 324 | |
|
319 | 325 | ## enable hooking to application loggers |
|
320 | 326 | # errormator.logging = true |
|
321 | 327 | |
|
322 | 328 | ## minimum log level for log capture |
|
323 | 329 | # errormator.logging.level = WARNING |
|
324 | 330 | |
|
325 | 331 | ## send logs only from erroneous/slow requests |
|
326 | 332 | ## (saves API quota for intensive logging) |
|
327 | 333 | errormator.logging_on_error = false |
|
328 | 334 | |
|
329 | 335 | ## list of additonal keywords that should be grabbed from environ object |
|
330 | 336 | ## can be string with comma separated list of words in lowercase |
|
331 | 337 | ## (by default client will always send following info: |
|
332 | 338 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
333 | 339 | ## start with HTTP* this list be extended with additional keywords here |
|
334 | 340 | errormator.environ_keys_whitelist = |
|
335 | 341 | |
|
336 | 342 | |
|
337 | 343 | ## list of keywords that should be blanked from request object |
|
338 | 344 | ## can be string with comma separated list of words in lowercase |
|
339 | 345 | ## (by default client will always blank keys that contain following words |
|
340 | 346 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
341 | 347 | ## this list be extended with additional keywords set here |
|
342 | 348 | errormator.request_keys_blacklist = |
|
343 | 349 | |
|
344 | 350 | |
|
345 | 351 | ## list of namespaces that should be ignores when gathering log entries |
|
346 | 352 | ## can be string with comma separated list of namespaces |
|
347 | 353 | ## (by default the client ignores own entries: errormator_client.client) |
|
348 | 354 | errormator.log_namespace_blacklist = |
|
349 | 355 | |
|
350 | 356 | |
|
351 | 357 | ################ |
|
352 | 358 | ### [sentry] ### |
|
353 | 359 | ################ |
|
354 | 360 | |
|
355 | 361 | ## sentry is a alternative open source error aggregator |
|
356 | 362 | ## you must install python packages `sentry` and `raven` to enable |
|
357 | 363 | |
|
358 | 364 | sentry.dsn = YOUR_DNS |
|
359 | 365 | sentry.servers = |
|
360 | 366 | sentry.name = |
|
361 | 367 | sentry.key = |
|
362 | 368 | sentry.public_key = |
|
363 | 369 | sentry.secret_key = |
|
364 | 370 | sentry.project = |
|
365 | 371 | sentry.site = |
|
366 | 372 | sentry.include_paths = |
|
367 | 373 | sentry.exclude_paths = |
|
368 | 374 | |
|
369 | 375 | |
|
370 | 376 | ################################################################################ |
|
371 | 377 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
372 | 378 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
373 | 379 | ## execute malicious code after an exception is raised. ## |
|
374 | 380 | ################################################################################ |
|
375 | 381 | set debug = false |
|
376 | 382 | |
|
377 | 383 | ################################## |
|
378 | 384 | ### LOGVIEW CONFIG ### |
|
379 | 385 | ################################## |
|
380 | 386 | logview.sqlalchemy = #faa |
|
381 | 387 | logview.pylons.templating = #bfb |
|
382 | 388 | logview.pylons.util = #eee |
|
383 | 389 | |
|
384 | 390 | ######################################################### |
|
385 | 391 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
386 | 392 | ######################################################### |
|
387 | 393 | |
|
388 | 394 | # SQLITE [default] |
|
389 | 395 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db |
|
390 | 396 | |
|
391 | 397 | # POSTGRESQL |
|
392 | 398 | # sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode |
|
393 | 399 | |
|
394 | 400 | # MySQL |
|
395 | 401 | # sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode |
|
396 | 402 | |
|
397 | 403 | # see sqlalchemy docs for others |
|
398 | 404 | |
|
399 | 405 | sqlalchemy.db1.echo = false |
|
400 | 406 | sqlalchemy.db1.pool_recycle = 3600 |
|
401 | 407 | sqlalchemy.db1.convert_unicode = true |
|
402 | 408 | |
|
403 | 409 | ################################ |
|
404 | 410 | ### LOGGING CONFIGURATION #### |
|
405 | 411 | ################################ |
|
406 | 412 | [loggers] |
|
407 | 413 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer |
|
408 | 414 | |
|
409 | 415 | [handlers] |
|
410 | 416 | keys = console, console_sql |
|
411 | 417 | |
|
412 | 418 | [formatters] |
|
413 | 419 | keys = generic, color_formatter, color_formatter_sql |
|
414 | 420 | |
|
415 | 421 | ############# |
|
416 | 422 | ## LOGGERS ## |
|
417 | 423 | ############# |
|
418 | 424 | [logger_root] |
|
419 | 425 | level = NOTSET |
|
420 | 426 | handlers = console |
|
421 | 427 | |
|
422 | 428 | [logger_routes] |
|
423 | 429 | level = DEBUG |
|
424 | 430 | handlers = |
|
425 | 431 | qualname = routes.middleware |
|
426 | 432 | ## "level = DEBUG" logs the route matched and routing variables. |
|
427 | 433 | propagate = 1 |
|
428 | 434 | |
|
429 | 435 | [logger_beaker] |
|
430 | 436 | level = DEBUG |
|
431 | 437 | handlers = |
|
432 | 438 | qualname = beaker.container |
|
433 | 439 | propagate = 1 |
|
434 | 440 | |
|
435 | 441 | [logger_templates] |
|
436 | 442 | level = INFO |
|
437 | 443 | handlers = |
|
438 | 444 | qualname = pylons.templating |
|
439 | 445 | propagate = 1 |
|
440 | 446 | |
|
441 | 447 | [logger_rhodecode] |
|
442 | 448 | level = DEBUG |
|
443 | 449 | handlers = |
|
444 | 450 | qualname = rhodecode |
|
445 | 451 | propagate = 1 |
|
446 | 452 | |
|
447 | 453 | [logger_sqlalchemy] |
|
448 | 454 | level = INFO |
|
449 | 455 | handlers = console_sql |
|
450 | 456 | qualname = sqlalchemy.engine |
|
451 | 457 | propagate = 0 |
|
452 | 458 | |
|
453 | 459 | [logger_whoosh_indexer] |
|
454 | 460 | level = DEBUG |
|
455 | 461 | handlers = |
|
456 | 462 | qualname = whoosh_indexer |
|
457 | 463 | propagate = 1 |
|
458 | 464 | |
|
459 | 465 | ############## |
|
460 | 466 | ## HANDLERS ## |
|
461 | 467 | ############## |
|
462 | 468 | |
|
463 | 469 | [handler_console] |
|
464 | 470 | class = StreamHandler |
|
465 | 471 | args = (sys.stderr,) |
|
466 | 472 | level = INFO |
|
467 | 473 | formatter = generic |
|
468 | 474 | |
|
469 | 475 | [handler_console_sql] |
|
470 | 476 | class = StreamHandler |
|
471 | 477 | args = (sys.stderr,) |
|
472 | 478 | level = WARN |
|
473 | 479 | formatter = generic |
|
474 | 480 | |
|
475 | 481 | ################ |
|
476 | 482 | ## FORMATTERS ## |
|
477 | 483 | ################ |
|
478 | 484 | |
|
479 | 485 | [formatter_generic] |
|
480 | 486 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
481 | 487 | datefmt = %Y-%m-%d %H:%M:%S |
|
482 | 488 | |
|
483 | 489 | [formatter_color_formatter] |
|
484 | 490 | class=rhodecode.lib.colored_formatter.ColorFormatter |
|
485 | 491 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
486 | 492 | datefmt = %Y-%m-%d %H:%M:%S |
|
487 | 493 | |
|
488 | 494 | [formatter_color_formatter_sql] |
|
489 | 495 | class=rhodecode.lib.colored_formatter.ColorFormatterSql |
|
490 | 496 | format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
491 | 497 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,1108 +1,1113 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.auth |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | authentication and permission libraries |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 4, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import random |
|
27 | 27 | import logging |
|
28 | 28 | import traceback |
|
29 | 29 | import hashlib |
|
30 | 30 | |
|
31 | 31 | from tempfile import _RandomNameSequence |
|
32 | 32 | from decorator import decorator |
|
33 | 33 | |
|
34 | 34 | from pylons import config, url, request |
|
35 | 35 | from pylons.controllers.util import abort, redirect |
|
36 | 36 | from pylons.i18n.translation import _ |
|
37 | 37 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
38 | 38 | |
|
39 | 39 | from rhodecode import __platform__, is_windows, is_unix |
|
40 | 40 | from rhodecode.model.meta import Session |
|
41 | 41 | |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_unicode | |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_unicode, aslist | |
|
43 | 43 | from rhodecode.lib.exceptions import LdapPasswordError, LdapUsernameError,\ |
|
44 | 44 | LdapImportError |
|
45 | 45 | from rhodecode.lib.utils import get_repo_slug, get_repos_group_slug,\ |
|
46 | 46 | get_user_group_slug |
|
47 | 47 | from rhodecode.lib.auth_ldap import AuthLdap |
|
48 | 48 | |
|
49 | 49 | from rhodecode.model import meta |
|
50 | 50 | from rhodecode.model.user import UserModel |
|
51 | 51 | from rhodecode.model.db import Permission, RhodeCodeSetting, User, UserIpMap |
|
52 | 52 | from rhodecode.lib.caching_query import FromCache |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class PasswordGenerator(object): |
|
58 | 58 | """ |
|
59 | 59 | This is a simple class for generating password from different sets of |
|
60 | 60 | characters |
|
61 | 61 | usage:: |
|
62 | 62 | |
|
63 | 63 | passwd_gen = PasswordGenerator() |
|
64 | 64 | #print 8-letter password containing only big and small letters |
|
65 | 65 | of alphabet |
|
66 | 66 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
67 | 67 | """ |
|
68 | 68 | ALPHABETS_NUM = r'''1234567890''' |
|
69 | 69 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
70 | 70 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
71 | 71 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
72 | 72 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
73 | 73 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
74 | 74 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
75 | 75 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
76 | 76 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
77 | 77 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
78 | 78 | |
|
79 | 79 | def __init__(self, passwd=''): |
|
80 | 80 | self.passwd = passwd |
|
81 | 81 | |
|
82 | 82 | def gen_password(self, length, type_=None): |
|
83 | 83 | if type_ is None: |
|
84 | 84 | type_ = self.ALPHABETS_FULL |
|
85 | 85 | self.passwd = ''.join([random.choice(type_) for _ in xrange(length)]) |
|
86 | 86 | return self.passwd |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | class RhodeCodeCrypto(object): |
|
90 | 90 | |
|
91 | 91 | @classmethod |
|
92 | 92 | def hash_string(cls, str_): |
|
93 | 93 | """ |
|
94 | 94 | Cryptographic function used for password hashing based on pybcrypt |
|
95 | 95 | or pycrypto in windows |
|
96 | 96 | |
|
97 | 97 | :param password: password to hash |
|
98 | 98 | """ |
|
99 | 99 | if is_windows: |
|
100 | 100 | from hashlib import sha256 |
|
101 | 101 | return sha256(str_).hexdigest() |
|
102 | 102 | elif is_unix: |
|
103 | 103 | import bcrypt |
|
104 | 104 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
105 | 105 | else: |
|
106 | 106 | raise Exception('Unknown or unsupported platform %s' \ |
|
107 | 107 | % __platform__) |
|
108 | 108 | |
|
109 | 109 | @classmethod |
|
110 | 110 | def hash_check(cls, password, hashed): |
|
111 | 111 | """ |
|
112 | 112 | Checks matching password with it's hashed value, runs different |
|
113 | 113 | implementation based on platform it runs on |
|
114 | 114 | |
|
115 | 115 | :param password: password |
|
116 | 116 | :param hashed: password in hashed form |
|
117 | 117 | """ |
|
118 | 118 | |
|
119 | 119 | if is_windows: |
|
120 | 120 | from hashlib import sha256 |
|
121 | 121 | return sha256(password).hexdigest() == hashed |
|
122 | 122 | elif is_unix: |
|
123 | 123 | import bcrypt |
|
124 | 124 | return bcrypt.hashpw(password, hashed) == hashed |
|
125 | 125 | else: |
|
126 | 126 | raise Exception('Unknown or unsupported platform %s' \ |
|
127 | 127 | % __platform__) |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | def get_crypt_password(password): |
|
131 | 131 | return RhodeCodeCrypto.hash_string(password) |
|
132 | 132 | |
|
133 | 133 | |
|
134 | 134 | def check_password(password, hashed): |
|
135 | 135 | return RhodeCodeCrypto.hash_check(password, hashed) |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def generate_api_key(str_, salt=None): |
|
139 | 139 | """ |
|
140 | 140 | Generates API KEY from given string |
|
141 | 141 | |
|
142 | 142 | :param str_: |
|
143 | 143 | :param salt: |
|
144 | 144 | """ |
|
145 | 145 | |
|
146 | 146 | if salt is None: |
|
147 | 147 | salt = _RandomNameSequence().next() |
|
148 | 148 | |
|
149 | 149 | return hashlib.sha1(str_ + salt).hexdigest() |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | def authfunc(environ, username, password): |
|
153 | 153 | """ |
|
154 | 154 | Dummy authentication wrapper function used in Mercurial and Git for |
|
155 | 155 | access control. |
|
156 | 156 | |
|
157 | 157 | :param environ: needed only for using in Basic auth |
|
158 | 158 | """ |
|
159 | 159 | return authenticate(username, password) |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | def authenticate(username, password): |
|
163 | 163 | """ |
|
164 | 164 | Authentication function used for access control, |
|
165 | 165 | firstly checks for db authentication then if ldap is enabled for ldap |
|
166 | 166 | authentication, also creates ldap user if not in database |
|
167 | 167 | |
|
168 | 168 | :param username: username |
|
169 | 169 | :param password: password |
|
170 | 170 | """ |
|
171 | 171 | |
|
172 | 172 | user_model = UserModel() |
|
173 | 173 | user = User.get_by_username(username) |
|
174 | 174 | |
|
175 | 175 | log.debug('Authenticating user using RhodeCode account') |
|
176 | 176 | if user is not None and not user.ldap_dn: |
|
177 | 177 | if user.active: |
|
178 | 178 | if user.username == 'default' and user.active: |
|
179 | 179 | log.info('user %s authenticated correctly as anonymous user' % |
|
180 | 180 | username) |
|
181 | 181 | return True |
|
182 | 182 | |
|
183 | 183 | elif user.username == username and check_password(password, |
|
184 | 184 | user.password): |
|
185 | 185 | log.info('user %s authenticated correctly' % username) |
|
186 | 186 | return True |
|
187 | 187 | else: |
|
188 | 188 | log.warning('user %s tried auth but is disabled' % username) |
|
189 | 189 | |
|
190 | 190 | else: |
|
191 | 191 | log.debug('Regular authentication failed') |
|
192 | 192 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
193 | 193 | |
|
194 | 194 | if user_obj is not None and not user_obj.ldap_dn: |
|
195 | 195 | log.debug('this user already exists as non ldap') |
|
196 | 196 | return False |
|
197 | 197 | |
|
198 | 198 | ldap_settings = RhodeCodeSetting.get_ldap_settings() |
|
199 | 199 | #====================================================================== |
|
200 | 200 | # FALLBACK TO LDAP AUTH IF ENABLE |
|
201 | 201 | #====================================================================== |
|
202 | 202 | if str2bool(ldap_settings.get('ldap_active')): |
|
203 | 203 | log.debug("Authenticating user using ldap") |
|
204 | 204 | kwargs = { |
|
205 | 205 | 'server': ldap_settings.get('ldap_host', ''), |
|
206 | 206 | 'base_dn': ldap_settings.get('ldap_base_dn', ''), |
|
207 | 207 | 'port': ldap_settings.get('ldap_port'), |
|
208 | 208 | 'bind_dn': ldap_settings.get('ldap_dn_user'), |
|
209 | 209 | 'bind_pass': ldap_settings.get('ldap_dn_pass'), |
|
210 | 210 | 'tls_kind': ldap_settings.get('ldap_tls_kind'), |
|
211 | 211 | 'tls_reqcert': ldap_settings.get('ldap_tls_reqcert'), |
|
212 | 212 | 'ldap_filter': ldap_settings.get('ldap_filter'), |
|
213 | 213 | 'search_scope': ldap_settings.get('ldap_search_scope'), |
|
214 | 214 | 'attr_login': ldap_settings.get('ldap_attr_login'), |
|
215 | 215 | 'ldap_version': 3, |
|
216 | 216 | } |
|
217 | 217 | log.debug('Checking for ldap authentication') |
|
218 | 218 | try: |
|
219 | 219 | aldap = AuthLdap(**kwargs) |
|
220 | 220 | (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, |
|
221 | 221 | password) |
|
222 | 222 | log.debug('Got ldap DN response %s' % user_dn) |
|
223 | 223 | |
|
224 | 224 | get_ldap_attr = lambda k: ldap_attrs.get(ldap_settings\ |
|
225 | 225 | .get(k), [''])[0] |
|
226 | 226 | |
|
227 | 227 | user_attrs = { |
|
228 | 228 | 'name': safe_unicode(get_ldap_attr('ldap_attr_firstname')), |
|
229 | 229 | 'lastname': safe_unicode(get_ldap_attr('ldap_attr_lastname')), |
|
230 | 230 | 'email': get_ldap_attr('ldap_attr_email'), |
|
231 | 231 | 'active': 'hg.register.auto_activate' in User\ |
|
232 | 232 | .get_default_user().AuthUser.permissions['global'] |
|
233 | 233 | } |
|
234 | 234 | |
|
235 | 235 | # don't store LDAP password since we don't need it. Override |
|
236 | 236 | # with some random generated password |
|
237 | 237 | _password = PasswordGenerator().gen_password(length=8) |
|
238 | 238 | # create this user on the fly if it doesn't exist in rhodecode |
|
239 | 239 | # database |
|
240 | 240 | if user_model.create_ldap(username, _password, user_dn, |
|
241 | 241 | user_attrs): |
|
242 | 242 | log.info('created new ldap user %s' % username) |
|
243 | 243 | |
|
244 | 244 | Session().commit() |
|
245 | 245 | return True |
|
246 | 246 | except (LdapUsernameError, LdapPasswordError, LdapImportError): |
|
247 | 247 | pass |
|
248 | 248 | except (Exception,): |
|
249 | 249 | log.error(traceback.format_exc()) |
|
250 | 250 | pass |
|
251 | 251 | return False |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | def login_container_auth(username): |
|
255 | 255 | user = User.get_by_username(username) |
|
256 | 256 | if user is None: |
|
257 | 257 | user_attrs = { |
|
258 | 258 | 'name': username, |
|
259 | 259 | 'lastname': None, |
|
260 | 260 | 'email': None, |
|
261 | 261 | 'active': 'hg.register.auto_activate' in User\ |
|
262 | 262 | .get_default_user().AuthUser.permissions['global'] |
|
263 | 263 | } |
|
264 | 264 | user = UserModel().create_for_container_auth(username, user_attrs) |
|
265 | 265 | if not user: |
|
266 | 266 | return None |
|
267 | 267 | log.info('User %s was created by container authentication' % username) |
|
268 | 268 | |
|
269 | 269 | if not user.active: |
|
270 | 270 | return None |
|
271 | 271 | |
|
272 | 272 | user.update_lastlogin() |
|
273 | 273 | Session().commit() |
|
274 | 274 | |
|
275 | 275 | log.debug('User %s is now logged in by container authentication', |
|
276 | 276 | user.username) |
|
277 | 277 | return user |
|
278 | 278 | |
|
279 | 279 | |
|
280 | 280 | def get_container_username(environ, config, clean_username=False): |
|
281 | 281 | """ |
|
282 | 282 | Get's the container_auth username (or email). It tries to get username |
|
283 | 283 | from REMOTE_USER if container_auth_enabled is enabled, if that fails |
|
284 | 284 | it tries to get username from HTTP_X_FORWARDED_USER if proxypass_auth_enabled |
|
285 | 285 | is enabled. clean_username extracts the username from this data if it's |
|
286 | 286 | having @ in it. |
|
287 | 287 | |
|
288 | 288 | :param environ: |
|
289 | 289 | :param config: |
|
290 | 290 | :param clean_username: |
|
291 | 291 | """ |
|
292 | 292 | username = None |
|
293 | 293 | |
|
294 | 294 | if str2bool(config.get('container_auth_enabled', False)): |
|
295 | 295 | from paste.httpheaders import REMOTE_USER |
|
296 | 296 | username = REMOTE_USER(environ) |
|
297 | 297 | log.debug('extracted REMOTE_USER:%s' % (username)) |
|
298 | 298 | |
|
299 | 299 | if not username and str2bool(config.get('proxypass_auth_enabled', False)): |
|
300 | 300 | username = environ.get('HTTP_X_FORWARDED_USER') |
|
301 | 301 | log.debug('extracted HTTP_X_FORWARDED_USER:%s' % (username)) |
|
302 | 302 | |
|
303 | 303 | if username and clean_username: |
|
304 | 304 | # Removing realm and domain from username |
|
305 | 305 | username = username.partition('@')[0] |
|
306 | 306 | username = username.rpartition('\\')[2] |
|
307 | 307 | log.debug('Received username %s from container' % username) |
|
308 | 308 | |
|
309 | 309 | return username |
|
310 | 310 | |
|
311 | 311 | |
|
312 | 312 | class CookieStoreWrapper(object): |
|
313 | 313 | |
|
314 | 314 | def __init__(self, cookie_store): |
|
315 | 315 | self.cookie_store = cookie_store |
|
316 | 316 | |
|
317 | 317 | def __repr__(self): |
|
318 | 318 | return 'CookieStore<%s>' % (self.cookie_store) |
|
319 | 319 | |
|
320 | 320 | def get(self, key, other=None): |
|
321 | 321 | if isinstance(self.cookie_store, dict): |
|
322 | 322 | return self.cookie_store.get(key, other) |
|
323 | 323 | elif isinstance(self.cookie_store, AuthUser): |
|
324 | 324 | return self.cookie_store.__dict__.get(key, other) |
|
325 | 325 | |
|
326 | 326 | |
|
327 | 327 | class AuthUser(object): |
|
328 | 328 | """ |
|
329 | 329 | A simple object that handles all attributes of user in RhodeCode |
|
330 | 330 | |
|
331 | 331 | It does lookup based on API key,given user, or user present in session |
|
332 | 332 | Then it fills all required information for such user. It also checks if |
|
333 | 333 | anonymous access is enabled and if so, it returns default user as logged |
|
334 | 334 | in |
|
335 | 335 | """ |
|
336 | 336 | |
|
337 | 337 | def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): |
|
338 | 338 | |
|
339 | 339 | self.user_id = user_id |
|
340 | 340 | self.api_key = None |
|
341 | 341 | self.username = username |
|
342 | 342 | self.ip_addr = ip_addr |
|
343 | 343 | |
|
344 | 344 | self.name = '' |
|
345 | 345 | self.lastname = '' |
|
346 | 346 | self.email = '' |
|
347 | 347 | self.is_authenticated = False |
|
348 | 348 | self.admin = False |
|
349 | 349 | self.inherit_default_permissions = False |
|
350 | 350 | self.permissions = {} |
|
351 | 351 | self._api_key = api_key |
|
352 | 352 | self.propagate_data() |
|
353 | 353 | self._instance = None |
|
354 | 354 | |
|
355 | 355 | def propagate_data(self): |
|
356 | 356 | user_model = UserModel() |
|
357 | 357 | self.anonymous_user = User.get_by_username('default', cache=True) |
|
358 | 358 | is_user_loaded = False |
|
359 | 359 | |
|
360 | 360 | # try go get user by api key |
|
361 | 361 | if self._api_key and self._api_key != self.anonymous_user.api_key: |
|
362 | 362 | log.debug('Auth User lookup by API KEY %s' % self._api_key) |
|
363 | 363 | is_user_loaded = user_model.fill_data(self, api_key=self._api_key) |
|
364 | 364 | # lookup by userid |
|
365 | 365 | elif (self.user_id is not None and |
|
366 | 366 | self.user_id != self.anonymous_user.user_id): |
|
367 | 367 | log.debug('Auth User lookup by USER ID %s' % self.user_id) |
|
368 | 368 | is_user_loaded = user_model.fill_data(self, user_id=self.user_id) |
|
369 | 369 | # lookup by username |
|
370 | 370 | elif self.username and \ |
|
371 | 371 | str2bool(config.get('container_auth_enabled', False)): |
|
372 | 372 | |
|
373 | 373 | log.debug('Auth User lookup by USER NAME %s' % self.username) |
|
374 | 374 | dbuser = login_container_auth(self.username) |
|
375 | 375 | if dbuser is not None: |
|
376 | 376 | log.debug('filling all attributes to object') |
|
377 | 377 | for k, v in dbuser.get_dict().items(): |
|
378 | 378 | setattr(self, k, v) |
|
379 | 379 | self.set_authenticated() |
|
380 | 380 | is_user_loaded = True |
|
381 | 381 | else: |
|
382 | 382 | log.debug('No data in %s that could been used to log in' % self) |
|
383 | 383 | |
|
384 | 384 | if not is_user_loaded: |
|
385 | 385 | # if we cannot authenticate user try anonymous |
|
386 | 386 | if self.anonymous_user.active: |
|
387 | 387 | user_model.fill_data(self, user_id=self.anonymous_user.user_id) |
|
388 | 388 | # then we set this user is logged in |
|
389 | 389 | self.is_authenticated = True |
|
390 | 390 | else: |
|
391 | 391 | self.user_id = None |
|
392 | 392 | self.username = None |
|
393 | 393 | self.is_authenticated = False |
|
394 | 394 | |
|
395 | 395 | if not self.username: |
|
396 | 396 | self.username = 'None' |
|
397 | 397 | |
|
398 | 398 | log.debug('Auth User is now %s' % self) |
|
399 | 399 | user_model.fill_perms(self) |
|
400 | 400 | |
|
401 | 401 | @property |
|
402 | 402 | def is_admin(self): |
|
403 | 403 | return self.admin |
|
404 | 404 | |
|
405 | 405 | @property |
|
406 | 406 | def repos_admin(self): |
|
407 | 407 | """ |
|
408 | 408 | Returns list of repositories you're an admin of |
|
409 | 409 | """ |
|
410 | 410 | return [x[0] for x in self.permissions['repositories'].iteritems() |
|
411 | 411 | if x[1] == 'repository.admin'] |
|
412 | 412 | |
|
413 | 413 | @property |
|
414 | 414 | def repository_groups_admin(self): |
|
415 | 415 | """ |
|
416 | 416 | Returns list of repository groups you're an admin of |
|
417 | 417 | """ |
|
418 | 418 | return [x[0] for x in self.permissions['repositories_groups'].iteritems() |
|
419 | 419 | if x[1] == 'group.admin'] |
|
420 | 420 | |
|
421 | 421 | @property |
|
422 | 422 | def user_groups_admin(self): |
|
423 | 423 | """ |
|
424 | 424 | Returns list of user groups you're an admin of |
|
425 | 425 | """ |
|
426 | 426 | return [x[0] for x in self.permissions['user_groups'].iteritems() |
|
427 | 427 | if x[1] == 'usergroup.admin'] |
|
428 | 428 | |
|
429 | 429 | @property |
|
430 | 430 | def ip_allowed(self): |
|
431 | 431 | """ |
|
432 | 432 | Checks if ip_addr used in constructor is allowed from defined list of |
|
433 | 433 | allowed ip_addresses for user |
|
434 | 434 | |
|
435 | 435 | :returns: boolean, True if ip is in allowed ip range |
|
436 | 436 | """ |
|
437 | 437 | #check IP |
|
438 | 438 | allowed_ips = AuthUser.get_allowed_ips(self.user_id, cache=True) |
|
439 | 439 | if check_ip_access(source_ip=self.ip_addr, allowed_ips=allowed_ips): |
|
440 | 440 | log.debug('IP:%s is in range of %s' % (self.ip_addr, allowed_ips)) |
|
441 | 441 | return True |
|
442 | 442 | else: |
|
443 | 443 | log.info('Access for IP:%s forbidden, ' |
|
444 | 444 | 'not in %s' % (self.ip_addr, allowed_ips)) |
|
445 | 445 | return False |
|
446 | 446 | |
|
447 | 447 | def __repr__(self): |
|
448 | 448 | return "<AuthUser('id:%s:%s|%s')>" % (self.user_id, self.username, |
|
449 | 449 | self.is_authenticated) |
|
450 | 450 | |
|
451 | 451 | def set_authenticated(self, authenticated=True): |
|
452 | 452 | if self.user_id != self.anonymous_user.user_id: |
|
453 | 453 | self.is_authenticated = authenticated |
|
454 | 454 | |
|
455 | 455 | def get_cookie_store(self): |
|
456 | 456 | return {'username': self.username, |
|
457 | 457 | 'user_id': self.user_id, |
|
458 | 458 | 'is_authenticated': self.is_authenticated} |
|
459 | 459 | |
|
460 | 460 | @classmethod |
|
461 | 461 | def from_cookie_store(cls, cookie_store): |
|
462 | 462 | """ |
|
463 | 463 | Creates AuthUser from a cookie store |
|
464 | 464 | |
|
465 | 465 | :param cls: |
|
466 | 466 | :param cookie_store: |
|
467 | 467 | """ |
|
468 | 468 | user_id = cookie_store.get('user_id') |
|
469 | 469 | username = cookie_store.get('username') |
|
470 | 470 | api_key = cookie_store.get('api_key') |
|
471 | 471 | return AuthUser(user_id, api_key, username) |
|
472 | 472 | |
|
473 | 473 | @classmethod |
|
474 | 474 | def get_allowed_ips(cls, user_id, cache=False): |
|
475 | 475 | _set = set() |
|
476 | 476 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
477 | 477 | if cache: |
|
478 | 478 | user_ips = user_ips.options(FromCache("sql_cache_short", |
|
479 | 479 | "get_user_ips_%s" % user_id)) |
|
480 | 480 | for ip in user_ips: |
|
481 | 481 | try: |
|
482 | 482 | _set.add(ip.ip_addr) |
|
483 | 483 | except ObjectDeletedError: |
|
484 | 484 | # since we use heavy caching sometimes it happens that we get |
|
485 | 485 | # deleted objects here, we just skip them |
|
486 | 486 | pass |
|
487 | 487 | return _set or set(['0.0.0.0/0', '::/0']) |
|
488 | 488 | |
|
489 | 489 | |
|
490 | 490 | def set_available_permissions(config): |
|
491 | 491 | """ |
|
492 | 492 | This function will propagate pylons globals with all available defined |
|
493 | 493 | permission given in db. We don't want to check each time from db for new |
|
494 | 494 | permissions since adding a new permission also requires application restart |
|
495 | 495 | ie. to decorate new views with the newly created permission |
|
496 | 496 | |
|
497 | 497 | :param config: current pylons config instance |
|
498 | 498 | |
|
499 | 499 | """ |
|
500 | 500 | log.info('getting information about all available permissions') |
|
501 | 501 | try: |
|
502 | 502 | sa = meta.Session |
|
503 | 503 | all_perms = sa.query(Permission).all() |
|
504 | 504 | except Exception: |
|
505 | 505 | pass |
|
506 | 506 | finally: |
|
507 | 507 | meta.Session.remove() |
|
508 | 508 | |
|
509 | 509 | config['available_permissions'] = [x.permission_name for x in all_perms] |
|
510 | 510 | |
|
511 | 511 | |
|
512 | 512 | #============================================================================== |
|
513 | 513 | # CHECK DECORATORS |
|
514 | 514 | #============================================================================== |
|
515 | 515 | class LoginRequired(object): |
|
516 | 516 | """ |
|
517 | 517 | Must be logged in to execute this function else |
|
518 | 518 | redirect to login page |
|
519 | 519 | |
|
520 | 520 | :param api_access: if enabled this checks only for valid auth token |
|
521 | 521 | and grants access based on valid token |
|
522 | 522 | """ |
|
523 | 523 | |
|
524 | 524 | def __init__(self, api_access=False): |
|
525 | 525 | self.api_access = api_access |
|
526 | 526 | |
|
527 | 527 | def __call__(self, func): |
|
528 | 528 | return decorator(self.__wrapper, func) |
|
529 | 529 | |
|
530 | 530 | def __wrapper(self, func, *fargs, **fkwargs): |
|
531 | 531 | cls = fargs[0] |
|
532 | 532 | user = cls.rhodecode_user |
|
533 | 533 | loc = "%s:%s" % (cls.__class__.__name__, func.__name__) |
|
534 | ||
|
534 | # defined whitelist of controllers which API access will be enabled | |
|
535 | whitelist = aslist(config.get('api_access_controllers_whitelist'), | |
|
536 | sep=',') | |
|
537 | api_access_whitelist = loc in whitelist | |
|
538 | log.debug('loc:%s is in API whitelist:%s:%s' % (loc, whitelist, | |
|
539 | api_access_whitelist)) | |
|
535 | 540 | #check IP |
|
536 | 541 | ip_access_ok = True |
|
537 | 542 | if not user.ip_allowed: |
|
538 | 543 | from rhodecode.lib import helpers as h |
|
539 | 544 | h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr))), |
|
540 | 545 | category='warning') |
|
541 | 546 | ip_access_ok = False |
|
542 | 547 | |
|
543 | 548 | api_access_ok = False |
|
544 | if self.api_access: | |
|
549 | if self.api_access or api_access_whitelist: | |
|
545 | 550 | log.debug('Checking API KEY access for %s' % cls) |
|
546 | 551 | if user.api_key == request.GET.get('api_key'): |
|
547 | 552 | api_access_ok = True |
|
548 | 553 | else: |
|
549 | 554 | log.debug("API KEY token not valid") |
|
550 | 555 | |
|
551 | 556 | log.debug('Checking if %s is authenticated @ %s' % (user.username, loc)) |
|
552 | 557 | if (user.is_authenticated or api_access_ok) and ip_access_ok: |
|
553 | 558 | reason = 'RegularAuth' if user.is_authenticated else 'APIAuth' |
|
554 | 559 | log.info('user %s is authenticated and granted access to %s ' |
|
555 | 560 | 'using %s' % (user.username, loc, reason) |
|
556 | 561 | ) |
|
557 | 562 | return func(*fargs, **fkwargs) |
|
558 | 563 | else: |
|
559 | 564 | log.warn('user %s NOT authenticated on func: %s' % ( |
|
560 | 565 | user, loc) |
|
561 | 566 | ) |
|
562 | 567 | p = url.current() |
|
563 | 568 | |
|
564 | 569 | log.debug('redirecting to login page with %s' % p) |
|
565 | 570 | return redirect(url('login_home', came_from=p)) |
|
566 | 571 | |
|
567 | 572 | |
|
568 | 573 | class NotAnonymous(object): |
|
569 | 574 | """ |
|
570 | 575 | Must be logged in to execute this function else |
|
571 | 576 | redirect to login page""" |
|
572 | 577 | |
|
573 | 578 | def __call__(self, func): |
|
574 | 579 | return decorator(self.__wrapper, func) |
|
575 | 580 | |
|
576 | 581 | def __wrapper(self, func, *fargs, **fkwargs): |
|
577 | 582 | cls = fargs[0] |
|
578 | 583 | self.user = cls.rhodecode_user |
|
579 | 584 | |
|
580 | 585 | log.debug('Checking if user is not anonymous @%s' % cls) |
|
581 | 586 | |
|
582 | 587 | anonymous = self.user.username == 'default' |
|
583 | 588 | |
|
584 | 589 | if anonymous: |
|
585 | 590 | p = url.current() |
|
586 | 591 | |
|
587 | 592 | import rhodecode.lib.helpers as h |
|
588 | 593 | h.flash(_('You need to be a registered user to ' |
|
589 | 594 | 'perform this action'), |
|
590 | 595 | category='warning') |
|
591 | 596 | return redirect(url('login_home', came_from=p)) |
|
592 | 597 | else: |
|
593 | 598 | return func(*fargs, **fkwargs) |
|
594 | 599 | |
|
595 | 600 | |
|
596 | 601 | class PermsDecorator(object): |
|
597 | 602 | """Base class for controller decorators""" |
|
598 | 603 | |
|
599 | 604 | def __init__(self, *required_perms): |
|
600 | 605 | available_perms = config['available_permissions'] |
|
601 | 606 | for perm in required_perms: |
|
602 | 607 | if perm not in available_perms: |
|
603 | 608 | raise Exception("'%s' permission is not defined" % perm) |
|
604 | 609 | self.required_perms = set(required_perms) |
|
605 | 610 | self.user_perms = None |
|
606 | 611 | |
|
607 | 612 | def __call__(self, func): |
|
608 | 613 | return decorator(self.__wrapper, func) |
|
609 | 614 | |
|
610 | 615 | def __wrapper(self, func, *fargs, **fkwargs): |
|
611 | 616 | cls = fargs[0] |
|
612 | 617 | self.user = cls.rhodecode_user |
|
613 | 618 | self.user_perms = self.user.permissions |
|
614 | 619 | log.debug('checking %s permissions %s for %s %s', |
|
615 | 620 | self.__class__.__name__, self.required_perms, cls, self.user) |
|
616 | 621 | |
|
617 | 622 | if self.check_permissions(): |
|
618 | 623 | log.debug('Permission granted for %s %s' % (cls, self.user)) |
|
619 | 624 | return func(*fargs, **fkwargs) |
|
620 | 625 | |
|
621 | 626 | else: |
|
622 | 627 | log.debug('Permission denied for %s %s' % (cls, self.user)) |
|
623 | 628 | anonymous = self.user.username == 'default' |
|
624 | 629 | |
|
625 | 630 | if anonymous: |
|
626 | 631 | p = url.current() |
|
627 | 632 | |
|
628 | 633 | import rhodecode.lib.helpers as h |
|
629 | 634 | h.flash(_('You need to be a signed in to ' |
|
630 | 635 | 'view this page'), |
|
631 | 636 | category='warning') |
|
632 | 637 | return redirect(url('login_home', came_from=p)) |
|
633 | 638 | |
|
634 | 639 | else: |
|
635 | 640 | # redirect with forbidden ret code |
|
636 | 641 | return abort(403) |
|
637 | 642 | |
|
638 | 643 | def check_permissions(self): |
|
639 | 644 | """Dummy function for overriding""" |
|
640 | 645 | raise Exception('You have to write this function in child class') |
|
641 | 646 | |
|
642 | 647 | |
|
643 | 648 | class HasPermissionAllDecorator(PermsDecorator): |
|
644 | 649 | """ |
|
645 | 650 | Checks for access permission for all given predicates. All of them |
|
646 | 651 | have to be meet in order to fulfill the request |
|
647 | 652 | """ |
|
648 | 653 | |
|
649 | 654 | def check_permissions(self): |
|
650 | 655 | if self.required_perms.issubset(self.user_perms.get('global')): |
|
651 | 656 | return True |
|
652 | 657 | return False |
|
653 | 658 | |
|
654 | 659 | |
|
655 | 660 | class HasPermissionAnyDecorator(PermsDecorator): |
|
656 | 661 | """ |
|
657 | 662 | Checks for access permission for any of given predicates. In order to |
|
658 | 663 | fulfill the request any of predicates must be meet |
|
659 | 664 | """ |
|
660 | 665 | |
|
661 | 666 | def check_permissions(self): |
|
662 | 667 | if self.required_perms.intersection(self.user_perms.get('global')): |
|
663 | 668 | return True |
|
664 | 669 | return False |
|
665 | 670 | |
|
666 | 671 | |
|
667 | 672 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
668 | 673 | """ |
|
669 | 674 | Checks for access permission for all given predicates for specific |
|
670 | 675 | repository. All of them have to be meet in order to fulfill the request |
|
671 | 676 | """ |
|
672 | 677 | |
|
673 | 678 | def check_permissions(self): |
|
674 | 679 | repo_name = get_repo_slug(request) |
|
675 | 680 | try: |
|
676 | 681 | user_perms = set([self.user_perms['repositories'][repo_name]]) |
|
677 | 682 | except KeyError: |
|
678 | 683 | return False |
|
679 | 684 | if self.required_perms.issubset(user_perms): |
|
680 | 685 | return True |
|
681 | 686 | return False |
|
682 | 687 | |
|
683 | 688 | |
|
684 | 689 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
685 | 690 | """ |
|
686 | 691 | Checks for access permission for any of given predicates for specific |
|
687 | 692 | repository. In order to fulfill the request any of predicates must be meet |
|
688 | 693 | """ |
|
689 | 694 | |
|
690 | 695 | def check_permissions(self): |
|
691 | 696 | repo_name = get_repo_slug(request) |
|
692 | 697 | try: |
|
693 | 698 | user_perms = set([self.user_perms['repositories'][repo_name]]) |
|
694 | 699 | except KeyError: |
|
695 | 700 | return False |
|
696 | 701 | |
|
697 | 702 | if self.required_perms.intersection(user_perms): |
|
698 | 703 | return True |
|
699 | 704 | return False |
|
700 | 705 | |
|
701 | 706 | |
|
702 | 707 | class HasReposGroupPermissionAllDecorator(PermsDecorator): |
|
703 | 708 | """ |
|
704 | 709 | Checks for access permission for all given predicates for specific |
|
705 | 710 | repository group. All of them have to be meet in order to fulfill the request |
|
706 | 711 | """ |
|
707 | 712 | |
|
708 | 713 | def check_permissions(self): |
|
709 | 714 | group_name = get_repos_group_slug(request) |
|
710 | 715 | try: |
|
711 | 716 | user_perms = set([self.user_perms['repositories_groups'][group_name]]) |
|
712 | 717 | except KeyError: |
|
713 | 718 | return False |
|
714 | 719 | |
|
715 | 720 | if self.required_perms.issubset(user_perms): |
|
716 | 721 | return True |
|
717 | 722 | return False |
|
718 | 723 | |
|
719 | 724 | |
|
720 | 725 | class HasReposGroupPermissionAnyDecorator(PermsDecorator): |
|
721 | 726 | """ |
|
722 | 727 | Checks for access permission for any of given predicates for specific |
|
723 | 728 | repository group. In order to fulfill the request any of predicates must be meet |
|
724 | 729 | """ |
|
725 | 730 | |
|
726 | 731 | def check_permissions(self): |
|
727 | 732 | group_name = get_repos_group_slug(request) |
|
728 | 733 | try: |
|
729 | 734 | user_perms = set([self.user_perms['repositories_groups'][group_name]]) |
|
730 | 735 | except KeyError: |
|
731 | 736 | return False |
|
732 | 737 | |
|
733 | 738 | if self.required_perms.intersection(user_perms): |
|
734 | 739 | return True |
|
735 | 740 | return False |
|
736 | 741 | |
|
737 | 742 | |
|
738 | 743 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
739 | 744 | """ |
|
740 | 745 | Checks for access permission for all given predicates for specific |
|
741 | 746 | user group. All of them have to be meet in order to fulfill the request |
|
742 | 747 | """ |
|
743 | 748 | |
|
744 | 749 | def check_permissions(self): |
|
745 | 750 | group_name = get_user_group_slug(request) |
|
746 | 751 | try: |
|
747 | 752 | user_perms = set([self.user_perms['user_groups'][group_name]]) |
|
748 | 753 | except KeyError: |
|
749 | 754 | return False |
|
750 | 755 | |
|
751 | 756 | if self.required_perms.issubset(user_perms): |
|
752 | 757 | return True |
|
753 | 758 | return False |
|
754 | 759 | |
|
755 | 760 | |
|
756 | 761 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
757 | 762 | """ |
|
758 | 763 | Checks for access permission for any of given predicates for specific |
|
759 | 764 | user group. In order to fulfill the request any of predicates must be meet |
|
760 | 765 | """ |
|
761 | 766 | |
|
762 | 767 | def check_permissions(self): |
|
763 | 768 | group_name = get_user_group_slug(request) |
|
764 | 769 | try: |
|
765 | 770 | user_perms = set([self.user_perms['user_groups'][group_name]]) |
|
766 | 771 | except KeyError: |
|
767 | 772 | return False |
|
768 | 773 | |
|
769 | 774 | if self.required_perms.intersection(user_perms): |
|
770 | 775 | return True |
|
771 | 776 | return False |
|
772 | 777 | |
|
773 | 778 | |
|
774 | 779 | #============================================================================== |
|
775 | 780 | # CHECK FUNCTIONS |
|
776 | 781 | #============================================================================== |
|
777 | 782 | class PermsFunction(object): |
|
778 | 783 | """Base function for other check functions""" |
|
779 | 784 | |
|
780 | 785 | def __init__(self, *perms): |
|
781 | 786 | available_perms = config['available_permissions'] |
|
782 | 787 | |
|
783 | 788 | for perm in perms: |
|
784 | 789 | if perm not in available_perms: |
|
785 | 790 | raise Exception("'%s' permission is not defined" % perm) |
|
786 | 791 | self.required_perms = set(perms) |
|
787 | 792 | self.user_perms = None |
|
788 | 793 | self.repo_name = None |
|
789 | 794 | self.group_name = None |
|
790 | 795 | |
|
791 | 796 | def __call__(self, check_location=''): |
|
792 | 797 | #TODO: put user as attribute here |
|
793 | 798 | user = request.user |
|
794 | 799 | cls_name = self.__class__.__name__ |
|
795 | 800 | check_scope = { |
|
796 | 801 | 'HasPermissionAll': '', |
|
797 | 802 | 'HasPermissionAny': '', |
|
798 | 803 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
799 | 804 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
800 | 805 | 'HasReposGroupPermissionAll': 'group:%s' % self.group_name, |
|
801 | 806 | 'HasReposGroupPermissionAny': 'group:%s' % self.group_name, |
|
802 | 807 | }.get(cls_name, '?') |
|
803 | 808 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
804 | 809 | self.required_perms, user, check_scope, |
|
805 | 810 | check_location or 'unspecified location') |
|
806 | 811 | if not user: |
|
807 | 812 | log.debug('Empty request user') |
|
808 | 813 | return False |
|
809 | 814 | self.user_perms = user.permissions |
|
810 | 815 | if self.check_permissions(): |
|
811 | 816 | log.debug('Permission to %s granted for user: %s @ %s', self.repo_name, user, |
|
812 | 817 | check_location or 'unspecified location') |
|
813 | 818 | return True |
|
814 | 819 | |
|
815 | 820 | else: |
|
816 | 821 | log.debug('Permission to %s denied for user: %s @ %s', self.repo_name, user, |
|
817 | 822 | check_location or 'unspecified location') |
|
818 | 823 | return False |
|
819 | 824 | |
|
820 | 825 | def check_permissions(self): |
|
821 | 826 | """Dummy function for overriding""" |
|
822 | 827 | raise Exception('You have to write this function in child class') |
|
823 | 828 | |
|
824 | 829 | |
|
825 | 830 | class HasPermissionAll(PermsFunction): |
|
826 | 831 | def check_permissions(self): |
|
827 | 832 | if self.required_perms.issubset(self.user_perms.get('global')): |
|
828 | 833 | return True |
|
829 | 834 | return False |
|
830 | 835 | |
|
831 | 836 | |
|
832 | 837 | class HasPermissionAny(PermsFunction): |
|
833 | 838 | def check_permissions(self): |
|
834 | 839 | if self.required_perms.intersection(self.user_perms.get('global')): |
|
835 | 840 | return True |
|
836 | 841 | return False |
|
837 | 842 | |
|
838 | 843 | |
|
839 | 844 | class HasRepoPermissionAll(PermsFunction): |
|
840 | 845 | def __call__(self, repo_name=None, check_location=''): |
|
841 | 846 | self.repo_name = repo_name |
|
842 | 847 | return super(HasRepoPermissionAll, self).__call__(check_location) |
|
843 | 848 | |
|
844 | 849 | def check_permissions(self): |
|
845 | 850 | if not self.repo_name: |
|
846 | 851 | self.repo_name = get_repo_slug(request) |
|
847 | 852 | |
|
848 | 853 | try: |
|
849 | 854 | self._user_perms = set( |
|
850 | 855 | [self.user_perms['repositories'][self.repo_name]] |
|
851 | 856 | ) |
|
852 | 857 | except KeyError: |
|
853 | 858 | return False |
|
854 | 859 | if self.required_perms.issubset(self._user_perms): |
|
855 | 860 | return True |
|
856 | 861 | return False |
|
857 | 862 | |
|
858 | 863 | |
|
859 | 864 | class HasRepoPermissionAny(PermsFunction): |
|
860 | 865 | def __call__(self, repo_name=None, check_location=''): |
|
861 | 866 | self.repo_name = repo_name |
|
862 | 867 | return super(HasRepoPermissionAny, self).__call__(check_location) |
|
863 | 868 | |
|
864 | 869 | def check_permissions(self): |
|
865 | 870 | if not self.repo_name: |
|
866 | 871 | self.repo_name = get_repo_slug(request) |
|
867 | 872 | |
|
868 | 873 | try: |
|
869 | 874 | self._user_perms = set( |
|
870 | 875 | [self.user_perms['repositories'][self.repo_name]] |
|
871 | 876 | ) |
|
872 | 877 | except KeyError: |
|
873 | 878 | return False |
|
874 | 879 | if self.required_perms.intersection(self._user_perms): |
|
875 | 880 | return True |
|
876 | 881 | return False |
|
877 | 882 | |
|
878 | 883 | |
|
879 | 884 | class HasReposGroupPermissionAny(PermsFunction): |
|
880 | 885 | def __call__(self, group_name=None, check_location=''): |
|
881 | 886 | self.group_name = group_name |
|
882 | 887 | return super(HasReposGroupPermissionAny, self).__call__(check_location) |
|
883 | 888 | |
|
884 | 889 | def check_permissions(self): |
|
885 | 890 | try: |
|
886 | 891 | self._user_perms = set( |
|
887 | 892 | [self.user_perms['repositories_groups'][self.group_name]] |
|
888 | 893 | ) |
|
889 | 894 | except KeyError: |
|
890 | 895 | return False |
|
891 | 896 | if self.required_perms.intersection(self._user_perms): |
|
892 | 897 | return True |
|
893 | 898 | return False |
|
894 | 899 | |
|
895 | 900 | |
|
896 | 901 | class HasReposGroupPermissionAll(PermsFunction): |
|
897 | 902 | def __call__(self, group_name=None, check_location=''): |
|
898 | 903 | self.group_name = group_name |
|
899 | 904 | return super(HasReposGroupPermissionAll, self).__call__(check_location) |
|
900 | 905 | |
|
901 | 906 | def check_permissions(self): |
|
902 | 907 | try: |
|
903 | 908 | self._user_perms = set( |
|
904 | 909 | [self.user_perms['repositories_groups'][self.group_name]] |
|
905 | 910 | ) |
|
906 | 911 | except KeyError: |
|
907 | 912 | return False |
|
908 | 913 | if self.required_perms.issubset(self._user_perms): |
|
909 | 914 | return True |
|
910 | 915 | return False |
|
911 | 916 | |
|
912 | 917 | |
|
913 | 918 | class HasUserGroupPermissionAny(PermsFunction): |
|
914 | 919 | def __call__(self, user_group_name=None, check_location=''): |
|
915 | 920 | self.user_group_name = user_group_name |
|
916 | 921 | return super(HasUserGroupPermissionAny, self).__call__(check_location) |
|
917 | 922 | |
|
918 | 923 | def check_permissions(self): |
|
919 | 924 | try: |
|
920 | 925 | self._user_perms = set( |
|
921 | 926 | [self.user_perms['user_groups'][self.user_group_name]] |
|
922 | 927 | ) |
|
923 | 928 | except KeyError: |
|
924 | 929 | return False |
|
925 | 930 | if self.required_perms.intersection(self._user_perms): |
|
926 | 931 | return True |
|
927 | 932 | return False |
|
928 | 933 | |
|
929 | 934 | |
|
930 | 935 | class HasUserGroupPermissionAll(PermsFunction): |
|
931 | 936 | def __call__(self, user_group_name=None, check_location=''): |
|
932 | 937 | self.user_group_name = user_group_name |
|
933 | 938 | return super(HasUserGroupPermissionAll, self).__call__(check_location) |
|
934 | 939 | |
|
935 | 940 | def check_permissions(self): |
|
936 | 941 | try: |
|
937 | 942 | self._user_perms = set( |
|
938 | 943 | [self.user_perms['user_groups'][self.user_group_name]] |
|
939 | 944 | ) |
|
940 | 945 | except KeyError: |
|
941 | 946 | return False |
|
942 | 947 | if self.required_perms.issubset(self._user_perms): |
|
943 | 948 | return True |
|
944 | 949 | return False |
|
945 | 950 | |
|
946 | 951 | #============================================================================== |
|
947 | 952 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
948 | 953 | #============================================================================== |
|
949 | 954 | class HasPermissionAnyMiddleware(object): |
|
950 | 955 | def __init__(self, *perms): |
|
951 | 956 | self.required_perms = set(perms) |
|
952 | 957 | |
|
953 | 958 | def __call__(self, user, repo_name): |
|
954 | 959 | # repo_name MUST be unicode, since we handle keys in permission |
|
955 | 960 | # dict by unicode |
|
956 | 961 | repo_name = safe_unicode(repo_name) |
|
957 | 962 | usr = AuthUser(user.user_id) |
|
958 | 963 | try: |
|
959 | 964 | self.user_perms = set([usr.permissions['repositories'][repo_name]]) |
|
960 | 965 | except Exception: |
|
961 | 966 | log.error('Exception while accessing permissions %s' % |
|
962 | 967 | traceback.format_exc()) |
|
963 | 968 | self.user_perms = set() |
|
964 | 969 | self.username = user.username |
|
965 | 970 | self.repo_name = repo_name |
|
966 | 971 | return self.check_permissions() |
|
967 | 972 | |
|
968 | 973 | def check_permissions(self): |
|
969 | 974 | log.debug('checking VCS protocol ' |
|
970 | 975 | 'permissions %s for user:%s repository:%s', self.user_perms, |
|
971 | 976 | self.username, self.repo_name) |
|
972 | 977 | if self.required_perms.intersection(self.user_perms): |
|
973 | 978 | log.debug('permission granted for user:%s on repo:%s' % ( |
|
974 | 979 | self.username, self.repo_name |
|
975 | 980 | ) |
|
976 | 981 | ) |
|
977 | 982 | return True |
|
978 | 983 | log.debug('permission denied for user:%s on repo:%s' % ( |
|
979 | 984 | self.username, self.repo_name |
|
980 | 985 | ) |
|
981 | 986 | ) |
|
982 | 987 | return False |
|
983 | 988 | |
|
984 | 989 | |
|
985 | 990 | #============================================================================== |
|
986 | 991 | # SPECIAL VERSION TO HANDLE API AUTH |
|
987 | 992 | #============================================================================== |
|
988 | 993 | class _BaseApiPerm(object): |
|
989 | 994 | def __init__(self, *perms): |
|
990 | 995 | self.required_perms = set(perms) |
|
991 | 996 | |
|
992 | 997 | def __call__(self, check_location='unspecified', user=None, repo_name=None): |
|
993 | 998 | cls_name = self.__class__.__name__ |
|
994 | 999 | check_scope = 'user:%s, repo:%s' % (user, repo_name) |
|
995 | 1000 | log.debug('checking cls:%s %s %s @ %s', cls_name, |
|
996 | 1001 | self.required_perms, check_scope, check_location) |
|
997 | 1002 | if not user: |
|
998 | 1003 | log.debug('Empty User passed into arguments') |
|
999 | 1004 | return False |
|
1000 | 1005 | |
|
1001 | 1006 | ## process user |
|
1002 | 1007 | if not isinstance(user, AuthUser): |
|
1003 | 1008 | user = AuthUser(user.user_id) |
|
1004 | 1009 | |
|
1005 | 1010 | if self.check_permissions(user.permissions, repo_name): |
|
1006 | 1011 | log.debug('Permission to %s granted for user: %s @ %s', repo_name, |
|
1007 | 1012 | user, check_location) |
|
1008 | 1013 | return True |
|
1009 | 1014 | |
|
1010 | 1015 | else: |
|
1011 | 1016 | log.debug('Permission to %s denied for user: %s @ %s', repo_name, |
|
1012 | 1017 | user, check_location) |
|
1013 | 1018 | return False |
|
1014 | 1019 | |
|
1015 | 1020 | def check_permissions(self, perm_defs, repo_name): |
|
1016 | 1021 | """ |
|
1017 | 1022 | implement in child class should return True if permissions are ok, |
|
1018 | 1023 | False otherwise |
|
1019 | 1024 | |
|
1020 | 1025 | :param perm_defs: dict with permission definitions |
|
1021 | 1026 | :param repo_name: repo name |
|
1022 | 1027 | """ |
|
1023 | 1028 | raise NotImplementedError() |
|
1024 | 1029 | |
|
1025 | 1030 | |
|
1026 | 1031 | class HasPermissionAllApi(_BaseApiPerm): |
|
1027 | 1032 | def __call__(self, user, check_location=''): |
|
1028 | 1033 | return super(HasPermissionAllApi, self)\ |
|
1029 | 1034 | .__call__(check_location=check_location, user=user) |
|
1030 | 1035 | |
|
1031 | 1036 | def check_permissions(self, perm_defs, repo): |
|
1032 | 1037 | if self.required_perms.issubset(perm_defs.get('global')): |
|
1033 | 1038 | return True |
|
1034 | 1039 | return False |
|
1035 | 1040 | |
|
1036 | 1041 | |
|
1037 | 1042 | class HasPermissionAnyApi(_BaseApiPerm): |
|
1038 | 1043 | def __call__(self, user, check_location=''): |
|
1039 | 1044 | return super(HasPermissionAnyApi, self)\ |
|
1040 | 1045 | .__call__(check_location=check_location, user=user) |
|
1041 | 1046 | |
|
1042 | 1047 | def check_permissions(self, perm_defs, repo): |
|
1043 | 1048 | if self.required_perms.intersection(perm_defs.get('global')): |
|
1044 | 1049 | return True |
|
1045 | 1050 | return False |
|
1046 | 1051 | |
|
1047 | 1052 | |
|
1048 | 1053 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
1049 | 1054 | def __call__(self, user, repo_name, check_location=''): |
|
1050 | 1055 | return super(HasRepoPermissionAllApi, self)\ |
|
1051 | 1056 | .__call__(check_location=check_location, user=user, |
|
1052 | 1057 | repo_name=repo_name) |
|
1053 | 1058 | |
|
1054 | 1059 | def check_permissions(self, perm_defs, repo_name): |
|
1055 | 1060 | |
|
1056 | 1061 | try: |
|
1057 | 1062 | self._user_perms = set( |
|
1058 | 1063 | [perm_defs['repositories'][repo_name]] |
|
1059 | 1064 | ) |
|
1060 | 1065 | except KeyError: |
|
1061 | 1066 | log.warning(traceback.format_exc()) |
|
1062 | 1067 | return False |
|
1063 | 1068 | if self.required_perms.issubset(self._user_perms): |
|
1064 | 1069 | return True |
|
1065 | 1070 | return False |
|
1066 | 1071 | |
|
1067 | 1072 | |
|
1068 | 1073 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
1069 | 1074 | def __call__(self, user, repo_name, check_location=''): |
|
1070 | 1075 | return super(HasRepoPermissionAnyApi, self)\ |
|
1071 | 1076 | .__call__(check_location=check_location, user=user, |
|
1072 | 1077 | repo_name=repo_name) |
|
1073 | 1078 | |
|
1074 | 1079 | def check_permissions(self, perm_defs, repo_name): |
|
1075 | 1080 | |
|
1076 | 1081 | try: |
|
1077 | 1082 | _user_perms = set( |
|
1078 | 1083 | [perm_defs['repositories'][repo_name]] |
|
1079 | 1084 | ) |
|
1080 | 1085 | except KeyError: |
|
1081 | 1086 | log.warning(traceback.format_exc()) |
|
1082 | 1087 | return False |
|
1083 | 1088 | if self.required_perms.intersection(_user_perms): |
|
1084 | 1089 | return True |
|
1085 | 1090 | return False |
|
1086 | 1091 | |
|
1087 | 1092 | |
|
1088 | 1093 | def check_ip_access(source_ip, allowed_ips=None): |
|
1089 | 1094 | """ |
|
1090 | 1095 | Checks if source_ip is a subnet of any of allowed_ips. |
|
1091 | 1096 | |
|
1092 | 1097 | :param source_ip: |
|
1093 | 1098 | :param allowed_ips: list of allowed ips together with mask |
|
1094 | 1099 | """ |
|
1095 | 1100 | from rhodecode.lib import ipaddr |
|
1096 | 1101 | log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips)) |
|
1097 | 1102 | if isinstance(allowed_ips, (tuple, list, set)): |
|
1098 | 1103 | for ip in allowed_ips: |
|
1099 | 1104 | try: |
|
1100 | 1105 | if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip): |
|
1101 | 1106 | return True |
|
1102 | 1107 | # for any case we cannot determine the IP, don't crash just |
|
1103 | 1108 | # skip it and log as error, we want to say forbidden still when |
|
1104 | 1109 | # sending bad IP |
|
1105 | 1110 | except Exception: |
|
1106 | 1111 | log.error(traceback.format_exc()) |
|
1107 | 1112 | continue |
|
1108 | 1113 | return False |
General Comments 0
You need to be logged in to leave comments.
Login now