##// END OF EJS Templates
site: added dummy robots.txt to handle those from crawling robots.
marcink -
r1316:035ac27f default
parent child Browse files
Show More
@@ -0,0 +1,8 b''
1 # See http://www.robotstxt.org/robotstxt.html for documentation on
2 # how to use the robots.txt file
3 #
4 # To forbid all spiders from indexing, uncomment the next two lines:
5 # User-Agent: *
6 # Disallow: /
7
8 User-Agent: *
@@ -331,6 +331,13 b' def includeme_first(config):'
331 331 config.add_view(favicon_redirect, route_name='favicon')
332 332 config.add_route('favicon', '/favicon.ico')
333 333
334 def robots_redirect(context, request):
335 return HTTPFound(
336 request.static_path('rhodecode:public/robots.txt'))
337
338 config.add_view(robots_redirect, route_name='robots')
339 config.add_route('robots', '/robots.txt')
340
334 341 config.add_static_view(
335 342 '_static/deform', 'deform:static')
336 343 config.add_static_view(
@@ -362,7 +369,6 b' def wrap_app_in_wsgi_middlewares(pyramid'
362 369 pyramid_app = make_gzip_middleware(
363 370 pyramid_app, settings, compress_level=1)
364 371
365
366 372 # this should be the outer most middleware in the wsgi stack since
367 373 # middleware like Routes make database calls
368 374 def pyramid_app_with_cleanup(environ, start_response):
General Comments 0
You need to be logged in to leave comments. Login now