# HG changeset patch # User Marcin Kuzminski # Date 2017-01-13 10:32:41 # Node ID 035ac27f3d0d18515bbc09b964bc4fb95f08304b # Parent 72279f5c871438558806eb90f68a499004712238 site: added dummy robots.txt to handle those from crawling robots. diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -331,6 +331,13 @@ def includeme_first(config): config.add_view(favicon_redirect, route_name='favicon') config.add_route('favicon', '/favicon.ico') + def robots_redirect(context, request): + return HTTPFound( + request.static_path('rhodecode:public/robots.txt')) + + config.add_view(robots_redirect, route_name='robots') + config.add_route('robots', '/robots.txt') + config.add_static_view( '_static/deform', 'deform:static') config.add_static_view( @@ -362,7 +369,6 @@ def wrap_app_in_wsgi_middlewares(pyramid pyramid_app = make_gzip_middleware( pyramid_app, settings, compress_level=1) - # this should be the outer most middleware in the wsgi stack since # middleware like Routes make database calls def pyramid_app_with_cleanup(environ, start_response): diff --git a/rhodecode/public/robots.txt b/rhodecode/public/robots.txt new file mode 100644 --- /dev/null +++ b/rhodecode/public/robots.txt @@ -0,0 +1,8 @@ +# See http://www.robotstxt.org/robotstxt.html for documentation on +# how to use the robots.txt file +# +# To forbid all spiders from indexing, uncomment the next two lines: +# User-Agent: * +# Disallow: / + +User-Agent: *