add a robots.txt to exclude certain routes for search engines

This commit is contained in:
Jakob Ketterl
2021-03-24 16:08:13 +01:00
parent deeaccba12
commit d50d08ad2c
2 changed files with 18 additions and 0 deletions

View File

@ -0,0 +1,16 @@
from owrx.controllers import Controller
class RobotsController(Controller):
def indexAction(self):
# search engines should not be crawling internal / API routes
self.send_response(
"""User-agent: *
Disallow: /login
Disallow: /logout
Disallow: /pwchange
Disallow: /settings
Disallow: /imageupload
""",
content_type="text/plain",
)