add a robots.txt to exclude certain routes for search engines

This commit is contained in:
Jakob Ketterl 2021-03-24 16:08:13 +01:00
parent deeaccba12
commit d50d08ad2c
2 changed files with 18 additions and 0 deletions

View File

@ -0,0 +1,16 @@
from owrx.controllers import Controller
class RobotsController(Controller):
def indexAction(self):
# search engines should not be crawling internal / API routes
self.send_response(
"""User-agent: *
Disallow: /login
Disallow: /logout
Disallow: /pwchange
Disallow: /settings
Disallow: /imageupload
""",
content_type="text/plain",
)

View File

@ -20,6 +20,7 @@ from owrx.controllers.settings.bookmarks import BookmarksController
from owrx.controllers.session import SessionController
from owrx.controllers.profile import ProfileController
from owrx.controllers.imageupload import ImageUploadController
from owrx.controllers.robots import RobotsController
from http.server import BaseHTTPRequestHandler
from urllib.parse import urlparse, parse_qs
import re
@ -105,6 +106,7 @@ class Router(object):
def __init__(self):
self.routes = [
StaticRoute("/", IndexController),
StaticRoute("/robots.txt", RobotsController),
StaticRoute("/status.json", StatusController),
RegexRoute("^/static/(.+)$", OwrxAssetsController),
RegexRoute("^/compiled/(.+)$", CompiledAssetsController),