add a robots.txt to exclude certain routes for search engines
This commit is contained in:
parent
deeaccba12
commit
d50d08ad2c
16
owrx/controllers/robots.py
Normal file
16
owrx/controllers/robots.py
Normal file
@ -0,0 +1,16 @@
|
||||
from owrx.controllers import Controller
|
||||
|
||||
|
||||
class RobotsController(Controller):
|
||||
def indexAction(self):
|
||||
# search engines should not be crawling internal / API routes
|
||||
self.send_response(
|
||||
"""User-agent: *
|
||||
Disallow: /login
|
||||
Disallow: /logout
|
||||
Disallow: /pwchange
|
||||
Disallow: /settings
|
||||
Disallow: /imageupload
|
||||
""",
|
||||
content_type="text/plain",
|
||||
)
|
@ -20,6 +20,7 @@ from owrx.controllers.settings.bookmarks import BookmarksController
|
||||
from owrx.controllers.session import SessionController
|
||||
from owrx.controllers.profile import ProfileController
|
||||
from owrx.controllers.imageupload import ImageUploadController
|
||||
from owrx.controllers.robots import RobotsController
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
import re
|
||||
@ -105,6 +106,7 @@ class Router(object):
|
||||
def __init__(self):
|
||||
self.routes = [
|
||||
StaticRoute("/", IndexController),
|
||||
StaticRoute("/robots.txt", RobotsController),
|
||||
StaticRoute("/status.json", StatusController),
|
||||
RegexRoute("^/static/(.+)$", OwrxAssetsController),
|
||||
RegexRoute("^/compiled/(.+)$", CompiledAssetsController),
|
||||
|
Loading…
Reference in New Issue
Block a user