add a robots.txt to exclude certain routes for search engines
This commit is contained in:
16
owrx/controllers/robots.py
Normal file
16
owrx/controllers/robots.py
Normal file
@ -0,0 +1,16 @@
|
||||
from owrx.controllers import Controller
|
||||
|
||||
|
||||
class RobotsController(Controller):
|
||||
def indexAction(self):
|
||||
# search engines should not be crawling internal / API routes
|
||||
self.send_response(
|
||||
"""User-agent: *
|
||||
Disallow: /login
|
||||
Disallow: /logout
|
||||
Disallow: /pwchange
|
||||
Disallow: /settings
|
||||
Disallow: /imageupload
|
||||
""",
|
||||
content_type="text/plain",
|
||||
)
|
Reference in New Issue
Block a user