Disallow robots to index the site (robots.txt)
This commit is contained in:
@ -5,10 +5,11 @@ from fastapi import FastAPI, Depends
|
||||
from fastapi import Request
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.responses import PlainTextResponse
|
||||
|
||||
|
||||
from config import CONFIGS # Loads the configuration from .env
|
||||
from authentication.auth import AuthMiddleware # Defines authentication middleware
|
||||
from middleware import AuthMiddleware # Defines authentication middleware
|
||||
from dependency import get_templates, get_session_manager # Defines dependencies across routers
|
||||
from exception_handler import setup_exception_handler # Defines exception handlers
|
||||
|
||||
@ -62,6 +63,10 @@ async def home(request: Request):
|
||||
return await index(request)
|
||||
|
||||
|
||||
@app.get('/robots.txt')
|
||||
async def robots_txt(request: Request):
|
||||
return PlainTextResponse('User-agent: *\nDisallow: /')
|
||||
|
||||
if __name__ == '__main__':
|
||||
import uvicorn
|
||||
uvicorn.run(app, host='127.0.0.1', port=8080)
|
||||
|
||||
Reference in New Issue
Block a user