Disallow robots to index the site (robots.txt)

This commit is contained in:
Iam54r1n4
2025-02-03 19:29:41 +00:00
parent d1caf5c389
commit 2f7e1c218c

View File

@ -5,10 +5,11 @@ from fastapi import FastAPI, Depends
from fastapi import Request from fastapi import Request
from starlette.staticfiles import StaticFiles from starlette.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates from fastapi.templating import Jinja2Templates
from fastapi.responses import PlainTextResponse
from config import CONFIGS # Loads the configuration from .env from config import CONFIGS # Loads the configuration from .env
from authentication.auth import AuthMiddleware # Defines authentication middleware from middleware import AuthMiddleware # Defines authentication middleware
from dependency import get_templates, get_session_manager # Defines dependencies across routers from dependency import get_templates, get_session_manager # Defines dependencies across routers
from exception_handler import setup_exception_handler # Defines exception handlers from exception_handler import setup_exception_handler # Defines exception handlers
@ -62,6 +63,10 @@ async def home(request: Request):
return await index(request) return await index(request)
@app.get('/robots.txt')
async def robots_txt(request: Request):
return PlainTextResponse('User-agent: *\nDisallow: /')
if __name__ == '__main__': if __name__ == '__main__':
import uvicorn import uvicorn
uvicorn.run(app, host='127.0.0.1', port=8080) uvicorn.run(app, host='127.0.0.1', port=8080)