Added a robot.txt that disallows indexing of the webiste

There is no interesting information to index in this website
This commit is contained in:
yorffuoj 2024-05-03 23:22:50 +02:00 committed by GitHub
parent dcb199362a
commit ad048806d3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -7,7 +7,7 @@ import socket
from sanic import Sanic
from sanic.log import logger
from sanic.response import html, json as json_response
from sanic.response import html, raw, json as json_response
from sanic.exceptions import InvalidUsage
app = Sanic(__name__)
@ -389,6 +389,11 @@ async def check_smtp(request):
return json_response({'status': 'ok', 'helo': helo_domain})
@app.route("/robots.txt")
async def robots(request):
return raw("User-agent: *\nDisallow: /")
@app.route("/")
async def main(request):
return html("You aren't really supposed to use this website using your browser.<br><br>It's a small server with an API to check if a services running on YunoHost instance can be reached from 'the global internet'.")