Merge pull request #22 from YunoHost/upgrade-to-sanic-21

Upgrade to Sanic 21
This commit is contained in:
Alexandre Aubin 2021-09-22 15:33:45 +02:00 committed by GitHub
commit 10552c35b2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 70 additions and 52 deletions

View file

@ -1,5 +1,5 @@
aiofiles==0.4.0
aiohttp==3.7.4
aiofiles==0.7.0
aiohttp==3.7.4.post0
argh==0.26.2
async-timeout==3.0.1
attrs==18.2.0
@ -8,15 +8,18 @@ chardet==3.0.4
httptools==0.1.0
idna==2.8
idna-ssl==1.1.0
Jinja2==2.11.3
MarkupSafe==1.1.0
multidict==4.5.2
peewee==3.7.1
Jinja2==3.0.1
MarkupSafe==2.0.1
multidict==5.1.0
peewee==3.14.4
pkg-resources==0.0.0
requests==2.25.1
sanic==19.3.1
sanic-jinja2==0.7.2
ujson==1.35
sanic==21.3.4
sanic-jinja2==0.10.0
sanic-routing==0.6.2
typing-extensions==3.10.0.0
ujson==4.0.2
urllib3==1.26.5
uvloop==0.14.0
websockets==6.0
websockets==8.1
yarl==1.3.0

View file

@ -3,7 +3,7 @@ aiohttp
aiofiles
peewee
sanic-jinja2
websockets<10.0
websockets
# cli
argh

85
run.py
View file

@ -20,7 +20,7 @@ from functools import wraps
from concurrent.futures._base import CancelledError
from asyncio import Task
import ujson
import json
import aiohttp
import aiofiles
@ -30,8 +30,8 @@ from websockets import WebSocketCommonProtocol
from sanic import Sanic, response
from sanic.exceptions import NotFound, abort
from sanic.log import LOGGING_CONFIG_DEFAULTS
from sanic.response import json
from jinja2 import FileSystemLoader
from sanic_jinja2 import SanicJinja2
from peewee import fn
@ -40,6 +40,11 @@ from playhouse.shortcuts import model_to_dict
from models import Repo, Job, db, Worker
from schedule import always_relaunch, once_per_day
try:
asyncio_all_tasks = asyncio.all_tasks
except AttributeError as e:
asyncio_all_tasks = asyncio.Task.all_tasks
LOGGING_CONFIG_DEFAULTS["loggers"] = {
"task": {
"level": "INFO",
@ -80,10 +85,11 @@ LOGGING_CONFIG_DEFAULTS["formatters"] = {
task_logger = logging.getLogger("task")
api_logger = logging.getLogger("api")
app = Sanic()
app = Sanic(__name__)
app.static('/static', './static/')
jinja = SanicJinja2(app)
loader = FileSystemLoader(os.path.abspath(os.path.dirname(__file__)) + '/templates', encoding='utf8')
jinja = SanicJinja2(app, loader=loader)
# to avoid conflict with vue.js
jinja.env.block_start_string = '<%'
@ -104,8 +110,12 @@ subscriptions = defaultdict(list)
jobs_in_memory_state = {}
@asyncio.coroutine
def wait_closed(self):
def datetime_to_epoch_json_converter(o):
if isinstance(o, datetime):
return o.strftime('%s')
async def wait_closed(self):
"""
Wait until the connection is closed.
@ -115,7 +125,7 @@ def wait_closed(self):
of its cause, in tasks that interact with the WebSocket connection.
"""
yield from asyncio.shield(self.connection_lost_waiter)
await asyncio.shield(self.connection_lost_waiter)
# this is a backport of websockets 7.0 which sanic doesn't support yet
@ -473,14 +483,13 @@ async def broadcast(message, channels):
for ws in ws_list:
try:
await ws.send(ujson.dumps(message))
await ws.send(json.dumps(message, default=datetime_to_epoch_json_converter))
except ConnectionClosed:
dead_ws.append(ws)
for to_remove in dead_ws:
ws_list.remove(to_remove)
def subscribe(ws, channel):
subscriptions[channel].append(ws)
@ -523,6 +532,7 @@ def chunks(l, n):
yield chunk
@app.websocket('/index-ws')
@clean_websocket
async def ws_index(request, websocket):
@ -565,21 +575,21 @@ async def ws_index(request, websocket):
first_chunck = next(data)
await websocket.send(ujson.dumps({
await websocket.send(json.dumps({
"action": "init_jobs",
"data": first_chunck, # send first chunk
}))
}, default=datetime_to_epoch_json_converter))
for chunk in data:
await websocket.send(ujson.dumps({
await websocket.send(json.dumps({
"action": "init_jobs_stream",
"data": chunk,
}))
}, default=datetime_to_epoch_json_converter))
await websocket.wait_closed()
@app.websocket('/job-<job_id>-ws')
@app.websocket('/job-ws/<job_id:int>')
@clean_websocket
async def ws_job(request, websocket, job_id):
job = Job.select().where(Job.id == job_id)
@ -591,10 +601,10 @@ async def ws_job(request, websocket, job_id):
subscribe(websocket, f"job-{job.id}")
await websocket.send(ujson.dumps({
await websocket.send(json.dumps({
"action": "init_job",
"data": model_to_dict(job),
}))
}, default=datetime_to_epoch_json_converter))
await websocket.wait_closed()
@ -691,15 +701,15 @@ async def ws_apps(request, websocket):
repos = sorted(repos, key=lambda x: x["name"])
await websocket.send(ujson.dumps({
await websocket.send(json.dumps({
"action": "init_apps",
"data": repos,
}))
}, default=datetime_to_epoch_json_converter))
await websocket.wait_closed()
@app.websocket('/app-<app_name>-ws')
@app.websocket('/app-ws/<app_name>')
@clean_websocket
async def ws_app(request, websocket, app_name):
# XXX I don't check if the app exists because this websocket is supposed to
@ -708,11 +718,11 @@ async def ws_app(request, websocket, app_name):
subscribe(websocket, f"app-jobs-{app.url}")
await websocket.send(ujson.dumps({
job = list(Job.select().where(Job.url_or_path == app.url).order_by(-Job.id).dicts())
await websocket.send(json.dumps({
"action": "init_jobs",
"data": Job.select().where(Job.url_or_path ==
app.url).order_by(-Job.id),
}))
"data": job,
}, default=datetime_to_epoch_json_converter))
await websocket.wait_closed()
@ -812,9 +822,7 @@ async def api_delete_job(request, job_id):
return response.text("ok")
@app.route("/api/job/<job_id:int>/stop", methods=['POST'])
async def api_stop_job(request, job_id):
# TODO auth or some kind
async def stop_job(job_id):
job = Job.select().where(Job.id == job_id)
if job.count() == 0:
@ -867,10 +875,17 @@ async def api_stop_job(request, job_id):
f"{job.state}")
@app.route("/api/job/<job_id:int>/stop", methods=['POST'])
async def api_stop_job(request, job_id):
# TODO auth or some kind
await stop_job(job_id)
@app.route("/api/job/<job_id:int>/restart", methods=['POST'])
async def api_restart_job(request, job_id):
api_logger.info(f"Request to restart job {job_id}")
await api_stop_job(request, job_id)
# Calling a route (eg api_stop_job) doesn't work anymore
await stop_job(job_id)
# no need to check if job existss, api_stop_job will do it for us
job = Job.select().where(Job.id == job_id)[0]
@ -942,7 +957,7 @@ async def html_job(request, job_id):
}
@app.route('/apps/')
@app.route('/apps/', strict_slashes=True) # To avoid reaching the route "/apps/<app_name>/" with <app_name> an empty string
@jinja.template('apps.html')
async def html_apps(request):
return {'relative_path_to_root': '../', 'path': request.path}
@ -967,7 +982,7 @@ async def html_index(request):
@always_relaunch(sleep=2)
async def number_of_tasks():
print("Number of tasks: %s" % len(Task.all_tasks()))
print("Number of tasks: %s" % len(asyncio_all_tasks()))
@app.route('/monitor')
@ -975,9 +990,9 @@ async def monitor(request):
snapshot = tracemalloc.take_snapshot()
top_stats = snapshot.statistics('lineno')
tasks = Task.all_tasks()
tasks = asyncio_all_tasks()
return json({
return response.json({
"top_20_trace": [str(x) for x in top_stats[:20]],
"tasks": {
"number": len(tasks),
@ -1087,7 +1102,7 @@ async def github(request):
token = open("./github_bot_token").read().strip()
async with aiohttp.ClientSession(headers={"Authorization": f"token {token}"}) as session:
async with session.post(comments_url, data=ujson.dumps({"body": body})) as resp:
async with session.post(comments_url, data=json.dumps({"body": body}, default=datetime_to_epoch_json_converter)) as resp:
api_logger.info("Added comment %s" % resp.json()["html_url"])
catchphrases = ["Alrighty!", "Fingers crossed!", "May the CI gods be with you!", ":carousel_horse:", ":rocket:", ":sunflower:", "Meow :cat2:", ":v:", ":stuck_out_tongue_winking_eye:" ]
@ -1142,11 +1157,11 @@ def main(config="./config.py"):
"MONTHLY_JOBS": False,
}
app.config.from_object(default_config)
app.config.from_pyfile(config)
app.config.update_config(default_config)
app.config.update_config(config)
if not os.path.exists(app.config.PATH_TO_ANALYZER):
print(f"Error: analyzer script doesn't exist at '{PATH_TO_ANALYZER}'. Please fix the configuration in {config}")
print(f"Error: analyzer script doesn't exist at '{app.config.PATH_TO_ANALYZER}'. Please fix the configuration in {config}")
sys.exit(1)
reset_pending_jobs()

View file

@ -339,7 +339,7 @@ var AnsiUp = (function () {
};
return AnsiUp;
}());
//# sourceMappingURL=ansi_up.js.map
// sourceMappingURL=ansi_up.js.map
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = AnsiUp;
}));

View file

@ -44,7 +44,7 @@
}
})
ws = new ReconnectingWebSocket(websocketPrefix() + '://' + document.domain + ':' + location.port + websocketRelativePath('<{ path }>') + '/app-<{ app.name }>-ws');
ws = new ReconnectingWebSocket(websocketPrefix() + '://' + document.domain + ':' + location.port + websocketRelativePath('<{ path }>') + '/app-ws/<{ app.name }>');
ws.onmessage = function (event) {
var message = JSON.parse(event.data);

View file

@ -68,7 +68,7 @@
}
})
ws = new ReconnectingWebSocket(websocketPrefix() + '://' + document.domain + ':' + location.port + websocketRelativePath('<{ path }>') + '/job-<{ job.id }>-ws');
ws = new ReconnectingWebSocket(websocketPrefix() + '://' + document.domain + ':' + location.port + websocketRelativePath('<{ path }>') + '/job-ws/<{ job.id }>');
ws.onmessage = function (event) {
var message = JSON.parse(event.data);