mirror of
https://github.com/YunoHost/yunorunner.git
synced 2024-09-03 20:05:52 +02:00
[mod] avoid data bufferisation to reduce memory usage
This commit is contained in:
parent
2523a1f140
commit
15c9fc0eda
1 changed files with 14 additions and 5 deletions
19
run.py
19
run.py
|
@ -527,8 +527,17 @@ def clean_websocket(function):
|
|||
|
||||
def chunks(l, n):
|
||||
"""Yield successive n-sized chunks from l."""
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i:i + n]
|
||||
chunk = []
|
||||
a = 0
|
||||
|
||||
for i in l:
|
||||
if a < n:
|
||||
a += 1
|
||||
chunk.append(i)
|
||||
else:
|
||||
yield chunk
|
||||
chunk = []
|
||||
a = 0
|
||||
|
||||
|
||||
@app.websocket('/index-ws')
|
||||
|
@ -556,9 +565,9 @@ async def ws_index(request, websocket):
|
|||
.order_by(-Job.id)
|
||||
|
||||
# chunks initial data by batch of 30 to avoid killing firefox
|
||||
data = chunks(list(itertools.chain(map(model_to_dict, next_scheduled_jobs),
|
||||
map(model_to_dict, Job.select().where(Job.state == "running")),
|
||||
map(model_to_dict, latest_done_jobs))), 30)
|
||||
data = chunks(itertools.chain(map(model_to_dict, next_scheduled_jobs),
|
||||
map(model_to_dict, Job.select().where(Job.state == "running")),
|
||||
map(model_to_dict, latest_done_jobs)), 30)
|
||||
|
||||
await websocket.send(ujson.dumps({
|
||||
"action": "init_jobs",
|
||||
|
|
Loading…
Add table
Reference in a new issue