mirror of
https://github.com/YunoHost/yunorunner.git
synced 2024-09-03 20:05:52 +02:00
commit
04777b259a
3 changed files with 149 additions and 64 deletions
|
@ -5,6 +5,7 @@ async-timeout==3.0.1
|
||||||
attrs==18.2.0
|
attrs==18.2.0
|
||||||
certifi==2018.11.29
|
certifi==2018.11.29
|
||||||
chardet==3.0.4
|
chardet==3.0.4
|
||||||
|
charset-normalizer==2.0.6
|
||||||
httptools==0.1.0
|
httptools==0.1.0
|
||||||
idna==2.8
|
idna==2.8
|
||||||
idna-ssl==1.1.0
|
idna-ssl==1.1.0
|
||||||
|
@ -12,14 +13,14 @@ Jinja2==3.0.1
|
||||||
MarkupSafe==2.0.1
|
MarkupSafe==2.0.1
|
||||||
multidict==5.1.0
|
multidict==5.1.0
|
||||||
peewee==3.14.4
|
peewee==3.14.4
|
||||||
pkg-resources==0.0.0
|
pkg_resources==0.0.0
|
||||||
requests==2.25.1
|
requests==2.26.0
|
||||||
sanic==21.3.4
|
sanic==21.6.2
|
||||||
sanic-jinja2==0.10.0
|
sanic-jinja2==0.10.0
|
||||||
sanic-routing==0.6.2
|
sanic-routing==0.7.1
|
||||||
typing-extensions==3.10.0.0
|
typing-extensions==3.10.0.0
|
||||||
ujson==4.0.2
|
ujson==4.0.2
|
||||||
urllib3==1.26.5
|
urllib3==1.26.5
|
||||||
uvloop==0.14.0
|
uvloop==0.14.0
|
||||||
websockets==8.1
|
websockets==10.0
|
||||||
yarl==1.3.0
|
yarl==1.3.0
|
||||||
|
|
148
run.py
148
run.py
|
@ -82,10 +82,20 @@ LOGGING_CONFIG_DEFAULTS["formatters"] = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def datetime_to_epoch_json_converter(o):
|
||||||
|
if isinstance(o, datetime):
|
||||||
|
return o.strftime('%s')
|
||||||
|
|
||||||
|
# define a custom json dumps to convert datetime
|
||||||
|
def my_json_dumps(o):
|
||||||
|
return json.dumps(o, default=datetime_to_epoch_json_converter)
|
||||||
|
|
||||||
|
|
||||||
task_logger = logging.getLogger("task")
|
task_logger = logging.getLogger("task")
|
||||||
api_logger = logging.getLogger("api")
|
api_logger = logging.getLogger("api")
|
||||||
|
|
||||||
app = Sanic(__name__)
|
app = Sanic(__name__, dumps=my_json_dumps)
|
||||||
app.static('/static', './static/')
|
app.static('/static', './static/')
|
||||||
|
|
||||||
loader = FileSystemLoader(os.path.abspath(os.path.dirname(__file__)) + '/templates', encoding='utf8')
|
loader = FileSystemLoader(os.path.abspath(os.path.dirname(__file__)) + '/templates', encoding='utf8')
|
||||||
|
@ -110,11 +120,6 @@ subscriptions = defaultdict(list)
|
||||||
jobs_in_memory_state = {}
|
jobs_in_memory_state = {}
|
||||||
|
|
||||||
|
|
||||||
def datetime_to_epoch_json_converter(o):
|
|
||||||
if isinstance(o, datetime):
|
|
||||||
return o.strftime('%s')
|
|
||||||
|
|
||||||
|
|
||||||
async def wait_closed(self):
|
async def wait_closed(self):
|
||||||
"""
|
"""
|
||||||
Wait until the connection is closed.
|
Wait until the connection is closed.
|
||||||
|
@ -351,11 +356,41 @@ async def launch_monthly_job():
|
||||||
await create_job(repo.name, repo.url)
|
await create_job(repo.name, repo.url)
|
||||||
|
|
||||||
|
|
||||||
|
async def ensure_workers_count():
|
||||||
|
if Worker.select().count() < app.config.WORKER_COUNT:
|
||||||
|
for _ in range(app.config.WORKER_COUNT - Worker.select().count()):
|
||||||
|
Worker.create(state="available")
|
||||||
|
elif Worker.select().count() > app.config.WORKER_COUNT:
|
||||||
|
workers_to_remove = Worker.select().count() - app.config.WORKER_COUNT
|
||||||
|
workers = Worker.select().where(Worker.state == "available")
|
||||||
|
for worker in workers:
|
||||||
|
if workers_to_remove == 0:
|
||||||
|
break
|
||||||
|
worker.delete_instance()
|
||||||
|
workers_to_remove -= 1
|
||||||
|
|
||||||
|
jobs_to_stop = workers_to_remove
|
||||||
|
for job_id in jobs_in_memory_state:
|
||||||
|
if jobs_to_stop == 0:
|
||||||
|
break
|
||||||
|
await stop_job(job_id)
|
||||||
|
jobs_to_stop -= 1
|
||||||
|
job = Job.select().where(Job.id == job_id)[0]
|
||||||
|
job.state = "scheduled"
|
||||||
|
job.log = ""
|
||||||
|
job.save()
|
||||||
|
|
||||||
|
workers = Worker.select().where(Worker.state == "available")
|
||||||
|
for worker in workers:
|
||||||
|
if workers_to_remove == 0:
|
||||||
|
break
|
||||||
|
worker.delete_instance()
|
||||||
|
workers_to_remove -= 1
|
||||||
|
|
||||||
|
|
||||||
@always_relaunch(sleep=3)
|
@always_relaunch(sleep=3)
|
||||||
async def jobs_dispatcher():
|
async def jobs_dispatcher():
|
||||||
if Worker.select().count() == 0:
|
await ensure_workers_count()
|
||||||
for i in range(1):
|
|
||||||
Worker.create(state="available")
|
|
||||||
|
|
||||||
workers = Worker.select().where(Worker.state == "available")
|
workers = Worker.select().where(Worker.state == "available")
|
||||||
|
|
||||||
|
@ -400,7 +435,7 @@ async def run_job(worker, job):
|
||||||
task_logger.info(f"Starting job '{job.name}' #{job.id}...")
|
task_logger.info(f"Starting job '{job.name}' #{job.id}...")
|
||||||
|
|
||||||
cwd = os.path.split(path_to_analyseCI)[0]
|
cwd = os.path.split(path_to_analyseCI)[0]
|
||||||
arguments = f' {job.url_or_path} "{job.name}" {job.id}'
|
arguments = f' {job.url_or_path} "{job.name}" {job.id} {worker.id}'
|
||||||
task_logger.info(f"Launch command: /bin/bash " + path_to_analyseCI + arguments)
|
task_logger.info(f"Launch command: /bin/bash " + path_to_analyseCI + arguments)
|
||||||
try:
|
try:
|
||||||
command = await asyncio.create_subprocess_shell("/bin/bash " + path_to_analyseCI + arguments,
|
command = await asyncio.create_subprocess_shell("/bin/bash " + path_to_analyseCI + arguments,
|
||||||
|
@ -483,12 +518,15 @@ async def broadcast(message, channels):
|
||||||
|
|
||||||
for ws in ws_list:
|
for ws in ws_list:
|
||||||
try:
|
try:
|
||||||
await ws.send(json.dumps(message, default=datetime_to_epoch_json_converter))
|
await ws.send(my_json_dumps(message))
|
||||||
except ConnectionClosed:
|
except ConnectionClosed:
|
||||||
dead_ws.append(ws)
|
dead_ws.append(ws)
|
||||||
|
|
||||||
for to_remove in dead_ws:
|
for to_remove in dead_ws:
|
||||||
|
try:
|
||||||
ws_list.remove(to_remove)
|
ws_list.remove(to_remove)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
def subscribe(ws, channel):
|
def subscribe(ws, channel):
|
||||||
subscriptions[channel].append(ws)
|
subscriptions[channel].append(ws)
|
||||||
|
@ -575,16 +613,16 @@ async def ws_index(request, websocket):
|
||||||
|
|
||||||
first_chunck = next(data)
|
first_chunck = next(data)
|
||||||
|
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(my_json_dumps({
|
||||||
"action": "init_jobs",
|
"action": "init_jobs",
|
||||||
"data": first_chunck, # send first chunk
|
"data": first_chunck, # send first chunk
|
||||||
}, default=datetime_to_epoch_json_converter))
|
}))
|
||||||
|
|
||||||
for chunk in data:
|
for chunk in data:
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(my_json_dumps({
|
||||||
"action": "init_jobs_stream",
|
"action": "init_jobs_stream",
|
||||||
"data": chunk,
|
"data": chunk,
|
||||||
}, default=datetime_to_epoch_json_converter))
|
}))
|
||||||
|
|
||||||
await websocket.wait_closed()
|
await websocket.wait_closed()
|
||||||
|
|
||||||
|
@ -601,10 +639,10 @@ async def ws_job(request, websocket, job_id):
|
||||||
|
|
||||||
subscribe(websocket, f"job-{job.id}")
|
subscribe(websocket, f"job-{job.id}")
|
||||||
|
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(my_json_dumps({
|
||||||
"action": "init_job",
|
"action": "init_job",
|
||||||
"data": model_to_dict(job),
|
"data": model_to_dict(job),
|
||||||
}, default=datetime_to_epoch_json_converter))
|
}))
|
||||||
|
|
||||||
await websocket.wait_closed()
|
await websocket.wait_closed()
|
||||||
|
|
||||||
|
@ -701,10 +739,10 @@ async def ws_apps(request, websocket):
|
||||||
|
|
||||||
repos = sorted(repos, key=lambda x: x["name"])
|
repos = sorted(repos, key=lambda x: x["name"])
|
||||||
|
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(my_json_dumps({
|
||||||
"action": "init_apps",
|
"action": "init_apps",
|
||||||
"data": repos,
|
"data": repos,
|
||||||
}, default=datetime_to_epoch_json_converter))
|
}))
|
||||||
|
|
||||||
await websocket.wait_closed()
|
await websocket.wait_closed()
|
||||||
|
|
||||||
|
@ -719,10 +757,10 @@ async def ws_app(request, websocket, app_name):
|
||||||
subscribe(websocket, f"app-jobs-{app.url}")
|
subscribe(websocket, f"app-jobs-{app.url}")
|
||||||
|
|
||||||
job = list(Job.select().where(Job.url_or_path == app.url).order_by(-Job.id).dicts())
|
job = list(Job.select().where(Job.url_or_path == app.url).order_by(-Job.id).dicts())
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(my_json_dumps({
|
||||||
"action": "init_jobs",
|
"action": "init_jobs",
|
||||||
"data": job,
|
"data": job,
|
||||||
}, default=datetime_to_epoch_json_converter))
|
}))
|
||||||
|
|
||||||
await websocket.wait_closed()
|
await websocket.wait_closed()
|
||||||
|
|
||||||
|
@ -945,6 +983,7 @@ async def html_job(request, job_id):
|
||||||
job_url = app.config.BASE_URL + app.url_for("html_job", job_id=job.id)
|
job_url = app.config.BASE_URL + app.url_for("html_job", job_id=job.id)
|
||||||
badge_url = app.config.BASE_URL + app.url_for("api_badge_job", job_id=job.id)
|
badge_url = app.config.BASE_URL + app.url_for("api_badge_job", job_id=job.id)
|
||||||
shield_badge_url = f"https://img.shields.io/endpoint?url={badge_url}"
|
shield_badge_url = f"https://img.shields.io/endpoint?url={badge_url}"
|
||||||
|
summary_url = app.config.BASE_URL + "/summary/" + str(job.id) + ".png"
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"job": job,
|
"job": job,
|
||||||
|
@ -952,6 +991,7 @@ async def html_job(request, job_id):
|
||||||
'job_url': job_url,
|
'job_url': job_url,
|
||||||
'badge_url': badge_url,
|
'badge_url': badge_url,
|
||||||
'shield_badge_url': shield_badge_url,
|
'shield_badge_url': shield_badge_url,
|
||||||
|
'summary_url': summary_url,
|
||||||
'relative_path_to_root': '../',
|
'relative_path_to_root': '../',
|
||||||
'path': request.path
|
'path': request.path
|
||||||
}
|
}
|
||||||
|
@ -1043,8 +1083,8 @@ async def github(request):
|
||||||
# We expect issue comments (issue = also PR in github stuff...)
|
# We expect issue comments (issue = also PR in github stuff...)
|
||||||
# - *New* comments
|
# - *New* comments
|
||||||
# - On issue/PRs which are still open
|
# - On issue/PRs which are still open
|
||||||
if hook_type != "issue_comment" \
|
if hook_type == "issue_comment":
|
||||||
or hook_infos["action"] != "created" \
|
if hook_infos["action"] != "created" \
|
||||||
or hook_infos["issue"]["state"] != "open" \
|
or hook_infos["issue"]["state"] != "open" \
|
||||||
or "pull_request" not in hook_infos["issue"]:
|
or "pull_request" not in hook_infos["issue"]:
|
||||||
# Nothing to do but success anyway (204 = No content)
|
# Nothing to do but success anyway (204 = No content)
|
||||||
|
@ -1070,9 +1110,25 @@ async def github(request):
|
||||||
if not await is_user_in_organization(hook_infos["comment"]["user"]["login"]):
|
if not await is_user_in_organization(hook_infos["comment"]["user"]["login"]):
|
||||||
# Unauthorized
|
# Unauthorized
|
||||||
abort(403, "Unauthorized")
|
abort(403, "Unauthorized")
|
||||||
|
|
||||||
# Fetch the PR infos (yeah they ain't in the initial infos we get @_@)
|
# Fetch the PR infos (yeah they ain't in the initial infos we get @_@)
|
||||||
pr_infos_url = hook_infos["issue"]["pull_request"]["url"]
|
pr_infos_url = hook_infos["issue"]["pull_request"]["url"]
|
||||||
|
|
||||||
|
elif hook_type == "pull_request":
|
||||||
|
if hook_infos["action"] != "opened":
|
||||||
|
# Nothing to do but success anyway (204 = No content)
|
||||||
|
abort(204, "Nothing to do")
|
||||||
|
# We only accept PRs that are created by github-action bot
|
||||||
|
if hook_infos["pull_request"]["user"]["login"] != "github-actions[bot]" \
|
||||||
|
or not hook_infos["pull_request"]["head"]["ref"].startswith("ci-auto-update-"):
|
||||||
|
# Unauthorized
|
||||||
|
abort(204, "Nothing to do")
|
||||||
|
# Fetch the PR infos (yeah they ain't in the initial infos we get @_@)
|
||||||
|
pr_infos_url = hook_infos["pull_request"]["url"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Nothing to do but success anyway (204 = No content)
|
||||||
|
abort(204, "Nothing to do")
|
||||||
|
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
async with session.get(pr_infos_url) as resp:
|
async with session.get(pr_infos_url) as resp:
|
||||||
pr_infos = await resp.json()
|
pr_infos = await resp.json()
|
||||||
|
@ -1097,12 +1153,14 @@ async def github(request):
|
||||||
# Answer with comment with link+badge for the job
|
# Answer with comment with link+badge for the job
|
||||||
|
|
||||||
async def comment(body):
|
async def comment(body):
|
||||||
|
if hook_type == "issue_comment":
|
||||||
comments_url = hook_infos["issue"]["comments_url"]
|
comments_url = hook_infos["issue"]["comments_url"]
|
||||||
|
else:
|
||||||
|
comments_url = hook_infos["pull_request"]["comments_url"]
|
||||||
|
|
||||||
token = open("./github_bot_token").read().strip()
|
token = open("./github_bot_token").read().strip()
|
||||||
async with aiohttp.ClientSession(headers={"Authorization": f"token {token}"}) as session:
|
async with aiohttp.ClientSession(headers={"Authorization": f"token {token}"}) as session:
|
||||||
async with session.post(comments_url, data=json.dumps({"body": body}, default=datetime_to_epoch_json_converter)) as resp:
|
async with session.post(comments_url, data=my_json_dumps({"body": body})) as resp:
|
||||||
api_logger.info("Added comment %s" % resp.json()["html_url"])
|
api_logger.info("Added comment %s" % resp.json()["html_url"])
|
||||||
|
|
||||||
catchphrases = ["Alrighty!", "Fingers crossed!", "May the CI gods be with you!", ":carousel_horse:", ":rocket:", ":sunflower:", "Meow :cat2:", ":v:", ":stuck_out_tongue_winking_eye:" ]
|
catchphrases = ["Alrighty!", "Fingers crossed!", "May the CI gods be with you!", ":carousel_horse:", ":rocket:", ":sunflower:", "Meow :cat2:", ":v:", ":stuck_out_tongue_winking_eye:" ]
|
||||||
|
@ -1144,6 +1202,37 @@ def format_frame(f):
|
||||||
return dict([(k, str(getattr(f, k))) for k in keys])
|
return dict([(k, str(getattr(f, k))) for k in keys])
|
||||||
|
|
||||||
|
|
||||||
|
@app.listener("before_server_start")
|
||||||
|
async def listener_before_server_start(*args, **kwargs):
|
||||||
|
task_logger.info("before_server_start")
|
||||||
|
reset_pending_jobs()
|
||||||
|
reset_busy_workers()
|
||||||
|
merge_jobs_on_startup()
|
||||||
|
|
||||||
|
set_random_day_for_monthy_job()
|
||||||
|
|
||||||
|
|
||||||
|
@app.listener("after_server_start")
|
||||||
|
async def listener_after_server_start(*args, **kwargs):
|
||||||
|
task_logger.info("after_server_start")
|
||||||
|
|
||||||
|
|
||||||
|
@app.listener("before_server_stop")
|
||||||
|
async def listener_before_server_stop(*args, **kwargs):
|
||||||
|
task_logger.info("before_server_stop")
|
||||||
|
|
||||||
|
|
||||||
|
@app.listener("after_server_stop")
|
||||||
|
async def listener_after_server_stop(*args, **kwargs):
|
||||||
|
task_logger.info("after_server_stop")
|
||||||
|
for job_id in jobs_in_memory_state:
|
||||||
|
await stop_job(job_id)
|
||||||
|
job = Job.select().where(Job.id == job_id)[0]
|
||||||
|
job.state = "scheduled"
|
||||||
|
job.log = ""
|
||||||
|
job.save()
|
||||||
|
|
||||||
|
|
||||||
def main(config="./config.py"):
|
def main(config="./config.py"):
|
||||||
|
|
||||||
default_config = {
|
default_config = {
|
||||||
|
@ -1155,6 +1244,7 @@ def main(config="./config.py"):
|
||||||
"MONITOR_GIT": False,
|
"MONITOR_GIT": False,
|
||||||
"MONITOR_ONLY_GOOD_QUALITY_APPS": False,
|
"MONITOR_ONLY_GOOD_QUALITY_APPS": False,
|
||||||
"MONTHLY_JOBS": False,
|
"MONTHLY_JOBS": False,
|
||||||
|
"WORKER_COUNT": 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
app.config.update_config(default_config)
|
app.config.update_config(default_config)
|
||||||
|
@ -1164,12 +1254,6 @@ def main(config="./config.py"):
|
||||||
print(f"Error: analyzer script doesn't exist at '{app.config.PATH_TO_ANALYZER}'. Please fix the configuration in {config}")
|
print(f"Error: analyzer script doesn't exist at '{app.config.PATH_TO_ANALYZER}'. Please fix the configuration in {config}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
reset_pending_jobs()
|
|
||||||
reset_busy_workers()
|
|
||||||
merge_jobs_on_startup()
|
|
||||||
|
|
||||||
set_random_day_for_monthy_job()
|
|
||||||
|
|
||||||
if app.config.MONITOR_APPS_LIST:
|
if app.config.MONITOR_APPS_LIST:
|
||||||
app.add_task(monitor_apps_lists(monitor_git=app.config.MONITOR_GIT,
|
app.add_task(monitor_apps_lists(monitor_git=app.config.MONITOR_GIT,
|
||||||
monitor_only_good_quality_apps=app.config.MONITOR_ONLY_GOOD_QUALITY_APPS))
|
monitor_only_good_quality_apps=app.config.MONITOR_ONLY_GOOD_QUALITY_APPS))
|
||||||
|
|
|
@ -103,7 +103,7 @@
|
||||||
// Clipboard API not available
|
// Clipboard API not available
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const text = "[](https://ci.pijean.ovh/ci/job/<{ job.id }>)"
|
const text = "[](<{ job_url }>)\n[](<{ job_url }>)"
|
||||||
try {
|
try {
|
||||||
await navigator.clipboard.writeText(text)
|
await navigator.clipboard.writeText(text)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
Loading…
Add table
Reference in a new issue