1
0
Fork 0
mirror of https://github.com/YunoHost-Apps/mediawiki_ynh.git synced 2024-09-03 19:46:05 +02:00

Rework update scripts

This commit is contained in:
Salamandar 2023-01-21 16:03:46 +01:00
parent 0d5047c7c8
commit 48937aa04b
2 changed files with 61 additions and 64 deletions

View file

@ -1,96 +1,94 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""
Download extensions for the current mediawiki version, and update the conf files.
"""
from pathlib import Path
from typing import List, Optional from typing import List, Optional
import hashlib import hashlib
import json
import textwrap
import urllib import urllib
from html.parser import HTMLParser from html.parser import HTMLParser
import tomlkit
from packaging import version from packaging import version
import requests import requests
EXTENSIONS_HOST_URL = "https://extdist.wmflabs.org/dist/extensions/" EXTENSIONS_HOST_URL = "https://extdist.wmflabs.org/dist/extensions/"
EXTENSIONS = { GITHUB_API_URL = "https://api.github.com/repos"
"ldap_authentication2": "LDAPAuthentication2",
"ldap_authorization": "LDAPAuthorization",
# "ldap_auth_remoteuser": "Auth_remoteuser",
"ldap_groups": "LDAPGroups",
"ldap_provider": "LDAPProvider",
"ldap_userinfo": "LDAPUserInfo",
"pluggable_auth": "PluggableAuth",
}
def sha256sum_of_url(url: str) -> str: def sha256sum_of_url(url: str) -> str:
"""Compute checksum without saving the file""" """Compute checksum without saving the file"""
checksum = hashlib.sha256() checksum = hashlib.sha256()
for chunk in requests.get(url, stream=True).iter_content(): for chunk in requests.get(url, stream=True, timeout=10).iter_content():
checksum.update(chunk) checksum.update(chunk)
return checksum.hexdigest() return checksum.hexdigest()
def generate_ext_source(asset_url: str, src_filename: str) -> None: def find_valid_version(all_versions: List[str], max_version: version.Version) -> Optional[str]:
with open(f"conf/{src_filename}", "w", encoding="utf-8") as output: """Find the valid extensions for the current mediawiki version"""
output.write(textwrap.dedent(f"""\
SOURCE_URL={asset_url}
SOURCE_SUM={sha256sum_of_url(asset_url)}
SOURCE_SUM_PRG=sha256sum
SOURCE_FORMAT=tar.gz
SOURCE_IN_SUBDIR=false
SOURCE_FILENAME=
SOURCE_EXTRACT=true
"""))
def get_all_extensions() -> List[str]:
"""Get all available extensions."""
with urllib.request.urlopen(EXTENSIONS_HOST_URL) as page:
webpage = page.read().decode("utf-8")
class MyHTMLParser(HTMLParser):
links = []
def handle_starttag(self, tag, attrs):
if tag == "a":
for name, value in attrs:
if name == "href":
self.links.append(value)
parser = MyHTMLParser()
parser.feed(webpage)
return parser.links
def find_valid_ext(all_exts: List[str], name: str, max_version: version.Version) -> Optional[str]:
def version_of(ext): def version_of(ext):
try: try:
return version.parse(ext.split("-")[1].replace("_", ".").replace("REL", "")) return version.parse(ext.replace("_", ".").replace("REL", ""))
except version.InvalidVersion: except version.InvalidVersion:
print(f"Invalid version (this might be normal): {ext}") # print(f"Invalid version (this might be normal): {ext}")
return version.parse("0.0") return version.parse("0.0")
found_exts = [ def compatible(ext_version: str) -> bool:
ext for ext in all_exts return version_of(ext_version) <= max_version
if ext.startswith(name) and version_of(ext) <= max_version
] compatible_versions = filter(compatible, all_versions)
return max(found_exts, key=version_of) if found_exts else None
if compatible_versions:
return max(compatible_versions, key=version_of)
if "master" in all_versions:
return "master"
return None
def get_repo(url: str) -> str:
return "/".join(url.split("://")[1].split("/")[1:3])
def get_branches(repo: str) -> List[str]:
branches = requests.get(f"{GITHUB_API_URL}/{repo}/branches", timeout=10).json()
names = [branch["name"] for branch in branches]
return names
def get_last_commit_of(repo: str, branch: str) -> str:
commit = requests.get(f"{GITHUB_API_URL}/{repo}/commits/{branch}", timeout=10).json()
return commit["sha"]
def main(): def main():
print('Updating extensions source files...') print('Updating extensions source files...')
with open("manifest.json", "r", encoding="utf-8") as file: with open("manifest.toml", "r", encoding="utf-8") as file:
manifest = json.load(file) manifest = tomlkit.loads(file.read())
mediawiki_version = version.Version(manifest["version"].split("~")[0]) mediawiki_version = version.Version(manifest["version"].value.split("~")[0])
all_extensions = get_all_extensions() for name, descr in manifest["resources"]["sources"].items():
if "extension" not in descr["url"]:
for file, name in EXTENSIONS.items(): # not an extension
continue
print(f'Updating source file for {name}') print(f'Updating source file for {name}')
ext = find_valid_ext(all_extensions, name, mediawiki_version) repo = get_repo(descr["url"])
if ext is None: branches = get_branches(repo)
print(f'ERROR: Could not find an upstream link for extension {name}') branch = find_valid_version(branches, mediawiki_version)
else: if not branch:
new_url = EXTENSIONS_HOST_URL + ext print("Could not find any valid branch")
generate_ext_source(new_url, file + ".src") continue
commit = get_last_commit_of(repo, branch)
url = f"https://github.com/{repo}/archive/{commit}.tar.gz"
manifest["resources"]["sources"][name]["url"] = url
manifest["resources"]["sources"][name]["sha256"] = sha256sum_of_url(url)
with open("manifest.toml", "w", encoding="utf-8") as manifest_file:
manifest_file.write(tomlkit.dumps(manifest))
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -10,7 +10,6 @@ You need to enable the action by removing `if ${{ false }}` in updater.yml!
""" """
import hashlib import hashlib
import json
import logging import logging
import os import os
from subprocess import run, PIPE from subprocess import run, PIPE