1
0
Fork 0
mirror of https://github.com/YunoHost-Apps/mediawiki_ynh.git synced 2024-09-03 19:46:05 +02:00

Add github workflows, rework update_extensions.py

This commit is contained in:
Salamandar 2022-05-27 19:45:30 +02:00
parent 5781981ab3
commit b265592ea9
4 changed files with 293 additions and 108 deletions

92
.github/workflows/update_extensions.py vendored Executable file
View file

@ -0,0 +1,92 @@
#!/usr/bin/env python3
from pathlib import Path
from typing import List, Optional
import hashlib
import json
import textwrap
import urllib
from html.parser import HTMLParser
from packaging import version
import requests
EXTENSIONS_HOST_URL = "https://extdist.wmflabs.org/dist/extensions/"
EXTENSIONS = {
"ldap_authentication2": "LDAPAuthentication2",
"ldap_authorization": "LDAPAuthorization",
# "ldap_auth_remoteuser": "Auth_remoteuser",
"ldap_groups": "LDAPGroups",
"ldap_provider": "LDAPProvider",
"ldap_userinfo": "LDAPUserInfo",
"pluggable_auth": "PluggableAuth",
}
def sha256sum_of_url(url: str) -> str:
"""Compute checksum without saving the file"""
checksum = hashlib.sha256()
for chunk in requests.get(url, stream=True).iter_content():
checksum.update(chunk)
return checksum.hexdigest()
def generate_ext_source(asset_url: str, src_filename: str) -> None:
with open(f"conf/{src_filename}", "w", encoding="utf-8") as output:
output.write(textwrap.dedent(f"""\
SOURCE_URL={asset_url}
SOURCE_SUM={sha256sum_of_url(asset_url)}
SOURCE_SUM_PRG=sha256sum
SOURCE_FORMAT=.tar.gz
SOURCE_IN_SUBDIR=true
SOURCE_EXTRACT=true
"""))
def get_all_extensions() -> List[str]:
"""Get all available extensions."""
with urllib.request.urlopen(EXTENSIONS_HOST_URL) as page:
webpage = page.read().decode("utf-8")
class MyHTMLParser(HTMLParser):
links = []
def handle_starttag(self, tag, attrs):
if tag == "a":
for name, value in attrs:
if name == "href":
self.links.append(value)
parser = MyHTMLParser()
parser.feed(webpage)
return parser.links
def find_valid_ext(all_exts: List[str], name: str, max_version: version.Version) -> Optional[str]:
def version_of(ext):
return version.parse(ext.split("-")[1].replace("_", ".").replace("REL", ""))
found_exts = [
ext for ext in all_exts
if ext.startswith(name) and version_of(ext) <= max_version
]
return max(found_exts, key=version_of) if found_exts else None
def main():
print('Updating extensions source files...')
with open("manifest.json", "r", encoding="utf-8") as file:
manifest = json.load(file)
mediawiki_version = version.Version(manifest["version"].split("~")[0])
all_extensions = get_all_extensions()
for file, name in EXTENSIONS.items():
print(f'Updating source file for {name}')
ext = find_valid_ext(all_extensions, name, mediawiki_version)
if ext is None:
print(f'ERROR: Could not find an upstream link for extension {name}')
else:
new_url = EXTENSIONS_HOST_URL + ext
generate_ext_source(new_url, file + ".src")
if __name__ == "__main__":
main()

123
.github/workflows/updater.py vendored Executable file
View file

@ -0,0 +1,123 @@
#!/usr/bin/env python3
import hashlib
import json
import logging
import os
import subprocess
import textwrap
from pathlib import Path
from typing import List, Tuple, Any
import requests
from packaging import version
logging.getLogger().setLevel(logging.INFO)
# This script is meant to be run by GitHub Actions
# The YunoHost-Apps organisation offers a template Action to run this script periodically
# Since each app is different, maintainers can adapt its contents so as to perform
# automatic actions when a new upstream release is detected.
# Remove this exit command when you are ready to run this Action
# exit(1)
#=================================================
# Fetching information
def get_latest_version(repo: str) -> Tuple[version.Version, Any]:
api_url = repo.replace("https://github.com/", "https://api.github.com/repos/")
releases = requests.get(f"{api_url}/tags").json()
release_info = [
release for release in releases
if "-rc" not in release["name"] and "REL" not in release["name"]
][0]
return version.Version(release_info["name"]), release_info
def get_assets_of_release(repo: str, rel_info: Any) -> List[str]:
"""May be customized by maintainers for custom urls"""
rel = rel_info['name']
short_rel = '.'.join(rel.split('.')[:2])
assets = [
f"https://releases.wikimedia.org/mediawiki/{short_rel}/mediawiki-{rel}.tar.gz"
]
return assets
#=================================================
# Download assets and compute filename / sha256sum
def sha256sum_of_url(url: str) -> str:
"""Compute checksum without saving the file"""
checksum = hashlib.sha256()
for chunk in requests.get(url, stream=True).iter_content():
checksum.update(chunk)
return checksum.hexdigest()
# It has to be adapted in accordance with how the upstream releases look like.
def handle_asset(asset_url: str):
"""This should be customized by the maintainer"""
logging.info("Handling asset at %s", asset_url)
if asset_url.endswith(".tar.gz"):
src = "app.src"
extract = "true"
else:
logging.info("Asset ignored")
return
logging.info("Asset is for %s", src)
# Rewrite source file
extension = "tar.gz" if asset_url.endswith(".tar.gz") else Path(asset_url).suffix[1:]
with open(f"conf/{src}", "w", encoding="utf-8") as conf_file:
conf_file.write(textwrap.dedent(f"""\
SOURCE_URL={asset_url}
SOURCE_SUM={sha256sum_of_url(asset_url)}
SOURCE_SUM_PRG=sha256sum
SOURCE_FORMAT={extension}
SOURCE_IN_SUBDIR=true
SOURCE_EXTRACT={extract}
"""))
def main():
with open(os.environ["GITHUB_ENV"], "w", encoding="utf-8") as github_env:
github_env.write("PROCEED=false\n")
with open("manifest.json", "r", encoding="utf-8") as file:
manifest = json.load(file)
repo = manifest["upstream"]["code"]
current_version = version.Version(manifest["version"].split("~")[0])
logging.info("Current version: %s", current_version)
latest_version, release_info = get_latest_version(repo)
logging.info("Latest upstream version: %s", latest_version)
# Proceed only if the retrieved version is greater than the current one
if latest_version <= current_version:
logging.warning("No new version available")
return
# Proceed only if a PR for this new version does not already exist
command = ["git", "ls-remote", "--exit-code", "-h", repo, f"ci-auto-update-v${latest_version}"]
if subprocess.run(command, stderr=subprocess.DEVNULL, check=False).returncode == 0:
logging.warning("A branch already exists for this update")
return
assets = get_assets_of_release(repo, release_info)
logging.info("%d available asset(s)", len(assets))
for asset in assets:
handle_asset(asset)
manifest["version"] = f"{latest_version}~ynh1"
with open("manifest.json", "w", encoding="utf-8") as manifest_file:
json.dump(manifest, manifest_file, indent=4, ensure_ascii=False)
manifest_file.write("\n")
with open(os.environ["GITHUB_ENV"], "w", encoding="utf-8") as github_env:
github_env.write(textwrap.dedent(f"""\
VERSION={latest_version}
REPO={repo}
PROCEED=true
"""))
if __name__ == "__main__":
main()

78
.github/workflows/updater.yml vendored Normal file
View file

@ -0,0 +1,78 @@
# This workflow allows GitHub Actions to automagically update your app whenever a new upstream release is detected.
# You need to enable Actions in your repository settings, and fetch this Action from the YunoHost-Apps organization.
# This file should be enough by itself, but feel free to tune it to your needs.
# It calls updater.sh, which is where you should put the app-specific update steps.
name: Check for new upstream releases
on:
# Allow to manually trigger the workflow
workflow_dispatch:
# Run it every day at 6:00 UTC
schedule:
- cron: '0 6 * * *'
jobs:
updater:
runs-on: ubuntu-latest
steps:
- name: Fetch the source code
uses: actions/checkout@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Run the updater script
id: run_updater
run: |
# Setting up Git user
git config --global user.name 'yunohost-bot'
git config --global user.email 'yunohost-bot@users.noreply.github.com'
# Run the updater script
.github/workflows/updater.py
- name: Commit changes
id: commit
if: ${{ env.PROCEED == 'true' }}
run: |
git commit -am "Upgrade to v$VERSION"
- name: Create Pull Request
id: cpr
if: ${{ env.PROCEED == 'true' }}
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update to version ${{ env.VERSION }}
committer: 'yunohost-bot <yunohost-bot@users.noreply.github.com>'
author: 'yunohost-bot <yunohost-bot@users.noreply.github.com>'
signoff: false
base: testing
branch: ci-auto-update-v${{ env.VERSION }}
delete-branch: true
title: 'Upgrade to version ${{ env.VERSION }}'
body: |
Upgrade to v${{ env.VERSION }}
draft: false
extensions_updater:
runs-on: ubuntu-latest
steps:
- name: Fetch the source code
uses: actions/checkout@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Run the updater script
id: run_updater
run: .github/workflows/update_extensions.py
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update extension urls
committer: 'yunohost-bot <yunohost-bot@users.noreply.github.com>'
author: 'yunohost-bot <yunohost-bot@users.noreply.github.com>'
signoff: false
base: master
branch: ci-auto-update-extensions
delete-branch: true
title: Update extension urls
body: Update extension urls
draft: false
add-paths: conf

View file

@ -1,108 +0,0 @@
#!/usr/bin/env python3
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import List, Optional
import hashlib
import json
import urllib
from html.parser import HTMLParser
from packaging import version
import requests
# GENERIC CODE
# Don't edit this file manually, but {program} instead.
SOURCE_TEMPLATE = """SOURCE_URL={url}
SOURCE_SUM={sha256sum}
SOURCE_SUM_PRG=sha256sum
SOURCE_FORMAT=tar.gz
SOURCE_IN_SUBDIR={source_in_subdir}
SOURCE_EXTRACT={source_extract}
"""
def generate_source(url: str, output_name: str, source_extract=True, source_in_subdir=True) -> None:
with NamedTemporaryFile() as tempfile:
response = requests.get(url)
response.raise_for_status()
with open(tempfile.name, "wb") as datafile:
for chunk in response.iter_content(chunk_size=1024):
datafile.write(chunk)
sha256_hash = hashlib.sha256()
with open(tempfile.name, "rb") as datafile:
# Read and update hash string value in blocks of 4K
for byte_block in iter(lambda: datafile.read(4096),b""):
sha256_hash.update(byte_block)
sha256sum = sha256_hash.hexdigest()
with open(Path(__file__).parent / output_name, "w", encoding="utf-8") as output:
output.write(SOURCE_TEMPLATE.format(
program=Path(__file__).name, url=url, sha256sum=sha256sum,
source_in_subdir=("true" if source_in_subdir else "false"),
source_extract=("true" if source_extract else "false")
))
# SPECIFIC TO MEDIAWIKI
VERSION = "1.37.2"
EXTENSION_VERSION = "_".join(VERSION.split(".")[0:2])
EXTENSIONS_HOST_URL = "https://extdist.wmflabs.org/dist/extensions/"
EXTENSIONS = {
"ldap_authentication2": "LDAPAuthentication2",
"ldap_authorization": "LDAPAuthorization",
# "ldap_auth_remoteuser": "Auth_remoteuser",
"ldap_groups": "LDAPGroups",
"ldap_provider": "LDAPProvider",
"ldap_userinfo": "LDAPUserInfo",
"pluggable_auth": "PluggableAuth",
}
def get_all_extensions() -> List[str]:
"""Get all available extensions."""
with urllib.request.urlopen(EXTENSIONS_HOST_URL) as page:
webpage = page.read().decode("utf-8")
class MyHTMLParser(HTMLParser):
links = []
def handle_starttag(self, tag, attrs):
if tag == "a":
for name, value in attrs:
if name == "href":
self.links.append(value)
parser = MyHTMLParser()
parser.feed(webpage)
return parser.links
def find_valid_ext(all_exts: List[str], name: str, max_ver: str) -> Optional[str]:
def version_of(ext):
return version.parse(ext.split("-")[1].replace("_", ".").replace("REL", ""))
found_exts = [ext for ext in all_exts if ext.startswith(name)]
return max(found_exts, key=version_of) if found_exts else None
def main():
print(f'Updating source file for Mediawiki...')
version_dir = ".".join(VERSION.split(".")[0:2])
generate_source(
f"https://releases.wikimedia.org/mediawiki/{version_dir}/mediawiki-{VERSION}.tar.gz",
"app.src"
)
all_extensions = get_all_extensions()
for file, name in EXTENSIONS.items():
print(f'Updating source file for {name}')
ext = find_valid_ext(all_extensions, name, VERSION)
if ext is None:
print(f'ERROR: Could not find an upstream link for extension {name}')
else:
new_url = EXTENSIONS_HOST_URL + ext
generate_source(new_url, file + ".src", source_in_subdir=False)
if __name__ == "__main__":
main()