[yolo] Add level graph for individual apps

This commit is contained in:
Alexandre Aubin 2020-11-05 22:23:31 +01:00
parent 2e24e858ee
commit dfbd7250d2
3 changed files with 103 additions and 119 deletions

View file

@ -6,6 +6,7 @@ from .models.appci import AppCI, AppCIBranch
from .models.unlistedapps import UnlistedApp from .models.unlistedapps import UnlistedApp
from .settings import SITE_ROOT from .settings import SITE_ROOT
import json import json
import os
main = Blueprint('main', __name__, url_prefix=SITE_ROOT) main = Blueprint('main', __name__, url_prefix=SITE_ROOT)
@ -75,9 +76,17 @@ def appci_app(app):
tests = AppCI.tests.copy() tests = AppCI.tests.copy()
if "Malformed path" in tests: if "Malformed path" in tests:
tests.remove("Malformed path") tests.remove("Malformed path")
history_file = "./app/scripts/appListsHistory/per_app/history_%s.json" % app
if os.path.exists(history_file):
history = json.loads(open("./app/scripts/appListsHistory/count_history.json").read())
else:
history = []
return render_template("appci_app.html", tests=tests, return render_template("appci_app.html", tests=tests,
app=app, app=app,
branch_results=branch_results) branch_results=branch_results,
history=history)
@main.route('/appci/compare/<ref>...<target>') @main.route('/appci/compare/<ref>...<target>')

View file

@ -2,13 +2,7 @@
import json import json
import os import os
from datetime import datetime from datetime import datetime
from feedgen.feed import FeedGenerator
import jinja2
state_to_value = { "working":1,
"inprogress": 0,
"notworking": -1,
}
def _time_points_until_today(): def _time_points_until_today():
@ -32,8 +26,10 @@ def _time_points_until_today():
date = datetime(year, month, day) date = datetime(year, month, day)
time_points_until_today = list(_time_points_until_today()) time_points_until_today = list(_time_points_until_today())
def get_lists_history(): def get_lists_history():
os.system("rm -rf ./.work") os.system("rm -rf ./.work")
@ -61,113 +57,18 @@ def get_lists_history():
# Save it # Save it
json.dump(merged, open('./.work/merged_lists.json.%s' % t.strftime("%y-%m-%d"), 'w')) json.dump(merged, open('./.work/merged_lists.json.%s' % t.strftime("%y-%m-%d"), 'w'))
def diffs():
# Iterate over pairs of date : (t0,t1), (t1,t2), ...
dates = time_points_until_today
for d1, d2 in zip(dates[:-1], dates[1:]):
print("Analyzing %s ... %s" % (d1.strftime("%y-%m-%d"), d2.strftime("%y-%m-%d")))
# Load corresponding json
f1 = json.loads(open("./.work/merged_lists.json.%s" % d1.strftime("%y-%m-%d")).read())
f2 = json.loads(open("./.work/merged_lists.json.%s" % d2.strftime("%y-%m-%d")).read())
for key in f1:
f1[key]["name"] = key
for key in f2:
f2[key]["name"] = key
keys_f1 = set(f1.keys())
keys_f2 = set(f2.keys())
removed = [ f1[k] for k in keys_f1 - keys_f2 ]
added = [ f2[k] for k in keys_f2 - keys_f1 ]
keys_inboth = keys_f1 & keys_f2
state_changes = []
level_changes = []
updates = []
for key in keys_inboth:
changes = []
# FIXME : this mechanism aint relevant anymore since
# the norm is to use HEAD now...
commit1 = f1[key].get("revision", None)
commit2 = f2[key].get("revision", None)
if commit1 != commit2:
changes.append("updated")
state1 = f1[key].get("state", None)
state2 = f2[key].get("state", None)
if state1 != state2:
changes.append(("state", state1, state2))
level1 = f1[key].get("level", None)
level2 = f2[key].get("level", None)
if level1 != level2:
changes.append(("level", level1, level2))
if level1 != level2 or state1 != state2:
level1_value = level1 if level1 else 0
level2_value = level2 if level2 else 0
if level1_value < level2_value:
changes.append("improvement")
elif level1_value > level2_value:
changes.append("regression")
else:
state1_value = state_to_value.get(state1,-1)
state2_value = state_to_value.get(state2,-1)
if state1_value < state2_value:
changes.append("improvement")
elif state1_value > state2_value:
changes.append("regression")
else:
changes.append("same")
if changes:
updates.append((f2[key], changes))
yield { "begin": d1,
"end": d2,
"new": sorted(added, key=lambda a:a["name"]),
"improvements": sorted([a for a in updates if "improvement" in a[1]], key=lambda a:a[0]["name"]),
"updates": sorted([a for a in updates if "same" in a[1]], key=lambda a:a[0]["name"]),
"regressions": sorted([a for a in updates if "regression" in a[1]], key=lambda a:a[0]["name"]),
"removed": sorted(removed, key=lambda a:a["name"]),
}
def make_rss_feed():
fg = FeedGenerator()
fg.id('https://github.com/YunoHost/Apps/')
fg.title('App Lists news')
fg.author( {'name':'YunoHost'} )
fg.language('en')
for diff in diffs():
fe = fg.add_entry()
fe.id('https://github.com/YunoHost/Apps/#'+diff["end"].strftime("%y-%m-%d"))
fe.title('Changes between %s and %s' % (diff["begin"].strftime("%b %d"), diff["end"].strftime("%b %d")))
fe.link(href='https://github.com/YunoHost/apps/commits/master/community.json')
fe.content(jinja2.Template(open("rss_template.html").read()).render(data=diff), type="html")
fe._FeedEntry__atom_updated = diff["end"]
print('Changes between %s and %s' % (diff["begin"].strftime("%b %d"), diff["end"].strftime("%b %d")))
open("tmp.html", "w").write(jinja2.Template(open("rss_template.html").read()).render(data=diff))
fg.atom_file('atom.xml')
def make_count_summary(): def make_count_summary():
states = ["official", "working", "inprogress", "notworking"] states = ["official", "working", "inprogress", "notworking"]
history = [] history = []
per_state = { state: [] for state in states } last_time_point = time_points_until_today[-1]
per_level = { "level-%s"%i: [] for i in range(0,8) } json_at_last_time_point = json.loads(open("./.work/merged_lists.json.%s" % last_time_point.strftime("%y-%m-%d")).read())
relevant_apps_to_track = [app
for app, infos in json_at_last_time_point.items()
if infos.get("state") in ["working", "official"]]
history_per_app = {app: [] for app in relevant_apps_to_track}
for d in time_points_until_today: for d in time_points_until_today:
@ -183,14 +84,36 @@ def make_count_summary():
summary[state] = len([k for k, infos in j.items() if infos["state"] == state]) summary[state] = len([k for k, infos in j.items() if infos["state"] == state])
for level in range(0, 9): for level in range(0, 9):
summary["level-%s"%level] = len([ k for k, infos in j.items() \ summary["level-%s" % level] = len([k for k, infos in j.items()
if infos["state"] in ["working", "official"] \ if infos["state"] in ["working", "official"]
and infos.get("level", None) == level]) and infos.get("level", None) == level])
history.append(summary) history.append(summary)
for app in relevant_apps_to_track:
infos = j.get(app, {})
if not infos or infos.get("state") not in ["working", "official"]:
level = -1
else:
level = infos.get("level", -1)
try:
level = int(level)
except:
level = -1
history_per_app[app].append({
"date": d_label,
"level": infos.get("level", -1)
})
json.dump(history, open('count_history.json', 'w')) json.dump(history, open('count_history.json', 'w'))
os.system("mkdir -p per_app/")
for app in relevant_apps_to_track:
json.dump(history_per_app[app], open('per_app/history_%s.json' % app, 'w'))
get_lists_history() get_lists_history()
make_rss_feed()
make_count_summary() make_count_summary()

View file

@ -125,4 +125,56 @@
</div> </div>
</div> </div>
<div class="row">
<div class="mx-auto">
<div id="levelHistory" style="height: 270px;"></div>
</div>
</div>
{% endblock %}
<script src="{{ url_for('static', filename='js/canvasjs.min.js') }}"></script>
<script>
window.onload = function () {
var colors_per_level = [
"#d9534f",
"#E26D4F",
"#E98D4E",
"#f0ad4e",
"#CBB052",
"#A6B255",
"#7AB659",
"#5cb85c",
"#4695d5"
];
var chart = new CanvasJS.Chart("levelHistory", {
animationEnabled: false,
title:{
text: "App level (in apps.json)"
},
toolTip: {
reversed: true,
shared: true
},
data: [
{
type: "line",
dataPoints: [
{% for d in history %}
{ label: "{{ d.date }}", y: {{ d["level"] }},
markerColor: colors_per_level[{{ d["level"] }}] },
{% endfor %}
]
},
]
});
chart.render();
}
</script>
{% endblock %} {% endblock %}