2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
2020-11-07 19:04:32 +01:00
|
|
|
import sys
|
|
|
|
import inspect
|
2018-05-22 22:20:17 +02:00
|
|
|
from datetime import datetime
|
|
|
|
|
2020-11-07 19:04:32 +01:00
|
|
|
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
|
|
|
appdir = os.path.abspath(currentdir + "../../../../")
|
|
|
|
sys.path.insert(0, appdir)
|
|
|
|
|
2020-11-07 20:59:34 +01:00
|
|
|
from app import db, create_app
|
2020-11-12 15:35:02 +01:00
|
|
|
from app.models.appcatalog import App
|
2020-11-07 20:59:34 +01:00
|
|
|
app_ = create_app()
|
2019-02-15 23:02:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
def _time_points_until_today():
|
|
|
|
|
|
|
|
year = 2017
|
|
|
|
month = 1
|
|
|
|
day = 1
|
|
|
|
today = datetime.today()
|
|
|
|
date = datetime(year, month, day)
|
|
|
|
|
|
|
|
while date < today:
|
|
|
|
yield date
|
|
|
|
|
|
|
|
day += 14
|
|
|
|
if day > 15:
|
|
|
|
day = 1
|
|
|
|
month += 1
|
|
|
|
|
|
|
|
if month > 12:
|
|
|
|
month = 1
|
|
|
|
year += 1
|
|
|
|
|
|
|
|
date = datetime(year, month, day)
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
time_points_until_today = list(_time_points_until_today())
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
def get_lists_history():
|
|
|
|
|
2019-02-24 01:31:56 +01:00
|
|
|
os.system("rm -rf ./.work")
|
|
|
|
os.system("git clone https://github.com/YunoHost/apps ./.work/apps")
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
for t in time_points_until_today:
|
|
|
|
print(t.strftime("%b %d %Y"))
|
|
|
|
|
|
|
|
# Fetch repo at this date
|
2019-02-24 01:31:56 +01:00
|
|
|
cmd = 'cd ./.work/apps; git checkout `git rev-list -1 --before="%s" master`'
|
2018-05-22 22:20:17 +02:00
|
|
|
os.system(cmd % t.strftime("%b %d %Y"))
|
|
|
|
|
2020-11-07 19:04:32 +01:00
|
|
|
if t < datetime(2019, 4, 4):
|
2019-04-29 22:29:08 +02:00
|
|
|
# Merge community and official
|
|
|
|
community = json.loads(open("./.work/apps/community.json").read())
|
|
|
|
official = json.loads(open("./.work/apps/official.json").read())
|
|
|
|
for key in official:
|
|
|
|
official[key]["state"] = "official"
|
|
|
|
merged = {}
|
|
|
|
merged.update(community)
|
|
|
|
merged.update(official)
|
|
|
|
else:
|
|
|
|
merged = json.loads(open("./.work/apps/apps.json").read())
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
# Save it
|
2019-02-24 01:31:56 +01:00
|
|
|
json.dump(merged, open('./.work/merged_lists.json.%s' % t.strftime("%y-%m-%d"), 'w'))
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
|
2018-05-22 23:51:23 +02:00
|
|
|
def make_count_summary():
|
|
|
|
|
|
|
|
states = ["official", "working", "inprogress", "notworking"]
|
|
|
|
history = []
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
last_time_point = time_points_until_today[-1]
|
|
|
|
json_at_last_time_point = json.loads(open("./.work/merged_lists.json.%s" % last_time_point.strftime("%y-%m-%d")).read())
|
|
|
|
relevant_apps_to_track = [app
|
|
|
|
for app, infos in json_at_last_time_point.items()
|
|
|
|
if infos.get("state") in ["working", "official"]]
|
|
|
|
history_per_app = {app: [] for app in relevant_apps_to_track}
|
2018-05-22 23:51:23 +02:00
|
|
|
|
|
|
|
for d in time_points_until_today:
|
|
|
|
|
|
|
|
print("Analyzing %s ..." % d.strftime("%y-%m-%d"))
|
|
|
|
|
|
|
|
# Load corresponding json
|
2019-02-24 01:31:56 +01:00
|
|
|
j = json.loads(open("./.work/merged_lists.json.%s" % d.strftime("%y-%m-%d")).read())
|
2018-05-22 23:51:23 +02:00
|
|
|
d_label = d.strftime("%b %d %Y")
|
|
|
|
|
|
|
|
summary = {}
|
|
|
|
summary["date"] = d_label
|
|
|
|
for state in states:
|
2020-11-05 22:23:31 +01:00
|
|
|
summary[state] = len([k for k, infos in j.items() if infos["state"] == state])
|
2018-05-22 23:51:23 +02:00
|
|
|
|
2020-12-30 23:34:48 +01:00
|
|
|
for level in range(0, 10):
|
2020-11-05 22:23:31 +01:00
|
|
|
summary["level-%s" % level] = len([k for k, infos in j.items()
|
|
|
|
if infos["state"] in ["working", "official"]
|
|
|
|
and infos.get("level", None) == level])
|
2018-05-22 23:51:23 +02:00
|
|
|
|
|
|
|
history.append(summary)
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
for app in relevant_apps_to_track:
|
|
|
|
|
|
|
|
infos = j.get(app, {})
|
|
|
|
|
|
|
|
if not infos or infos.get("state") not in ["working", "official"]:
|
2020-11-07 20:59:34 +01:00
|
|
|
level = -1
|
2020-11-05 22:23:31 +01:00
|
|
|
else:
|
2020-11-07 20:59:34 +01:00
|
|
|
level = infos.get("level", -1)
|
2020-11-05 22:23:31 +01:00
|
|
|
try:
|
|
|
|
level = int(level)
|
|
|
|
except:
|
2020-11-07 20:59:34 +01:00
|
|
|
level = -1
|
2020-11-05 22:23:31 +01:00
|
|
|
|
|
|
|
history_per_app[app].append({
|
|
|
|
"date": d_label,
|
2020-11-07 19:00:36 +01:00
|
|
|
"level": level
|
2020-11-05 22:23:31 +01:00
|
|
|
})
|
|
|
|
|
2018-05-22 23:51:23 +02:00
|
|
|
json.dump(history, open('count_history.json', 'w'))
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
os.system("mkdir -p per_app/")
|
|
|
|
for app in relevant_apps_to_track:
|
|
|
|
json.dump(history_per_app[app], open('per_app/history_%s.json' % app, 'w'))
|
2020-11-07 19:04:32 +01:00
|
|
|
|
2020-11-07 20:59:34 +01:00
|
|
|
with app_.app_context():
|
|
|
|
for app in relevant_apps_to_track:
|
|
|
|
update_catalog_stats(app, history_per_app[app])
|
|
|
|
|
|
|
|
db.session.commit()
|
2020-11-07 19:04:32 +01:00
|
|
|
|
|
|
|
|
|
|
|
def update_catalog_stats(app, history):
|
|
|
|
|
2020-11-07 20:59:34 +01:00
|
|
|
print(app)
|
|
|
|
try:
|
|
|
|
app_in_db = App.query.filter_by(name=app).first_or_404()
|
|
|
|
except:
|
|
|
|
return
|
|
|
|
|
2020-12-30 23:34:48 +01:00
|
|
|
app_in_db.long_term_good_quality = len([d for d in history[-24:] if d["level"] > 5]) > 0.90 * 24
|
2020-11-07 20:59:34 +01:00
|
|
|
app_in_db.long_term_broken = history[-1]["level"] == 0 and len([d for d in history[-24:] if d["level"] == 0]) > 12
|
2020-11-07 19:04:32 +01:00
|
|
|
|
|
|
|
db.session.add(app_in_db)
|
2020-11-05 22:23:31 +01:00
|
|
|
|
|
|
|
|
2018-11-08 14:16:07 +01:00
|
|
|
get_lists_history()
|
|
|
|
make_count_summary()
|