2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
from datetime import datetime
|
|
|
|
|
2019-02-15 23:02:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
def _time_points_until_today():
|
|
|
|
|
|
|
|
year = 2017
|
|
|
|
month = 1
|
|
|
|
day = 1
|
|
|
|
today = datetime.today()
|
|
|
|
date = datetime(year, month, day)
|
|
|
|
|
|
|
|
while date < today:
|
|
|
|
yield date
|
|
|
|
|
|
|
|
day += 14
|
|
|
|
if day > 15:
|
|
|
|
day = 1
|
|
|
|
month += 1
|
|
|
|
|
|
|
|
if month > 12:
|
|
|
|
month = 1
|
|
|
|
year += 1
|
|
|
|
|
|
|
|
date = datetime(year, month, day)
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
time_points_until_today = list(_time_points_until_today())
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
|
2018-05-22 22:20:17 +02:00
|
|
|
def get_lists_history():
|
|
|
|
|
2019-02-24 01:31:56 +01:00
|
|
|
os.system("rm -rf ./.work")
|
|
|
|
os.system("git clone https://github.com/YunoHost/apps ./.work/apps")
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
for t in time_points_until_today:
|
|
|
|
print(t.strftime("%b %d %Y"))
|
|
|
|
|
|
|
|
# Fetch repo at this date
|
2019-02-24 01:31:56 +01:00
|
|
|
cmd = 'cd ./.work/apps; git checkout `git rev-list -1 --before="%s" master`'
|
2018-05-22 22:20:17 +02:00
|
|
|
os.system(cmd % t.strftime("%b %d %Y"))
|
|
|
|
|
2019-04-29 22:29:08 +02:00
|
|
|
if t < datetime(2019,4,4):
|
|
|
|
# Merge community and official
|
|
|
|
community = json.loads(open("./.work/apps/community.json").read())
|
|
|
|
official = json.loads(open("./.work/apps/official.json").read())
|
|
|
|
for key in official:
|
|
|
|
official[key]["state"] = "official"
|
|
|
|
merged = {}
|
|
|
|
merged.update(community)
|
|
|
|
merged.update(official)
|
|
|
|
else:
|
|
|
|
merged = json.loads(open("./.work/apps/apps.json").read())
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
# Save it
|
2019-02-24 01:31:56 +01:00
|
|
|
json.dump(merged, open('./.work/merged_lists.json.%s' % t.strftime("%y-%m-%d"), 'w'))
|
2018-05-22 22:20:17 +02:00
|
|
|
|
|
|
|
|
2018-05-22 23:51:23 +02:00
|
|
|
def make_count_summary():
|
|
|
|
|
|
|
|
states = ["official", "working", "inprogress", "notworking"]
|
|
|
|
history = []
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
last_time_point = time_points_until_today[-1]
|
|
|
|
json_at_last_time_point = json.loads(open("./.work/merged_lists.json.%s" % last_time_point.strftime("%y-%m-%d")).read())
|
|
|
|
relevant_apps_to_track = [app
|
|
|
|
for app, infos in json_at_last_time_point.items()
|
|
|
|
if infos.get("state") in ["working", "official"]]
|
|
|
|
history_per_app = {app: [] for app in relevant_apps_to_track}
|
2018-05-22 23:51:23 +02:00
|
|
|
|
|
|
|
for d in time_points_until_today:
|
|
|
|
|
|
|
|
print("Analyzing %s ..." % d.strftime("%y-%m-%d"))
|
|
|
|
|
|
|
|
# Load corresponding json
|
2019-02-24 01:31:56 +01:00
|
|
|
j = json.loads(open("./.work/merged_lists.json.%s" % d.strftime("%y-%m-%d")).read())
|
2018-05-22 23:51:23 +02:00
|
|
|
d_label = d.strftime("%b %d %Y")
|
|
|
|
|
|
|
|
summary = {}
|
|
|
|
summary["date"] = d_label
|
|
|
|
for state in states:
|
2020-11-05 22:23:31 +01:00
|
|
|
summary[state] = len([k for k, infos in j.items() if infos["state"] == state])
|
2018-05-22 23:51:23 +02:00
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
for level in range(0, 9):
|
|
|
|
summary["level-%s" % level] = len([k for k, infos in j.items()
|
|
|
|
if infos["state"] in ["working", "official"]
|
|
|
|
and infos.get("level", None) == level])
|
2018-05-22 23:51:23 +02:00
|
|
|
|
|
|
|
history.append(summary)
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
for app in relevant_apps_to_track:
|
|
|
|
|
|
|
|
infos = j.get(app, {})
|
|
|
|
|
|
|
|
if not infos or infos.get("state") not in ["working", "official"]:
|
|
|
|
level = -1
|
|
|
|
else:
|
|
|
|
level = infos.get("level", -1)
|
|
|
|
try:
|
|
|
|
level = int(level)
|
|
|
|
except:
|
|
|
|
level = -1
|
|
|
|
|
|
|
|
history_per_app[app].append({
|
|
|
|
"date": d_label,
|
|
|
|
"level": infos.get("level", -1)
|
|
|
|
})
|
|
|
|
|
2018-05-22 23:51:23 +02:00
|
|
|
json.dump(history, open('count_history.json', 'w'))
|
|
|
|
|
2020-11-05 22:23:31 +01:00
|
|
|
os.system("mkdir -p per_app/")
|
|
|
|
for app in relevant_apps_to_track:
|
|
|
|
json.dump(history_per_app[app], open('per_app/history_%s.json' % app, 'w'))
|
|
|
|
|
|
|
|
|
2018-11-08 14:16:07 +01:00
|
|
|
get_lists_history()
|
|
|
|
make_count_summary()
|