import os

import ujson

from crawleth.config import API_HOURLY_EXPORT_DIRECTORY

names = [x for x in list(next(os.walk(API_HOURLY_EXPORT_DIRECTORY))[1]) if x != "latest"]
values = (((API_HOURLY_EXPORT_DIRECTORY + n + "/" + v + "/", n, v)
           for v in next(os.walk(API_HOURLY_EXPORT_DIRECTORY + n))[1])
          for n in names)

flat = []
for v in values:
    for x in v:
        flat.append(x)

for v in flat:
    with open(v[0] + 'raw_aggregated_results.json', 'r') as outfile:
        output = ujson.load(outfile)
    with open(v[0] + 'aliases.json', 'r') as outfile:
        aliases_res = ujson.load(outfile)
    with open(v[0] + 'groups.json', 'r') as outfile:
        groups_res = ujson.load(outfile)
    with open(v[0] + 'subnets.json', 'r') as outfile:
        subnets_res = ujson.load(outfile)

    with open('api/stats.json', 'r') as infile:
        stats = ujson.load(infile)
    stats[f"{v[1]}_{v[2]}"] = {"up_count": len(output["up"]),
                               "aliases_count": len(aliases_res["results"]),
                               "subnets_count": len(subnets_res["results"]),
                               "groups_count": len(groups_res["results"]),
                               "aggregated_files": output["stats"]["aggregated_crawls"]}
    with open('api/stats.json', 'w') as outfile:
        ujson.dump(stats, outfile)
