import ujson
import time
import logging

from models.entity import *
from models.statement import *
from models import save_item

from conf.config import Environment, filename, encoding
from table import drop_and_create
from asyncio import run, create_task


def readfile(filename):
    """
    generate json line from json file

    wikidata-all.json is a json file which is an array consists of json per line
    read one line yield one line
    """

    # parse the json file
    with open(filename, encoding=encoding) as f:
        f.readline()
        count = 1
        while True:
            line = f.readline()
            if line:
                logging.info(f"already load #{count} lines")
                count += 1
                try:
                    d = ujson.loads(line[:-2])  # d means dictionary
                    yield d
                except:
                    print("error line")
            else:
                break


def save_entity(eid: str, etype: str):
    e = Entity(id=eid, type=etype)
    save_item(e)


async def save_labels(eid: str, labels: dict):
    for l in labels.values():
        label = Label(eid=eid, language=l["language"], value=l["value"])
        save_item(label)


async def save_descriptions(eid: str, descriptions: dict):
    for d in descriptions.values():
        desc = Description(eid=eid, language=d["language"], value=d["value"])
        save_item(desc)


async def save_alias(eid: str, aliases: dict):
    for arr in aliases.values():
        for al in arr:
            alias = Alias(eid=eid, language=al["language"], value=al["value"])
            save_item(alias)


async def save_sitelinks(eid: str, sitelinks: dict):
    for s in sitelinks.values():
        sitelink = SiteLink(eid=eid, title=s["title"], site=s["site"], badges=",".join(s["badges"]))
        save_item(sitelink)


# save statements
async def save_claims(eid: str, claims: dict):
    for arr in claims.values():
        for stmt in arr:
            ms = stmt["mainsnak"]
            msid = await save_snak(ms)
            type = stmt["type"]
            id = stmt["id"]
            rank = stmt["rank"]
            statement = Statement(id=id, eid=eid, type=type, msid=msid, rank=rank)
            save_item(statement)

            # save_references
            if "references" in stmt.keys():
                await save_references(id, stmt["references"])

            # save_qualifiers
            if "qualifiers" in stmt.keys():
                await save_qualifiers(id, stmt["qualifiers"])


async def save_references(sid: str, references: dict):
    for r in references:
        rid = Reference.get_id()
        ref = Reference(id=rid, sid=sid, hash=r["hash"], snakorder=",".join(r["snaks-order"]))
        save_item(ref)
        for arr in r["snaks"].values():
            for snak in arr:
                skid = await save_snak(snak)
                sr = SR(rid=rid, skid=skid)
                save_item(sr)


async def save_qualifiers(sid: str, qualifiers: dict):
    for arr in qualifiers.values():
        for q in arr:
            if q["snaktype"] == "value":
                qual = Qualifier(sid=sid, pid=q["property"], hash=q["hash"],
                                 snaktype=q["snaktype"], datatype=q["datatype"],
                                 datavalue=ujson.dumps(q["datavalue"]))
            else:
                qual = Qualifier(sid=sid, pid=q["property"], hash=q["hash"],
                                 snaktype=q["snaktype"])
            save_item(qual)


async def save_snak(snak: dict):
    ID = Snak.get_id()
    if snak["snaktype"] == "value":
        s = Snak(id=ID, pid=snak["property"], snaktype=snak["snaktype"], datatype=snak["datatype"],
                 datavalue=ujson.dumps(snak["datavalue"]))
    else:
        s = Snak(id=ID, pid=snak["property"], snaktype=snak["snaktype"])
    save_item(s)
    return ID


def parseline(dic: dict):
    eid = dic["id"]  # entity id
    etype = dic["type"]  # entity type
    save_entity(eid, etype)

    # save labels descriptions aliases
    labels = dic["labels"]
    descriptions = dic["descriptions"]
    aliases = dic["aliases"]
    run(save_labels(eid, labels))
    run(save_descriptions(eid, descriptions))
    run(save_alias(eid, aliases))

    # save sitelink
    if etype == 'item':
        sitelinks = dic["sitelinks"]
        run(save_sitelinks(eid, sitelinks))

    # save claims
    claims = dic["claims"]
    run(save_claims(eid, claims))


if __name__ == "__main__":
    if Environment == 'dev':
        logging.basicConfig(level=logging.INFO)
        drop_and_create()

    start = time.time()
    for data in readfile(filename):
        parseline(data)
    end = time.time()
    logging.info(f"total cost: {end - start} second")
