import argparse
import sys

sys.path.append("../")
from logdeep.models.lstm import loganomaly
from logdeep.tools.predict import Predicter
from logdeep.tools.train import Trainer
from logdeep.tools.utils import *
from loganomaly.config import options
from processing import processor
import time
import json
import pandas as pd
from data.log_dir.query_log import export_log_data
from data.chaos_mesh_dir.query_chaos_mesh import export_chaos_mesh_data


def get_params():
    import argparse

    # 创建解析器
    parser = argparse.ArgumentParser()

    # 添加参数
    parser.add_argument("--start_time", type=int)
    parser.add_argument("--end_time", type=int)

    # 解析参数
    args = parser.parse_args()

    # 返回参数
    return args.start_time, args.end_time


def train():

    Model = loganomaly(
        input_size=options["input_size"],
        hidden_size=options["hidden_size"],
        num_layers=options["num_layers"],
        num_keys=options["num_classes"],
    )
    trainer = Trainer(Model, options)
    trainer.start_train()


def predict(log_df):
    Model = loganomaly(
        input_size=options["input_size"],
        hidden_size=options["hidden_size"],
        num_layers=options["num_layers"],
        num_keys=options["num_classes"],
    )
    predicter = Predicter(Model, options)
    predicter.predict_unsupervised(log_df)


if __name__ == "__main__":
    data = {"status": "error", "result": {}}
    try:
        # 获取 start_time、end_time
        data["result"] = "success1"
        start_time, end_time = get_params()
        data["result"] = "success2"

        # 导出数据
        export_chaos_mesh_data(start_time, end_time)
        export_log_data(start_time, end_time)
        data["result"] = "success3"

        # 读入数据
        log_df = pd.read_csv("./data/log.csv")
        anomaly_df = pd.read_csv("./data/ground_truth.csv").rename(
            columns={"起始时间戳": "st_time", "截止时间戳": "ed_time"}
        )
        data["result"] = "success4"
        # 预处理数据
        log_df = processor(log_df, anomaly_df)
        data["result"] = "ok"
        # 获取结果
        data["result"] = predict(log_df)
        data["status"] = "success"
    except Exception as e:
        print(e)
        data["status"] = "error"
        data["result"] = repr(e)
    finally:
        with open("./result/result.json", "w", encoding="utf-8") as f:
            json.dump(data, f)

        while 1:
            time.sleep(1)
