import json
import os

import yaml
from google.protobuf.json_format import MessageToDict

from server.service.pb.model_server_pb2 import PredictRequest, Experiment, OptimizerConfig, ModelConfig, \
    TexttoolRequest


class PbHelper:
    def __init__(self):
        self.optimizer = None
        self.model_config = None
        self.experiment = None

    def set_optimizer_config(self, optimizer_config=None):
        if optimizer_config is not None:
            assert isinstance(optimizer_config, dict)
            assert isinstance(optimizer_config["params"], dict)
        else:
            optimizer_config = {}
        optimizer_class = optimizer_config.get("optimizer_class", "AdamW")
        params = optimizer_config.get("params", {"lr": 3e-5, "eps": 1e-8})
        self.optimizer = OptimizerConfig(optimizer_class=optimizer_class, params=json.dumps(params))

    def set_model_config(self, model_config=None,):
        known_params = {"train_max_seq_length", "eval_max_seq_length", "batch_size","other_config",
                        "num_train_epochs", "evaluate_during_training", "save_mode", "load_mode"}

        if model_config is not None:
            assert isinstance(model_config, dict)
            if "other_config" in model_config:
                assert isinstance(model_config["other_config"], dict)
        else:
            model_config = {}

        train_max_seq_length = model_config.get("train_max_seq_length", 128)
        eval_max_seq_length = model_config.get("eval_max_seq_length", 128)
        batch_size = model_config.get("batch_size", 32)
        num_train_epochs = model_config.get("num_train_epochs", 3)
        evaluate_during_training = model_config.get("evaluate_during_training", True)
        save_mode = model_config.get("save_mode", "all")
        load_mode = model_config.get("load_mode", "all")
        other_config = model_config.get("other_config", {})

        for key, value in model_config.items():
            if key not in known_params:
                other_config[key] = value

        self.model_config = ModelConfig(train_max_seq_length=train_max_seq_length,
                                        eval_max_seq_length=eval_max_seq_length,
                                        batch_size=batch_size,
                                        num_train_epochs=num_train_epochs,
                                        evaluate_during_training=evaluate_during_training,
                                        save_mode=save_mode,
                                        load_mode=load_mode,
                                        other_config=json.dumps(other_config))

    def create_experiment(self, task, model, dataset=None, experiment_id=None, project_id=None, version=None,
                          model_config=None, optimizer_config=None, data=None, label_list=None):
        self.set_optimizer_config(optimizer_config)
        self.set_model_config(model_config)
        assert task in ["ner", "re", "ner_re"]
        if data is not None:
            data = json.dumps(data)
        if label_list is not None:
            label_list = json.dumps(label_list)
        experiment_id = -1 if experiment_id is None else experiment_id
        project_id = -1 if project_id is None else project_id
        version = -1 if version is None else version
        experiment = Experiment(task=task, model=model, dataset=dataset, experiment_id=experiment_id,
                                project_id=project_id, version=version,
                                optimizer=self.optimizer, model_config=self.model_config,
                                data=data, label_list=label_list)
        self.experiment = experiment
        return experiment

    def create_predict_request(self, text, task, model, dataset=None, version=None, experiment_id=None, project_id=None,
                       model_config=None, label_list=None):
        assert task in ["ner", "re", "ner_re"]
        assert isinstance(text, list)
        assert isinstance(text[0], str)
        self.set_model_config(model_config)
        experiment_id = -1 if experiment_id is None else experiment_id
        project_id = -1 if project_id is None else project_id
        version = -1 if version is None else version
        if label_list is not None:
            label_list = json.dumps(label_list)
        request = PredictRequest(text=json.dumps(text), task=task, model=model, project_id=project_id,
                                 version=version,experiment_id=experiment_id,dataset=dataset,
                                 model_config=self.model_config, label_list=label_list)
        return request


def texttool_to_config(texttool_request: TexttoolRequest):
    config = {}
    config["main"] = {"model": texttool_request.model, "version": texttool_request.version,
                      "task": texttool_request.task, "project_id": texttool_request.project_id}
    model_config = MessageToDict(texttool_request.model_config, preserving_proto_field_name=True)
    if "other_config" in model_config:
        other_config = model_config.pop("other_config")
        other_config = json.loads(other_config)
        model_config.update(other_config)
    config["model_config"] = model_config
    config["optimizer"] = {"optimizer_class": texttool_request.optimizer_config.optimizer_class,
                           "params": json.loads(texttool_request.optimizer_config.params)}
    config["train_data"] = json.loads(texttool_request.train_data)
    config["test_data"] = json.loads(texttool_request.test_data)
    config["label_list"] = json.loads(texttool_request.label_list)
    config['acquire'] = texttool_request.acquire
    return config


def get_optimizer_config(optimizer_config=None):
    if optimizer_config is not None:
        assert isinstance(optimizer_config, dict)
        assert isinstance(optimizer_config["params"], dict)
    else:
        optimizer_config = {}
    optimizer_class = optimizer_config.get("optimizer_class", "AdamW")
    params = optimizer_config.get("params", {"lr": 3e-5, "eps": 1e-8})
    optimizer = OptimizerConfig(optimizer_class=optimizer_class, params=json.dumps(params))
    return optimizer


def get_model_config(model_config=None):
    if model_config is not None:
        assert isinstance(model_config, dict)
        if "other_config" in model_config:
            assert isinstance(model_config["other_config"], dict)
    else:
        model_config = {}
    train_max_seq_length = model_config.get("train_max_seq_length", 128)
    eval_max_seq_length = model_config.get("eval_max_seq_length", 128)
    batch_size = model_config.get("batch_size", 32)
    num_train_epochs = model_config.get("num_train_epochs", 3)
    evaluate_during_training = model_config.get("evaluate_during_training", True)
    save_mode = model_config.get("save_mode", "all")
    load_mode = model_config.get("load_mode", "all")
    other_config = model_config.get("other_config", {})

    model_config = ModelConfig(train_max_seq_length=train_max_seq_length,
                               eval_max_seq_length=eval_max_seq_length,
                               batch_size=batch_size,
                               num_train_epochs=num_train_epochs,
                               evaluate_during_training=evaluate_during_training,
                               save_mode=save_mode,
                               load_mode=load_mode,
                               other_config=json.dumps(other_config))
    return model_config


def create_texttool_request(task, model, label_list, train_data, test_data, project_id, version, model_config=None,
                            optimizer_config=None, acquire=5):
    optimizer_config = get_optimizer_config(optimizer_config)
    model_config = get_model_config(model_config)
    assert task in ["ner", "re", "ner_re"]
    assert isinstance(label_list, list)
    assert isinstance(train_data, list)
    assert isinstance(test_data, list)
    texttool_request = TexttoolRequest(task=task, model=model, label_list=json.dumps(label_list),
                                       train_data=json.dumps(train_data), test_data=json.dumps(test_data),
                                       version=version, project_id=project_id, optimizer_config=optimizer_config,
                                       model_config=model_config, acquire=acquire)

    return texttool_request