import abc
from torch.nn import Module, LSTM, Linear


class ModelConfig:
    """
    模型配置相关
    """

    def __init__(self) -> None:
        self.name = ""
        self.module_file = ""
        self.module_name = ""
        self.model_weigths_file = ""
        self.inputs = dict()
        self.input_gaps = dict()
        self.inputs_need_preprocess = False
        self.mean = []
        self.std = []
        self.outputs = dict()
        self.input_output_queue_size = 1024
        self.online_training_interval = 0
        self.online_training_mode = False
        self.stop_conditions = []
        self.start_conditions = []


class BaseModel(metaclass=abc.ABCMeta):


    @abc.abstractmethod
    def __init__(self, config: ModelConfig):
        self._config = config
        self.output_table_num = 0

    @abc.abstractmethod
    def train(self, data):
        pass

    @abc.abstractmethod
    def run(self):
        pass

    @abc.abstractmethod
    def load_weights(self, weights_path):
        pass

    @abc.abstractmethod
    def update_input_data(self, data):
        pass

    @abc.abstractmethod
    def get_output_data(self):
        pass

    @abc.abstractmethod
    def get_output_varnames(self):
        pass

    @abc.abstractmethod
    def get_model_name(self):
        pass


class LstmNet(Module):
    """
    pytorch预测模型，包括LSTM时序预测层和Linear回归输出层
    可以根据自己的情况增加模型结构
    input_size: 输入变量个数
    lstm_layers: LSTM层数
    """

    def __init__(self, input_size, hidden_size, lstm_layers, dropout_rate, output_size):
        super(LstmNet, self).__init__()
        self.lstm = LSTM(input_size=input_size,
                         hidden_size=hidden_size,
                         num_layers=lstm_layers,
                         batch_first=True,
                         dropout=dropout_rate)

        self.linear = Linear(in_features=hidden_size,
                             out_features=output_size)

    def forward(self, x, hidden=None):
        lstm_out, hidden = self.lstm(x, hidden)
        linear_out = self.linear(lstm_out)
        return linear_out, hidden
