import numpy as np
import pandas as pd
import torch
from pmdarima import auto_arima
from statsmodels.tsa.arima.model import ARIMA
from statsmodels.tsa.holtwinters import ExponentialSmoothing
from torch import nn


class MyLSTMModel(nn.Module):
    input_dim = 5
    lstm_hidden_dim = 256
    embed_dim = 8
    output_dim = 1
    num_layers = 1

    def __init__(self, num_cities, num_attrs):
        super().__init__()

        # 嵌入层
        self.city_embedding = nn.Embedding(num_cities, self.embed_dim)
        self.attr_embedding = nn.Embedding(num_attrs, self.embed_dim)

        self.embed_dropout = nn.Dropout(0.2)

        # LSTM
        self.lstm = nn.LSTM(input_size=MyLSTMModel.input_dim + self.embed_dim * 2,
                            hidden_size=MyLSTMModel.lstm_hidden_dim,
                            num_layers=MyLSTMModel.num_layers,
                            batch_first=True)

        # FC 层
        self.fc_stack = nn.Sequential(
            nn.Linear(MyLSTMModel.lstm_hidden_dim, 32),
            nn.ReLU(),
            nn.Linear(32, MyLSTMModel.output_dim)
        )

    def forward(self, x, city_ids, attr_ids):
        # 嵌入向量 shape: (batch_size, embed_dim)
        city_emb = self.embed_dropout(self.city_embedding(city_ids))
        attr_emb = self.embed_dropout(self.attr_embedding(attr_ids))

        # 拼接到输入序列上 (广播 + expand)
        batch_size, seq_len, _ = x.shape
        embed_stack = torch.cat([city_emb, attr_emb], dim=-1)
        embed_stack = embed_stack.unsqueeze(1).expand(-1, seq_len, -1)  # shape: (batch, seq, embed_dim * 3)

        # 合并
        x = torch.cat([x, embed_stack], dim=-1)  # (batch, seq_len, input + embed)

        lstm_out, _ = self.lstm(x)
        last_output = lstm_out[:, -1, :]
        return self.fc_stack(last_output)


class ARIMABaseline:
    def __init__(self, use_auto_arima=True, order=(2, 1, 2)):
        self.use_auto_arima = use_auto_arima
        self.order = order
        self.model = None  # 存储单个模型

    def fit(self, X):
        """ 输入应为1D numpy数组 """
        # 类型转换检查
        if isinstance(X, torch.Tensor):
            X = X.numpy().astype(np.float64)
        elif isinstance(X, (list, pd.Series)):
            X = np.array(X, dtype=np.float64)

        # 确保数据为1D格式
        if X.ndim > 1:
            X = X.squeeze()

        # 模型训练
        if self.use_auto_arima:
            self.model = auto_arima(X, seasonal=False, suppress_warnings=True)
        else:
            self.model = ARIMA(X, order=self.order).fit()
        return self

    def predict(self, n_periods=1):
        """ 预测未来n步 """
        if self.model is None:
            raise ValueError("Model not fitted yet!")
        return self.model.predict(n_periods=n_periods)


class HoltWintersBaseline:
    def __init__(self,
                 seasonal_periods=7,
                 trend='add',
                 seasonal='add',
                 start_date='2023-07-28',
                 freq='W'):
        """
        Holt-Winters 三次指数平滑封装

        :param seasonal_periods: 季节性周期长度（如月数据=12，周数据=7）
        :param trend: 趋势项类型 ('add' 加法, 'mul' 乘法)
        :param seasonal: 季节性项类型 ('add', 'mul')
        """
        self.seasonal_periods = seasonal_periods
        self.trend = trend
        self.seasonal = seasonal
        self.model = None
        self.start_date = start_date
        self.freq = freq

    def fit(self, X):
        dates = pd.date_range(start=self.start_date, periods=len(X), freq=self.freq)
        data = pd.Series(X, index=dates)

        self.model = ExponentialSmoothing(
            data,
            trend=self.trend,
            seasonal=self.seasonal,
            seasonal_periods=self.seasonal_periods
        ).fit(optimized=True)

        return self

    def predict(self, n_periods=1):
        """ 预测未来n步 """
        if self.model is None:
            raise ValueError("Model not fitted yet!")
        return self.model.forecast(steps=n_periods)
