"""
Transformer时间序列预测（适合复杂非线性关系） 训练速度慢 数据大
优势：
捕捉长期依赖关系
并行计算效率高
适合高波动性市场

混合方法：将Prophet预测结果作为特征输入Transformer模型
"""
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader


# Transformer模型定义
class ElectricityTransformer(nn.Module):
    def __init__(self, input_dim=1, d_model=64, nhead=4, num_layers=3):
        super().__init__()
        self.encoder = nn.Linear(input_dim, d_model)
        self.transformer = nn.Transformer(
            d_model=d_model,
            nhead=nhead,
            num_encoder_layers=num_layers,
            num_decoder_layers=num_layers
        )
        self.decoder = nn.Linear(d_model, 1)

    def forward(self, src, tgt):
        src = self.encoder(src)
        tgt = self.encoder(tgt)
        output = self.transformer(src, tgt)
        return self.decoder(output)


# 数据预处理
def create_sequences(data, seq_length=168):
    sequences = []
    for i in range(len(data) - seq_length - 24):
        seq = data[i:i + seq_length]
        label = data[i + seq_length:i + seq_length + 24]
        sequences.append((seq, label))
    return sequences


# 参数设置
seq_length = 168  # 使用过去一周数据
batch_size = 32
epochs = 50

# 示例训练流程
if __name__ == "__main__":
    # 假设data是预处理后的张量
    dataset = create_sequences(torch.randn(5000, 1))  # 使用随机数据示例
    loader = DataLoader(dataset, batch_size=batch_size, shuffle=True)

    model = ElectricityTransformer()
    criterion = nn.MSELoss()
    optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)

    # 训练循环
    for epoch in range(epochs):
        for src, tgt in loader:
            output = model(src, tgt[:-1])
            loss = criterion(output, tgt[1:])
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
        print(f"Epoch {epoch} Loss: {loss.item():.4f}")