from Nets.autoencoder import Autoencoder
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import TensorDataset, DataLoader
import h5py
import matplotlib.pyplot as plt

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

# 实例化自编码器
autoencoder = Autoencoder()
autoencoder = autoencoder.to(device)

in_file = h5py.File('datasets/ASCAD.h5', 'r')
input_data = torch.tensor(in_file['Profiling_traces/traces'][:], dtype=torch.float32)
in_file.close()

# 定义损失函数和优化器
criterion = nn.L1Loss()
criterion = criterion.to(device)
optimizer = optim.Adam(autoencoder.parameters(), lr=0.001)

# 准备数据加载器
dataset = TensorDataset(input_data, input_data)
dataloader = DataLoader(dataset, batch_size=256, shuffle=True)

loss_list = []

# 训练自编码器
num_epochs = 200
for epoch in range(num_epochs):
    for data in dataloader:
        inputs, _ = data
        inputs = inputs.to(device)
        optimizer.zero_grad()
        outputs = autoencoder(inputs)
        loss = criterion(outputs, inputs)
        loss.backward()
        optimizer.step()
    # 保存模型
    torch.save(autoencoder.state_dict(), 'autoencoder.pth')
    # 打印损失
    print('Epoch [{}/{}], Loss: {:.4f}'.format(epoch+1, num_epochs, loss.item()))
    loss_list.append(loss.item())

# 绘制损失曲线
plt.plot(loss_list)
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()


