import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader,TensorDataset
import numpy as np

"""
torch.n.RNN 基本的RNN单元
torch.n.LSTM 长短期记忆单元,能够学习长期依赖
torch.nn.GRU 门控循环单元,是LSTM的简化版本,更容易训练
"""

class SimpleRNN(nn.Module):
    def __init__(self,input_size,hidden_size,output_size):    
        super(SimpleRNN,self).__init__()
        self.rnn=nn.RNN(input_size,hidden_size,batch_first=True)
        self.fc=nn.Linear(hidden_size,output_size)
    def forward(self,x):
        #x: (batch_size,seq_len,input_size)
        out,_=self.rnn(x)
        out=out[:,-1,:]#(batch_size,hidden_size)
        out=self.fc(out)#全连接层
        return out

num_samples=1000
seq_len=10
input_size=5
output_size=2 #假设二分类问题

#随机生成输入数据
X=torch.randn(num_samples,seq_len,input_size)
#随机生成目标标签
Y=torch.randint(0,output_size,(num_samples,))

#创建数据加载器
dataset=TensorDataset(X,Y)
train_loader=DataLoader(dataset,batch_size=32,shuffle=True)
dataset=TensorDataset(X,Y)
model =SimpleRNN(input_size=input_size,hidden_size=64,output_size=output_size)
criterion=nn.CrossEntropyLoss()#多分类交叉熵损失
optimizer=optim.Adam(model.parameters(),lr=0.01)
num_epoch=10
for epoch in range(num_epoch):
    model.train()
    total_loss=0
    correct=0
    total=0
    for inputs,labels in train_loader:
        outputs=model(inputs)
        loss = criterion(outputs,labels)
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        total_loss+=loss.item()
        _,predicted=torch.max(outputs,1)
        total+=labels.size(0)
        correct+=(predicted == labels).sum().item()
    accuracy=100*correct/total
    print(f'Epoch[{epoch+1}/{num_epoch}],loss:{total_loss/len(train_loader):.4f},Accuracy:{accuracy:.2f}%')


model.eval()
with torch.no_grad():
    total=0
    correct=0
    for inputs,labels in train_loader:
        output=model(inputs)
        _,predicted=torch.max(output,1)
        total+=labels.size(0)
        correct+=(predicted == labels).sum().item()
    accuracy=100*correct/total
    print(f'ACCURACY:{accuracy:.3f}%')