import torch
from torch import nn


class SSQNet(nn.Module):
    def __init__(self, input_size, hidden_size, num_layers, num_classes, sample_nums):
        super(SSQNet, self).__init__()
        self.hidden_size = hidden_size
        self.num_layers = num_layers
        self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
        self.net_droped = torch.nn.Sequential(
            torch.nn.Linear(hidden_size * sample_nums, 256),
            torch.nn.Dropout(0.2),  # drop 50% of the neuron    (在两层之间断掉一些层)
            torch.nn.ReLU(),
            torch.nn.Linear(256, 64),
            torch.nn.Dropout(0.2),  # drop 50% of the neuron
            torch.nn.ReLU(),
            torch.nn.Linear(64, 16),
            torch.nn.Dropout(0.2),  # drop 50% of the neuron
            torch.nn.ReLU(),
            torch.nn.Linear(16, 2),
        )
        self.m = nn.Softmax(dim=1)

    def forward(self, x):
        # Set initial hidden and cell states
        device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
        h0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(device)
        c0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(device)

        # Forward propagate LSTM
        out, _ = self.lstm(x, (h0, c0))  # out: tensor of shape (batch_size, seq_length, hidden_size)
        out = torch.reshape(out, [out.size()[0], -1])

        # Decode the hidden state of the last time step
        out = self.net_droped(out)

        out = self.m(out)
        return out


