from torch.utils.data import Dataset
import torch.nn as nn
import torch.nn.functional as F
import torch


class Mydataset(Dataset):
    def __init__(self, x, y):
        self.x = x
        self.y = y
        # self.idx = list()
        # for item in x:
        #     self.idx.append(item)
        # pass

    def __getitem__(self, index):
        input_data = self.x[index,:]
        target = self.y[index,:]
        return input_data, target

    def __len__(self):
        num_rows_shape, num_cols_shape = self.x.shape
        return num_rows_shape
    
class attitudePolicy(nn.Module):
    def __init__(self):
        super(attitudePolicy, self).__init__()
        self.fc1_x = nn.Linear(7, 50)
        self.bn1_x = nn.LayerNorm(50)
        self.fc1_y = nn.Linear(7, 50)
        self.bn1_y = nn.LayerNorm(50)

        self.fc2_x = nn.Linear(50, 100)
        self.bn2_x = nn.LayerNorm(100)
        self.fc2_y = nn.Linear(50, 100)
        self.bn2_y = nn.LayerNorm(100)

        self.fc3 = nn.Linear(200, 400)
        self.bn3 = nn.LayerNorm(400)
        # self.fc3_y = nn.Linear(64, 64)
        # self.bn3_y = nn.BatchNorm1d(64)

        self.fc4 = nn.Linear(400, 400)
        self.bn4 = nn.LayerNorm(400)
        # self.fc4_y = nn.Linear(100, 100)
        # self.bn4_y = nn.BatchNorm1d(100)

        self.fc5 = nn.Linear(400, 400)
        self.bn5 = nn.LayerNorm(400)
        # self.fc5_y = nn.Linear(100, 100)
        # self.bn5_y = nn.BatchNorm1d(100)

        self.fc6 = nn.Linear(400, 400)
        self.bn6 = nn.LayerNorm(400)
        # self.fc6_y = nn.Linear(100, 100)
        # self.bn6_y = nn.BatchNorm1d(100)

        self.fc7 = nn.Linear(400, 200)
        self.bn7 = nn.LayerNorm(200)
        # self.fc7_y = nn.Linear(100, 64)
        # self.bn7_y = nn.BatchNorm1d(64)

        self.fc8 = nn.Linear(200, 100)
        self.bn8 = nn.LayerNorm(100)
        # self.fc8_y = nn.Linear(64, 32)
        # self.bn8_y = nn.BatchNorm1d(32)

        self.fc9 = nn.Linear(100, 2)
        # self.fc9_y = nn.Linear(32, 2)

    def forward(self, x):
        # x = x.to(torch.float32)  # 确保输入为浮点数
        if x.dim() == 1:  # 如果是 1D 张量（单个样本）
            x = x.unsqueeze(0)  # 变成 2D 张量 (1, n_features)
        x1_x = F.tanh(self.bn1_x(self.fc1_x(x)))
        x2_x = F.tanh(self.bn2_x(self.fc2_x(x1_x)))

        x1_y = F.tanh(self.bn1_y(self.fc1_y(x)))
        x2_y = F.tanh(self.bn2_y(self.fc2_y(x1_y)))

        x2 = torch.cat((x2_x, x2_y), dim=1)
        x3 = F.tanh(self.bn3(self.fc3(x2)))
        x4 = F.tanh(self.bn4(self.fc4(x3)))
        x5 = F.tanh(self.bn5(self.fc5(x4)))
        x6 = F.tanh(self.bn6(self.fc6(x5)))
        x7 = F.tanh(self.bn7(self.fc7(x6)))
        x8 = F.tanh(self.bn8(self.fc8(x7)))
        x9 = F.tanh(self.fc9(x8))

        # x3_x = F.tanh(self.bn3_x(self.fc3_x(x2_x)))
        # x7_x = F.tanh((self.fc4_x(x3_x)))
        # x8_x = F.tanh((self.fc7_x(x7_x)))
        # x8_x = F.tanh((self.fc8_x(x3_x)))
        # x9_x = self.fc9_x(x8_x)

        
        # x3_y = F.tanh(self.bn3_y(self.fc3_y(x2_y)))
        # x7_y = F.tanh((self.fc4_y(x3_y)))
        # x8_y = F.tanh((self.fc7_y(x7_y)))
        # x8_y = F.tanh((self.fc8_y(x3_y)))
        # x9_y = self.fc9_y(x8_y)

        # x10 = torch.cat((x9_x, x9_y), dim=1)
        return x9