import torch.nn as nn
import torch
import numpy
import torch
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")


class CriticNetwork(nn.Module):
    def __init__(self, state_dim, action_dim, hidden_dim=128):
        super(CriticNetwork, self).__init__()
        self.fc1 = nn.Linear(state_dim+1 , hidden_dim)
        self.fc2 = nn.Linear(hidden_dim, hidden_dim)
        self.out = nn.Linear(hidden_dim, 1)
        self.activation = nn.ReLU()

    def forward(self, state, action):
        if isinstance(action, (int, float, numpy.integer, numpy.floating)):
            action = torch.tensor([action], dtype=torch.float).to(device)
        # 确保 action 是一维张量
        if action.dim() == 0:
            action = action.unsqueeze(0)
        #print(state.shape, action.shape)
        x = torch.cat((state, action), dim=-1)
        x = self.activation(self.fc1(x))
        x = self.activation(self.fc2(x))
        return self.out(x)
