import random
import numpy as np
import torch

class ReplayBuffer:
    def __init__(self, max_size=10000):
        self.buffer = []
        self.max_size = max_size

    def add(self, experience):
        if len(self.buffer) >= self.max_size:
            self.buffer.pop(0)
        self.buffer.append(experience)

    def sample(self, batch_size=64):
        indices = np.random.choice(len(self.buffer), batch_size, replace=False)
        batch = [self.buffer[i] for i in indices]
        states, actions, rewards, next_states = zip(*batch)
        
        # 确认采样的维度
        states_np = [s.detach().numpy() if isinstance(s, torch.Tensor) else s for s in states]
        actions_np = [a.detach().numpy() if isinstance(a, torch.Tensor) else a for a in actions]
        #print(f"Sampled states: {np.array(states_np).shape}, actions: {np.array(actions_np).shape}")
        
        # 检查 next_states 的元素类型
        next_states_np = [ns() if callable(ns) else ns for ns in next_states]  # 调用 get_state 方法
        next_states_np = [ns.detach().numpy() if isinstance(ns, torch.Tensor) else np.array(ns) for ns in next_states_np]
        #print(f"Next states types: {[type(ns) for ns in next_states_np]}")
        
        #print(f"State type: {type(next_states_np)}, State value: {next_states_np}")
        
        if isinstance(next_states_np, (list, tuple)):
            state_tensor = torch.tensor(next_states_np, dtype=torch.float)
        elif isinstance(next_states_np, torch.Tensor):
            state_tensor = next_states_np.float()
        else:
            raise TypeError("State must be a list, tuple, or torch Tensor")
    
        # 将采样结果转为张量
        return (
            torch.tensor(np.array(states_np), dtype=torch.float32),
            torch.tensor(np.array(actions_np), dtype=torch.float32),
            torch.tensor(np.array([r.detach().numpy() if isinstance(r, torch.Tensor) else r for r in rewards]), dtype=torch.float32),
            state_tensor,
            #next_states
        )

    def __len__(self):
        return len(self.buffer)