#!/usr/bin/env python3
"""
直接使用.npy文件的数据加载器
"""
import os
import numpy as np
import torch
from torch.utils.data import Dataset, DataLoader
from src.utils.scaler import StandardScaler

class DirectNpyDataset(Dataset):
    """直接使用.npy文件的数据集"""
    
    def __init__(self, x_path, y_path, output_dim=4, limit_samples=None):
        self.x_path = x_path
        self.y_path = y_path
        self.output_dim = output_dim
        self.limit = limit_samples
        
        # 使用mmap模式加载大文件
        self.x = np.load(x_path, mmap_mode='r')
        self.y = np.load(y_path, mmap_mode='r')
        
        # 创建标准化器
        self.scalers = []
        for i in range(output_dim):
            # 计算均值和标准差
            mean = np.mean(self.x[..., i])
            std = np.std(self.x[..., i])
            if std == 0:
                std = 1.0  # 避免除零
            scaler = StandardScaler(mean, std)
            self.scalers.append(scaler)
    
    def __len__(self):
        n = int(self.x.shape[0])
        return n if self.limit is None else min(n, self.limit)
    
    def __getitem__(self, idx):
        x = self.x[idx]
        y = self.y[idx]
        
        # 转换为float32并确保连续性
        x = np.ascontiguousarray(x, dtype=np.float32)
        y = np.ascontiguousarray(y, dtype=np.float32)
        
        # 标准化
        for i in range(self.output_dim):
            x[..., i] = self.scalers[i].transform(x[..., i])
            y[..., i] = self.scalers[i].transform(y[..., i])
        
        return torch.from_numpy(x), torch.from_numpy(y)

def get_direct_dataloader(datapath, batch_size=8, output_dim=4, num_workers=2, pin_memory=True):
    """创建直接使用.npy文件的数据加载器"""
    
    # 数据文件路径
    train_x_path = os.path.join(datapath, "train_x.npy")
    train_y_path = os.path.join(datapath, "train_y.npy")
    val_x_path = os.path.join(datapath, "val_x.npy")
    val_y_path = os.path.join(datapath, "val_y.npy")
    test_x_path = os.path.join(datapath, "test_x.npy")
    test_y_path = os.path.join(datapath, "test_y.npy")
    
    # 检查文件是否存在
    files_to_check = [train_x_path, train_y_path, val_x_path, val_y_path, test_x_path, test_y_path]
    for file_path in files_to_check:
        if not os.path.exists(file_path):
            raise FileNotFoundError(f"Data file not found: {file_path}")
    
    # 创建数据集
    train_dataset = DirectNpyDataset(train_x_path, train_y_path, output_dim)
    val_dataset = DirectNpyDataset(val_x_path, val_y_path, output_dim)
    test_dataset = DirectNpyDataset(test_x_path, test_y_path, output_dim)
    
    # 创建数据加载器
    train_loader = DataLoader(
        train_dataset,
        batch_size=batch_size,
        shuffle=True,
        num_workers=num_workers,
        pin_memory=pin_memory,
        persistent_workers=True if num_workers > 0 else False,
        prefetch_factor=2 if num_workers > 0 else None
    )
    
    val_loader = DataLoader(
        val_dataset,
        batch_size=batch_size,
        shuffle=False,
        num_workers=num_workers,
        pin_memory=pin_memory,
        persistent_workers=True if num_workers > 0 else False,
        prefetch_factor=2 if num_workers > 0 else None
    )
    
    test_loader = DataLoader(
        test_dataset,
        batch_size=batch_size,
        shuffle=False,
        num_workers=num_workers,
        pin_memory=pin_memory,
        persistent_workers=True if num_workers > 0 else False,
        prefetch_factor=2 if num_workers > 0 else None
    )
    
    return {
        "train_loader": train_loader,
        "val_loader": val_loader,
        "test_loader": test_loader,
        "train_dataset": train_dataset,
        "val_dataset": val_dataset,
        "test_dataset": test_dataset
    }

if __name__ == "__main__":
    # 测试数据加载器
    datapath = "/hy-tmp"
    data = get_direct_dataloader(datapath, batch_size=8)
    
    print("Data loaders created successfully")
    print(f"Train batches: {len(data['train_loader'])}")
    print(f"Val batches: {len(data['val_loader'])}")
    print(f"Test batches: {len(data['test_loader'])}")
    
    # 测试一个batch
    train_loader = data['train_loader']
    batch = next(iter(train_loader))
    X, y = batch
    print(f"Batch shapes: X={X.shape}, y={y.shape}")
    print(f"Data types: X={X.dtype}, y={y.dtype}")
