import os
import pytorch_lightning as pl
from torch.utils.data import DataLoader, random_split
import torch

from dpo_dataset import DPODataset


class DPODataModule(pl.LightningDataModule):
    def __init__(self, data_dir: str, batch_size=32, num_episodes=4, history_step=20, future_step=30, pos_max_val=200,
                 vel_max_val=50,  val_split=0.1, test_split=0.1):
        super().__init__()
        self.data_dir = data_dir
        self.batch_size = batch_size
        self.num_episodes = num_episodes

        self.history_step = history_step
        self.future_step = future_step
        self.pos_max_val = pos_max_val  # 传入静态最大值
        self.vel_max_val = vel_max_val  # 传入静态最大值

        self.val_split = val_split
        self.test_split = test_split

    def setup(self, stage=None):


        # 加载整个数据集
        full_dataset = DPODataset(self.data_dir,
                                   self.history_step,
                                   self.future_step,)

        # 计算数据集划分
        total_size = len(full_dataset)
        val_size = int(total_size * self.val_split)
        test_size = int(total_size * self.test_split)
        train_size = total_size - val_size - test_size

        # 数据集划分
        self.train_dataset, self.val_dataset, self.test_dataset = random_split(full_dataset, [train_size, val_size, test_size])


    def train_dataloader(self):
        """返回训练数据加载器"""
        return DataLoader(self.train_dataset, batch_size=self.batch_size, shuffle=True)

    def val_dataloader(self):
        """返回验证数据加载器"""
        return DataLoader(self.val_dataset, batch_size=self.batch_size, shuffle=False)

    def test_dataloader(self):
        """返回测试数据加载器"""
        return DataLoader(self.test_dataset, batch_size=self.batch_size, shuffle=False)
