"""Utility functions for reading the datasets."""

import sys
import os

from torch.utils.data import IterableDataset, Dataset
from torch_geometric.data import InMemoryDataset
from torch_geometric.data import Data

import torch_geometric.nn as nng
from torch_geometric.loader import NeighborSampler as PyG_NeighborSampler
from torch_geometric.utils import k_hop_subgraph

import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import torch
import numpy as np
from torch_geometric.loader import DataLoader as torch_geometric_DataLoader
import h5py
import re


class DOF_Dataset(Dataset):
    def __init__(
        self, params=None, is_training=True, split="train", device=None, is_norm=False
    ):
        self.params = params
        self.is_training = is_training
        self.device = device
        self.epoch = 0
        self.pool = h5py.File(self.params.dataset_dir + f"/DOF_{split}.h5", "r")
        self.split = split
        self.is_norm = is_norm

        self.key_list = list(self.pool.keys())
        self.key_list = np.sort([int(key) for key in self.key_list])
        self.key_list = [str(key) for key in self.key_list]
        self.mean_std = torch.from_numpy(
            np.loadtxt(self.params.dataset_dir + "dof_mean_std.txt")
        ).to(torch.float32)

    def __getitem__(self, index):
        dof = torch.from_numpy(self.pool[self.key_list[index]][:]).to(torch.float32)

        if self.is_norm:
            dof = dof.permute(1, 2, 0)
            dof -= self.mean_std[0]
            dof /= self.mean_std[1]
            dof = dof.permute(1, 2, 0)
        return dof

    def len(self):
        return len(self.key_list)


class CarDataset(Dataset):
    def __init__(self, path, mesh_indices):
        super().__init__()

        self.mesh_indices = mesh_indices
        self.dataset_dir = path
        self.file_handle = h5py.File(self.dataset_dir, "r")

    def __getitem__(self, index):
        file_idx = str(int(self.mesh_indices[index]))
        handle = self.file_handle[file_idx]
        handle_dict = dict(handle)
        for k, v in handle_dict.items():
            handle_dict[k] = v[:]
        return handle_dict, file_idx

    def __len__(self):
        return len(self.mesh_indices)


class CarDataset4UNet(CarDataset):

    def __init__(self, path, mesh_indices, gt_exist=True):
        super().__init__(path, mesh_indices)
        # for idx in range(len(mesh_indices)):
        #     data = super().__getitem__(idx)
        #     pos = data["node|pos"]
        #     # grid, sdf = DS_utils.compute_sdf_grid(pos, data['cells_node'].reshape(-1, 3), bounds, resolution)
        #     # voxel_sdf = torch.from_numpy(sdf).reshape(1, *grid.shape[:-1]) # [C, W, H, D]
        #     data["voxel_sdf"] = data["voxel|sdf"]
        self.current_idx = None
        self.gt_exist = gt_exist

    def __getitem__(self, index):
        data, file_idx = super().__getitem__(index)
        self.current_idx = file_idx
        rdata = {}
        rdata["node|pos"] = torch.from_numpy(data["node|pos"]).to(torch.float32)
        if self.gt_exist:
            rdata["node|pressure"] = torch.from_numpy(data["node|pressure"]).to(
                torch.float32
            )
        rdata["voxel|sdf"] = (
            torch.from_numpy(data["voxel|sdf"])
            .reshape(1, *data["voxel|grid"].shape[:-1])
            .to(torch.float32)
        )
        rdata["node|unit_norm_v"] = torch.from_numpy(data["node|unit_norm_v"]).to(
            torch.float32
        )
        return rdata

    def get_cur_file_idx(self):
        return self.current_idx


class CarDatasetGraph(CarDataset4UNet):
    def __init__(self, path, mesh_indices, gt_exist=True):
        super().__init__(path, mesh_indices, gt_exist)

    def __getitem__(self, index):
        raw_data, _ = super().__getitem__(index)
        data = super().__getitem__(index)
        Data(
            x=data["node|unit_norm_v"],
            pos=data["node|pos"],
            pressure=data["node|pressure"],
            edge_index=raw_data["face|face_node"],
        )


def GetCarDatasetInfoList(params, path, split: list):
    dataset_dir = os.path.join(path, "train.h5")
    pressure_min_std = (np.loadtxt(os.path.join(path, "train_pressure_min_std.txt")),)
    bounds = (np.loadtxt(os.path.join(path, "watertight_global_bounds.txt")),)
    all_mesh_indices = np.loadtxt(os.path.join(path, "watertight_meshes.txt")).reshape(
        -1
    )

    splited_mesh_indices = [
        all_mesh_indices[start:end] for start, end in zip(split[:-1], split[1:])
    ]
    return dataset_dir, *pressure_min_std, *bounds, splited_mesh_indices


class CFDdatasetmap(Dataset):
    def __init__(
        self, params, path, split="train", dataset_type="h5", is_training=False
    ):
        super().__init__()

        self.path = path
        self.split = split
        self.dataset_dir = path
        self.params = params
        self.is_training = is_training
        if dataset_type == "h5":
            self.file_handle = h5py.File(self.dataset_dir + f"/{split}.h5", "r")
        else:
            raise ValueError("invalid data format")

    def __getitem__(self, index):
        trajectory_handle = self.file_handle[str(index)]
        trajectory = {}

        for key in trajectory_handle.keys():
            trajectory[key] = torch.from_numpy(trajectory_handle[key][:])

        return trajectory

    def __len__(self):
        return len(self.file_handle)


def sort_key_list(in_list: list):
    a_list = []
    b_list = []

    for k in in_list:
        if k.startswith("A"):
            a_list.append(k)
        elif k.startswith("B"):
            b_list.append(k)
    sorted_a_list = sorted(a_list, key=lambda s: int(re.search(r"_(\d+)", s).group(1)))
    sorted_b_list = sorted(b_list, key=lambda s: int(re.search(r"_(\d+)", s).group(1)))
    rt_list = sorted_a_list + sorted_b_list
    return rt_list


class Data_Pool:
    def __init__(self, params=None, is_training=True, split="train", device=None):
        self.params = params
        self.is_training = is_training
        self.device = device
        self.epoch = 0
        self.load_mesh_to_cpu(split=split, dataset_dir=params.dataset_dir)

    def load_mesh_to_cpu(self, split="train", dataset_dir=None):

        self.valid_pool = []

        if dataset_dir is not None:
            self.pool = h5py.File(dataset_dir + f"/{split}.h5", "r")
        else:
            self.pool = h5py.File(self.params.dataset_dir + f"/{split}.h5", "r")

        self.key_list = list(self.pool.keys())
        self.key_list = sort_key_list(self.key_list)

        # pressure_mean_std = np.loadtxt(
        #     self.params.dataset_dir + "/train_pressure_mean_std.txt"
        # )
        # bounds = np.loadtxt(self.params.dataset_dir + "/global_bounds.txt")

        # voxel_mean_std = torch.from_numpy(
        #     np.loadtxt(self.params.dataset_dir + "/voxel_mean_std.txt")
        # ).to(torch.float32)

        # PNA_mean_std = np.loadtxt(
        #     self.params.dataset_dir + "/PosNormalArea_mean_std.txt"
        # )

        # pos_scale = np.linalg.norm(bounds[0] - bounds[1]) * 0.5

        # self.physics_info = {
        #     "pressure_mean_std": pressure_mean_std,
        #     "bounds": bounds,
        #     "pos_scale": pos_scale,
        #     "voxel_mean_std": voxel_mean_std,
        #     "PNA_mean_std": PNA_mean_std,
        # }

        return self.params.dataset_dir

    @staticmethod
    def datapreprocessing(graph_cell, is_training=False):

        # graph_node.x = torch.cat((graph_node.x, graph_node.pos, graph_node.ao), dim=-1)
        def randbool(*size, device="cuda"):
            """Returns 50% channce of True of False"""
            return torch.randint(2, size, device=device) == torch.randint(
                2, size, device=device
            )

        graph_cell.ball_edge_index = None

        cell_attr = torch.cat((graph_cell.x, graph_cell.pos), dim=-1)

        # permute edge direction
        senders, receivers = graph_cell.edge_index
        if is_training:
            random_mask = randbool(1, senders.shape[0], device=senders.device).repeat(
                2, 1
            )
            random_direction_edge = torch.where(
                random_mask,
                torch.stack((senders, receivers), dim=0),
                torch.stack((receivers, senders), dim=0),
            )

        else:
            random_direction_edge = torch.stack((senders, receivers), dim=0)

        releative_node_attr = (
            cell_attr[random_direction_edge[0]] - cell_attr[random_direction_edge[1]]
        )

        # graph_cell.x = torch.cat((graph_cell.x, graph_cell.ao), dim=-1)
        

        graph_cell.edge_index = random_direction_edge
        graph_cell.edge_attr = releative_node_attr

        return graph_cell


class CustomGraphData(Data):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)

    def __inc__(self, key, value, *args, **kwargs):
        offset_rules = {
            "edge_index": self.num_nodes,
            "face": self.num_nodes,
            "cells_node": self.num_nodes,
            "face_node": self.num_nodes,
            "cells_face": self.num_nodes,
            "neighbour_cell": self.num_nodes,
            "face_node_x": self.num_nodes,
            "pos": 0,
            "A_node_to_node": 0,
            "A_node_to_node_x": 0,
            "B_node_to_node": 0,
            "B_node_to_node_x": 0,
            "cell_area": 0,
            "node_type": 0,
            "graph_index": 0,
            "pde_theta": 0,
            "neural_network_output_mask": 0,
            "uvp_dim": 0,
            "dt_graph": 0,
            "x": 0,
            "y": 0,
            "m_ids": 0,
            "m_gs": 0,
            "case_global_index": 0,
        }
        return offset_rules.get(key, super().__inc__(key, value, *args, **kwargs))

    def __cat_dim__(self, key, value, *args, **kwargs):
        cat_dim_rules = {
            "x": 0,
            "pos": 0,
            "y": 0,
            "norm_y": 0,
            "query": 0,  # 保持query为列表，不进行拼接
            "edge_index": 1,  # edge_index保持默认的offset拼接
            "voxel": 0,
            "graph_index": 0,
        }
        return cat_dim_rules.get(key, super().__cat_dim__(key, value, *args, **kwargs))


class GraphCellDataset(InMemoryDataset):
    def __init__(
        self,
        base_dataset,
        len_ds=None,
        indices=None,
        params=None,
        subsampling=False,
        sample_ratio=0.2,
    ):
        super().__init__()
        self.base_dataset = base_dataset
        self._len = len_ds
        self.idx_indices = None
        if indices is not None:
            self._len = len(indices)
            self.idx_indices = indices
        self.params = params
        self.subsampling = subsampling
        self.sample_ratio = sample_ratio
        self.k_hop = self.params.sample_khop

    @property
    def pool(self):
        # 这里你可以根据需要从基类的pool中筛选出GraphNode的数据
        return self.base_dataset.pool

    @property
    def key_list(self):
        return self.base_dataset.key_list

    def len(self):
        return self._len

    def load_A_data(self, idx):
        """<KeysViewHDF5 ['cells_face', 'cells_index',
        'cells_node', 'cell|cells_area', 'cell|centroid',
        'face|face_center_pos', 'face|face_length',
        'face|face_node', 'face|neighbour_cell',
        'node|pos', 'node|pressure',
        'node|unit_norm_v', 'voxel|grid',
        'voxel|sdf']>"""

        minibatch_data = self.pool[self.key_list[idx]]
        mesh_pos = torch.from_numpy(minibatch_data["node|pos"][:]).to(torch.float32)
        unit_norm_v = torch.from_numpy(minibatch_data["node|unit_norm_v"][:]).to(
            torch.float32
        )
        face_node = torch.from_numpy(minibatch_data["face|face_node"][:]).to(torch.long)
        ao = torch.from_numpy(minibatch_data["node|ao"][:]).to(torch.float32)

        voxel = (
            torch.from_numpy(minibatch_data["voxel|sdf"][:])
            .to(torch.float32)
            .reshape(1, 1, *minibatch_data["voxel|grid"][:].shape[:-1])
        )  # B C W H D
        voxel = (voxel - minibatch_data["voxel_mean_std"][0]) / minibatch_data[
            "voxel_mean_std"
        ][1]

        bounds = minibatch_data["bounds"]
        mid = (bounds[0] + bounds[1]) / 2
        scale = (bounds[1] - bounds[0]) / 2
        canonical_query = (mesh_pos - mid) / scale
        canonical_query = canonical_query.to(torch.float32)

        y = torch.from_numpy(minibatch_data["node|pressure"][:]).to(
            torch.float32
        )
        norm_y = (
            y - minibatch_data["pressure_mean_std"][0]
        ) / minibatch_data["pressure_mean_std"][1]

        graph_node = CustomGraphData(
            x=unit_norm_v,
            edge_index=face_node,
            pos=mesh_pos,
            y=y,
            norm_y=norm_y,
            ao=ao,
            voxel=voxel,
            query=canonical_query,
            graph_index=torch.as_tensor([idx], dtype=torch.long),
            origin_id=torch.as_tensor(
                [ord(char) for char in (self.key_list[idx])], dtype=torch.long
            ),
            press_mean=torch.tensor(minibatch_data["pressure_mean_std"][0]),
            press_std=torch.tensor(minibatch_data["pressure_mean_std"][1]),
        )

        return graph_node

    def load_B_data(self, idx):
        """<KeysViewHDF5 ['cells_face', 'cells_index',
        'cells_node', 'cell|cells_area', 'cell|centroid',
        'face|face_center_pos', 'face|face_length',
        'face|face_node', 'face|neighbour_cell',
        'node|pos', 'node|pressure',
        'node|unit_norm_v', 'voxel|grid',
        'voxel|sdf']>"""

        minibatch_data = self.pool[self.key_list[idx]]

        # cell_attr
        mesh_pos = torch.from_numpy(minibatch_data["cell|centroid"][:]).to(
            torch.float32
        )
        normals = torch.from_numpy(minibatch_data["cell|unit_norm_v"][:]).to(
            torch.float32
        )

        edge_index = torch.from_numpy(minibatch_data["face|neighbour_cell"][:]).to(
            torch.long
        )

        y = torch.from_numpy(minibatch_data["cell|pressure"][:]).to(torch.float32)

        norm_y = (y - minibatch_data["pressure_mean_std"][0]) / minibatch_data[
            "pressure_mean_std"
        ][1]

        voxel = (
            torch.from_numpy(minibatch_data["voxel|sdf"][:])
            .to(torch.float32)
            .reshape(1, 1, *minibatch_data["voxel|grid"][:].shape[:-1])
        )  # B C W H D
        voxel = (voxel - minibatch_data["voxel_mean_std"][0]) / minibatch_data[
            "voxel_mean_std"
        ][1]

        bounds = minibatch_data["bounds"]
        mid = (bounds[0] + bounds[1]) / 2
        scale = (bounds[1] - bounds[0]) / 2
        canonical_query = (mesh_pos - mid) / scale

        canonical_query = canonical_query.to(torch.float32)
        ao = torch.zeros_like(y)

        if self.subsampling:

            sampled_nodes = torch.randint(
                0, normals.shape[0], [self.params.num_samples]
            )
            subgraph_nodes, subgraph_edge_index, _, _ = k_hop_subgraph(
                sampled_nodes, self.k_hop, edge_index, relabel_nodes=True
            )
            normals = normals[subgraph_nodes]
            mesh_pos = mesh_pos[subgraph_nodes]
            y = y[subgraph_nodes]
            norm_y = norm_y[subgraph_nodes]
            ao = ao[subgraph_nodes]
            canonical_query = canonical_query[subgraph_nodes]
            edge_index = subgraph_edge_index

        graph_cell = CustomGraphData(
            x=normals,
            edge_index=edge_index,
            pos=mesh_pos,
            y=y,
            norm_y=norm_y,
            ao=ao,
            query=canonical_query,
            voxel=voxel,
            graph_index=torch.as_tensor([idx], dtype=torch.long),
            origin_id=torch.as_tensor(
                [ord(char) for char in (self.key_list[idx])], dtype=torch.long
            ),
            press_mean=torch.tensor(minibatch_data["pressure_mean_std"][0]),
            press_std=torch.tensor(minibatch_data["pressure_mean_std"][1]),
        )

        return graph_cell

    def get(self, idx):
        """<KeysViewHDF5 ['cells_face', 'cells_index',
        'cells_node', 'cell|cells_area', 'cell|centroid',
        'face|face_center_pos', 'face|face_length',
        'face|face_node', 'face|neighbour_cell',
        'node|pos', 'node|pressure',
        'node|unit_norm_v', 'voxel|grid',
        'voxel|sdf']>"""
        if self.idx_indices is not None:
            idx = self.idx_indices[idx]  # 二级映射
        if self.key_list[idx].startswith("A"):
            graph_cell = self.load_A_data(idx)

        elif self.key_list[idx].startswith("B"):
            graph_cell = self.load_B_data(idx)

        else:
            minibatch_data = self.key_list[self.key_list[idx]]
            if minibatch_data["cell|centroid"].shape[0] < 10000:
                graph_cell = self.load_A_data(idx)
            else:
                graph_cell = self.load_B_data(idx)

        return graph_cell


class DatasetFactory:
    def __init__(
        self,
        params=None,
        device=None,
        split="test",
    ):
        self.params = params

        self.train_dataset = Data_Pool(
            params=params,
            is_training=True,
            split=split,
            device=device,
        )

        self.test_dataset = Data_Pool(
            params=params,
            is_training=False,
            split=split,
            device=device,
        )

    def create_trainset(
        self,
        batch_size=100,
        num_workers=4,
        pin_memory=True,
        persistent_workers=True,
        indices=None,
        subsampling=True,  # 默认在训练时使用NeighborSampler
        ratio=0.2,  # 设置默认的采样率为2
    ):
        """training set"""
        graph_cell_dataset = GraphCellDataset(
            base_dataset=self.train_dataset,
            len_ds=len(self.train_dataset.pool),
            indices=indices,
            params=self.params,
            subsampling=subsampling,
            sample_ratio=ratio,
        )

        loader = torch_geometric_DataLoader(
            graph_cell_dataset,
            batch_size,
            num_workers=num_workers,
            pin_memory=pin_memory,
            persistent_workers=persistent_workers,
            shuffle=True,
        )

        """ training set """

        return self.train_dataset, loader

    def create_testset(
        self,
        batch_size=1,
        num_workers=0,
        pin_memory=False,
        persistent_workers=False,
        valid_num=10,
        subsampling=True,
        indices=None
    ):
        """test set"""
        if indices is not None:
            valid_num = len(indices)
        graph_cell_dataset = GraphCellDataset(
            base_dataset=self.test_dataset,
            len_ds=valid_num,
            params=self.params,
            subsampling=subsampling,
            indices=indices
        )

        loader = torch_geometric_DataLoader(
            graph_cell_dataset,
            batch_size,
            num_workers=num_workers,
            pin_memory=pin_memory,
            persistent_workers=persistent_workers,
            shuffle=True,
        )
        """ test set """

        return self.test_dataset, loader
