import torch
import vtk
import os
import numpy as np
import itertools
import random
from torch_geometric import nn as nng
from sklearn.neighbors import NearestNeighbors
from torch_geometric.data import Data, Dataset
from torch_geometric.utils import k_hop_subgraph, subgraph
import meshio
import pandas as pd


def read_data_trackA(args, norm=True):
    ####加载归一化参数
    with open(os.path.join(args.info_dir, 'watertight_global_bounds.txt'), "r") as fp:
        min_bounds = fp.readline().split(" ")
        max_bounds = fp.readline().split(" ")

        min_in = np.array([float(a) for a in min_bounds])
        max_in = np.array([float(a) for a in max_bounds])

    with open(os.path.join(args.info_dir, 'train_pressure_min_std.txt'), "r") as fp:
        min_bounds = fp.readline().split(" ")
        max_bounds = fp.readline().split(" ")

        mean_out = np.array([float(a) for a in min_bounds])
        std_out = np.array([float(a) for a in max_bounds])

    coef_norm = (min_in, max_in, mean_out, std_out)

    ####读取训练数据和测试数据
    train_data_dir = args.train_data_dir
    test_data_dir = args.test_data_dir
    extra_data_dir = args.extra_data_dir

    train_samples = []
    test_samples = []

    train_files = os.listdir(train_data_dir)
    test_files = os.listdir(test_data_dir)
    for file in train_files:
        if file.startswith('press_'):
            path = os.path.join(train_data_dir, file)
            train_samples.append(path)

    if extra_data_dir is not None:
        extra_files = os.listdir(extra_data_dir)
        for file in extra_files:
            if file.startswith('press_'):
                path = os.path.join(extra_data_dir, file)
                train_samples.append(path)

    for file in test_files:
        if file.startswith('mesh_'):
            path = os.path.join(test_data_dir, file)
            test_samples.append(path)

    ####9:1划分数据集
    val_samples = train_samples[-50:]
    train_samples = train_samples[:-50]

    train_dataset = []
    val_dataset = []
    test_dataset = []

    ###加载训练数据
    for k, s in enumerate(train_samples):
        file_name_press = s
        file_name_point = s.replace('press', 'mesh').replace('npy', 'ply')
        if not (os.path.exists(file_name_press) or os.path.exists(file_name_point)):
            continue

        press = np.load(file_name_press)
        mesh = meshio.read(file_name_point)
        points_press = mesh.points
        if len(press)>3586:
          press = np.concatenate((press[0:16], press[112:]), axis=0)
        print(s, file_name_point, len(press), len(points_press))

        x = torch.tensor(points_press)
        y = torch.tensor(press)

        data = Data(x=x, y=y)
        if norm is True:
            data.x = ((data.x - min_in) / (max_in - min_in + 1e-8)).float()
            data.y = ((data.y - mean_out) / (std_out + 1e-8)).float()
        train_dataset.append(data)

    ###加载验证数据
    for k, s in enumerate(val_samples):
        file_name_press = s
        file_name_point = s.replace('press', 'mesh').replace('npy', 'ply')
        if not (os.path.exists(file_name_press) or os.path.exists(file_name_point)):
            continue

        press = np.load(file_name_press)
        mesh = meshio.read(file_name_point)
        points_press = mesh.points
        if len(press)>3586:
          press = np.concatenate((press[0:16], press[112:]), axis=0)

        x = torch.tensor(points_press)
        y = torch.tensor(press)

        data = Data(x=x, y=y)
        if norm is True:
            data.x = ((data.x - min_in) / (max_in - min_in + 1e-8)).float()
            data.y = ((data.y - mean_out) / (std_out + 1e-8)).float()
        val_dataset.append(data)

    ###加载测试数据（无press）
    for k, s in enumerate(test_samples):
        file_name_point = s
        mesh = meshio.read(file_name_point)
        points_press = mesh.points
        x = torch.tensor(points_press)

        data = Data(x=x)
        if norm is True:
            data.x = ((data.x - min_in) / (max_in - min_in + 1e-8)).float()
        test_dataset.append(data)

    test_index = [int(os.path.basename(i).lstrip('mesh_').rstrip('.ply')) for i in test_samples]
    return train_dataset, val_dataset, test_dataset, coef_norm, test_index


def get_edges_index(mesh):
    edge_indeces = set()
    cells = mesh.cells[0].data
    cell_size = cells.shape[1]
    for i in range(len(cells)):
        for j, k in itertools.product(range(cell_size), repeat=2):
            edge_indeces.add((cells[i][j], cells[i][k]))
            edge_indeces.add((cells[i][k], cells[i][j]))
    edges_press = [[], []]
    for u, v in edge_indeces:
        edges_press[0].append(tuple(mesh.points[u]))
        edges_press[1].append(tuple(mesh.points[v]))

    indices = {tuple(mesh.points[i]): i for i in range(len(mesh.points))}
    edges = set()
    for i in range(len(edges_press[0])):
        edges.add((indices[edges_press[0][i]], indices[edges_press[1][i]]))
    edge_index = np.array(list(edges)).T
    return edge_index


def get_induced_graph(data, idx, num_hops):
    subset, sub_edge_index, _, _ = k_hop_subgraph(node_idx=idx, num_hops=num_hops, edge_index=data.edge_index,
                                                  relabel_nodes=True)
    return Data(x=data.x[subset], y=data.y[idx], edge_index=sub_edge_index)


def pc_normalize(pc):
    centroid = torch.mean(pc, axis=0)
    pc = pc - centroid
    m = torch.max(torch.sqrt(torch.sum(pc ** 2, axis=1)))
    pc = pc / m
    return pc


def get_shape(data, max_n_point=3682, normalize=True):
    if len(data.x) > max_n_point:
        surf_indices = np.array(random.sample(range(len(data.x)), max_n_point))
        shape_pc = data.x[surf_indices].clone()
    else:
        shape_pc = data.x.clone()
    if normalize:
        shape_pc = pc_normalize(shape_pc)
    return shape_pc.float()


def sampling(data, max_n_point=3682, normalize=False):
    if len(data.x) > max_n_point:
        surf_indices = np.array(random.sample(range(len(data.x)), max_n_point))
        data.x = data.x[surf_indices]
        data.y = data.y[surf_indices]

    if normalize:
        data.x = pc_normalize(data.x)
        data.y = pc_normalize(data.y)

    return data


def create_edge_index_radius(data, r, max_neighbors=32):
    data.edge_index = nng.radius_graph(x=data.pos, r=r, loop=True, max_num_neighbors=max_neighbors)
    return data


class GraphDataset(Dataset):
    def __init__(self, datalist, use_cfd_mesh=True, r=None):
        super().__init__()
        self.datalist = datalist
        if not use_cfd_mesh:
            assert r is not None
            for i in range(len(self.datalist)):
                self.datalist[i] = create_edge_index_radius(self.datalist[i], r)

    def len(self):
        return len(self.datalist)

    def get(self, idx):
        data = self.datalist[idx]
        shape = get_shape(data)
        return self.datalist[idx], shape


if __name__ == '__main__':
    import numpy as np

    root = './data/mlcfd_data/training_data'
