import os
import shutil
import time

import numpy as np
import torch
from sklearn.preprocessing import MinMaxScaler
from torch_geometric.data import Data, Dataset
from tqdm import tqdm

from utils import read_array_from_file


class ArchDataset(Dataset):
    def __init__(self, root, transform=None, pre_transform=None):
        super(ArchDataset, self).__init__(root, transform, pre_transform)
        self.process()

    @property
    def raw_file_names(self):
        return []

    @property
    def processed_file_names(self):
        processed_files = []
        if not os.path.exists(os.path.join(self.processed_dir)):
            os.makedirs(os.path.join(self.processed_dir))
        for dir in os.listdir(self.processed_dir):
            if dir.startswith('data_') and dir.endswith('.pt'):
                processed_files.append(dir)
        return sorted(processed_files)

    def len(self):
        return len(self.processed_file_names)

    def get(self, idx):
        data = torch.load(os.path.join(self.processed_dir, self.processed_file_names[idx]))
        return data

    def process(self):
        waiting_room_path = os.path.join(self.root, 'waiting-room')
        if not os.path.exists(os.path.join(self.processed_dir, 'datasrc')):
            os.makedirs(os.path.join(self.processed_dir, 'datasrc'))
        idx = 0
        for root, dirs, files in tqdm(os.walk(waiting_room_path)):
            for file in files:
                if file != 'va.txt':
                    continue
                try:
                    data_item = self.process_data_files(root)
                    if idx == 0:
                        filename = f'data_{int(time.time())}{idx}.pt'
                    else:
                        idx = 0 if filename != f'data_{int(time.time())}{idx - 1}.pt' else idx
                        filename = f'data_{int(time.time())}{idx}.pt'
                    # print(filename)
                    torch.save(data_item, os.path.join(self.processed_dir, filename))
                    processed_path = root.replace('waiting-room', r'processed\datasrc')
                    shutil.move(root, processed_path)
                    idx += 1
                except:
                    print('fail:' + root)
                    continue

    def process_data_files(self, root):

        data_va_file = os.path.join(root, 'va.txt')
        data_ea_file = os.path.join(root, 'ea.txt')
        data_ei_file = os.path.join(root, 'ei.txt')
        data_ii_file = os.path.join(root, 'ii.txt')

        data_va = np.array(read_array_from_file(data_va_file))
        data_ea = np.array(read_array_from_file(data_ea_file))
        data_ei = np.array(read_array_from_file(data_ei_file), dtype=np.int32)
        data_ii = read_array_from_file(data_ii_file)

        x = data_va[:, 1:]
        y = np.array(data_va[:, 0], dtype=np.int32)
        edge_attr = data_ea
        edge_index = data_ei
        ins_index = data_ii

        x, edge_attr = fit_transform(x, edge_attr)
        ins_adj = get_ins_adj(ins_index, len(x))

        x = torch.tensor(x, dtype=torch.float)
        y = torch.tensor([y], dtype=torch.long)
        edge_attr = torch.tensor(edge_attr, dtype=torch.float)
        edge_index = torch.tensor(edge_index, dtype=torch.long)
        ins_adj = torch.tensor(ins_adj, dtype=torch.long)

        return Data(x=x, y=y, edge_attr=edge_attr, edge_index=edge_index, ins_adj=ins_adj)


def fit_transform(x, edge_attr):
    # standard_scaler = StandardScaler()
    minmax_scaler = MinMaxScaler((0, 1))

    # 节点
    x[:, 2:] = minmax_scaler.fit_transform(x[:, 2:])

    # 边
    edge_attr[:, 0:2] = minmax_scaler.fit_transform(edge_attr[:, 0:2])

    return x, edge_attr


def get_ins_adj(ins_index, n):
    ins_adj = np.zeros([n, n], dtype=np.int32)

    for neighbours in ins_index:
        for i in neighbours:
            for j in neighbours:
                if i == j:
                    continue
                ins_adj[i][j] = 1

    ins_adj = np.reshape(ins_adj, n * n).tolist()

    return ins_adj
