from slip_detector import DeltaEstimator, StreamTactileDataset
import torch
import torch.nn as nn
import os
import numpy as np
import random
import tqdm
import torch.nn.functional as F
import time
import pandas as pd
import re

class NetworkTrainee:
    def __init__(self, res_dir = None, dataset_dir = None):
        """
        Initializes the NetworkTrainee class.
        Args:
            model (EnhancedTactileCNN): The model to be trained.
            dataset_dir (str): The directory where the dataset is stored.
        """
        self.device = "cuda" if torch.cuda.is_available() else "cpu"
        self.delta_estimator = DeltaEstimator(res_dir)

        print("[NetworkTrainee] model loaded successfully")

        if dataset_dir is None:
            dataset_dir = self._get_dataset_dir()
        self.dataset_dir = dataset_dir

        self.sample_meta = []
        self.frame_cnt = None
        self.train_loader = None
        self.val_loader = None
        self.train_result = None
        self.scanned_dir = None
        self.mean, self.std = None, None

    def get_sample_meta(self):
        """
        Returns the metadata of the samples in the dataset.
        This method should be implemented to return the actual metadata.
        """
        scanned_dir = []
        self.frame_cnt = 0
        # Implement logic to retrieve sample metadata
        for dataset in os.listdir(self.dataset_dir):

            if not os.path.isdir(os.path.join(self.dataset_dir, dataset)):
                continue
            # check dataset is named as <objname>_dataset_<index> , that is, the middle  part is "dataset"
            # use regular expression to check if the dataset name is valid
            if not re.match(r'^[a-zA-Z0-9_]+_dataset_[0-9]+$', dataset):
                continue
            
            scanned_dir.append(dataset)
            dataset = os.path.join(self.dataset_dir, dataset)
            for exp_dir in os.listdir(dataset):
                exp_dir = os.path.join(dataset, exp_dir)
                for sensor_id in [0, 1]:
                    label_path = os.path.join(exp_dir, f'true_delta_{sensor_id}.npy')
                    tac_path = os.path.join(exp_dir, f'tacdata_{sensor_id}.npy')    
                    if not (os.path.exists(label_path) and os.path.exists(tac_path)):
                        continue
                    
                    # 添加元数据记录
                    data_length = len(np.load(label_path))
                    self.sample_meta.extend([
                        (exp_dir, sensor_id, data_length) 
                    ])
                    self.frame_cnt += data_length
            
            if not os.path.exists(os.path.join(dataset, "used_dataset")):
                f = open(os.path.join(dataset, "used_dataset"), 'w')
                f.close()
                print(f"[NetworkTrainee] Freezed dataset {dataset} for training.")
        print(f"[NetworkTrainee] Scanned {scanned_dir} for samples.")
        print(f"[NetworkTrainee] Found {len(self.sample_meta)} samples in {len(scanned_dir)} datasets.")
        self.scanned_dir = scanned_dir

    def create_dataloader(self, train_ratio = 0.8, batch_size=32):
        """
        Creates a DataLoader for the dataset.
        Args:
            batch_size (int): Size of each batch.
        Returns:
            DataLoader: A DataLoader instance for the dataset.
        """

        # Get and split the sample metadata
        self.get_sample_meta()
        random.shuffle(self.sample_meta)
        split_index = int(len(self.sample_meta) * train_ratio)
        train_meta = self.sample_meta[:split_index]
        val_meta = self.sample_meta[split_index:]

        # Create the dataset
        self.mean, self.std = self._estimate_mean_std(self.sample_meta, sample_size=100)
        train_ds = StreamTactileDataset(train_meta, self.mean, self.std)
        val_ds = StreamTactileDataset(val_meta, self.mean, self.std)
        print(f"[NetworkTrainee] Dataset created with {train_ds.__len__()} training samples and {val_ds.__len__()} validation samples.")



        # Create DataLoaders
        train_loader = torch.utils.data.DataLoader(train_ds, batch_size=batch_size, shuffle=True, num_workers=4, pin_memory=True)
        val_loader = torch.utils.data.DataLoader(val_ds, batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=True)
        print("[NetworkTrainee] training set size:", len(train_loader.dataset))
        print("[NetworkTrainee] validation set size:", len(val_loader.dataset))
        self.train_loader = train_loader
        self.val_loader = val_loader

    def train(self, epochs=10, patience=5):
        self.delta_estimator.layer_unfreezer.unfreeze_n_layers(2)
        self.delta_estimator.save_mean_std(self.mean, self.std)
        self.delta_estimator.save_unfreeze_layer()
        self.train_result = self.delta_estimator.trainer.train(
            train_loader=self.train_loader,
            val_loader=self.val_loader,
            num_epochs=epochs,
            early_stopping_patience=patience,
        )
        

    def save_training_results(self, goal_dir="resultlib"):
        time_str = time.strftime("%Y%m%d_%H%M%S")
        dir = os.path.join(goal_dir, f"train_result_{time_str}")      
        if not os.path.exists(dir):
            os.makedirs(dir)
        print(f"[NetworkTrainee] Saving training results to {dir}")
        # 保存训练配置
        config_path = os.path.join(dir, "config.txt")
        with open(config_path, 'w') as f:
            f.write(f"Device: {self.device}\n")
            f.write(f"Model: {self.delta_estimator.model.__class__.__name__}\n")
            f.write(f"Scanned directories: {self.scanned_dir}\n")
            f.write(f"Count of experiments: {len(self.sample_meta)}\n")
            f.write(f"Epochs: {len(self.train_result['train_loss'])}\n")
            f.write(f"Batch Size: {self.train_loader.batch_size}\n")

        
        # 保存训练结果
        result_path = os.path.join(dir, "training_results.csv")
        df = pd.DataFrame(self.train_result)
        df.to_csv(result_path, index=False)
    
    def _get_dataset_dir(self):
        """
        Returns the directory where the dataset is stored.
        Data should be stored in a directory named 'dataset_{index}' where index is an integer.
        """
        return "datalib"
    
    def _create_dataset(self, meta):
        all_tacdata = []
        all_labels = []
        for exp_dir, sensor_id,_ in meta:
            label_path = os.path.join(exp_dir, f'true_delta_{sensor_id}.npy')
            tac_path = os.path.join(exp_dir, f'tacdata_{sensor_id}.npy')    
    
            # 读取数据
            labels = np.load(label_path)
            tac_data = np.load(tac_path)
            
            # 放入数据集
            all_tacdata.append(tac_data)
            all_labels.append(labels)
        print("Created.")
        return np.concatenate(all_tacdata), np.concatenate(all_labels)
    
    def _calculate_mean_std(self, data):
        """
        Calculate the mean and standard deviation of the dataset.
        Args:
            data (np.ndarray): The dataset for which to calculate mean and std.
        Returns:
            tuple: Mean and standard deviation of the dataset.
        """
        mean = np.mean(data, axis=(0,1))
        std = np.std(data, axis=(0,1))
        return mean, std
    
    def _estimate_mean_std(self, metas, sample_size=100):
        """
        Estimate the mean and standard deviation of the dataset.
        Args:
            meta (list): Metadata of the dataset.
        Returns:
            tuple: Mean and standard deviation of the dataset.
        """
        if len(metas) < sample_size:
            sample_size = len(metas)
            print(f"[NetworkTrainee] Sample size is larger than dataset size. Using {sample_size} samples.")
        sampled_metas = random.sample(metas, sample_size)
        all_tacdata, _ = self._create_dataset(sampled_metas)
        mean = np.mean(all_tacdata, axis=(0,1))
        std = np.std(all_tacdata, axis=(0,1))

        return mean, std

if __name__ == "__main__":
    # Example usage
    trainee = NetworkTrainee()
    trainee.create_dataloader()
    try:
        trainee.train(epochs=100, patience=7)
    except KeyboardInterrupt:
        trainee.train_result = trainee.delta_estimator.trainer.dict_train_history
    trainee.save_training_results(goal_dir="resultlib")