import lightning.pytorch as pl
import numpy as np
import json
from datetime import datetime
from lightning.pytorch.callbacks import Callback
import pandas as pd
import math

class Dynamic_K(Callback):
    def __init__(self, name, step=5, mimiter=20):
        super().__init__()
        self.name = name
        self.mimiter = mimiter
        self.step = step
        self.log_info = {'epoch':[], 'itertimes':[]}
    
    def on_train_epoch_start(self, trainer, pl_module):
        if (trainer.current_epoch+1) % 20 == 0:
            iter_times = int(pl_module.maxiter - self.step)
            if iter_times >= self.mimiter:
                pl_module.maxiter = iter_times
            else:
                pl_module.maxiter = self.mimiter
        self.log_info['epoch'].append(trainer.current_epoch)
        self.log_info['itertimes'].append(pl_module.maxiter)
        return 
    
    
    def on_fit_end(self, trainer, pl_module):
        df = pd.DataFrame(self.log_info)
        df.to_csv(f'/home/bon/workspace/green-net/csv/{self.name}.csv')
        return super().on_fit_end(trainer, pl_module)
    

class Adaptive_Kmul(Callback):
    def __init__(self, name, maxiter=500, mimiter=20):
        super().__init__()
        self.val_metrics = []
        self.val_metric = None
        self.name = name
        self.maxiter = maxiter
        self.mimiter = mimiter
        self.log_info = {'epoch':[], 'itertimes':[]}

    def on_validation_epoch_end(self, trainer, pl_module):
        new_val_metric = np.array(self.val_metrics).mean()
        # print(new_val_metric)
        if self.val_metric is None:
            self.val_metric = new_val_metric
        else:
            if new_val_metric >= 1.25 * self.val_metric:
                new_iter = math.ceil(pl_module.maxiter * 1.8)
                if new_iter <= self.maxiter:
                    pl_module.maxiter = new_iter
                else:
                    pl_module.maxiter = self.maxiter
            elif new_val_metric <= 0.85 * self.val_metric:
                new_iter = math.floor(pl_module.maxiter * 0.8)
                if new_iter >= self.mimiter:
                    pl_module.maxiter = new_iter
                else:
                    pl_module.maxiter = self.mimiter
            self.val_metric = new_val_metric

        return None
    
    def on_train_epoch_start(self, trainer, pl_module):
        self.val_metrics = []
        self.log_info['epoch'].append(trainer.current_epoch)
        self.log_info['itertimes'].append(pl_module.maxiter)
        return None
    
    def on_validation_batch_end(
        self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx=0
    ):
        self.val_metrics.append(outputs["val_real"].item())
        return None

    def on_fit_end(self, trainer, pl_module) -> None:
        df = pd.DataFrame(self.log_info)
        df.to_csv(f'/home/bon/workspace/green-net/csv/{self.name}.csv')
        return super().on_fit_end(trainer, pl_module)
    

class Adaptive_Kadd(Callback):
    def __init__(self, name, maxiter=500, mimiter=20):
        super().__init__()
        self.val_metrics = []
        self.val_metric = None
        self.name = name
        self.maxiter = maxiter
        self.mimiter = mimiter
        self.log_info = {'epoch':[], 'itertimes':[]}

    def on_validation_epoch_end(self, trainer, pl_module):
        new_val_metric = np.array(self.val_metrics).mean()
        # print(new_val_metric)
        if self.val_metric is None:
            self.val_metric = new_val_metric
        else:
            if new_val_metric >= 1.25 * self.val_metric:
                new_iter = int(pl_module.maxiter + 5)
                if new_iter <= self.maxiter:
                    pl_module.maxiter = new_iter
                else:
                    pl_module.maxiter = self.maxiter
            elif new_val_metric <= 0.85 * self.val_metric:
                new_iter = int(pl_module.maxiter - 5)
                if new_iter >= self.mimiter:
                    pl_module.maxiter = new_iter
                else:
                    pl_module.maxiter = self.mimiter
            self.val_metric = new_val_metric

        return None
    
    def on_train_epoch_start(self, trainer, pl_module):
        self.val_metrics = []
        self.log_info['epoch'].append(trainer.current_epoch)
        self.log_info['itertimes'].append(pl_module.maxiter)
        return None
    
    def on_validation_batch_end(
        self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx=0
    ):
        self.val_metrics.append(outputs["val_real"].item())
        return None

    def on_fit_end(self, trainer, pl_module) -> None:
        df = pd.DataFrame(self.log_info)
        df.to_csv(f'/home/bon/workspace/green-net/csv/{self.name}.csv')
        return super().on_fit_end(trainer, pl_module)

class JsonLogger(Callback):
    def __init__(self, js_log_path, name, ):
        super().__init__()
        self.js_log_path = js_log_path
        self.name = name
    
    def on_train_epoch_end(self, trainer, pl_module) -> None:
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        js[self.name]["epochs"] = trainer.current_epoch
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))

    def on_fit_start(self, trainer, pl_module):
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        js[self.name]["fit_start_time"] = datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))

    def on_fit_end(self, trainer, pl_module):
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        end_time = datetime.now()
        js[self.name]["fit_end_time"] = end_time.strftime("%m/%d/%Y, %H:%M:%S")
        start_time = datetime.strptime(
            js[self.name]["fit_start_time"], "%m/%d/%Y, %H:%M:%S"
        )
        td = end_time - start_time
        js[self.name]["fit_spend_time"] = str(td)
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))

    def on_exception(self, trainer, pl_module, exception):
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        js[self.name]["exception"] = str(exception)
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))

    def on_save_checkpoint(self, trainer, pl_module, checkpoint):
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        js[self.name]["best_model_ckpt"] = trainer.checkpoint_callback.best_model_path
        js[self.name]["last_model_ckpt"] = trainer.checkpoint_callback.last_model_path
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))

    def teardown(self, trainer, pl_module, stage):
        with open(self.js_log_path, "r") as f:
            js = json.load(f)
        js[self.name]["teardown"] = str(stage)
        with open(self.js_log_path, "w") as f:
            f.write(json.dumps(js))
