from mindnlp.abc import Callback
import wandb
from utils.runcontext import Runcontext

class LogCallback(Callback):
    def __init__(self, args, prefix = None, is_eval: bool = False, idx=None):
        self.log_interval = 10
        self.best_metric = 0
        self.step = 0
        self.prefix = prefix
        self.is_eval = is_eval
        self.idx = idx
        self.args = args
        if self.idx is not None:
            wandb.define_metric(f"modular{self.idx}/step")
            wandb.define_metric(f"modular{self.idx}/*", step_metric=f"modular{self.idx}/step")
    
    def train_step_end(self, run_context):
        if self.step % self.log_interval == 0:
            log_metrics = {
                "loss": run_context.loss.item(), 
                "epoch": run_context.cur_epoch_nums
            }
            if self.idx != None:
                update_metrics = {}
                for key, value in log_metrics.items():
                    update_metrics[f"modular{self.idx}/{key}"] = value
                log_metrics = update_metrics
                log_metrics[f"modular{self.idx}/step"] = self.step
            wandb.log(log_metrics, step=self.step if self.idx is None else None)
        self.step += 1
        Runcontext.step += 1
    
    def evaluate_end(self, run_context):
        self.best_metric = max(self.best_metric, run_context.metrics_values[0])
        metric_key = run_context.metrics_names[0] if self.prefix == None else f'{self.prefix}_{run_context.metrics_names[0]}'
        if self.is_eval:
            print("eval end")
            wandb.summary[metric_key] = run_context.metrics_values[0]
        else:
            best_metric_key = f'best_{run_context.metrics_names[0]}'
            if self.idx != None:
                metric_key = f"modular{self.idx}/{metric_key}"
                best_metric_key = f"modular{self.idx}/{best_metric_key}"
            log_metrics = {
                metric_key : run_context.metrics_values[0],
            }
            if self.idx != None:
                log_metrics[f"modular{self.idx}/step"] = self.step
            wandb.log(log_metrics, step=self.step if self.idx is None else None)
            wandb.summary[best_metric_key] = self.best_metric