import sys, os

import numpy as np


def loss_lambda(epoch):
    if epoch is None:
        return 0.0001
    if epoch > 300:
        return 1
    elif epoch > 250:
        return 0.1
    elif epoch > 200:
        return 0.01
    elif epoch > 150:
        return 0.001
    else:
        return 0.0001


class GlobMgr:
    metric_loger = None
    params = {}


class BaseMetricLogger:
    def __init__(self):
        self.acc = {}
        self.current_epoch = None
        self.loss = {}
        self.acc_color = {}

    def update(self, acc: float, loss: float, acc_color: float, epoch: int):
        self.current_epoch = epoch
        if self.acc.get(self.current_epoch) is None:
            self.acc[self.current_epoch] = []
            self.acc_color[self.current_epoch] = []
            self.loss[self.current_epoch] = []
        self.acc[self.current_epoch].append(acc)
        self.acc_color[self.current_epoch].append(acc_color)
        self.loss[self.current_epoch].append(loss)

    def get_acc(self, epoch):
        return np.array(self.acc[epoch]).mean()

    def get_acc_color(self, epoch):
        return np.array(self.acc_color[epoch]).mean()

    def get_loss(self, epoch):
        return np.array(self.loss[epoch]).mean()


class MetricLogger(BaseMetricLogger):
    def __init__(self, trainer):
        super(MetricLogger, self).__init__()
        self.trainer = trainer

    def update(self, acc: float, loss: float, acc_color: float):
        epoch = self.trainer.current_epoch
        super().update(acc, loss, acc_color, epoch)
