import torch
from easycore.common.registry import Registry


class SchedulerFactory:
    registry = Registry("scheduler")

    @classmethod
    def register(cls, scheduler_type_name=None, obj=None):
        return cls.registry.register(scheduler_type_name, obj)

    @classmethod
    def get(cls, scheduler_config, optimizer):
        optimizer = cls.registry.get(scheduler_config['type'])(scheduler_config, optimizer)
        return optimizer


@SchedulerFactory.register()
class ReduceLROnPlateau(torch.optim.lr_scheduler.ReduceLROnPlateau):
    def __init__(self, scheduler_config, optimizer):
        super(ReduceLROnPlateau, self).__init__(optimizer, patience=scheduler_config['patience'], verbose=True)

    def step(self, message):
        super(ReduceLROnPlateau, self).step(message['val_loss'])


@SchedulerFactory.register()
class CosineAnnealingLR(torch.optim.lr_scheduler.CosineAnnealingLR):
    def __init__(self, scheduler_config, optimizer):
        super(CosineAnnealingLR, self).__init__(optimizer, T_max=scheduler_config['T_max'], eta_min=scheduler_config['eta_min'])

    def step(self, message):
        super(CosineAnnealingLR, self).step()
