import torch.nn as nn
from torch.optim import AdamW
from torch.optim.lr_scheduler import ExponentialLR

from auxmodels.core.model import NNModel, NNLoss

from .net import CenterNet
from .loss import CenterLoss


class PretrainModel(NNModel):
    def init_network(self) -> nn.Module:
        return CenterNet(base_channel=32, repeats=3, num_cls=7)

    def init_loss(self) -> NNLoss:
        return CenterLoss(sub_loss_names=["hm", "bbox"], sub_loss_weights=[1, 1])

    def init_optimizer(self, lr):
        return AdamW(self.network.parameters(), lr=lr)

    def init_lr_scheduler(self):
        return ExponentialLR(self.optimizer, gamma=0.95)

    def init_decoder(self):
        return


class FinetuneModel(NNModel):
    def init_network(self) -> nn.Module:
        return CenterNet(base_channel=32, repeats=3)

    def init_loss(self) -> NNLoss:
        return CenterLoss(sub_loss_names=["hm", "bbox"], sub_loss_weights=[1, 1])

    def init_optimizer(self, lr):
        return AdamW(self.network.parameters(), lr=lr)

    def init_lr_scheduler(self):
        return ExponentialLR(self.optimizer, gamma=0.95)

    def init_decoder(self):
        return
