from typing import Union, List, Optional, Any

import torch
import torch.nn as nn
import torch.optim.lr_scheduler as lrs
from pytorch_lightning import LightningModule

from pytorch_lightning.utilities.types import STEP_OUTPUT


class DecoModel(LightningModule):
    """
    装饰model，用于封装基本的模型设置，loss、adam，训练、验证：
    1. 先使用委托的方式来是使用;
    2. 也可以使用继承的方式
    """

    def __init__(self, config, model):
        super(DecoModel, self).__init__()
        self.config = config
        self.model = model
        self.loss = nn.CrossEntropyLoss()

    def validation_step(self, batch, batch_idx):
        p1 = self.model(batch)
        ls = nn.LogSoftmax(dim=1)
        score = ls(p1)
        score, s_idx = score.max(dim=1)
        acc = (s_idx == batch.label).sum()
        loss = self.loss(p1, batch.label)
        return dict(
            loss=loss,
            acc=acc
        )

    def configure_optimizers(self):
        weight_decay = self.config.weight_decay
        optimizer = torch.optim.Adam(
            self.parameters(), lr=self.config.learning_rate, weight_decay=weight_decay)

        if self.config.lr_scheduler is None:
            return optimizer
        else:
            if self.config.lr_scheduler == 'step':
                scheduler = lrs.StepLR(optimizer,
                                       step_size=self.config.lr_decay_steps,
                                       gamma=self.config.lr_decay_rate)
            elif self.config.lr_scheduler == 'cosine':
                scheduler = lrs.CosineAnnealingLR(optimizer,
                                                  T_max=self.config.lr_decay_steps,
                                                  eta_min=self.config.lr_decay_min_lr)
            else:
                raise ValueError('Invalid lr_scheduler type!')
            return [optimizer], [scheduler]

    def training_step(self, batch, batch_idx) -> STEP_OUTPUT:
        p1 = self.model(batch)
        loss = self.loss(p1, batch.label)
        ls = nn.LogSoftmax(dim=1)
        score = ls(p1)
        score, s_idx = score.max(dim=1)
        acc = (s_idx == batch.label).sum()
        return dict(
            loss=loss,
            acc=acc
        )

    def on_validation_batch_end(self, outputs: Optional[STEP_OUTPUT], batch, batch_idx: int,
                                dataloader_idx: int) -> None:
        pass

    def training_epoch_end(self, outputs) -> None:
        avg_loss = torch.stack([x['loss'] for x in outputs]).mean()
        avg_acc = torch.stack([x['acc'] * 1.0 for x in outputs]).mean()
        self.log('val_loss', avg_loss)
        self.log('val_acc', avg_acc)

    def validation_epoch_end(self, outputs):
        avg_loss = torch.stack([x['loss'] for x in outputs]).mean()
        avg_acc = torch.stack([x['acc'] * 1.0 for x in outputs]).mean()
        self.log('val_loss', avg_loss)
        self.log('val_acc', avg_acc)

    def predict_step(self, batch, batch_idx, dataloader_idx) -> Any:
        p1 = self.model(batch)
        ls = nn.LogSoftmax(dim=1)
        score = ls(p1)
        score, s_idx = score.max(dim=1)
        return s_idx
