import os
from typing import Any, Dict, List, Tuple, Union

import pytorch_lightning as pl
import torch
from torch.optim.lr_scheduler import LambdaLR, MultiStepLR
from torch.optim.sgd import SGD

from ..yolo.data import create_dataloader
from ..yolo.loss import YoloLoss
from ..yolo.yolo import YoloModel


class YoloSystem(pl.LightningModule):
    def __init__(
        self,
        backbone: str,
        lambda_coord: float,
        lambda_noobj: float,
        grid_size: int,
        bounding_box_num: int,
        data_path: List[str],
        data_series: str,
        data_version: List[str],
        auto_download: bool,
        batch_size: int,
        workers_num: Union[int, str],
    ) -> None:
        super().__init__()
        self.dataloader, class_num = create_dataloader(
            data_path,
            data_series,
            data_version,
            auto_download,
            grid_size,
            bounding_box_num,
            batch_size,
            os.cpu_count() - 1 if type(workers_num) != int else workers_num,
        )
        self.model = YoloModel(backbone, grid_size, bounding_box_num, class_num)
        self.loss = YoloLoss(lambda_coord, lambda_noobj, bounding_box_num, class_num)

    def on_fit_start(self) -> None:
        torch.set_float32_matmul_precision("high")

    def train_dataloader(self) -> Any:
        return self.dataloader.train_dataloader()

    def val_dataloader(self) -> Any:
        return self.dataloader.val_dataloader()

    def training_step(
        self, batch: Tuple[torch.Tensor, torch.Tensor], batch_idx: int
    ) -> Dict[str, Any]:
        x, y = batch
        predict = self.model(x)
        loss = self.loss(predict, y)
        for key, value in loss.items():
            self.log(
                f"train/{key}", value, sync_dist=True, on_epoch=True, on_step=False
            )
        return loss["loss"]

    def validation_step(self, batch: Tuple[torch.Tensor, torch.Tensor]):
        x, y = batch
        predict = self.model(x)
        loss = self.loss(predict, y)
        for key, value in loss.items():
            self.log(f"val/{key}", value, sync_dist=True, on_epoch=True, on_step=False)
        return loss["loss"]

    def configure_optimizers(self):
        optimizer = SGD(self.parameters(), lr=0.001, momentum=0.9, weight_decay=0.0005)

        num_steps_per_epoch = len(self.train_dataloader())

        def lr_lambda(step):
            return min(1.0, step / num_steps_per_epoch) * 0.009 + 0.001

        scheduler1 = LambdaLR(optimizer, lr_lambda)
        scheduler2 = MultiStepLR(
            optimizer,
            milestones=[self.trainer.max_epochs // 2, self.trainer.max_epochs // 4 * 3],
            gamma=0.1,
        )

        return [optimizer], [
            {"scheduler": scheduler1, "interval": "step"},
            {"scheduler": scheduler2, "interval": "epoch"},
        ]
