from lightning import fabric
import lightning as L


class FabricTrainer:

    def __init__(self,
                 accelerator,
                 strategy,
                 devices,
                 precision,
                 plugins,
                 callbacks,
                 loggers,
                 max_epochs,
                 max_steps,
                 gradient_accumulation_steps,
                 limit_train_batches,
                 limit_val_batches,
                 val_check_interval,
                 checkpoint_dir: str,
                 checkpoint_frequency: int,
                 use_distributed_sampler: bool = True,
                 ):
        self.fabric = fabric.Fabric(
            accelerator=accelerator,
            strategy=strategy,
            devices=devices,
            precision=precision,
            plugins=plugins,
            callbacks=callbacks,
            loggers=loggers,
        )
        self.use_distributed_sampler = use_distributed_sampler

    def fit(self, model, optimizer, scheduler, train_dataloader, val_dataloader, ckpt_path):
        print(
            "Setting up fabric.",
            "Global rank: ", self.fabric.global_rank,
            "Node rank:", self.fabric.node_rank,
            "Local rank: ", self.fabric.local_rank
        )
        self.fabric.launch()

        # Split dataloaders
        self.fabric.print("Splitting dataloaders into different devices.")
        train_dataloader = self.fabric.setup_dataloaders(
            train_dataloader,
            use_distributed_sampler=self.use_distributed_sampler
        )
        print(f"Node rank {self.fabric.node_rank} batch number of train dataloader {len(train_dataloader)}")

        if val_dataloader is not None:
            val_dataloader = self.fabric.setup_dataloaders(
                val_dataloader,
                use_distributed_sampler=self.use_distributed_sampler
            )

        # setup model and optimizer
        if isinstance(self.fabric.strategy, L.fabric.strategies.DDPStrategy):
            # currently, there is no way to support fsdp with model.configure_optimizers in fabric
            # as it would require fabric to hold a reference to the model, which we don't want to.
            raise NotImplementedError("BYOT currently does not support FSDP")

        model, optimizer = self.fabric.setup(model, optimizer)

