from accelerator_trainer import AccelerateTrainer
import torch
from transformers import get_scheduler
from lightning import seed_everything
from accelerate.data_loader import DataLoaderShard
seed_everything(42)


def main():
    from toy_data import toy_dataloader
    model = torch.nn.Sequential(
        torch.nn.Linear(1, 10),
        torch.nn.Sigmoid(),
        torch.nn.Linear(10, 5),
        torch.nn.Sigmoid(),
        torch.nn.Linear(5, 1),
    )
    optimizer = torch.optim.Adam(model.parameters(), lr=0.1)
    scheduler = get_scheduler("linear", optimizer, num_warmup_steps=100, num_training_steps=1000)

    trainer = AccelerateTrainer(
        gradient_accumulation_steps=2,
        limit_train_batches=float("inf"),
        limit_val_batches=float("inf"),
        checkpoint_dir="./checkpoints",
        log_with="wandb",
        project_name="test-mc-trainer",
        experiment_args={"batch_size": 32},
        log_every_n_steps=1,
        valid_every_n_steps=100,
        max_steps=1000,
        max_epochs=1000,
        checkpoint_frequency=100,
        wandb_entity="matwings",
        valid_at_epoch_end=False
    )

    trainer.fit(
        model,
        optimizer,
        scheduler,
        toy_dataloader,
        toy_dataloader,
        # ckpt_path="./checkpoints/test-mc-trainer_epoch=3_step=200"
    )


if __name__ == '__main__':
    main()
