import json, pytorch_lightning as pl
from model import build_model,build_tokenizer
from model import delete_local_model
from data_module import build_dataloaders,build_val_loaders
from lightning_module import UnlearnLitModule
from pytorch_lightning.callbacks import ModelCheckpoint
from utils import save_metrics_to_csv
import gc
import torch

def main():
    pl.seed_everything(42)
    cfg = json.load(open("configs/example_unlearn.json"))
    train_sets_list = cfg["train_sets"]
    tokenizer = build_tokenizer()
    val_loaders = build_val_loaders(cfg,tokenizer)
    for i in range(len(train_sets_list)):
        size_map = {0: 1, 1: 4, 2: 8, 3: 32, 4: 128}
        size = size_map[i]
        delete_local_model()
        train_sets = train_sets_list[i]
        model = build_model()
        module = UnlearnLitModule(model,tokenizer,cfg)
        checkpoint_callback = ModelCheckpoint(
            monitor='val_loss',
            dirpath='checkpoints/',
            filename='gptneo-125m-{epoch:02d}-{val_loss:.2f}',
            save_top_k=1,
            mode='min',
            save_weights_only=True,
        )
        trainer = pl.Trainer(
            accelerator="gpu", devices=1, precision=16, max_epochs=cfg["num_train_epochs"],
            strategy=cfg["strategy"], callbacks=[checkpoint_callback], logger=False,
            check_val_every_n_epoch=cfg["check_val_every_n_epoch"],
            num_sanity_val_steps=0
        )
        # 1. 训练前验证公共数据集
        for index,val_loader in enumerate(val_loaders):
            if index == 0:
                continue
            trainer.validate(module, val_loader)
        filepath_before = f"outputs/val_metrics_before_{size}_public.csv"
        save_metrics_to_csv(
            filepath=filepath_before,
            metrics_dict=module.val_metrics,
            header=["val_el10", "val_ma","val_acc"]
        )
        module.clear_val_metrics()
        for j in range(len(train_sets)):
            print(f"[INFO] start choose the dataset which size = {size} index = {j}")
            checkpoint_callback = ModelCheckpoint(
                monitor='val_loss',
                dirpath='checkpoints/',
                filename='gptneo-125m-{epoch:02d}-{val_loss:.2f}',
                save_top_k=1,
                mode='min',
                save_weights_only=True,
            )
            trainer = pl.Trainer(
                accelerator="gpu", devices=1, precision=16, max_epochs=cfg["num_train_epochs"],
                strategy=cfg["strategy"], callbacks=[checkpoint_callback], logger=False,
                check_val_every_n_epoch=cfg["check_val_every_n_epoch"],
                num_sanity_val_steps=0
            )
            train_set = train_sets[j]
            model = build_model()
            cfg["valid_sets"][0] = train_set
            train_loader, val_loader_first = build_dataloaders(cfg, train_set, tokenizer)
            val_loaders[0] = val_loader_first
            del val_loader_first
            module = UnlearnLitModule(model,tokenizer,cfg)
            del model
            # 1. 训练前验证训练集
            trainer.validate(module, val_loaders[0])
            filepath_before = f"outputs/val_metrics_before_{size}_{j}.csv"
            save_metrics_to_csv(
                filepath=filepath_before,
                metrics_dict=module.val_metrics,
                header=["val_el10", "val_ma","val_acc"]
            )
            module.clear_val_metrics()
            # 训练
            trainer.fit(module, train_loader, val_loaders[0])
            local_path = "./model/gpt-neo125m"
            module.model.save_pretrained(local_path)
            tokenizer.save_pretrained(local_path)
            print(f"[INFO] 模型和分词器已保存到 {local_path}，用于下轮加载")
            del train_loader
            gc.collect()
            torch.cuda.empty_cache()
            # 2. 训练后验证
            for val_loader in val_loaders:
                trainer.validate(module, val_loader)
            filepath_after = f"outputs/val_metrics_after_{size}_{j}.csv"
            save_metrics_to_csv(
                filepath=filepath_after,
                metrics_dict=module.val_metrics,
                header=["val_el10", "val_ma","val_acc"]
            )
            module.clear_val_metrics()
    
    
if __name__ == "__main__":
    main()
