# -*- coding: utf-8 -*-
# @Time    : 2025/9/22 18:31
# @Author  : chenmh
# @File    : main.py
# @Desc: 主函数运行入口

from bst.model import BSTransformer
import torch
import numpy as np
from bst.dataset import train_loader, valid_loader, test_loader
from bst.config import BSTConfig, bst_config
from bst.trainer import Trainer
from transformers import get_linear_schedule_with_warmup
from utils import setup_logging, get_latest_pth_file, set_seed


def main():
    set_seed()
    model = BSTransformer(
        config=bst_config
    )
    criterion = torch.nn.CrossEntropyLoss()
    optimizer = torch.optim.AdamW(params=model.parameters(), lr=bst_config.learning_rate)

    total_steps = len(train_loader) * bst_config.num_epochs
    warmup_steps = int(total_steps * bst_config.warmup_steps_rate)
    scheduler = get_linear_schedule_with_warmup(
        optimizer,
        num_warmup_steps=warmup_steps,
        num_training_steps=total_steps
    )
    device = "cuda" if torch.cuda.is_available() else "cpu"
    patience = bst_config.patience
    best_model_name = bst_config.best_model_name
    save_path = bst_config.save_path
    if bst_config.is_continued_train:
        _, log_time = get_latest_pth_file()
    else:
        log_time = ""
    logger, save_time = setup_logging(save_dir=save_path, log_file_name=best_model_name, log_time=log_time)

    trainer = Trainer(
        model=model
        , criterion=criterion
        , optimizer=optimizer
        , scheduler=scheduler
        , train_loader=train_loader
        , valid_loader=valid_loader
        , test_loader=test_loader
        , device=device
        , num_epochs=bst_config.num_epochs
        , patience=patience
        , best_model_name=best_model_name
        , save_path=save_path
        , save_time=save_time
        , logger=logger
        , is_continued_train=bst_config.is_continued_train
        , continued_timestamp=bst_config.continued_timestamp
        , early_stop_metric=bst_config.early_stop_metric
    )

    trainer.train()


if __name__ == '__main__':
    main()
