import os
import torch
from torch.optim import Adam
from transformers import AutoModelForSequenceClassification
from dataloader_wrapper import get_dataloader,get_splited_dataset

os.environ['HTTP_PROXY'] = 'http://127.0.0.1:10792'
os.environ['HTTPS_PROXY'] = 'http://127.0.0.1:10792'

def get_model():
    model = AutoModelForSequenceClassification.from_pretrained("hfl/rbt3")
    if torch.cuda.is_available():
        model = model.cuda()
    return model

def get_optimizer():
    model = get_model()
    optimizer = Adam(model.parameters(), lr=2e-5)
    return optimizer

def evaluate():
    _, valid_dataloader = get_dataloader()
    _, validset = get_splited_dataset()
    model = get_model()
    model.eval()
    acc_num = 0
    with torch.inference_mode():
        for batch in valid_dataloader:
            if torch.cuda.is_available():
                batch = {k: v.cuda() for k, v in batch.items()}
            output = model(**batch)
            pred = torch.argmax(output.logits, dim=-1)
            acc_num += (pred.long() == batch["labels"].long()).float().sum()
    return acc_num / len(validset)

def train(epoch=3, log_step=100):
    model = get_model()
    optimizer = get_optimizer()
    tain_dataloader, _ = get_dataloader()
    global_step = 0
    for ep in range(epoch):
        model.train()
        for batch in tain_dataloader:
            if torch.cuda.is_available():
                batch = {k: v.cuda() for k, v in batch.items()}
            optimizer.zero_grad()
            output = model(**batch)
            output.loss.backward()
            optimizer.step()
            if global_step % log_step == 0:
                print(f"ep: {ep}, global_step: {global_step}, loss: {output.loss.item()}")
            global_step += 1
        acc = evaluate()
        print(f"ep: {ep}, acc: {acc}")

if __name__ == "__main__":
    train()