# encoding: utf-8
# @Time:    :2025/2/6 21:36
import json
import time

import torch
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader

from model.model import ClassificationModel
from model.config import ModelConfig


class MyDataset(Dataset):
    def __init__(self, file_path: str):
        self.data = []
        self.label = []
        self.load_data(file_path)

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        return torch.tensor(self.data[idx], dtype=torch.long), torch.tensor(self.label[idx], dtype=torch.long)

    def load_data(self, file_path: str):
        with open(file_path, "r", encoding="utf-8") as f:
            for line in f:
                line = line.strip()
                if line:
                    token_ids = json.loads(line)
                    self.data.append(token_ids[:-1])
                    self.label.append(token_ids[-1])


# device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
device = torch.device("cpu")
config = ModelConfig()
model = ClassificationModel(config)
model.to(device)

train_path = "./datas/pretrain_data_demo.txt"
train_dataset = MyDataset(train_path)

if __name__ == "__main__":
    epochs = 10
    batch_size = 16
    criterion = torch.nn.CrossEntropyLoss(ignore_index=0)
    learn_rate = 1e-4
    optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate, weight_decay=1e-4)

    train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)

    for epoch in range(epochs):
        model.train()
        start = time.time()
        loss_sum = 0.0
        accu = 0
        for data, label in train_loader:
            data = data.to(device)
            label = label.to(device).long()
            optimizer.zero_grad()
            outputs = model(data).long()
            # outputs = torch.argmax(outputs)
            loss = criterion(outputs.view(-1, outputs.size(-1)), label.view(-1))
            loss.backward()
            optimizer.step()
            loss_sum += loss.cpu().data.numpy()
            accu += (outputs.argmax(1) == label).sum().cpu().data.numpy()

        end = time.time()
        logs = f"""
        epoch:{epoch + 1}, train acc: {accu:.4f}, train loss: {loss_sum:.4f}, use time: {round(end - start, 3)}
        """
