import torch

import os
import sys

# 添加当前目录和父目录到Python路径
current_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
sys.path.insert(0, current_dir)
from cs336_basics.model import Transformer
from cs336_basics.tokenizer import Tokenizer
from cs336_basics.train import Config, load_config
from cs336_basics.checkpointing import load_checkpoint
from cs336_basics.decoding import decode


def main():
    vocab_path = "./out/ts-train-vocab.txt"
    merges_path = "./out/ts-train-merges.txt"

    # vocab_path = "./out/tokenizers/owt-train/vocab.txt"
    # merges_path = "./out/tokenizers/owt-train/merges.txt"

    tokenizer = Tokenizer.from_files(
        vocab_path, merges_path, special_tokens=["<|endoftext|>"]
    )

    # config = Config(load_config("./cs336_basics/configs/owt.yml"))
    config = Config(load_config())
    model = Transformer(**config.model, device="cuda", dtype=torch.float32)
    # model.to("cuda")

    # print("Pre-checkpoint")
    # print(decode(model, tokenizer, "The", max_new_tokens=512, temperature=0.7, top_p=0.9))

    # checkpoint = "../out/runs/latest/checkpoints/latest.pt"
    # checkpoint = "/data/c-sniderb/runs/latest/checkpoints/latest.pt"

    # TinyStories model (1.38, I think)
    checkpoint = r"C:\Users\cwj\Desktop\cs336\a1\assignment1-basics-main\cs336\cs336_basics\out\run_1758784033\checkpoints\checkpoint_5000.pt"
    load_checkpoint(checkpoint, model)

    # print("-" * 100)

    # print("Post-checkpoint")
    print(
        decode(model, tokenizer, "The", max_new_tokens=512, temperature=0.7, top_p=0.99)
    )


if __name__ == "__main__":
    main()
