from transformers import LlamaForCausalLM, CodeLlamaTokenizer, AutoTokenizer
from tqdm import tqdm
import torch
import os
from generate import gen_mid
from get_args import parser_eval
from solve import get_data, save_data

def main():
    
    parser = parser_eval()
    args = parser.parse_args()
    
    device = args.device

    model = LlamaForCausalLM.from_pretrained(args.model_path, torch_dtype=torch.bfloat16).to(device)
    model.load_adapter(os.path.abspath(args.adapter_path))
    
    tokenizer = AutoTokenizer.from_pretrained(args.model_path)
    tokenizer.pad_token = tokenizer.unk_token
    tokenizer.pad_token_id = tokenizer.unk_token_id

    data = get_data(args)

    output = []
    for item in tqdm(data):
        output.append(gen_mid([item], model, tokenizer, torch.bfloat16))

    save_data(output, args)
    
    
if __name__ == '__main__':
    main()