#include <torch/torch.h>
#include <torch/script.h>
#include <iostream>
#include <fstream>
#include <string>
#include <vector>
#include "nlohmann/json.hpp"
#include "tokenizers_cpp.h"
using namespace nlohmann;
int main(int argc, char* argv[]) {
    // 解析命令行参数
    std::string base_model = "../Llama-3.2-1B";
    std::string weights_path = "";
    std::string test_data_path = "/home/jack/code/llms/test.json";
    std::string results_dir = "test_results/llama_3.2b_result.json";
    
    // 参数解析逻辑
    
    // 加载模型
    torch::jit::script::Module model;
    try {
        model = torch::jit::load(base_model);
        model.eval();
    } catch (const c10::Error& e) {
        std::cerr << "Error loading the model\n";
        return -1;
    }
    
    // 加载分词器
    
    // 读取测试数据
    std::ifstream file(test_data_path);
    json val_data;
    file >> val_data;
    
    // 推理过程
    json result;
    const std::string STOP_TOKEN = "\n\nEND";
    
    auto start_time = std::chrono::high_resolution_clock::now();
    
    for (auto& d : val_data) {
        std::string prompt = d["input"];
        prompt += "\n\n###\n";
        
        // 分词和编码
        
        // 模型推理
        std::vector<torch::jit::IValue> inputs;
        
        at::Tensor output = model.forward(inputs).toTensor();
        
        // 解码
        std::string response = ""; // 解码逻辑
        
        // 处理响应
        if (response.find(STOP_TOKEN) != std::string::npos) {
            response.erase(response.find(STOP_TOKEN), STOP_TOKEN.length());
        }
        
        d["gpt3_completion"] = response;
        result.push_back(d);
    }
    
    // 保存结果
    std::ofstream out_file(results_dir);
    out_file << result.dump(4);
    
    auto end_time = std::chrono::high_resolution_clock::now();
    auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end_time - start_time);
    
    std::cout << "Time consumed: " << duration.count() << " ms" << std::endl;
    
    json results;
    results["base_model"] = base_model;
    results["time"] = duration.count() / 1000.0;
    
    std::ofstream time_file(weights_path + "/interface_time.json");
    time_file << results.dump(4);
    
    return 0;
}