package com.apadog.study.ai;

import ai.djl.*;
import ai.djl.huggingface.tokenizers.Encoding;
import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.inference.Predictor;
import ai.djl.modality.nlp.bert.BertTokenizer;
import ai.djl.modality.nlp.preprocess.Tokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.Batchifier;
import ai.djl.translate.TranslateException;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson2.JSONObject;

import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;

public class QwenChatbot {
    public static void main(String[] args) throws MalformedModelException, IOException, TranslateException {

        System.setProperty("DJL_CACHE_DIR", "D:/djl_cache");

        String mode_dir = "D:/models/Qwen3-1.7B/model/bert-base-cased-squad2";
        Path modelPath = Paths.get(StrUtil.format("{}/model.onnx", mode_dir));
        Path tokenPath = Paths.get(StrUtil.format("{}/tokenizer.json", mode_dir));

        QwenTranslatorConfig config = new QwenTranslatorConfig(tokenPath);

// 使用 DJL 的 NDManager
        try (NDManager manager = NDManager.newBaseManager()) {

            HuggingFaceTokenizer tokenizer = HuggingFaceTokenizer.newInstance(tokenPath);
            Encoding encode = tokenizer.encode("你好，请介绍一下你自己。");

            NDArray inputIds = manager.create(encode.getIds());
            NDArray attentionMask = manager.create(encode.getAttentionMask());
            NDArray tokenTypeIds = manager.create(encode.getIds());

            JSONObject inputMap = new JSONObject();
            inputMap.put("input_ids", inputIds.expandDims(0));         // [1, seq_len]
            inputMap.put("attention_mask", attentionMask.expandDims(0));
            inputMap.put("token_type_ids", tokenTypeIds.expandDims(0));

            Criteria<JSONObject, String> criteria = Criteria.builder()
                    .setTypes(JSONObject.class, String.class)
                    .optModelPath(modelPath)
                    .optEngine("OnnxRuntime")
                    .build();

            try (ZooModel<JSONObject, String> model = ModelZoo.loadModel(criteria);
                 Predictor<JSONObject, String> predictor = model.newPredictor()) {

                String result = predictor.predict(inputMap);
                System.out.println("输出结果：" + result);
            } catch (ModelNotFoundException e) {
                throw new RuntimeException(e);
            }

        }
    }


}
