package com.apadog.study.ai;

import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;

import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;

public class QwenTranslatorConfig {

    private final HuggingFaceTokenizer tokenizer;

    public QwenTranslatorConfig(String tokenizerPath) {
        Path path = Paths.get(tokenizerPath);
        try {
            this.tokenizer = HuggingFaceTokenizer.newInstance(path);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

    }

    public QwenTranslatorConfig(Path tokenizerPath)  {
        try {
            this.tokenizer = HuggingFaceTokenizer.newInstance(tokenizerPath);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public Translator<long[], String> getTranslator() {
        return new Translator<long[], String>() {

            @Override
            public NDList processInput(TranslatorContext ctx, long[] input) {
                NDManager manager = ctx.getNDManager();
                NDArray inputArr = manager.create(input).expandDims(0); // [1, seq_len]
                return new NDList(inputArr);
            }

            @Override
            public String processOutput(TranslatorContext ctx, NDList list) {
                NDArray output = list.get(0);
                long[] outputIds;

                if (output.getShape().dimension() == 3) {
                    // logits: [1, seq_len, vocab_size]
                    outputIds = output.argMax(-1).toLongArray();
                } else {
                    // token ids: [1, seq_len]
                    outputIds = output.toLongArray();
                }

                return tokenizer.decode(outputIds, true);
            }

            @Override
            public Batchifier getBatchifier() {
                return null;
            }
        };
    }

    public HuggingFaceTokenizer getTokenizer() {
        return tokenizer;
    }

}
