package dev.alm.aiserviceintegration.controller;

import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.output.TokenUsage;
import jakarta.annotation.Resource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

@RestController
@RequestMapping("llm")
public class LLMController {

    @Resource
    private ChatModel deepseekChatModel;

    @GetMapping("chat")
    public String chat(@RequestParam(value = "message", defaultValue = "你是谁？") String message) {
        ChatResponse chat = deepseekChatModel.chat(new UserMessage(message));
        String text = chat.aiMessage().text();
//        System.out.println("大模型返回结果：" + text);
        TokenUsage tokenUsage = chat.tokenUsage();
//        System.out.println("token使用量：" + tokenUsage);
        text = text + "\t\n" + "token使用量：" + tokenUsage;
        Integer tokenCount = tokenUsage.totalTokenCount();
        return text;
    }

}
