package com.sense.controller;

import com.sense.common.lang.annotation.JsonResponse;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.web.bind.annotation.*;
import org.springframework.stereotype.Controller;

/**
 * @Author: jiangyong
 * @Date: 2025/02/08/16:37
 * @Description:
 */
@Slf4j
@Controller
@AllArgsConstructor
public class DeepSeekController {

    private OllamaChatModel ollamaChatModel;

    @RequestMapping(value = "/")
    public String index() {
        return "chat";
    }

    @JsonResponse
    @PostMapping(value = "/ai/chat")
    @ResponseBody
    public String chat2(@RequestBody ChatRequest request) {
        String prompt = """
                所有的问题回复请说人话。
                """;
        String result = ollamaChatModel.call(prompt + ":" + request.getQuestion());
        log.info(result);
        return result;
    }

    // 原有的chat方法保持不变
    @RequestMapping(value = "ai/ollama")
    @ResponseBody
    public void chat() {
        String prompt = """
                你是一个精通中文和英文的翻译大师。如果我给你英文就翻译成中文，给你中文就翻译成英文。
                """;
        String message = """
                Ollama now supports tool calling with popular models such as Llama 3.1.
                This enables a model to answer a given prompt using tool(s) it knows about,
                making it possible for models to perform more complex tasks or interact with the outside world.
                """;

        String result = ollamaChatModel.call(prompt + ":" + message);
        System.out.println(result);
    }
}

// 请求对象
class ChatRequest {
    private String question;

    public String getQuestion() {
        return question;
    }

    public void setQuestion(String question) {
        this.question = question;
    }
}
