package com.lyh.ai.controller;

import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

@RestController
@RequestMapping("/ollama")
public class OllamaController {

    private final OllamaChatModel ollamaChatModel;

    @Autowired
    public OllamaController(OllamaChatModel ollamaChatModel) {
        this.ollamaChatModel = ollamaChatModel;
    }


    @GetMapping("/chat1")
    public String chat1(String message) {
        return ollamaChatModel.call(message);
    }


    @GetMapping("/chat2")
    public String chat2(String message) {
        ChatResponse chatResponse = ollamaChatModel.call(new Prompt(message,
                OllamaOptions
                        .builder()
                        .withModel(OllamaModel.valueOf("qwen:0.5b-chat"))
                        .build())
        );
        return chatResponse.getResult().getOutput().getContent();
    }


}
