package com.cn.lp.ai.factory;

import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.memory.InMemoryChatMemory;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.qianfan.QianFanChatModel;
import org.springframework.ai.vectorstore.RedisVectorStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;
import java.util.Map;

/**
 * ChatModel和ChatClient区别：ChatClient针对的是所有模型，共用一个客户端。而ChatModel是针对各个模型实现的。
 */
//@RestController
public class ChatDemoController {
    @Autowired
    private ChatClient chatClient;
    @Autowired
    private QianFanChatModel chatModel;
    @Value("classpath:/speech/jfk.flac")
    private Resource audioFile;
    @Autowired
    private DocumentService documentService;
    @Autowired
    private RedisVectorStore vectorStore;

    private InMemoryChatMemory chatMemory= new InMemoryChatMemory();
    /**
     * openAi 聊天
     *
     * @param message
     * @return
     */
    @GetMapping("/ai/openAiChat")
    public Map openAiChat(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
        return Map.of("generation", chatModel.call(message));
    }
    /**
     * zhipuAi 聊天
     *
     * @param message
     * @return
     */
    @GetMapping("/ai/generate")
    public Map generate(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
        return Map.of("generation", chatModel.call(message));
    }

    /**
     * ChatClient使用(流式调用)
     * @param message
     * @param voice
     * @return
     */
    @GetMapping("/ai/chatClient")
    Map<String, String> completion(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message, String voice) {
        return Map.of(
                "completion",
                chatClient.prompt()
                        .system(sp -> sp.param("voice", voice))
                        .user(message)
                        .call()
                        .content());
    }

    /**
     * 图片生成(文生图)
     *
     * @param message
     * @return
     */
//    @GetMapping("/ai/imageGeneration")
//    public Map imageGeneration(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
//        OpenAiImageOptions imageOptions = OpenAiImageOptions.builder()
//                .withQuality("hd")
//                .withN(1)
//                .withHeight(1024)
//                .withWidth(1024).build();
//        ImagePrompt imagePrompt = new ImagePrompt(message, imageOptions);
//        ImageResponse response = imageModel.call(imagePrompt);
//        return Map.of("generation", response.getResult().getOutput().getUrl());
//    }

    /**
     * 多模态
     * 举个例子，如果我想告诉你“我有一个苹果”，我可以用文字写出来，也可以用语言说出来，还可以用图片画出来。这就是相同信息的多种模态表现形式
     *
     * @param message
     * @return
     */
//    @GetMapping("/ai/multimodal")
//    public String Multimodal(@RequestParam(value = "message", defaultValue = "解释一下你在这张图片上看到了什么?") String message) throws MalformedURLException {
//        var userMessage = new UserMessage(message,
//                List.of(new Media(MimeTypeUtils.IMAGE_PNG,
//                        new URL("https://docs.spring.io/spring-ai/reference/1.0-SNAPSHOT/_images/multimodal.test.png"))));
//
//        ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
//                OpenAiChatOptions.builder().withModel(OpenAiApi.ChatModel.GPT_4_O.getValue()).build()));
//        return response.getResult().getOutput().getContent();
//    }

    /**
     * 语音转文字
     */
//    @GetMapping("/ai/audioTranscription")
//    private String audioTranscription(){
//        OpenAiAudioTranscriptionOptions transcriptionOptions = OpenAiAudioTranscriptionOptions.builder()
//                .withResponseFormat(OpenAiAudioApi.TranscriptResponseFormat.TEXT)
//                .withTemperature(0f)
//                .build();
//        AudioTranscriptionPrompt transcriptionRequest = new AudioTranscriptionPrompt(audioFile, transcriptionOptions);
//        AudioTranscriptionResponse response = openAiAudioTranscriptionModel.call(transcriptionRequest);
//        return response.getResult().getOutput();
//    }


    /**
     * 工具调用
     */
//    @GetMapping("/ai/function")
//    public String function(@RequestParam String message) {
////        Prompt prompt = new Prompt(message, OpenAiChatOptions.builder().withFunction("dateService").build());
//        Prompt prompt = new Prompt(message, QianFanChatOptions.builder().withFunctionCallbacks(
//                List.of(FunctionCallbackWrapper.builder(new DateService())
//                        .withName("dateService")
//                        .withDescription("获取指定地点的当前时间").build())
//        ).build());
//        Generation generation = chatModel.call(prompt).getResult();
//        return (generation != null) ? generation.getOutput().getContent() : "";
//    }

    /**
     * 向量存储
     */
    @GetMapping("/ai/vectorStore")
    public Map vectorStore() {
        List<Document> documents = documentService.loadText();
        return Map.of("generation", documents);
    }
    /**
     * 向量搜索
     * @param message
     * @return
     */
    @GetMapping("/ai/documentSearch")
    public List<Document> documentSearch(@RequestParam String message) {
        return documentService.search(message);
    }
    /**
     * 元数据搜索
     * @param message
     * @param question
     * @return
     */
    @GetMapping("/ai/metadataSearch")
    public List<Document> documentMetadataSearch(@RequestParam String message, @RequestParam String question) {
        return documentService.metadataSearch(message, question);
    }


    /**
     * RAG
     * @param message
     * @return
     */
    @GetMapping("/ai/customerService")
    public String customerService(@RequestParam String message) {

        // 向量搜索
        List<Document> documentList = documentService.search(message);

        // 提示词模板
        PromptTemplate promptTemplate = new PromptTemplate("{userMessage}\n\n 用以下信息回答问题:\n {contents}");

        // 组装提示词
        Prompt prompt = promptTemplate.create(Map.of("userMessage", message, "contents", documentList));

        // 调用大模型
        return chatModel.call(prompt).getResult().getOutput().getContent();
    }

    /**
     * 模型评估
     */
//    @GetMapping("/ai/evaluation")
//    public EvaluationResponse evaluation(String message) {
//        //RAG
//        ChatResponse response = chatClient.prompt()
//                .advisors(new QuestionAnswerAdvisor(vectorStore, SearchRequest.defaults()))
//                .user(message)
//                .call()
//                .chatResponse();
//
//        // 评估器
//        var relevancyEvaluator = new RelevancyEvaluator(ChatClient.builder(chatModel));
//        // 评估请求
//        EvaluationRequest evaluationRequest = new EvaluationRequest(message,
//                (List<Content>) response.getMetadata().get(QuestionAnswerAdvisor.RETRIEVED_DOCUMENTS), response);
//        // 评估结果
//        EvaluationResponse evaluationResponse = relevancyEvaluator.evaluate(evaluationRequest);
//        return evaluationResponse;
//    }

    /**
     * ChatMemory
     */
    @GetMapping("/ai/chatMemory")
    public String chatMemory(String message,String userId){
        ChatResponse response = chatClient.prompt()
                .advisors(new ResourceChatMemoryAdvisor(chatMemory,userId,100))
                // 定义角色
//                .system(sp -> sp.param("voice", "律师"))
                .user(message)
                .call()
                .chatResponse();
        return response.getResult().getOutput().getContent();
    }

}
