package com.xs.langchain4j_springboot.controller;

import com.xs.langchain4j_springboot.config.AiConfig;
import dev.langchain4j.community.model.dashscope.QwenChatModel;
import dev.langchain4j.community.model.dashscope.QwenEmbeddingModel;
import dev.langchain4j.community.model.dashscope.QwenStreamingChatModel;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.TokenStream;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.redis.RedisEmbeddingStore;
import jakarta.annotation.PostConstruct;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.MediaType;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import reactor.core.publisher.Flux;



import java.net.http.HttpClient;
import java.time.LocalDate;

@RestController
@RequestMapping("/ai")
public class ChatController {

/*    @Autowired
    QwenChatModel chatModel;

    @Autowired
    QwenStreamingChatModel streamingChatModel;*/

    @Autowired
    @Qualifier("ollamaStreamingChatModel")
    private StreamingChatLanguageModel qwen3Stream;

    @Autowired
    @Qualifier("ollamaChatModel")
    private ChatLanguageModel qwen3;

    @Autowired
    private EmbeddingModel embeddingModel;

    @Autowired
    private EmbeddingStore embeddingStore;

/*    @PostConstruct
    public void initModels() {

        deepseek = OllamaChatModel.builder()
                .baseUrl("http://localhost:11434")
                .modelName("deepseek-r1:1.5b")
                .build();

        System.out.println("初始化成功");
    }*/




    //非流式
    @CrossOrigin(origins = "*")
    @GetMapping("/chat")
    public String test(@RequestParam(defaultValue="你是谁") String message) {
        String chat = qwen3.chat(message);
        return chat;
    }


    //普通流式
    @CrossOrigin(origins = "*")
    @GetMapping(value = "/stream",produces = "text/event-stream;charset=UTF-8")
    public Flux<String> stream(@RequestParam(defaultValue="你是谁") String message) {

        Flux<String> flux = Flux.create(fluxSink -> {

            qwen3Stream.chat(message, new StreamingChatResponseHandler() {
                @Override
                public void onPartialResponse(String partialResponse) {
                    fluxSink.next(partialResponse);
                }

                @Override
                public void onCompleteResponse(ChatResponse completeResponse) {
                    fluxSink.complete();
                }

                @Override
                public void onError(Throwable error) {
                    fluxSink.error(error);
                }
            });


        });
        return flux;
    }


    @Autowired
    AiConfig.Assistant assistant;

/*    @CrossOrigin(origins = "*")
    @GetMapping(value = "/memory_chat")
    public String memoryChat(@RequestParam(defaultValue="我叫刘文锋") String message) {
        return assistant.chat(message);
    }*/



    //带记忆带rag带tools的流式
    @CrossOrigin(origins = "*")
    @GetMapping(value = "/memory_stream_chat",produces ="text/event-stream;charset=UTF-8")
    public Flux<String> memoryStreamChat(@RequestParam(defaultValue="我是谁") String message, HttpServletResponse response) {
        TokenStream stream = assistant.streamWithTwoParams(message, LocalDate.now().toString());

        return Flux.create(sink -> {
            stream.onPartialResponse(s -> sink.next(s))
                    .onCompleteResponse(c -> sink.complete())
                    .onError(sink::error)
                    .start();

        });
    }

    @CrossOrigin(origins = "*")
    @GetMapping(value = "/memory_chat",produces ="text/stream;charset=UTF-8")
    public Flux<String> memoryStreamChat(@RequestParam(defaultValue="我是谁") String message) {
        TokenStream stream = assistant.stream(message);

        return Flux.create(sink -> {
            stream.onPartialResponse(s -> sink.next(s))
                    .onCompleteResponse(c -> sink.complete())
                    .onError(sink::error)
                    .start();
        });
    }

    @CrossOrigin(origins = "*")
    @GetMapping(value = "/memory_chat2")
    public String memoryStreamChat2(@RequestParam(defaultValue="我是谁") String message) {
        String chat = assistant.chat(message);
        System.out.println(chat);

        return chat;
    }

    @Autowired
    AiConfig.AssistantUnique assistantUnique;


    //带区分用户的流式
    @CrossOrigin(origins = "*")
    @GetMapping(value = "/memoryId_chat",produces ="text/stream;charset=UTF-8")
    public Flux<String> memoryChat(@RequestParam(defaultValue="我是谁") String message, Integer userId) {
        //return assistantUnique.chat(userId,message);
        TokenStream stream = assistantUnique.stream(userId, message);

        return Flux.create(sink -> {
            stream.onPartialResponse(s -> sink.next(s))
                    .onCompleteResponse(c -> sink.complete())
                    .onError(sink::error)
                    .start();

        });
    }


}
