package com.heiye.blog.ai.strategy.text.impl;

import com.heiye.blog.ai.advisor.DashScopeAdvisor;
import com.heiye.blog.ai.enums.TextModelTypeEnum;
import com.heiye.blog.ai.helper.DashScopeAdvisorHelper;
import com.heiye.blog.ai.model.dashscope.DashScopeChatClient;
import com.heiye.blog.ai.model.dashscope.DashScopeChatOptions;
import com.heiye.blog.ai.model.dto.AIChatRequest;
import com.heiye.blog.ai.model.vo.AIResponse;
import com.heiye.blog.ai.model.vo.AiChatReqVO;
import com.heiye.blog.ai.strategy.text.TextModelStrategy;
import jakarta.annotation.Resource;
import lombok.SneakyThrows;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Flux;

import java.util.List;
import java.util.Objects;

/**
 * @author: heiye
 * @date: 2025/11/09 下午7:22
 * @version: v1.0.0
 * @description: 基于百炼模型调用 DeepSeek-V3 模型
 */
@Component
public class DashScopeDeepSeekV3Strategy implements TextModelStrategy {

    @Value("${spring.ai.dashscope.api-key}")
    private String apiKey;

    @Resource
    private DashScopeAdvisorHelper dashScopeAdvisorHelper;

    /**
     * 获取存储类型枚举
     *
     * @return TextModelTypeEnum
     */
    @Override
    public TextModelTypeEnum getTextModelType() {
        return TextModelTypeEnum.DASH_SCOPE_DEEPSEEK_V3;
    }

    /**
     * 处理流式响应
     *
     * @param aiChatRequest
     * @return
     */
    @Override
    @SneakyThrows
    public Flux<AIResponse> handleStreamResponse(AIChatRequest aiChatRequest) {
        // 对参数进行审查
        reviewAndOptimizeRequest(aiChatRequest);
        // 用户消息
        String userMessage = aiChatRequest.getUserMessage();
        // 是否联网搜索
        Boolean networkSearch = aiChatRequest.getNetworkSearch();

        // 构建 dashScopeChatClient
        DashScopeChatClient dashScopeChatClient = DashScopeChatClient.builder()
                .options(DashScopeChatOptions.builder()
                        .apiKey(apiKey)
                        .networkSearch(networkSearch)
                        .model(getTextModelType().getType())
                        .incrementalOutput(aiChatRequest.getIncrementalOutput())
                        .temperature(Objects.nonNull(aiChatRequest.getTemperature()) ? Float.parseFloat(aiChatRequest.getTemperature().toString()) : null)
                        .build())
                .userMessage(userMessage)
                .build();

        // 构建 Advisor
        List<DashScopeAdvisor> dashScopeAdvisors = dashScopeAdvisorHelper.buildDashScopeAdvisors(aiChatRequest);

        // 添加 Advisor
        dashScopeChatClient.advisors(dashScopeAdvisors);

        return dashScopeChatClient.stream()
                .mapNotNull(generationResult -> {
                    // 该模型为不思考模型 直接获取正式回答
                    String text = generationResult.getOutput().getChoices().get(0).getMessage().getContent();

                    return AIResponse.builder().v(text).build();
                });
    }

    /**
     * 对参数进行审查
     *
     * @param aiChatRequest
     * @return
     */
    @Override
    public void reviewAndOptimizeRequest(AIChatRequest aiChatRequest) {
        // 默认不开启深度思考
        aiChatRequest.setThinkEnable(false);

//        // 获取模型温度
//        Double temperature = aiChatRequest.getTemperature();
//
//        // 如果没设置温度则设置默认温度
//        if (Objects.isNull(temperature)) {
//            // 设置默认温度，这里就不使用 deepseek 官网给的通用对话建议温度值了
//            // 因为这个版本是 v3, 不是 v3.2
//            aiChatRequest.setTemperature(0.7);
//        }
    }
}
