package com.bruce.modelscope.config;

import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import jakarta.annotation.Resource;

@Configuration
public class ModelScopeConfig {


    @Resource
    private  ModelScopeProperties modelScopeProperties;

    @Bean
    public ChatLanguageModel modelScopeChatModel() {
        return OpenAiChatModel.builder()
               .apiKey(modelScopeProperties.getApiKey())
               .baseUrl(modelScopeProperties.getBaseUrl())
               .modelName(modelScopeProperties.getModelName())
               .temperature(modelScopeProperties.getTemperature())
               .maxTokens(modelScopeProperties.getMaxTokens())
               .timeout(Duration.ofMillis(modelScopeProperties.getTimeOut()))
               .maxRetries(modelScopeProperties.getMaxRetries())
               .logRequests(modelScopeProperties.getLogRequests())
               .logResponses(modelScopeProperties.getLogResponses())
               .build();
    }


    /**
     * 流式输出模型
     * @return
     */
    @Bean 
    public StreamingChatLanguageModel modelScopeStreamingChatModel(){
        return OpenAiStreamingChatModel.builder()
               .apiKey(modelScopeProperties.getApiKey())
               .baseUrl(modelScopeProperties.getBaseUrl())
               .modelName(modelScopeProperties.getModelName())
               .temperature(modelScopeProperties.getTemperature())
               .maxTokens(modelScopeProperties.getMaxTokens())
               .timeout(Duration.ofMillis(modelScopeProperties.getTimeOut()))
               .logRequests(modelScopeProperties.getLogRequests())
               .logResponses(modelScopeProperties.getLogResponses())
               .build();
    }

}
