package top.hyperplasma.kinare.service.impl;

import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import top.hyperplasma.kinare.domain.dto.ChatRequest;
import top.hyperplasma.kinare.domain.dto.ChatResponse;
import top.hyperplasma.kinare.service.OpenAIService;

@Slf4j
@Service
public class OpenAIServiceImpl implements OpenAIService {

    @Value("${openai.api-key}")
    private String apiKey;

    @Value("${openai.base-url}")
    private String baseUrl;

    private ChatLanguageModel createChatModel(String modelName) {
        return OpenAiChatModel.builder()
                .modelName(modelName)
                .apiKey(apiKey)
                .baseUrl(baseUrl)
                .logRequests(true)
                .logResponses(true)
                .build();
    }

    @Override
    public ChatResponse generateResponse(ChatRequest request) {
        log.debug("Generating response for prompt: {}", request.getPrompt());

        try {
            ChatLanguageModel model = createChatModel(request.getModelName());
            String response = model.generate(request.getPrompt());

            return ChatResponse.builder()
                    .message(response)
                    .model(request.getModelName())
                    .timestamp(System.currentTimeMillis())
                    .status("success")
                    .build();
        } catch (Exception e) {
            log.error("Error generating response", e);
            throw new RuntimeException("Failed to generate response: " + e.getMessage());
        }
    }

    public static void main(String[] args) {

    }
}