package com.liuqi.openai.model;

import com.liuqi.openai.core.client.Executor;
import com.liuqi.openai.core.client.OpenAiRemoteException;
import com.liuqi.openai.core.client.StreamHandler;
import com.liuqi.openai.core.completion.CompletionChoice;
import com.liuqi.openai.core.completion.CompletionRequest;
import com.liuqi.openai.core.completion.CompletionResponse;
import com.liuqi.openai.core.shared.StreamOptions;
import com.liuqi.openai.model.common.AbstractOpenAiChatModel;
import com.liuqi.openai.model.completion.CompletionModel;
import com.liuqi.openai.model.completion.StreamingCompletionModel;
import com.liuqi.openai.model.handler.StreamingResponseHandler;
import com.liuqi.openai.model.output.Response;
import com.liuqi.openai.util.StringUtil;
import java.util.List;

/**
 * 文本生成(completions) AI 模型调用
 *
 * @author liuqi
 * @date 2025/7/20
 **/
public class OpenAiCompletionModel extends AbstractOpenAiChatModel implements CompletionModel, StreamingCompletionModel {

    private final String suffix;

    OpenAiCompletionModel(OpenAiCompletionModelBuilder builder) {
        super(builder);
        this.suffix = builder.suffix;
    }

    public static OpenAiCompletionModelBuilder builder() {
        return new OpenAiCompletionModelBuilder();
    }

    /**
     * 使用 builder 进行构建
     */
    public static class OpenAiCompletionModelBuilder
            extends AbstractOpenAiChatModelBuilder<OpenAiCompletionModel, OpenAiCompletionModelBuilder> {
        String suffix;
        public OpenAiCompletionModelBuilder suffix(String suffix) {
            this.suffix = suffix;
            return this;
        }
        @Override
        public OpenAiCompletionModel build() {
            return new OpenAiCompletionModel(this);
        }
    }

    @Override
    public Response<String> generate(String prompt) throws OpenAiModelException {
        return generate0(prompt, null);
    }

    @Override
    public Response<String> generate(String prompt, StreamingResponseHandler handler) throws OpenAiModelException {
        if (handler == null) {
            throw new OpenAiModelException("streaming response: handler cannot be null");
        }
        return generate0(prompt, handler);
    }

    private Response<String> generate0(String prompt, StreamingResponseHandler handler) throws OpenAiModelException {
        try {
            // 是否流式响应
            boolean stream = handler != null;

            // 构建请求报文
            CompletionRequest request = request(prompt, stream);

            // 创建执行器
            Executor<CompletionResponse> executor = openAiClient.completion(request);

            // 流式响应
            if (stream) {
                // 注入一个流式处理器
                executor.injectStreamHandler(new StreamHandler<CompletionResponse>() {
                    @Override
                    public void onResponse(CompletionResponse resp) {
                        List<CompletionChoice> choices = resp.getChoices();
                        if (choices != null && !choices.isEmpty()) {
                            String text = choices.get(0).getText();
                            if (StringUtil.isNotEmpty(text)) {
                                handler.onNext(text);
                            }
                        }
                    }

                    @Override
                    public void onComplete() {
                        handler.onComplete();
                    }

                    @Override
                    public void onError(Throwable t) {
                        handler.onError(t);
                    }
                });
            }

            // 发起请求
            CompletionResponse response = executor.execute();

            // choices 响应
            CompletionChoice completionChoice = response.getChoices().get(0);

            // 构建响应
            return Response.from(
                    completionChoice.getText(),
                    tokenUsageFrom(response.getUsage()),
                    finishReasonFrom(completionChoice.getFinishReason())
            );
        } catch (OpenAiRemoteException e) {
            throw new OpenAiModelException("OpenAiCompletionModel Error.", e);
        }
    }

    private CompletionRequest request(String prompt, boolean stream) {
        return CompletionRequest.builder()
                .model(modelName)
                .prompt(prompt)
                .suffix(suffix)
                .maxTokens(maxTokens)
                .temperature(temperature)
                .topP(topP)
                .n(n)
                .stream(stream)
                .streamOptions(stream ? StreamOptions.builder().includeUsage(true).build() : null)
                .stop(stop)
                .seed(seed)
                .presencePenalty(presencePenalty)
                .frequencyPenalty(frequencyPenalty)
                .build();
    }

}
