const OpenAI = require('openai');
// 避免每个用户重复创建对象
const device_open_obj = {};
module.exports = {
    // 插件名字
    name: "esp-ai-plugin-llm-deepseek",
    // 插件类型 LLM | TTS | IAT
    type: "LLM", 
    main({ devLog, device_id,is_pre_connect, llm_config, text, llmServerErrorCb, llm_init_messages = [], llm_historys = [], cb, llm_params_set, logWSServer, connectServerBeforeCb, connectServerCb,log }) {
        try {
            const { apiKey,model, ...other_config } = llm_config;
            if (!apiKey) return log.error(`请配给 deepseek 配置 apiKey 参数。`)
            if (!model) return log.error(`请配给 deepseek 配置 model 参数。`)
            // 预先连接函数
            async function preConnect() {
                const params = {
                    model: model,
                    apiKey: apiKey,
                    baseURL: 'https://api.deepseek.com',
                };
                const openai = new OpenAI(llm_params_set ? llm_params_set({ ...params }) : params); 
                await openai.chat.completions.create({ 
                    model: model, 
                    messages: [{ "role": "user", "content": "" }],
                    stream: false,
                }); 
            }
            if (is_pre_connect) {
                preConnect()
                return;
            }
            
            

            // 如果关闭后 message 还没有被关闭，需要定义一个标志控制
            let shouldClose = false;
            // 这个对象是固定写法，每个 TTS 都必须按这个结构定义
            const texts = {
                all_text: "",
                count_text: "",
                index: 0,
            } 
            // 告诉框架要开始连接 LLM 服务了
            connectServerBeforeCb();

            let openai = device_open_obj[device_id];
            if (!device_open_obj[device_id]) {
                connectServerBeforeCb();
                openai = new OpenAI({
                    apiKey: apiKey,
                    baseURL: 'https://api.deepseek.com',
                    model: model,
                });
            }


            async function main() {
                try {
                    const stream = await openai.chat.completions.create({
                        messages: [
                            ...llm_init_messages,
                            ...llm_historys,
                            {
                                "role": "user", "content": text
                            },
                        ],
                        model: model,
                        stream: true,
                        response_format:{
                            'type': 'json_object'
                        }
                    });
                    connectServerCb(true);
                    logWSServer({
                        close: () => {
                            connectServerCb(false);
                            stream.controller.abort()
                            shouldClose = true;  
                        }
                    })
                    for await (const part of stream) {
                        if (shouldClose) break;
                        const chunk_text = part.choices[0]?.delta?.content || '';
                        // console.log('LLM 输出 ：', chunk_text);
                        devLog === 2 && log.llm_info('LLM 输出 ：', chunk_text);
                        texts["count_text"] += chunk_text;
                        cb({ text, texts, chunk_text: chunk_text })
                    }
                    // process.stdout.write('\n');
                    
                    if (shouldClose) return;
                    cb({
                        text,
                        is_over: true,
                        texts,
                        shouldClose, 
                    })
                    connectServerCb(false);
                    // devLog && log.llm_info('\n===\n', httpResponse, '\n===\n')
                    devLog && log.llm_info('===')
                    devLog && log.llm_info(texts["count_text"])
                    devLog && log.llm_info('===')
                    devLog && log.llm_info('LLM connect close!\n')
                } catch (error) {
                    console.log(error);
                    llmServerErrorCb("deepseekLLM 报错: " + error)
                    connectServerCb(false);
                }

            }

            main();

        } catch (err) {
            console.log(err);
            log.error("deepseek LLM 插件错误：", err)
            connectServerCb(false);
        }

    }
}