// Mocks a streaming response from a chat API
const mockStreamResponse = (prompt, onChunk, onEnd) => {
    const responses = [
        "好的，正在为您分析。根据您提供的数据，我发现",
        "用户增长趋势在过去三个月呈现出明显的季节性波动。",
        "\n\n主要的增长点来自于",
        "**新推广渠道**的引入，",
        "特别是在第二季度的社交媒体活动。",
        "\n\n然而，",
        "值得注意的是，",
        "老用户的留存率有轻微下降。",
        "\n\n我为您生成了一个简单的表格总结：",
        "\n\n| 月份 | 新用户 | 活跃用户 |\n| --- | --- | --- |\n| 四月 | 1,200 | 8,500 |\n| 五月 | 1,800 | 9,200 |\n| 六月 | 1,500 | 8,900 |",
        "\n\n建议下一步对老用户进行定向回访和激励，以提升用户粘性。"
    ];

    let index = 0;
    const interval = setInterval(() => {
        if (index < responses.length) {
            const chunk = {
                id: `chunk-${Date.now()}`,
                done: false,
                content: responses[index],
            };
            onChunk(chunk);
            index++;
        } else {
            clearInterval(interval);
            onEnd({ id: 'end', done: true });
        }
    }, 150); // Simulate typing delay

    return {
        cancel: () => clearInterval(interval)
    };
};


export default {
    /**
     * Mocks fetching a response from a chat API.
     * @param {string} prompt - The user's message.
     * @param {Array} history - The conversation history.
     * @param {Function} onChunk - Callback function for each chunk of the response.
     * @param {Function} onEnd - Callback function when the response is complete.
     */
    getChatResponse(prompt, history, { onChunk, onEnd }) {
        console.log("Sending prompt to mock API:", prompt);
        console.log("With history:", history);
        
        // Return a controller to cancel the stream if needed
        return mockStreamResponse(prompt, onChunk, onEnd);
    }
}; 