// src/services/chatService.js
import apiClient from './api';

export const chatService = {
    // 发送消息到LLM（流式版本）
    sendMessageStream: async (message, onData, onComplete,aiMsgId,onError) => {
        try {
            const response = await fetch(`${import.meta.env.VITE_BASE_URL}/chatWithThink`, {
                method: 'POST',
                headers: {
                    'Content-Type': 'application/json',
                    'Authorization': `Bearer ${localStorage.getItem('authToken') || ''}`
                },
                body: JSON.stringify({
                    message: message
                })
            });

            if (!response.ok) {
                throw new Error(`HTTP error! status: ${response.status}`);
            }

            // 处理流式响应
            const reader = response.body.getReader();
            const decoder = new TextDecoder();
            let accumulatedText = '';
            let thinkText = '';
            let currentThinkingStart = Date.now();
            let finalThinkingTime = Date.now();
            while (true) {
                const { done, value } = await reader.read();
                if (done) {
                    //onComplete && onComplete(accumulatedText,done,aiMsgId);
                    break;
                }

                // 解码并处理数据块
                const chunk = decoder.decode(value, { stream: true });
                const lines = chunk.split('\n');

                for (const line of lines) {

                    if (line.startsWith('data:')) {

                        try {
                            let spdata = line.substring(5)
                            if (spdata) {
                                const data = JSON.parse(spdata); // 移除 "data: " 前缀

                                if (data.type === 'thinking') {

                                    if (!thinkText) {
                                        currentThinkingStart = Date.now();
                                    }

                                    thinkText += data.content
                                    onData && onData(thinkText,data.type,done,aiMsgId)
                                }else if (data.type === 'partialResponse') {

                                    if (!accumulatedText) {
                                        finalThinkingTime = Date.now();
                                    }

                                    accumulatedText += data.content
                                    onComplete && onComplete(accumulatedText,done,aiMsgId,thinkText,(finalThinkingTime-currentThinkingStart)/1000);
                                }
                            }


                            // if (data.content) {
                            //     accumulatedText += data.content;
                            //     onData && onData(accumulatedText, data);
                            // }
                            // console.log(data,"pppppppp")
                            // if (data.done) {
                            //
                            //     return;
                            // }
                        } catch (e) {
                            console.warn('解析流数据失败:', e, line);
                        }
                    }else{
                       // accumulatedText += line;
                       // onComplete && onComplete(accumulatedText,done,aiMsgId);
                    }
                }
            }
        } catch (error) {
            console.error('流式请求失败:', error);
            onError && onError(error);
        }
    },

    // 传统的非流式请求（保留作为备选）
    sendMessage: async (message) => {
        try {
            const response = await apiClient.post('/chat', {
                message: message.content,
                conversation_id: message.conversationId,
            });

            return response.data;
        } catch (error) {
            console.error('发送消息失败:', error);
            throw error;
        }
    },

};
export default chatService;