'use client';

import React, {
  useCallback,
  useEffect,
  useRef,
  useState,
} from 'react';

import clsx from 'clsx';
import Recorder from 'js-audio-recorder';

import { CharacterManager } from '@/app/lib/character';
import { Comm } from '@/app/lib/comm';
import {
  AI_THINK_MESSAGE,
  AUDIO_SUPPORT_ALERT,
} from '@/app/lib/constants';
import { LAppWavFileHandler } from '@/app/lib/live2d/lappwavfilehandler';
import {
  ChatMessage,
  ChatRole,
  InteractionMode,
  useChatRecordStore,
  useInteractionModeStore,
  useMuteStore,
  useShowStore,
} from '@/app/lib/store';
import { ConfirmAlert } from '@/app/ui/common/alert';

let micRecorder: Recorder | null = null;
let isRecording: boolean = false;
let audioContext: AudioContext | null = null;
let analyser: AnalyserNode | null = null;
let dataArray: Uint8Array | null = null;
let animationFrameId: number | null = null;
let timer: NodeJS.Timeout | null = null;

export default function Chatbot(props: { showChatHistory: boolean }) {
    const { show} = useShowStore();
    const {showChatHistory} = props;
    const {chatRecord, addChatRecord, updateLastRecord} = useChatRecordStore();
    const {mute} = useMuteStore();
    const {mode} = useInteractionModeStore();

    const [micRecording, setMicRecording] = useState(false);
    const [micRecordAlert, setMicRecordAlert] = useState(false);
    const [isProcessing, setIsProcessing] = useState(false);
    const [isListening, setIsListening] = useState(false);
    const [hasDetectedAudio, setHasDetectedAudio] = useState(false);

    const inputRef = useRef<HTMLInputElement>(null);
    const chatbotRef = useRef<HTMLDivElement>(null);
    const canvasRef = useRef<HTMLCanvasElement>(null);
    const hasDetectedAudioRef = useRef(false);
    const lastChangeTimeRef = useRef(Date.now());
    const startTimeRef = useRef(Date.now());

    const wavFileHandler = LAppWavFileHandler.getInstance();
    useEffect(() => {
        console.log('OtherComponent show value changed:', show);
      }, [show]);
    const drawWaveform = useCallback(() => {
        if (!canvasRef.current || !isListening) return;

        const canvas = canvasRef.current;
        const ctx = canvas.getContext('2d');
        if (!ctx || !dataArray) return;

        ctx.fillStyle = 'black';
        ctx.fillRect(0, 0, canvas.width, canvas.height);

        ctx.lineWidth = 2;
        ctx.strokeStyle = 'red';
        ctx.beginPath();

        const sliceWidth = canvas.width / dataArray.length;
        let x = 0;

        let hasSignificantAudio = false;
        for (let i = 0; i < dataArray.length; i++) {
            const v = dataArray[i] / 128.0;
            const y = (v * canvas.height) / 2;

            if (i === 0) {
                ctx.moveTo(x, y);
            } else {
                ctx.lineTo(x, y);
            }
            if (Math.abs(dataArray[i] - 128) > 10) {
                hasSignificantAudio = true;
            }
            x += sliceWidth;
        }
        ctx.lineTo(canvas.width, canvas.height / 2);
        ctx.stroke();

        const currentTime = Date.now();
        if (hasSignificantAudio) {
            if (!hasDetectedAudioRef.current) {
                setHasDetectedAudio(true);
                hasDetectedAudioRef.current = true;
            }
            lastChangeTimeRef.current = currentTime;
        } else {
            const silenceDuration = currentTime - lastChangeTimeRef.current;
            const totalDuration = currentTime - startTimeRef.current;
            console.log("isListening", isListening);

            if (hasDetectedAudioRef.current && silenceDuration > 2000) {
                console.log("检测到音频后，2秒内没有变化，发送音频到tts");
                stopListening();
                sendAudio();
            } else if (!hasDetectedAudioRef.current && totalDuration > 10000) {
                console.log("10秒内波形图一直没有变化，停止监听");
                stopListening();
                setIsProcessing(false)
            }
        }
        animationFrameId = requestAnimationFrame(drawWaveform);
    }, [isListening]);

    useEffect(() => {
        if (isListening) {
            drawWaveform();
        }

        return () => {
            if (animationFrameId) {
                cancelAnimationFrame(animationFrameId);
            }
        };
    }, [isListening, drawWaveform]);

    useEffect(() => {
        hasDetectedAudioRef.current = hasDetectedAudio;
    }, [hasDetectedAudio]);

    const startListening = useCallback(() => {
        if (!audioContext) {
            audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
        }

        navigator.mediaDevices.getUserMedia({audio: true})
            .then(stream => {
                analyser = audioContext.createAnalyser();
                analyser.fftSize = 2048;
                const bufferLength = analyser.frequencyBinCount;
                dataArray = new Uint8Array(bufferLength);

                const source = audioContext.createMediaStreamSource(stream);
                source.connect(analyser);

                setIsListening(true);
                startTimeRef.current = Date.now();
                lastChangeTimeRef.current = Date.now();
                setHasDetectedAudio(false);
                hasDetectedAudioRef.current = false;

                const updateData = () => {
                    if (!analyser || !dataArray) return;
                    analyser.getByteTimeDomainData(dataArray);
                    requestAnimationFrame(updateData);
                };
                updateData();
            })
            .catch(err => {
                console.error('访问麦克风时出错:', err);
                setMicRecordAlert(true);
            });
    }, []);

    const stopListening = useCallback(() => {
        setIsListening(false);
        if (audioContext) {
            audioContext.close().then(r => {console.log("audioContext error",r)});
            audioContext = null;
        }
        if (animationFrameId) {
            cancelAnimationFrame(animationFrameId);
        }

        if (micRecorder) {
            micRecorder.stop();
        }
        isRecording = false;
        setMicRecording(false);
    }, []);

    const sendAudio = useCallback(() => {
        console.log('开始发送音频');
        //发送音频处理的时候，禁止点击麦克风图标
        setIsProcessing(true)
        timer = setInterval(function () {
            console.log("playStatus:",wavFileHandler.getPlayStatus());
            console.log("isRecording:",isRecording);

            if (wavFileHandler.getPlayStatus() && !isRecording) {
                console.log("继续监听，playStatus：", wavFileHandler.getPlayStatus());
                clearInterval(timer);
                setIsProcessing(false)
                wavFileHandler.setPlayStatus(false)
                micRecorder.start().then(
                    () => {
                        isRecording = true;
                        setMicRecording(true);
                        startListening();
                    },
                    (error) => {
                        console.error(error);
                        setMicRecordAlert(true);
                    }
                );
                return;
            }
        },1000)
        //停止监听
        stopListening();
        if (micRecorder) {
            Comm.getInstance().asr(micRecorder.getWAVBlob()).then(
                (res) => {
                    if (res) {
                        console.log("asr: ", res);
                        chatWithAI(res);
                    } else {
                        //重新监听
                        micRecorder.start().then(
                            () => {
                                isRecording = true;
                                setMicRecording(true);
                                startListening();
                            },
                            (error) => {
                                console.error(error);
                                setMicRecordAlert(true);
                            }
                        );
                    }
                }
            );
        }
    }, []);

    const chatWithAI = useCallback((message: string) => {
        addChatRecord({role: ChatRole.HUMAN, content: message});
        let responseText = "";
        let audioText = "";
        let index = 0;
        let audioRecorderIndex = 0;
        let audioRecorderDict = new Map<number, ArrayBuffer>();
        addChatRecord({role: ChatRole.AI, content: AI_THINK_MESSAGE});

        const evtSource = new EventSource(process.env.NEXT_PUBLIC_ADH_SERVER_IP +":"+process.env.NEXT_PUBLIC_ADH_SERVER_PORT+ '/adh/agent/v0/infer?prompt=' + message);

        evtSource.addEventListener("open", function (e) {
            console.log('open successfully')
        })
        evtSource.addEventListener('message', event => {
            const data = event.data.replace(/'/g, '"')
            console.log(data);
            const json = JSON.parse(data);
            if (json.finish_status === 'true') {
                evtSource.close();
                setTimeout(() => {
                    wavFileHandler.setFinishStatus(true)
                }, 1000);
            }
            processing(index, json.text)
            index++;
        });

        const processing = (index: number, data: string) => {
            responseText += data;

            updateLastRecord({role: ChatRole.AI, content: responseText});
            if (!mute && mode != InteractionMode.CHATBOT) {
                audioText += data;
                let punc = ["。", ".", "！", "!", "？", "?", "；", ";", "，", ","];
                let lastPuncIndex = -1;
                for (let i = 0; i < punc.length; i++) {
                    let index = audioText.lastIndexOf(punc[i]);
                    if (index > lastPuncIndex) {
                        lastPuncIndex = index;
                        break;
                    }
                }
                if (lastPuncIndex !== -1) {
                    let firstPart = audioText.slice(0, lastPuncIndex + 1);
                    let secondPart = audioText.slice(lastPuncIndex + 1);
                    console.log("tts:", firstPart);
                    Comm.getInstance().tts(firstPart).then(
                        (data: ArrayBuffer) => {
                            if (data) {
                                audioRecorderDict.set(index, data);
                                while (true) {
                                    if (!audioRecorderDict.has(audioRecorderIndex)) break;
                                    if (audioRecorderDict.get(audioRecorderIndex)) {
                                        CharacterManager.getInstance().pushAudioQueue(audioRecorderDict.get(audioRecorderIndex)!);
                                        wavFileHandler.setPlayStatus(false)
                                    }
                                    audioRecorderIndex++;
                                }
                            }
                        }
                    )
                    audioText = secondPart;
                } else {
                    audioRecorderDict.set(index, null)
                }
            }
        }
    }, [addChatRecord, updateLastRecord, mute, mode]);

    const micClick = useCallback(() => {
        if (isProcessing) return;
        if (micRecorder == null) {
            micRecorder = new Recorder({
                sampleBits: 16,
                sampleRate: 16000,
                numChannels: 1,
            });
        }

        if (!isRecording) {
            micRecorder.start().then(
                () => {
                    isRecording = true;
                    setMicRecording(true);
                    startListening();
                },
                (error) => {
                    console.error(error);
                    setMicRecordAlert(true);
                }
            );
        } else {
            stopListening();
        }
    }, [isProcessing, startListening, stopListening]);

    const sendClick = useCallback(() => {
        if (inputRef.current && inputRef.current.value !== "") {
            setIsProcessing(true);
            chatWithAI(inputRef.current.value);
            inputRef.current.value = "";
            const timer = setInterval(function () {
                console.log("playStatus:",wavFileHandler.getPlayStatus());
                if (wavFileHandler.getPlayStatus()) {
                    clearInterval(timer);
                    setIsProcessing(false)
                    return;
                }
            },1000)
        }
    }, [chatWithAI]);
    //新增初始化问题点击
    const sendClickli = useCallback((val: string) => {
        return () => {
            setIsProcessing(true);
            chatWithAI(val);
            inputRef.current.value = "";
            const timer = setInterval(function () {
                console.log("playStatus:", wavFileHandler.getPlayStatus());
                if (wavFileHandler.getPlayStatus()) {
                    clearInterval(timer);
                    setIsProcessing(false);
                    return;
                }
            }, 1000);
        };
    }, [chatWithAI]);
    const items = [
        "径山镇有多少亩水稻？ ",
        "前溪村有稻田捕鱼活动吗？",
        "前往径山的交通方式有哪些？",
        "径山的气候和生态环境有何特点，有哪些独特的植被和动物资源？"
    ];

    const enterPress = useCallback((e: React.KeyboardEvent<HTMLInputElement>) => {
        if (e.key === "Enter") {
            sendClick();
        }
    }, [sendClick]);
    useEffect(() => { if (chatbotRef.current) { chatbotRef.current.scrollTop = chatbotRef.current.scrollHeight; } }, [chatRecord]);
    return (
        <div className=" flex flex-col h-full">
            {micRecordAlert ? <ConfirmAlert message={AUDIO_SUPPORT_ALERT}/> : null}
            <div id="messages" ref={chatbotRef}
             className={`flex conheight flex-col space-y-4  overflow-y-auto no-scrollbar z-10 ${show ? '' : 'hide'}`}>
                 <ul className='myqus_ul'>
                         <li className='li1'><img src={"/icons/icon_user.png"} className="iconnew"/> <div className='qustit1'>你可以这样问我：</div></li>
                         {items.map((item, index) => (
                            <li  onClick={sendClickli(item)} key={index} className={clsx("myli",index % 2 === 0 ? 'even' : 'odd')}>
                                <p className='li_p'> {item}</p>
                                <img src={"/images/right.png"} className="right_icon"/>
                            </li>
                        ))}
                    </ul>
                {
                    showChatHistory ?
                        chatRecord.map((chat: ChatMessage, index: number) => (
                            <div className="chat-message p-3" key={index}>
                                <div className={clsx(
                                    "flex items-end",
                                    chat.role == ChatRole.HUMAN ? "" : "justify-end"
                                )}>
                                    <div className={clsx(
                                        "flex flex-col space-y-2 text-xs max-w-xs mx-2",
                                        chat.role == ChatRole.HUMAN ? "order-2 items-start" : "order-1 items-end"
                                    )}>
                                        {/* <div><span
                                            className="wz px-4 py-2 rounded-lg inline-block rounded-bl-none bg-gray-300 text-gray-600">{chat.content}</span>
                                        </div> */}
                                         <div><span
                                            className={clsx(
                                                "wz px-4 py-2 rounded-lg inline-block rounded-bl-none  text-gray-600",
                                                chat.role == ChatRole.HUMAN ? "human_bg" : "ai_bg"
                                            )}>{chat.content}</span>
                                        </div>
                                    </div>
                                    <img
                                        src={chat.role == ChatRole.HUMAN ? "/icons/icon_ai.png" : "/icons/icon_user.png"}
                                        className="iconnew w-6 h-6  order-1"/>
                                </div>
                            </div>
                        ))
                        :
                        <></>
                }
            </div>

            {/* <div className="px-4 pt-4 mb-2 sm:mb-0 z-10 w-full"> */}
            <div className="z-10 mybox">
                {/* 新增：波形图canvas */}
                <canvas ref={canvasRef} width="600" height="0"/>
                <div className={`relative flex mydiv ${show ? '' : 'hide'}`}>
                    <div className="absolute inset-y-0 flex items-center">
                        <button type="button" onClick={micClick} disabled={isProcessing} className={clsx(
                            "inline-flex items-center justify-center rounded-full h-12 w-12  focus:outline-none",
                            micRecording ? "text-red-600" : "text-green-600",
                        )}>
                            {
                                micRecording ?
                                    <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24"
                                         strokeWidth="2" stroke="currentColor" className="size-6">
                                        <path strokeLinecap="round" strokeLinejoin="round"
                                              d="M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z"/>
                                        <path strokeLinecap="round" strokeLinejoin="round"
                                              d="M9 9.563C9 9.252 9.252 9 9.563 9h4.874c.311 0 .563.252.563.563v4.874c0 .311-.252.563-.563.563H9.564A.562.562 0 0 1 9 14.437V9.564Z"/>
                                    </svg>
                                  
                                    :
                                    // <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24"
                                    //      strokeWidth="2" stroke="currentColor" className="size-6">
                                    //     <path strokeLinecap="round" strokeLinejoin="round"
                                    //           d="M12 18.75a6 6 0 0 0 6-6v-1.5m-6 7.5a6 6 0 0 1-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 0 1-3-3V4.5a3 3 0 1 1 6 0v8.25a3 3 0 0 1-3 3Z"/>
                                    // </svg>
                                    <img src="/images/ly.png" className="ly" />
                            }
                        </button>
                    </div>
                    <input  enterKeyHint="send" type="text" disabled={isProcessing} placeholder="AI径山百晓生"
                           ref={inputRef} onKeyDown={enterPress}
                           className="myinput w-full focus:outline-none  pl-12  rounded-md py-3"/>
                    {/* <div className="absolute right-0 items-center inset-y-0 hidden sm:flex"> */}
                    <div className="absolute right-0 items-center inset-y-0 flex">
                      
                        <button type="button" onClick={sendClick} disabled={isProcessing}
                                className="mybtn inline-flex items-center justify-center ">
                            {/* <span className="font-bold mysend">发送</span> */}
                           
                            {
                                isProcessing ?
                                    <img  className='sendimg' src="/icons/send_no.png" alt="" />
                                    :
                                    <img  className='sendimg' src="/icons/send_ok.png" alt="" />
                            }
                            {/* <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor"
                                 className="h-6 w-6 ml-2 transform rotate-90">
                                <path
                                    d="M10.894 2.553a1 1 0 00-1.788 0l-7 14a1 1 0 001.169 1.409l5-1.429A1 1 0 009 15.571V11a1 1 0 112 0v4.571a1 1 0 00.725.962l5 1.428a1 1 0 001.17-1.408l-7-14z"></path>
                            </svg> */}
                        </button>
                    </div>
                </div>
            </div>
        </div>
    );
}