<template>
    <div class="ai-voice-chat">
        <div class="glass-card">
            <div class="chat-header">AI 语音助手</div>
            <div class="chat-body">
                <div class="suggestions-card">
                    <div class="suggestions-list">
                    <span class="suggestions-title">猜你想了解</span>
                        <p class="suggestion-item" @click="inputValue='项目概述';sendMessage()">项目概述</p>
                        <p class="suggestion-item" @click="inputValue='项目总览';sendMessage()">项目总览</p>
                        <p class="suggestion-item" @click="inputValue='项目规划';sendMessage()">项目规划</p>
                    </div>
                </div>
                <div v-for="(msg, idx) in messages" :key="idx"
                    :class="['msg-row', msg.role === 'user' ? 'msg-row-user' : 'msg-row-ai']">
                    <div :class="['msg-bubble', msg.role === 'user' ? 'user-bubble' : 'ai-bubble']"
                        v-html="renderMarkdown(msg.content)"></div>
                </div>
                
            </div>
            <div class="chat-footer">
                <el-button @click="textOrVoice = !textOrVoice">按钮</el-button>
                <div class="text-input" v-if="textOrVoice">
                    <el-input class="chat-input" placeholder="请输入内容..." v-model="inputValue"
                        @keyup.enter="sendMessage" />
                    <el-button type="primary" size="large" @click="sendMessage">发送</el-button>
                </div>
                <div v-if="!textOrVoice" class="voice-input" style="flex:1;text-align:center;">
                    <div class="voice-btn" @touchstart="onVoiceTouchStart" @touchmove="onVoiceTouchMove"
                        @touchend="onVoiceTouchEnd" @touchcancel="onVoiceTouchCancel" @mousedown="onVoiceMouseDown"
                        @mousemove="onVoiceMouseMove" @mouseup="onVoiceMouseUp" @mouseleave="onVoiceMouseLeave"
                        style="width:100%;height:44px;line-height:44px;background:#43cea2;color:#fff;border-radius:22px;user-select:none;">
                        {{ voiceTip }}
                    </div>
                </div>
            </div>
        </div>
    </div>
</template>
<script setup>
import { ElMessage } from 'element-plus'
import { WaveFile } from 'wavefile';
import { ref } from 'vue'
import { ASR, chatWithAI } from '../service/aiVoiceChat'
import { marked } from 'marked'
import { el } from 'element-plus/es/locales.mjs';
const textOrVoice = ref(true)
const voiceTip = ref('按住说话，松开发送')
let startY = 0
let isRecording = false
let isSend = false
let recognition = null

function onVoiceTouchStart(e) {
    onVoiceMouseDown(e)
}
function onVoiceTouchMove(e) {
    if (!isRecording) return
    const moveY = e.touches[0].clientY
    if (startY - moveY > 60) {
        voiceTip.value = '松开立即发送语音'
        isSend = true
    } else {
        voiceTip.value = '松开发送语音'
        isSend = false
    }
}
function onVoiceTouchEnd() {
    if (isRecording) {
        stopRecognition(isSend)
        isRecording = false
        voiceTip.value = '按住说话，向上滑动发送'
    }
}
function onVoiceTouchCancel() {
    if (isRecording) {
        stopRecognition(false)
        isRecording = false
        voiceTip.value = '按住说话，向上滑动发送'
    }
}

// 鼠标事件兼容PC
function onVoiceMouseDown(e) {
    //按住开始录音
    startY = e.clientY
    isRecording = true
    isSend = false
    voiceTip.value = '松开发送语音'

    // 创建音频录制对象
    if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
        navigator.mediaDevices.getUserMedia({ audio: true })
            .then(stream => {
                const mediaRecorder = new MediaRecorder(stream);
                const audioChunks = [];

                mediaRecorder.ondataavailable = (event) => {
                    audioChunks.push(event.data);
                };

                mediaRecorder.onstop = async () => {
                    // 1. 创建原始音频Blob（WebM/Opus）
                    const audioBlob = new Blob(audioChunks, { type: 'audio/webm' });
                    //TODO 笔记
                    try {
                        // 2. 转换为ArrayBuffer并解码
                        const arrayBuffer = await audioBlob.arrayBuffer();
                        const audioContext = new (window.AudioContext || window.webkitAudioContext)();
                        const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);

                        // 3. 重采样为16000Hz（关键步骤）
                        const targetSampleRate = 16000;
                        const resampledBuffer = await resampleAudio(audioBuffer, targetSampleRate);

                        // 4. 提取PCM数据
                        const pcmData = resampledBuffer.getChannelData(0); // 单声道
                        const int16Data = new Int16Array(pcmData.length);

                        for (let i = 0; i < pcmData.length; i++) {
                            int16Data[i] = Math.floor(pcmData[i] * 32767); // 转换为16位整数
                        }

                        // 5. 使用wavefile创建WAV，指定16000Hz采样率（关键参数）
                        const wav = new WaveFile();
                        wav.fromScratch(
                            1, // 单声道
                            targetSampleRate, // 采样率设为16000Hz
                            '16', // 位深度
                            [int16Data] // 音频数据
                        );

                        // 6. 生成真正的WAV Blob
                        const wavBlob = new Blob([wav.toBuffer()], { type: 'audio/wav' });
                        sendAudioMessage(wavBlob);
                        // 7. 停止录制
                        mediaRecorder.stop();
                        stream.getTracks().forEach(track => track.stop());
                    } catch (error) {
                        console.error('音频处理失败:', error);
                    }
                };

                // 开始录音
                mediaRecorder.start();
                recognition = mediaRecorder; // 保存录音实例以便后续控制
            })
            .catch(error => {
                console.error('录音权限获取失败:', error);
                voiceTip.value = '无法获取麦克风权限';
            });
    } else {
        console.error('浏览器不支持录音功能');
        voiceTip.value = '设备不支持录音';
    }

    window.addEventListener('mousemove', onVoiceMouseMove)
    window.addEventListener('mouseup', onVoiceMouseUp)
}
// 音频重采样函数
async function resampleAudio(audioBuffer, targetSampleRate) {
    // 创建目标采样率的离线音频上下文
    const offlineContext = new OfflineAudioContext(
        audioBuffer.numberOfChannels, // 保持原声道数
        Math.ceil(audioBuffer.duration * targetSampleRate), // 计算目标样本数
        targetSampleRate // 设置目标采样率为16000Hz
    );

    const source = offlineContext.createBufferSource();
    source.buffer = audioBuffer;
    source.connect(offlineContext.destination);
    source.start();

    // 渲染并获取重采样后的音频缓冲
    return await offlineContext.startRendering();
}
function onVoiceMouseMove(e) {
    if (!isRecording) return
    if (startY - e.clientY > 60) {
        voiceTip.value = '松开立即发送语音'

        isSend = true
    } else {
        voiceTip.value = '松开发送语音'
        isSend = false
    }
}
//松开触发
function onVoiceMouseUp() {
    if (isRecording) {
        stopRecognition(isSend)
        isRecording = false
        voiceTip.value = '按住说话，向上滑动发送'
        window.removeEventListener('mousemove', onVoiceMouseMove)
        window.removeEventListener('mouseup', onVoiceMouseUp)
    }
}
function onVoiceMouseLeave() {
    if (isRecording) {
        stopRecognition(false)
        isRecording = false
        voiceTip.value = '按住说话，向上滑动发送'
        window.removeEventListener('mousemove', onVoiceMouseMove)
        window.removeEventListener('mouseup', onVoiceMouseUp)
    }
}
//语音识别
function startRecognition() {

}
function stopRecognition(send) {
    if (recognition) {
        recognition.stop()
        if (!send) {
            inputValue.value = ''
        }
    }
}
const inputValue = ref('')
const messages = ref([
    { role: 'ai', content: '你好，我是年年2.0turbo，有什么可以帮您？' },
])

function renderMarkdown(content) {
    return marked(content)
}

async function sendMessage() {
    const content = inputValue.value.trim()
    if (!content) return
    messages.value.push({ role: 'user', content })
    inputValue.value = ''
    messages.value.push({ role: 'ai', content: 'AI正在思考中...' })
    const aiIndex = messages.value.length - 1
    try {
        // const aiReply = await chatWithAI(content)
        messages.value[aiIndex].content = ''

        // messages.value[aiIndex].content = aiReply
        await chatWithAI(content, (msg) => {
            messages.value[aiIndex].content += msg
            const chatBody = document.querySelector('.chat-body');
            chatBody.scrollTop = chatBody.scrollHeight;
        })
    } catch (e) {
        console.error(e)
        messages.value[aiIndex].content = 'AI回复失败，请稍后重试。'
    }
}
//音频消息转为文字
let sendAudioMessage = async (audioBlob) => {
    try {
        const text = await ASR(audioBlob);
        console.log('识别结果:', text);
        if (text.length == 0) {
            ElMessage({
                message: '语音识别失败',
                type: 'warning',
            });
        } else {
            inputValue.value = text
            sendMessage()
        }
        // 处理识别结果，例如发送到聊天界面
    } catch (error) {
        console.error('语音识别失败:', error);
        ElMessage({
            message: '语音识别失败',
            type: 'warning',
        });
        // 错误处理，例如显示提示给用户
        throw error;
    }
}
</script>

<style scoped>
.ai-voice-chat {
    min-height: 100vh;
    width: 100vw;
    background: linear-gradient(135deg, #e9f5e1 0%, #b7d7b0 100%);
    display: flex;
    align-items: center;
    justify-content: center;
    padding: 2vw;
    box-sizing: border-box;
}

.glass-card {
    width: 100%;
    max-width: 480px;
    min-width: 260px;
    min-height: 80vh;
    border-radius: 18px;
    background: rgba(255, 255, 245, 0.97);
    box-shadow: 0 4px 24px 0 #b7d7b044;
    border: 1.5px solid #b7d7b0;
    padding: 0;
    color: #3e4d2c;
    display: flex;
    flex-direction: column;
}

.chat-header {
    width: 100%;
    background: #e3e6d9;
    color: #4b6e3c;
    font-size: 1.3rem;
    text-align: center;
    padding: 1.2em 0 1em 0;
    border-bottom: 1px solid #b7d7b0;
    letter-spacing: 2px;
    text-shadow: none;
}

.chat-body {
    width: 100%;
    height: 80vh;
    flex: 1;
    padding: 1em 0.5em;
    background: transparent;
    display: flex;
    flex-direction: column;
    gap: 0.7em;
    overflow-y: auto;
    max-height: 80vh;
}

.msg-row {
    display: flex;
    align-items: flex-end;
}

.msg-row-user {
    justify-content: flex-end;
}

.msg-row-ai {
    justify-content: flex-start;
}

.msg-bubble {
    max-width: 70%;
    padding: 0.7em 1em;
    border-radius: 14px;
    font-size: 1em;
    line-height: 1.5;
    word-break: break-all;
    box-shadow: 0 2px 8px #b7d7b044;
    border: 1px solid #b7d7b0;
    background: #f6fbe9;
}

.suggestions-card {
    background-color: #e9f5e1;
    border-radius: 8px;
    padding: 10px;
    box-shadow: 0 2px 8px #b7d7b044;
    margin-top: 10px;
}
.suggestions-title {
    color: #4b6e3c;
    font-size: 1.2em;
    margin-bottom: 8px;
}
.suggestions-list {
    display: flex;
    flex-direction: column;
    gap: 5px;
}
.suggestion-item {
    color: #3e4d2c;
    padding: 5px;
    border-radius: 4px;
    transition: background-color 0.3s;
}
.suggestion-item:hover {
    background-color: #cbe6b6;
    cursor: pointer;
}
.user-bubble {
    background: linear-gradient(135deg, #b7d7b0 0%, #e9cba7 100%);
    color: #3e4d2c;
    border-bottom-right-radius: 6px;
    box-shadow: 0 0 8px #b7d7b099;
}

.ai-bubble {
    background: #e3e6d9;
    color: #4b6e3c;
    border-bottom-left-radius: 6px;
    box-shadow: 0 0 8px #b7d7b099;
}

.chat-footer {
    width: 100%;
    display: flex;
    align-items: center;
    gap: 0.5em;
    background: #e3e6d9;
    padding: 0.7em 0.5em;
    border-top: 1px solid #b7d7b0;
    position: sticky;
    bottom: 0;
    flex-wrap: wrap;
}

.text-input {
    flex: 1;
    min-width: 0;
    display: flex;
    gap: 0.5em;
}

.chat-input {
    flex: 1;
    border-radius: 18px;
    background: #fff;
    height: 40px;
    color: #3e4d2c;
    border: 1px solid #b7d7b0;
}

.voice-btn {
    width: 44px;
    height: 44px;
    font-size: 1.1em;
    background: linear-gradient(90deg, #b7d7b0 0%, #e9cba7 100%);
    color: #4b6e3c;
    box-shadow: 0 0 8px #b7d7b099;
    border: none;
    border-radius: 22px;
    transition: box-shadow 0.2s, background 0.2s;
}

.voice-btn:hover {
    box-shadow: 0 0 16px #b7d7b0;
    background: #cbe6b6;
}

.icon-mic {
    vertical-align: middle;
}

@media (max-width: 600px) {
    .glass-card {
        max-width: 98vw;
        min-height: 90vh;
        padding: 0;
    }
    .chat-header {
        font-size: 1.1rem;
        padding: 1em 0 0.8em 0;
    }
    .msg-bubble {
        max-width: 90%;
        font-size: 0.98em;
    }
}
</style>
