<script setup>
import { Fragment as _Fragment, createTextVNode as _createTextVNode, createVNode as _createVNode } from "vue";
import { Chat, Radio, RadioGroup } from '@kousum/semi-ui-vue';
import { defineComponent, ref,onMounted,watch,nextTick  } from 'vue';
import voiceButton from "./voiceButton.vue"
import aiassistantView from "./aiassistantView.vue"
import Recorder from 'recorder-core';
import 'recorder-core/src/engine/wav';
import 'recorder-core/src/extensions/waveview';
import axios from 'axios';
import InputMassage from "./InputMassage.vue"
import { AudioPlayer } from '@kousum/semi-ui-vue';
// 使用模块化的全局变量
let rec = null;
const voiceUrl = ref("https://yubinux.cn/yyfile/tts/downloaded_audio.wav");
const ChatResult = ref("");
const SystemResult = ref("");
const now = new Date();
// 打开录音
const recOpen = ()=> {
    return new Promise((resolve, reject) => {
        // 创建录音对象
        rec = Recorder({
            type: 'wav',
            sampleRate: 16000,
            bitRate: 16,
        });

        if (!rec) {
            reject(new Error('当前浏览器不支持录音功能'));
            return;
        }

        // 请求麦克风权限
        rec.open(
            function () {
                console.log('麦克风权限已获取');
                resolve();
            },
            function (msg, isUserNotAllow) {
                const errorMsg = isUserNotAllow ? '用户拒绝了麦克风权限' : '无法访问麦克风: ' + msg;
                reject(new Error(errorMsg));
            }
        );
    });
}


// 开始录音
const recStart = () => {
    if (!rec) {
        const error = new Error('未打开录音');
        console.error(error);
        throw error;
    }
    rec.start();
    console.log('已开始录音');

}
// 结束录音
const recStop = ()=> {
    if (!rec) {
        console.error('未打开录音');
        return;
    }
    rec.stop(
        (blob, duration) => {

            const localUrl = (window.URL || window.webkitURL).createObjectURL(blob);
            console.log('录音成功', blob, localUrl, '时长:' + duration + 'ms');
            upload(blob)
            // rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
            // rec = null;
        },
        (err) => {
            console.error('结束录音出错：' + err);
            rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
            rec = null;
        },
    );
}


// 上传录音
const upload = (blob) =>{
    if (!blob) {
        throw new Error('无效的录音文件');
    }

    try {
        const formData = new FormData();
        formData.append('vfile', blob, 'recording.wav');
        const voiceUrl = "http://127.0.0.1:5005/voiceapi"
        axios.post(voiceUrl, formData, {
            headers: { 'Content-Type': 'multipart/form-data' },
        }).then(res => {
            console.log('上传成功!');
            console.log(res.data);
            ChatResult.value = res.data;
        })
    } catch (error) {
        console.error('上传失败:', error);
        throw error;
    }
}


onMounted(() => {
    recOpen(); // 初始化录音

});

const formattedTime = now.toLocaleString('zh-CN', {
  year: 'numeric',
  month: '2-digit',
  day: '2-digit',
  hour: '2-digit',
  minute: '2-digit',
  second: '2-digit',
  hour12: false // 24 小时制
}).replace(/\//g, '-');

const defaultMessage = [{
    role: 'system',
    id: '1',
    createAt: 1715676751919,
    content: "Hello,你好？请问有什么可以帮助您的？."
    }, {
    role: 'user',
    id: '2',
    createAt: 1715676751919,
    content: "现在的时间是？"
    },
    {
    role: 'system',
    id: '1',
    createAt: 1715676751919,
    content: "Now："+formattedTime
    }
];

    const roleInfo = {
    user: {
        name: 'User',
        avatar: 'https://yubinux.cn/yyfile/images/user.jpg'
    },
    system: {
        name: 'AI Assistant',
        avatar: 'https://yubinux.cn/yyfile/images/system.png'
    }
    };

    const commonOuterStyle = {
    border: '1px solid var(--semi-color-border)',
    borderRadius: '16px',
    margin: '8px 16px',
    height: '550px'
    };
    const uploadProps = {
    action: 'https://api.semi.design/upload'
    };
    const uploadTipProps = {
    content: '自定义上传按钮提示信息'
    };
    let count = 0

    const DefaultChat = defineComponent(() => {
    const chats = ref(defaultMessage);
    const mode = ref('bubble');
    const align = ref('leftRight');
    const onMessageSend = (message) => {
    chats.value.push({
        role: 'system',
        id: count++,
        createAt: new Date().getTime(),
        content: '',
        status: 'loading',
    });
    const eventSource = new EventSource(
        `http://localhost:8081/aiapi/stream?message=${encodeURIComponent(message)}`
    );
    eventSource.onmessage = (event) => {
        let response = event.data;
        if (response == 'end') {
            chats.value[chats.value.length - 1].status = 'complete';
            SystemResult.value = chats.value[chats.value.length - 1].content
            uploadtexttotts(SystemResult.value)
            console.log(SystemResult.value);
        close();
        return;
        }
        response = JSON.parse(response).content;
        chats.value[chats.value.length - 1].status = 'incomplete';
        chats.value[chats.value.length - 1].content += response;
    };
        eventSource.onerror = (error) => {
        console.log(error);
        chats.value[chats.value.length - 1].status = 'error';
        close();
    };

    const close = () => {
        eventSource.close();
    };
    };
    
    const onChatsChange = chatss => {
        chats.value = chatss;
    };
        const onMessageReset = e => {
        console.log(e);
        
        setTimeout(() => {
        const lastMessage = chats.value[chats.value.length - 1];
        const newLastMessage = {
            ...lastMessage,
            status: 'complete',
            content: 'This is a mock reset message.'
        };
        chats.value = [...chats.value.slice(0, -1), newLastMessage];
        }, 200);
        };
    
    return () => _createVNode(_Fragment, null, [_createVNode("span", {
        "style": {
        display: 'flex',
        flexDirection: 'column',
        rowGap: '8px'
        }
    }, [_createVNode("span", {
        "style": {
        display: 'flex',
        alignItems: 'center',
        columnGap: '10px'
        }
    }, [_createVNode(RadioGroup, {
        default: () => [_createVNode(Radio, {
        "value": 'bubble'
        }, {
        default: () => [_createTextVNode("\u6C14\u6CE1")]
        }), _createVNode(Radio, {
        "value": 'noBubble'
        }, {
        default: () => [_createTextVNode("\u975E\u6C14\u6CE1")]
        }), _createVNode(Radio, {
        "value": 'userBubble'
        }, {
        default: () => [_createTextVNode("\u7528\u6237\u4F1A\u8BDD\u6C14\u6CE1")]
        })]
    })]), _createVNode("span", {
        "style": {
        display: 'flex',
        alignItems: 'center',
        columnGap: '10px'
        }
    }, [_createVNode(RadioGroup, {
        default: () => [_createVNode(Radio, {
        "value": 'leftRight'
        }, {
        default: () => [_createTextVNode("\u5DE6\u53F3\u5206\u5E03")]
        }), _createVNode(Radio, {
        "value": 'leftAlign'
        }, {
        default: () => [_createTextVNode("\u5DE6\u5BF9\u9F50")]
        })]
    })])]), _createVNode(Chat, {
        "key": align.value + mode.value,
        "align": align.value,
        "mode": mode.value,
        "uploadProps": uploadProps,
        "style": commonOuterStyle,
        "chats": chats.value,
        "roleConfig": roleInfo,
        "onChatsChange": onChatsChange,
        "onMessageSend": onMessageSend,
        "onMessageReset": onMessageReset,
        "uploadTipProps": uploadTipProps
    }, null)]);
    });

const uploadtexttotts = (text) => {

    console.log(text);
    axios.get("https://earthquake.yubinux.cn/voiceapi/tts", {
        params: {  // 正确传递GET参数
            text: text // 编码特殊字符
        },
        headers: {  // 可选：添加请求头
            'Content-Type': 'application/json'
        }
    }).then(res => {
        console.log('响应数据:', res.data);
        let filename = res.data
        let filename_get = filename.split('/')
        voiceUrl.value = "https://yubinux.cn/yyfile/tts/"+filename_get[filename_get.length-1]
        shouldAutoPlay.value = true; 

            // 销毁并重新创建组件
        showAudio.value = false;
        nextTick();
        showAudio.value = true;
    }).catch(error => {
        console.error('请求失败:', error);
    });
    
}


const shouldAutoPlay = ref(true)
watch(voiceUrl, () => {
  shouldAutoPlay.value = true; // 新URL允许自动播放一次
});

const showAudio = ref(true)
import cardView from "./cardView.vue";
import earthView from "./earthView.vue";
</script>

<template>
    <div class="box">
        <div class="earth_box">
            <earthView />
        </div>
        <div class="chat-container">
            <DefaultChat />
        </div>
        <div class="start_voice">
            <voiceButton @click="recStart" />
        </div>
        <div class="end_voice">
            <aiassistantView @click="recStop" />
        </div>
        <div class="detect_voice">
            <InputMassage :chat-result="ChatResult" />
        </div>
        <div class="cardview">
            <cardView/> 
        </div>

    <div v-if="showAudio" class="voice_box">
    <AudioPlayer
      :audioUrl="voiceUrl"
      :autoPlay="true"
      theme="light"
      @ended="shouldAutoPlay = false"
    />
        </div>

    </div>


</template>


<style>
.earth_box{
    position: absolute;
    right: 3%;
    top: 3%;
    z-index: 100;
}
.cardview{
    position: absolute;
    left: 2%;
    top: 2%;
}
.box{
    overflow: hidden;

}
.detect_voice{
    position:absolute;
    bottom: 25%;
    right: 2%;
}
.chat-container {
position: absolute;
    left: 25%;
    align-content: center;
    justify-content: center;
    background-color: white;
    overflow: hidden;
    margin: 0 auto;
    padding: 20px;
    width: 100%;
    height: 100%;
}

.end_voice{
    position: absolute;
    bottom: 15%;
    right: 6%;

}
.start_voice{
    position: absolute;
    bottom: -1%;
    right: 10%;


}

.voice_box{
    position: absolute;
    top: 2%;
    left: 30%;
    z-index: 100;
}



</style>