<template>
    <div class="type1">
        <div class="finger">
            点击这里
            <img src="@/static/finger.svg" alt="">
        </div>
        <div class="fingerbtn" v-show="fingerbtn">
            点击这里录音和结束录音
            <img src="@/static/finger.svg" alt="">
        </div>
        <img src="@/static/voice.png" alt="" class="voice" @click="playaudio">
        <video ref="audioRef" style="height: 0px;" :src="mymp3" @ended="enddd"></video>
        <img :src="mypic" alt="" class="topic" v-if="endsWithAny(mypic.toLowerCase(), ['.png', '.jpg'])">
        <div class="processdetail1" v-else> <video controls class="videoPath" :src="mypic"></video></div>
        <img src="@/static/record.png" alt="" :class="voice" @click="toggleRecording" :disabled="isRecording">
    </div>
</template>

<script setup>
import { ref, defineProps, onMounted, defineEmits } from 'vue';
import { message } from 'ant-design-vue';
import { useRoute } from 'vue-router';
import axios from 'axios';
import api from '@/api/user'
const time = ref(0);
const timer = ref(null);
const enddd = () => {
    audioRef.value.controls = false;
    fingerbtn.value = true;
    fingerbtn.value = true;
    timer.value = setInterval(() => {
        time.value += 100;
    }, 100);
}
const fingerbtn = ref(false);
const voice = ref('voice')
const correct = ref();
const route = useRoute();
const props = defineProps({
    img: {
        type: String,
        required: true
    },
    mymp3: {
        type: String,
        required: true
    },
    level: {
        type: String,
        required: true
    },
    mypic: {
        type: String,
        required: true
    },
    id: {
        type: String,
        required: true
    }
});
import Recorder from 'recorder-core';
//引入mp3格式支持文件；如果需要多个格式支持，把这些格式的编码引擎js文件放到后面统统引入进来即可
import 'recorder-core/src/engine/mp3';
import 'recorder-core/src/engine/mp3-engine';
//录制wav格式的用这一句就行
import 'recorder-core/src/engine/wav';
//可选的插件支持项，这个是波形可视化插件
import 'recorder-core/src/extensions/waveview';
//ts import 提示：npm包内已自带了.d.ts声明文件（不过是any类型）
const isRecording = ref(false);
const audioRef = ref();
function endsWithAny(str, suffixes) {
    return suffixes.some(suffix => str.endsWith(suffix));
}
const playaudio = () => {
    console.log(audioRef.value.src);
    if (audioRef.value) {
        audioRef.value.play().catch(error => {
            console.error('Error playing audio:', error);
        });
    }
};

const emits = defineEmits(['end'], ['correct']);
const run = () => {
    setTimeout(() => {
        emits('end', 1);
    }, 1000);
};

const toggleRecording = () => {
    if (!isRecording.value) {
        // startRecording();
        isRecording.value = 1;
        message.success('开始录音');
        recStart()
    } else {
        recStop();
        message.success('结束录音');
        // stopRecording();
    }
};
let rec;
let recBlob;
let wave;
const recwave = ref(null);
// 打开录音
function recOpen() {
    //创建录音对象
    rec = Recorder({
        type: 'mp3', //录音格式，可以换成wav等其他格式
        sampleRate: 16000, //录音的采样率，越大细节越丰富越细腻
        bitRate: 16, //录音的比特率，越大音质越好
        onProcess: (
            buffers,
            powerLevel,
            bufferDuration,
            bufferSampleRate,
            newBufferIdx,
            asyncEnd,
        ) => {
            //录音实时回调，大约1秒调用12次本回调
            //可实时绘制波形，实时上传（发送）数据
            if (wave) {
                wave.input(buffers[buffers.length - 1], powerLevel, bufferSampleRate);
            }
        },
    });
    if (!rec) {
        alert('当前浏览器不支持录音功能！');
        return;
    }
    rec.open(
        () => {
            console.log('录音已打开');
            if (recwave.value) {
                //创建音频可视化图形绘制对象
                wave = Recorder.WaveView({ elem: recwave.value });
            }
        },
        (msg, isUserNotAllow) => {
            //用户拒绝了录音权限，或者浏览器不支持录音
            console.log((isUserNotAllow ? 'UserNotAllow，' : '') + '无法录音:' + msg);
        },
    );

}
function recStart() {
    if (!rec) {
        console.error('未打开录音');
        return;
    }
    rec.start();
    console.log('已开始录音');
    voice.value = "voice change"
}
function recStop() {
    if (!rec) {
        console.error('未打开录音');
        return;
    }
    rec.stop(
        (blob, duration) => {
            //blob就是我们要的录音文件对象，可以上传，或者本地播放
            recBlob = blob;
            //简单利用URL生成本地文件地址，此地址只能本地使用，比如赋值给audio.src进行播放，赋值给a.href然后a.click()进行下载（a需提供download="xxx.mp3"属性）
            const localUrl = (window.URL || window.webkitURL).createObjectURL(blob);
            console.log('录音成功', blob, localUrl, '时长:' + duration + 'ms');
            upload(blob); //把blob文件上传到服务器
            rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
            rec = null;
            // recPlay();
        },
        (err) => {
            console.error('结束录音出错：' + err);
            rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
            rec = null;
        },
    );
}
const upload = (blob) => {
    const formData = new FormData();
    formData.append('file', blob, "recording.mp3"); // 'recording.wav' 是文件名，可以根据实际情况修改
    formData.append('level', props.level);
    axios({
        url: "/interface/shasha/upload",
        method: "post",
        headers: {
            Authorization: localStorage.getItem('token'),
            'Content-Type': 'multipart/form-data'
        },
        data: formData
    })
        .then(async (res) => {
            correct.value = res.data.correct;
            console.log(correct.value);
            emits('correct', correct.value);
            await accuracys(res.data)


        })
}
const accuracys = async (x) => {

    getconsolidate(x);




}
const addacheive = async () => {
    await api.addachieve({
        "rewardId": 1
    }).then((res) => {
        console.log(res);
        setTimeout(() => {
            emits('end', 1);
        }, 1000);
    })
}
const getconsolidate = async (x) => {
    clearInterval(timer.value);
    await api.addconsolidate({
        "correct": x.correct,
        "levels": route.query.processnum,
        "questionId": props.id,
        "answerPath": x.fileName,
        "selectAnswerDuration": String(time.value)
    })
        .then(async (res) => {
            await addacheive();

        })
}
onMounted(() => {
    console.log("level" + props.id);
    recOpen();

});
</script>

<style lang="scss" scoped>
.finger {
    position: absolute;
    display: flex;
    left: 50%;
    top: 50%;
    transform: translate(-18rem, -21rem);
    justify-content: center;
    align-items: center;
    font-size: 2rem;
    color: red;
    font-weight: 900;

    img {
        transform: rotate(90deg);
    }
}

.fingerbtn {
    left: 50%;
    top: 50%;
    transform: translate(-23rem, 17rem);
    position: absolute;
    display: flex;
    justify-content: center;
    align-items: center;
    font-size: 2rem;
    color: red;
    font-weight: 900;
    width: 18rem;

    img {
        transform: rotate(90deg);
    }
}

.voice {
    height: 8rem;
    width: 8rem;
    cursor: pointer;
}

.change {
    filter: invert(1) sepia(1) saturate(10) hue-rotate(40deg);
    /* Adjust the values to get the desired color */

}

.videoPath {
    z-index: 3;
    height: 100%;
    width: 100%;
}

.processdetail1 {
    // background-image: url("@/static/processdetail2.jpg");
    background-size: cover;
    height: 30.5rem;
    width: 35rem;
    display: flex;
    justify-content: center;
    align-items: center;
    flex-direction: column;
    gap: 3rem;

    img {
        height: 90%;
        width: 90%;
    }
}

.videoPath {
    z-index: 3;
    height: 100%;
    width: 120%;
}

.topic {
    height: 30rem;
    width: 30rem;
}

.type1 {
    display: flex;
    flex-direction: column;
    justify-content: center;
    align-items: center;
}
</style>