<template>
    <a-modal v-model:visible="state.visible" :maskClosable="false" width="400px" title="语音通话12" 
        wrapClassName="ipCallModal"
        @cancel="closeIPCall"
    >
        <div class="ipCallCon">
            <div class="startSpeek">
                <!-- 添加15个音频标签 定义css变量，表示延迟时间,一共是15个标签，中间位置为8，所以设置
                第八个位置延迟为0-->
            <!-- <div class="container" v-show="state.isSpeeking">

                <span style="--d: 7"></span>
                <span style="--d: 6"></span>
                <span style="--d: 5"></span>
                <span style="--d: 4"></span>
                <span style="--d: 3"></span>
                <span style="--d: 2"></span>
                <span style="--d: 1"></span>
                <span style="--d: 0"></span>
                <span style="--d: 1"></span>
                <span style="--d: 2"></span>
                <span style="--d: 3"></span>
                <span style="--d: 4"></span>
                <span style="--d: 5"></span>
                <span style="--d: 6"></span>
                <span style="--d: 7"></span>
            </div> -->
                <!-- @mousedown="speekmousedown" 
                @mouseup = 'speekmouseup' -->
            <!-- <a-button type="primary"  

                @click="speekClick">喊话</a-button> -->
            </div>
            <div style="padding-bottom: 20px;">
                <img :src="require(`@/assets/images/overviewData_camera.png`)" alt="">
                <span class='iptitle'>
                    {{ state.siteInfo.yardStationName }}
                </span>
            </div>
            <div v-show="state.callStatus == '1'">
                <div class="ipt1">正在呼叫中....</div>
            <!-- <div class="ipt2">等待对方接受</div> -->
            </div>
            <div  v-show="state.callStatus == '2'">
                <div class="ipt1">{{formatTimeStr(state.startTime) }}</div>
                <div class="ipt2">已接通</div>
            </div>
            <div  v-show="state.callStatus == '3'">
                <div class="ipt1">呼叫异常</div>
                <div class="ipt2">{{ state.errorMessage }}</div>
            </div>
        </div>
        <template #footer>
            <a-button v-show="state.callStatus == '1' || state.callStatus == '2'" key="back" @click="closeIPCall">挂断</a-button>
            <a-button v-show="state.callStatus == '3'"  type="primary"  @click="handleReset">重拨</a-button>
        </template>
    </a-modal>
    <!-- </div> -->
</template>
<script setup>

import { defineComponent, reactive, ref, defineExpose } from 'vue';
import {message} from "ant-design-vue";
let timer = null;
const state = reactive({
    siteInfo: {},
    startTime:0,
    visible:false,
    errorMessage:'',

    callStatus:'1',  //呼叫状态 1 呼叫中 2通话中 3报错

    isSpeeking:false   //是否喊话中
})
const showModal = (obj) => {
    if(!player){
        // player= new PCMPlayer({
        //     encoding: '16bitInt',
        //     channels: 1,
        //     sampleRate: 8000,
        //     flushingTime: 300
        // });
    }
    if(!obj.loudspeakerCode){
        message.warning('请维护场站扩音柱设备！')
        return
    }
    state.siteInfo = { ...obj };
    if(timer)clearInterval(timer);
    state.callStatus = '1'
    state.startTime = 0;
    state.errorMessage = ''
    //   console.log(obj, 'obj9999');
    state.visible = true;
    begin();
};
// // 鼠标按下
// const speekmousedown= () => {
//     state.isSpeeking= true
// }
// // 鼠标抬起
// const speekmouseup= () => {
//     state.isSpeeking= false
// }
const speekClick= () => {
    state.isSpeeking= !state.isSpeeking;
    // console.log('speekClick');
}

const handleOk = () => {

};

const handleReset = () => {
    if(timer)clearInterval(timer);
    state.callStatus = '1'
    state.startTime = 0;
    state.errorMessage = ''
    begin();
};
// const handleCancel = () => {
//   state.visible = false;
//   end();
// };

const startUpTime= () => {
    state.callStatus = '2'
    if(timer)clearInterval(timer)
    timer = setInterval(() => {
        state.startTime +=1;
    }, 1000);
}

const errstopUpTime = () => {
    state.callStatus = '3'
    if(timer)clearInterval(timer)
    state.startTime = 0
}

const closeIPCall = () => {
    if(player){
        player.destroy()
        player = null;
    }
    state.visible = false;
    end();
}

const formatTimeStr = (time) => {
    let m = Math.floor(time / 60);
    let s = time % 60;
    s = `${s > 9 ? s : '0' + s}`;
    m = `${m > 9 ? m : '0' + m}`;
    return `${m}:${s}`
}

// -----------------------语音代码

let ws = null; //实现WebSocket 
let record = null; //多媒体对象，用来处理音频
let timeInte;
let player =null
function init(rec) {
    record = rec;
}
//录音对象
let Recorder = function (stream) {
    let sampleBits = 16; //输出采样数位 8, 16
    let sampleRate = 8000; //输出采样率
    let context = new AudioContext();
    let audioInput = context.createMediaStreamSource(stream);
    let recorder = context.createScriptProcessor(4096, 1, 1);
    let audioData = {
        size: 0, //录音文件长度
        buffer: [], //录音缓存
        inputSampleRate: 48000, //输入采样率,网页默认的采样率即为48000
        inputSampleBits: 16, //输入采样数位 8, 16
        outputSampleRate: sampleRate, //输出采样数位
        oututSampleBits: sampleBits, //输出采样率
        clear: function () {
            this.buffer = [];
            this.size = 0;
        },
        input: function (data) {
            this.buffer.push(new Float32Array(data));
            this.size += data.length;
        },
        compress: function () { //合并压缩
            //合并
            let data = new Float32Array(this.size);
            let offset = 0;
            for (let i = 0; i < this.buffer.length; i++) {
                data.set(this.buffer[i], offset);
                offset += this.buffer[i].length;
            }
            //压缩
            let compression = parseInt(this.inputSampleRate / this.outputSampleRate);
            let length = data.length / compression;
            let result = new Float32Array(length);
            let index = 0,
                j = 0;
            while (index < length) {
                result[index] = data[j];
                j += compression;
                index++;
            }
            return result;
        },
        encodePCM: function () { //这里不对采集到的数据进行其他格式处理，如有需要均交给服务器端处理。
            let sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate);
            let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits);
            let bytes = this.compress();
            let dataLength = bytes.length * (sampleBits / 8);
            let buffer = new ArrayBuffer(dataLength);
            let data = new DataView(buffer);
            let offset = 0;
            for (let i = 0; i < bytes.length; i++, offset += 2) {
                let s = Math.max(-1, Math.min(1, bytes[i]));
                data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
            }

            return new Blob([data]);
        }

    };

    let sendData = function () { //对以获取的数据进行处理(分包)
        let reader = new FileReader();
        reader.onload = e => {
            let outbuffer = e.target.result;
            //ws.send(outbuffer);
            //console.log(outbuffer);
            let arr = new Int8Array(outbuffer);
            if (arr.length > 0) {
                //let tmparr = new Int8Array(arr.length);
                //ws.send(tmparr);
                let tmparr = new Int8Array(1024);
                // console.log(tmparr);
                let j = 0;
                for (let i = 0; i < arr.byteLength; i++) {
                    tmparr[j++] = arr[i];
                    if (((i + 1) % 1024) == 0) {
                        ws.send(tmparr);
                        if (arr.byteLength - i - 1 >= 1024) {
                            tmparr = new Int8Array(1024);
                        } else {
                            tmparr = new Int8Array(arr.byteLength - i - 1);
                        }
                        j = 0;
                    }
                    if ((i + 1 == arr.byteLength) && ((i + 1) % 1024) != 0) {
                        ws.send(tmparr);
                    }
                }
            }
        };
        reader.readAsArrayBuffer(audioData.encodePCM());
        audioData.clear();//每次发送完成则清理掉旧数据
    };

    this.start = function () {
        audioInput.connect(recorder);
        recorder.connect(context.destination);
    }

    this.stop = function () {
        recorder.onaudioprocess = function (e) {
        }
        recorder.disconnect();
    }

    this.getBlob = function () {
        return audioData.encodePCM();
    }

    this.clear = function () {
        audioData.clear();
    }

    recorder.onaudioprocess = function (e) {
        // if(!state.isSpeeking){
        //     return
        // }
        let inputBuffer = e.inputBuffer.getChannelData(0);
        audioData.input(inputBuffer);
        sendData();
    }
}



/*
    * WebSocket
    */
const  useWebSocket=()=> {
    // let url = document.getElementById('wsUrl').value ;
    // let chn = document.getElementById('chnName').value ;
    let url = process.env.NODE_ENV === 'production' ?`ws://${window.location.host}/talk/`:'ws://223.244.82.69:8777/talk/'
    // let url = process.env.NODE_ENV === 'production' ?`ws://${window.location.host}/talk/`:'ws://10.1.254.55:8001/talk/'
    let chn = state.siteInfo.loudspeakerCode;
    // let chn = '34020000001180000001_34020000001310000001';
    // ws = new WebSocket(url+chn+'?type=0');
    ws = new WebSocket(url+chn+'?type=1');
    ws.binaryType = 'arraybuffer'; //传输的是 ArrayBuffer 类型的数据
    ws.onopen =  (event)=> {
        console.log('握手成功');
        startUpTime();
        timeInte = setInterval(function () {
            //if (ws.readyState == 1) { //ws进入连接状态，则每隔180毫秒发送一包数据
            //录音对象   给用户的音频
            record.start();

            //}
        }, 180);

        //clearInterval(timeInte);
    };

    ws.onmessage = function (msg) {
        //console.info(msg)
        if(msg.data.constructor == String){
            // console.info(msg)
            return;
        }
        //let data = new Uint16Array(msg.data);
        // if(!state.isSpeeking){
        player.feed(msg.data);   //音频对象  用户过来的声音
        // }
            
        //console.log(msg.data);
    }
    ws.onclose = (msg)=> {
        clearInterval(timeInte);
    }

    ws.onerror = (err)=> {
        console.info(err)
    }
}

// 用来接受getUserMedia资源，关闭时释放麦克风
let mediaStreamAA;
/*
    * 开始对讲
    */
const begin = function () {
    navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;

    if (!navigator.getUserMedia) {
        alert('浏览器不支持音频输入');
    } else {
        navigator.getUserMedia({
            audio: true
        },
        function (mediaStream) {
            init(new Recorder(mediaStream));
            mediaStreamAA = mediaStream
            console.log('开始对讲');
            useWebSocket();
        },
        function (error) {
            console.log(error,'error');
            errstopUpTime()
            switch (error.message || error.name) {
                case 'PERMISSION_DENIED':
                case 'PermissionDeniedError':
                    state.errorMessage = '用户拒绝提供信息。'
                    console.info('用户拒绝提供信息。');
                    break;
                case 'NOT_SUPPORTED_ERROR':
                case 'NotSupportedError':
                    state.errorMessage = '浏览器不支持硬件设备。'
                    console.info('浏览器不支持硬件设备。');
                    break;
                case 'MANDATORY_UNSATISFIED_ERROR':
                case 'MandatoryUnsatisfiedError':
                    state.errorMessage = '无法发现指定的硬件设备。'
                    console.info('无法发现指定的硬件设备。');
                    break;
                default:
                    state.errorMessage = '无法打开麦克风。异常信息:' + (error.code || error.name)
                    console.info('无法打开麦克风。异常信息:' + (error.code || error.name));
                    break;
            }
        }
        )
    }
}

/*
    * 关闭对讲
    */
const end = ()=> {
    if (ws) {
        record.stop();
        ws.close();
        ws = null
            
        mediaStreamAA.stop();
        // record = null
        console.log('关闭对讲以及WebSocket',record);
    }
}
defineExpose({ showModal })
</script>
<style lang="less" scoped>
    /* 定义全局变量，音频的高度 */
    .container {
      display: flex;
      align-items: center;
      position: relative;
      margin-bottom: 4px;
    //   height: var(--h);
        height:60px
    }
    .container span {
      width: 4px;
      height: 20%;
      border-radius: calc(60px * 0.2 * 0.5);
      margin-right: 4px;
      background: linear-gradient(to top, #d299c2 0%, #00cccc 100%);
      /* 添加动画效果 */
      animation: loading 2s infinite linear;
      /* 添加动画延迟，实现此起彼伏的效果 */
      animation-delay: calc(0.2s * var(--d));
    }
    .container span:last-child {
      margin-right: 0;
      /* 最后一个标签不需要右边距 */
    }
    @keyframes loading {
      0% {
        height: 20%;
        border-radius: calc(var(--h) * 0.2 * 0.5);
        background: linear-gradient(to right, #fa709a 0%, #fee140 100%);
      }
      50% {
        height: 100%;
        border-radius: calc(var(--h) * 1 * 0.5);
        background: linear-gradient(to top, #d2d2d2 0%, #00cccc 100%);
      }
      100% {
        height: 20%;
        border-radius: calc(var(--h) * 0.2 * 0.5);
        background: linear-gradient(to top, #a8edea 0%, #fed6e3 100%);
      }
    }
.ipCallCon{
    position: relative;
    .startSpeek{
        position: absolute;
        // width: 100%;
        bottom: -10px;
        right: 0;
        // height: 100%;
        width: 120px;
        background-color: transparent;
        text-align: center;
    }
}
img {
  width: 32px;
  vertical-align: bottom;
}

.iptitle {
  font-size: 24px;
}

.ipt1 {
  font-size: 20px;
}

.ipt2 {
  color: #ccc
}
</style>