let recorderComponent = {
    template: `
        <div class="recorder page">
            <div class="head">
                <div class="left" @click="back">
                    <span class="iconfont icon-ali-fanhui"></span>
                </div>
                <div class="title">CREATE SECTION</div>
                <div class="right"></div>
            </div>
            <div class="main" v-show="recordState!==3">
    
                <div class="time">
                    {{parseTime(time)}}
                </div>
                <canvas id="my-canvas" style="width: 100%;height: 300px;"></canvas>
            </div>
             <div class="main" v-show="recordState === 3">
                         <audio :src="url" @timeupdate="updateTime" @ended="playState = false" @canplay="canplay"></audio>
                         <canvas id="my-canvas1" style="width: 100%;height: 300px;"></canvas>
                <div class="play-time">
                    <span class="currentTime">{{parseTime(currentTime)}}</span>
                    <span>&nbsp;|&nbsp;</span>
                    <span class="duration">{{parseTime(duration)}}</span>
                </div>
             
            </div>
             <div class="controls" v-if="recordState!==3">
                <div class="btn">
                    <span @click="pause" v-show="recordState === 1" class="iconfont icon-ali-caozuo-bofang-zanting"></span>
                    <span @click="resume" v-show="recordState === 2" class="iconfont icon-ali-bofangzanting"></span>
                </div>
                <div class="btn">
                    <span @click="start" v-show="recordState === 0" class="iconfont icon-ali-a-smlsicon_luzhi"></span>
                    <span @click="finish" v-show="recordState !== 0" class="iconfont icon-ali-duihao"></span>
                </div>
                <div class="btn" v-show="recordState === 0">
                    <span @click="upload"  class="iconfont icon-ali-shangchuan"></span>
                </div>
                <div class="btn"><span @click="cancel" v-show="recordState !== 0" class="iconfont icon-ali-fork"></span></div>
             </div>
             

             <div class="controls" v-else>
                <div class="btn">
                    <span @click="destroy"  class="iconfont icon-ali-fork"></span>
                </div>
                <div class="btn">
                    <span @click="prev"  class="iconfont icon-ali-ai18"></span>
                </div>
                <div class="btn">
                    <span @click="play" v-show="!playState" class="iconfont icon-ali-bofangzanting"></span>
                    <span @click="play" v-show="playState" class="iconfont icon-ali-caozuo-bofang-zanting"></span>
                </div>
                <div class="btn">
                    <span @click="next"  class="iconfont icon-ali-ai19"></span>
                </div>
                <div class="btn"><span @click="save" v-show="recordState !== 0" class="iconfont icon-ali-duihao"></span></div>
             </div>
                             <input type="file" id="fileInput" style="display: none">
        </div>
    `,
    data() {
        return {
            recordState: 0, // 0初始状态  1录制中 2暂停中  3完成
            playState:false,
            volumeList: [],
            time:0,
            currentTime:0,
            duration:0,
            url:null
        }

    },
    computed: {},
    watch: {},
    mounted() {
        this.canvas = document.getElementById('my-canvas')
        this.canvas.width = this.canvas.clientWidth
        this.canvas.height = this.canvas.clientHeight
        this.context = this.canvas.getContext('2d')

        this.audio = document.querySelector('audio')

        navigator.mediaDevices.getUserMedia({ audio: true })
            .then((stream) =>{
                this.stream = stream

                this.mediaRecorder = new MediaRecorder(stream);
                this.mediaRecorder.addEventListener("dataavailable", (event) =>{
                    if(this.recordState === 3){
                                console.log(event.data.arrayBuffer())
                         let blob = this.blob = new Blob([event.data], { type: "audio/wav" });
                        this.audioFile = new File([blob], `music.wav`, { type: blob.type });
                        this.url = URL.createObjectURL(blob);
                        this.currentTime =0
                        this.duration = this.time + 10
                        // this.createWave()
                    }
                    this.time = 0
                })

                let audioContext = new AudioContext
                // 将麦克风的声音输入这个对象
                let mediaStreamSource = audioContext.createMediaStreamSource(stream)
                // 创建一个音频分析对象，采样的缓冲区大小为4096，输入和输出都是单声道
                let scriptProcessor = audioContext.createScriptProcessor(4096, 1, 1)
                // 将该分析对象与麦克风音频进行连接
                mediaStreamSource.connect(scriptProcessor)
                // 此举无甚效果，仅仅是因为解决 Chrome 自身的 bug
                scriptProcessor.connect(audioContext.destination)
                // 开始处理音频


                let lineWidth = 5

                let lineOffset = 2
                let size =  this.canvas.width / (lineWidth + lineOffset)

                scriptProcessor.onaudioprocess = (e) =>{
                    // 获得缓冲区的输入音频，转换为包含了PCM通道数据的32位浮点数组
                    let buffer = e.inputBuffer.getChannelData(0)
                    // 获取缓冲区中最大的音量值
                    let maxVal = Math.max.apply(Math, buffer)
                    // 显示音量值
                    // console.log('显示音量：')
                    //

                    let volume = Math.round(maxVal * 100)
                    // console.log(Math.round(maxVal * 100))
                    // 如果有声音的话，值为true,通过语音流去判断话筒是否有声音

                    if(this.recordState === 1){
                        if(this.volumeList.length > size ){
                            this.volumeList.shift()
                        }
                        this.volumeList.push(maxVal)
                        this.context.clearRect(0, 0, this.canvas.width, this.canvas.height)
                        for(let i =0; i < size ; i ++){

                            if(this.volumeList[i]){
                                let volume = this.volumeList[i]
                                this.context.fillStyle = '#000000'
                                this.context.fillRect(i * (lineWidth + lineOffset), (this.canvas.height - this.canvas.height * volume) / 2 , lineWidth, this.canvas.height * volume + 1)
                            }else{
                                this.context.fillStyle = 'red'
                                this.context.fillRect(i * (lineWidth + lineOffset), this.canvas.height  / 2 , lineWidth,  2)
                            }
                        }
                    }

                }
            })
            .catch(function(error) {
                console.error(error);
            });


    },
    activated() {

    },
    methods: {
        async createWave(){
            let canvas = document.getElementById('my-canvas1')
            canvas.width = canvas.clientWidth
            canvas.height = canvas.clientHeight
            let context = this.canvas.getContext('2d')
            let audioContext = new AudioContext
            // 将麦克风的声音输入这个对象
            let mediaStreamSource = audioContext.createBufferSource()
            const audioBuffer = await audioContext.decodeAudioData(await this.blob.arrayBuffer(), function(
                decodeData
            ) {
                return decodeData;
            });
            mediaStreamSource.buffer = audioBuffer
            // 创建一个音频分析对象，采样的缓冲区大小为4096，输入和输出都是单声道
            let scriptProcessor = audioContext.createScriptProcessor(4096, 1, 1)
            // 将该分析对象与麦克风音频进行连接
            mediaStreamSource.connect(scriptProcessor)
            // 此举无甚效果，仅仅是因为解决 Chrome 自身的 bug
            scriptProcessor.connect(audioContext.destination)
            // 开始处理音频


            let lineWidth = 5

            let lineOffset = 2
            let size =  canvas.width / (lineWidth + lineOffset)
            this.volumeList = []
            scriptProcessor.onaudioprocess = (e) =>{
                // 获得缓冲区的输入音频，转换为包含了PCM通道数据的32位浮点数组
                let buffer = e.inputBuffer.getChannelData(0)
                // 获取缓冲区中最大的音量值
                let maxVal = Math.max.apply(Math, buffer)
                // 显示音量值
                // console.log('显示音量：')
                //

                let volume = Math.round(maxVal * 100)
                // console.log(Math.round(maxVal * 100))
                // 如果有声音的话，值为true,通过语音流去判断话筒是否有声音


                if(this.volumeList.length > size ){
                    this.volumeList.shift()
                }
                this.volumeList.push(maxVal)
                context.clearRect(0, 0, canvas.width, canvas.height)
                for(let i =0; i < size ; i ++){

                    if(this.volumeList[i]){
                        let volume = this.volumeList[i]
                        context.fillStyle = '#000000'
                        context.fillRect(i * (lineWidth + lineOffset), (canvas.height - canvas.height * volume) / 2 , lineWidth, canvas.height * volume + 1)
                    }else{
                        context.fillStyle = 'red'
                        context.fillRect(i * (lineWidth + lineOffset), canvas.height  / 2 , lineWidth,  2)
                    }
                }

            }


        },
        upload(){
            let fileUpload = document.getElementById('fileInput')
            fileUpload.onchange = (e)=>{
                console.log(e)
                this.recordState = 3
                this.audioFile = e.currentTarget.files[0]
                let audio = document.createElement('audio')
                audio.oncanplay=()=>{
                    console.log(audio.duration)
                    this.duration = audio.duration * 1000
                }
                audio.src = this.url = URL.createObjectURL(this.audioFile)
                this.$nextTick(()=>{
                    console.log(this.audio)
                    this.currentTime =0
                })

            }
            fileUpload.click()
        },
        prev(){
            this.audio.currentTime -= 3
        },
        next(){
            this.audio.currentTime += 3
        },
        canplay(e){

        },
        updateTime(e) {
            this.currentTime = e.target.currentTime * 1000
        },
        play(){
            if(this.playState){
                this.audio.pause()
            } else {
                this.audio.play()
            }
            this.playState = !this.playState
        },
        parseTime(msTime){
            let time = msTime / 1000;

            let hour = Math.floor(time / 60 / 60);

            hour = hour.toString().padStart(2, "0");

            let minute = Math.floor(time / 60) % 60;

            minute = minute.toString().padStart(2, "0");

            let second = Math.ceil(time) % 60;

            second = second.toString().padStart(2, "0");

            return `${hour}:${minute}:${second}`;
        },
        back(){
          this.$router.back()
        },
        stop(){
            this.context.clearRect(0, 0, this.canvas.width, this.canvas.height)
            clearInterval(this.timer)
            this.mediaRecorder.stop()
        },
        finish(){
            this.recordState = 3
            this.stop()
        },
        save(){
            let form = new FormData()
            form.append('audio',this.audioFile)
            form.append('mediaPath',user.user.id)
            form.append('name',Date.now())
            const l = this.$loading({
                lock: true,
                text: 'Please wait while uploading.',
                background: 'rgba(0,0,0,0.7)'
            })
            section.uploadAudio(form).then((result)=>{
                l.close()
                section.body.timeout = this.duration
                if(this.$route.query.recordType === 'reminder'){
                    section.body.reminderId = result.id
                    section.body.reminderUrl = result.url
                    section.body.id = this.$route.query.sectionId
                    section.update(section.body).then((r)=>{
                        this.$router.replace({
                            name: 'sections',
                            query:{
                                sessionId: this.$route.query.sessionId
                            }
                        })
                    })

                } else {
                    section.body.fragmentId = result.id
                    section.body.fragmentUrl = result.url
                    section.create(JSON.stringify(section.body)).then((r)=>{
                        let sortList = []
                        this.sectionList = [...section.bodys,r]
                        for(let i = 0; i < this.sectionList.length;i++){
                            let item = this.sectionList[i]
                            if(this.sectionList[i+1]){
                                let next = this.sectionList[i+1]
                                sortList.push({
                                    sectionId: item.id,
                                    nextId: next.id,
                                    type: 'normal'
                                })
                            }
                        }
                        sectionFlow.put(this.$route.query.sessionId,sortList).then(()=>{
                            this.$router.replace({
                                name: 'createSection',
                                query:{
                                    sectionId: r.id,
                                    sessionId: this.$route.query.sessionId
                                }
                            })
                        })

                    })
                }

            })

        },
        destroy(){
            this.recordState = 0
        },
        cancel(){
            this.recordState = 0
            this.stop()
        },
        async start(){
            this.volumeList = []
            let lineWidth = 5

            let lineOffset = 2

            let size =  this.canvas.width / (lineWidth + lineOffset)
            for(let i = 0; i< size;i++){
                this.volumeList.push(0.01)
            }
            await this.mediaRecorder.start()

            this.recordState = 1
            this.timer = setInterval(()=>{
                if(this.recordState === 1){
                    this.time += 10
                }
            },10)

        },
        async resume(){
            await this.mediaRecorder.resume();
            this.recordState = 1
        },
        async pause(){
            await this.mediaRecorder.pause();
            this.recordState = 2
        }
    },
    components: {},
    beforeRouteLeave(to, from, next) {
        if(this.stream){
            this.stream.getTracks().forEach((track)=>{
                track.stop()
            })
        }
        next();
    }
}

