import React from 'react';
import { message } from 'antd';
import { CloseOutlined, SyncOutlined } from '@ant-design/icons';
// import Recorder from 'recorder-js';
import Recorder from 'recorder-core';
//需要使用到的音频格式编码引擎的js文件统统加载进来
// import 'recorder-core/src/engine/mp3';
// import 'recorder-core/src/engine/mp3-engine';
// import Recorder from 'recorder-core/recorder.mp3.min' //已包含recorder-core和mp3格式支持
import 'recorder-core/src/engine/wav';
import 'recorder-core/src/extensions/waveview'; //可选的扩展支持项
import * as Request from '../../../services/search/index';
import Css from './index.less';

class SearchComponent extends React.Component{
    constructor(props){
        super(props);
        this.state = {
            isRecordeing: false
        }
        this.audioRef = React.createRef();
        this.blob = null;
        this.rec = null;
        this.wave = null;
    }

    componentDidMount(){
        const that = this;
        this.recOpen(function(){
            that.wave = Recorder.WaveView({ //创建wave对象，写这里面浏览器妥妥的
                elem: "#record-area"
                ,width: 200
                ,height: 50
                ,scale:2 //缩放系数，应为正整数，使用2(3? no!)倍宽高进行绘制，避免移动端绘制模糊
                ,speed:8 //移动速度系数，越大越快
                ,lineWidth:3 //线条基础粗细
                ,linear1:[0,"rgba(150,96,238,1)",0.2,"rgba(170,79,249,1)",1,"rgba(53,199,253,1)"] //线条渐变色1，从左到右
                ,linear2:[0,"rgba(209,130,255,0.6)",1,"rgba(53,199,255,0.6)"] //线条渐变色2，从左到右
                ,linearBg:[0,"rgba(255,255,255,0.2)",1,"rgba(54,197,252,0.2)"] //背景渐变色，从上到下
            }); 
        });
    }

    /**调用open打开录音请求好录音权限**/
    recOpen(success){//一般在显示出录音按钮或相关的录音界面时进行此方法调用，后面用户点击开始录音时就能畅通无阻了
        const that = this;
        this.rec = Recorder({
            type:"wav",sampleRate:16000,bitRate:16 //mp3格式，指定采样率hz、比特率kbps，其他参数使用默认配置；注意：是数字的参数必须提供数字，不要用字符串；需要使用的type类型，需提前把格式支持文件加载进来，比如使用wav格式需要提前加载wav.js编码引擎
            ,onProcess:function(buffers,powerLevel,bufferDuration,bufferSampleRate,newBufferIdx,asyncEnd){
                //录音实时回调，大约1秒调用12次本回调
                //可利用extensions/waveview.js扩展实时绘制波形
                //可利用extensions/sonic.js扩展实时变速变调，此扩展计算量巨大，onProcess需要返回true开启异步模式
                that.wave.input(buffers[buffers.length-1],powerLevel,bufferSampleRate);//输入音频数据，更新显示波形
            }
        });
        //var dialog=createDelayDialog(); 我们可以选择性的弹一个对话框：为了防止移动端浏览器存在第三种情况：用户忽略，并且（或者国产系统UC系）浏览器没有任何回调，此处demo省略了弹窗的代码
        this.rec.open(function(){//打开麦克风授权获得相关资源
            //dialog&&dialog.Cancel(); 如果开启了弹框，此处需要取消
            //rec.start() 此处可以立即开始录音，但不建议这样编写，因为open是一个延迟漫长的操作，通过两次用户操作来分别调用open和start是推荐的最佳流程
            success && success();
        },function(msg,isUserNotAllow){//用户拒绝未授权或不支持
            //dialog&&dialog.Cancel(); 如果开启了弹框，此处需要取消
            console.log((isUserNotAllow?"UserNotAllow，":"")+"无法录音:"+msg);
        });
    };

    //开始录音
    handleTouchStart(){
        this.setState({ isRecordeing: true });
        this.rec.start();
    }

    //结束录音
    handleTouchEnd(){
        const that = this;
        this.rec.stop(function(blob,duration){
            that.setState({ isRecordeing: false });
            console.log(blob,(window.URL).createObjectURL(blob),"时长:"+duration+"ms");
            // that.rec.close();//释放录音资源，当然可以不释放，后面可以连续调用start；但不释放时系统或浏览器会一直提示在录音，最佳操作是录完就close掉
            // that.rec=null;
            setTimeout(() => {
                // console.log(blob.slice(44,blob.size,"audio/pcm")); //wav转换成pcm格式
                that.audioRef.current.src = window.URL.createObjectURL(blob);
                // that.uploadVoiceFile(blob);
            }, 1000);
        },function(msg){
            console.log("录音失败:"+msg);
            that.rec.close();//可以通过stop方法的第3个参数来自动调用close
            that.rec=null;
        });
    }

    //录音文件上传
    uploadVoiceFile(file){
        Request.uploadVoiceFile({ file }).then(res => {
            console.log(res);
        });
    }

    componentWillUnmount(){
        this.setState = (state, callback) => { //页面退出，避免内存溢出
            return ;
        }
    }

    render(){
        const { searchValue, sInputChange } = this.props;
        const { isRecordeing } = this.state;
        return (
            <>
                <div className={isRecordeing ? Css['mask'] : Css['mask']+' '+Css['hide']}></div>
                <div className={isRecordeing ? Css['record-icon'] : Css['record-icon']+' '+Css['none']} id="record-area"></div>
                <div className={isRecordeing ? Css['record-close'] : Css['record-close']+' '+Css['hide']} onClick={() => this.handleTouchEnd()}>
                    <CloseOutlined style={{fontSize:'20px'}} />
                </div>
                <div className={Css['search-wrapper']} id="aaa">
                    <div className={Css['input-wrapper']}>
                        <input type="text" value={searchValue} onChange={(e) => sInputChange(e)} />
                        <>
                            <div className={Css['voice']} onClick={() => this.handleTouchStart()}></div>
                            <div className={Css['camera']}></div>
                        </>
                    </div>
                    <div className={Css['search-btn']}>搜索</div>
                    <audio ref={this.audioRef} controls autoPlay className={Css['hide']}></audio>
                </div>
            </>
        )
    }
}

export default SearchComponent;