package me.jessyan.mvparms.demo.mvp.ui.xunfeisdk.xunfei;

import android.content.Context;

import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.Keep;
import android.util.Log;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;



import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import com.iflytek.cloud.RequestListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechEvent;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.VoiceWakeuper;
import com.iflytek.cloud.WakeuperListener;
import com.iflytek.cloud.WakeuperResult;
import com.iflytek.cloud.ui.RecognizerDialog;
import com.iflytek.cloud.ui.RecognizerDialogListener;
import com.iflytek.cloud.util.ResourceUtil;
import com.platform.ai.control.R;

import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import org.simple.eventbus.EventBus;

import me.jessyan.mvparms.demo.mvp.model.entity.MessageEvent;
import me.jessyan.mvparms.demo.mvp.model.logic.TextUtil;
import me.jessyan.mvparms.demo.mvp.model.logic.api.OkHttpUtils;
import me.jessyan.mvparms.demo.mvp.ui.activity.MainActivity;
import me.jessyan.mvparms.demo.mvp.ui.xunfeisdk.core.SpeechDialogListener;


/**
 * 科大讯飞自带的语音识别窗口
 *
 * @Author 35574
 * @Date 2020/7/8
 * @Description
 */
@Keep
public class XunFeiSpeechDialogWrap {
    private RecognizerDialog mDialog;
    private SpeechDialogListener mListener;
    private SpeechRecognizer mIat;
    private String mEngineType = SpeechConstant.TYPE_CLOUD;
    private Context mContext;
    private VoiceViewListener mVoiceListener;
    private SharedPreferences mSharedPreferences;

    /**
     * 初始化监听器。
     */
    private InitListener mInitListener = new InitListener() {
        @Override
        public void onInit(int code) {
            if (code != ErrorCode.SUCCESS) {
                Toast.makeText(mContext,"初始化失败，错误码：" + code,Toast.LENGTH_SHORT).show();
            }
        }
    };

    /**
     * 识别完成后自动关闭窗口
     */
    private boolean autoCancel=true;

    public XunFeiSpeechDialogWrap(Context context) {
        this.mContext = context;

    }

    /**
     * 识别监听
     * @param speechDialogListener
     */
    public void initDialog(Context mContext,Handler mHandler) {
        // 不显示听写对话框
        this.mHandler = mHandler;
        if(mDialog == null) {
            mDialog = new RecognizerDialog(mContext, new SpeechInitListener(mContext));
            mDialog.setCanceledOnTouchOutside(false);
            mDialog.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
            mDialog.setParameter(SpeechConstant.ACCENT, "mandarin");
            //设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
            mDialog.setParameter(SpeechConstant.ASR_PTT, "0");
            // 设置语音前端点:静音超时时间，单位ms，即用户多长时间不说话则当做超时处理
            //取值范围{1000～10000}
            mDialog.setParameter(SpeechConstant.VAD_BOS, "5000");
            //设置语音后端点:后端点静音检测时间，单位ms，即用户停止说话多长时间内即认为不再输入，
            //自动停止录音，范围{0~10000}
            mDialog.setParameter(SpeechConstant.VAD_EOS, "1000");
            mDialog.setListener(new SpeechDialogListener() {
                @Override
                public void onResult(String result) {

                }

                @Override
                public void onError(me.jessyan.mvparms.demo.mvp.ui.xunfeisdk.core.SpeechError speechError) {

                }

                @Override
                public void onResult(RecognizerResult recognizerResult, boolean b) {
                    xunfeiSpendMessage(recognizerResult.getResultString(),mHandler);
                }

                @Override
                public void onError(SpeechError speechError) {

                }
            });
            mDialog.setOnDismissListener(new DialogInterface.OnDismissListener() {
                @Override
                public void onDismiss(DialogInterface dialog) {
                }
            });
            mDialog.show();
        }else{
            mDialog.show();
        }
    }

    public void dialogShow() {
        mDialog.show();
    }
    /**
     * 识别监听
     * @param speechDialogListener
     */
    public void setListener(SpeechDialogListener speechDialogListener) {
        mListener=speechDialogListener;
        if(mListener!=null){
            mDialog.setListener(new RecognizerDialogListener() {
                @Override
                public void onResult(RecognizerResult recognizerResult, boolean b) {
                   if(autoCancel){
                       mDialog.cancel();
                   }
                    if(recognizerResult!=null){
                        mListener.onResult(ResultParser.parseResult(recognizerResult.getResultString()));
                    }else{
//                        mListener.onError(new SpeechError(10000));
                    }
                }

                @Override
                public void onError(SpeechError speechError) {
                    if(autoCancel){
                        mDialog.cancel();
                    }
//                    mListener.onError(new SpeechError(speechError.getErrorCode()));
                }
            });
        }else{
            mDialog.setListener(null);
        }
    }



    public void show() {
        mDialog.show();
    }

    /**
     * 识别完成后自动关闭窗口
     * @param autoCancel
     */
    public void setAutoCancel(boolean autoCancel) {
        this.autoCancel = autoCancel;
    }

    public boolean isShowing() {
        return mDialog.isShowing();
    }

    public void initData(VoiceViewListener mVoiceListener) {
        this.mVoiceListener = mVoiceListener;
        mIat = com.iflytek.cloud.SpeechRecognizer.createRecognizer(mContext, mInitListener);
        // 初始化听写Dialog，如果只使用有UI听写功能，无需创建SpeechRecognizer
        mIat.setParameter(SpeechConstant.PARAMS, null); // 清空参数
        mIat.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType); // 设置听写引擎
        mIat.setParameter(SpeechConstant.RESULT_TYPE, "json");  // 设置返回结果格式
        // 设置语言
        mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
        // 设置语言区域
        mIat.setParameter(SpeechConstant.ACCENT, "zh_cn");
        // 设置语音前端点:静音超时时间，即用户多长时间不说话则当做超时处理
        mIat.setParameter(SpeechConstant.VAD_BOS, "1000");
        // 设置语音后端点:后端点静音检测时间，即用户停止说话多长时间内即认为不再输入， 自动停止录音
        mIat.setParameter(SpeechConstant.VAD_EOS, "1000");
        // 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
        mIat.setParameter(SpeechConstant.ASR_PTT, "0");
        // 设置音频保存路径，保存音频格式支持pcm、wav，设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
        // 注：AUDIO_FORMAT参数语记需要更新版本才能生效
        mIat.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
        mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory() + "/msc/iat.wav");
        // 不显示听写对话框
        int ret = mIat.startListening(mRecognizerListener);
//        mIat.stopListening();
        if (ret != ErrorCode.SUCCESS) {
            Toast.makeText(mContext,"听写失败,错误码：" + ret,Toast.LENGTH_SHORT).show();
        } else {
//            Toast.makeText(mContext,"开始录音",Toast.LENGTH_SHORT).show();
        }
    }


    private void startIat(){
        // 不显示听写对话框
        mIat.stopListening();
        int ret = mIat.startListening(mRecognizerListener);
        if (ret != ErrorCode.SUCCESS) {
            Toast.makeText(mContext,"听写失败,错误码：" + ret,Toast.LENGTH_SHORT).show();
        } else {
//            Toast.makeText(mContext,"开始录音：" + ret,Toast.LENGTH_SHORT).show();
        }

    }


    /**
     * 听写监听器。
     */
    private RecognizerListener mRecognizerListener = new RecognizerListener() {
        @Override
        public void onBeginOfSpeech() {
            // 此回调表示：sdk内部录音机已经准备好了，用户可以开始语音输入
//            Toast.makeText(mContext,"开始说话",Toast.LENGTH_SHORT).show();
        }
        @Override
        public void onError(SpeechError error) {
            // Tips：
            // 错误码：10118(您没有说话)，可能是录音机权限被禁，需要提示用户打开应用的录音权限。
            // 如果使用本地功能（语记）需要提示用户开启语记的录音权限。
            Toast.makeText(mContext,error.getPlainDescription(true),Toast.LENGTH_SHORT).show();
        }
        @Override
        public void onEndOfSpeech() {
            // 此回调表示：检测到了语音的尾端点，已经进入识别过程，不再接受语音输入
//            Toast.makeText(mContext,"结束语音",Toast.LENGTH_SHORT).show();
            startIat();
        }

        @Override
        public void onResult(RecognizerResult results, boolean isLast) {
            Log.i("lzd","唤醒识别："+results.getResultString());

        }
        @Override
        public void onVolumeChanged(int volume, byte[] data) {
//            Toast.makeText(mContext,"当前正在说话，音量大小：" + volume,Toast.LENGTH_SHORT).show();
        }

        @Override
        public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
            // 以下代码用于获取与云端的会话id，当业务出错时将会话id提供给技术支持人员，可用于查询会话日志，定位出错原因
            // 若使用本地能力，会话id为null
            //	if (SpeechEvent.EVENT_SESSION_ID == eventType) {
            //		String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
            //		Log.d(TAG, "session id =" + sid);
            //	}
        }
    };

    public interface VoiceViewListener{
        void notifyViewChange(String request);
    }

    //语音唤醒
    // 语音唤醒对象
    private VoiceWakeuper mIvw;
    // 唤醒结果内容
    private String resultString;

    // 设置门限值 ： 门限值越低越容易被唤醒
    private final static int MAX = 3000;
    private final static int MIN = 0;
    private int curThresh = 1450;
    private String threshStr = "门限值：";
    private String keep_alive = "1";
    private String ivwNetMode = "0";

    public void wakeUpInit(Context mContext,Handler mHandler){
        this.mHandler = mHandler;
        // 初始化唤醒对象
        mIvw = VoiceWakeuper.createWakeuper(mContext, null);
        //非空判断，防止因空指针使程序崩溃
        mIvw = VoiceWakeuper.getWakeuper();
        if (mIvw != null) {
            resultString = "";
            // 清空参数
            mIvw.setParameter(SpeechConstant.PARAMS, null);
            // 唤醒门限值，根据资源携带的唤醒词个数按照“id:门限;id:门限”的格式传入
            mIvw.setParameter(SpeechConstant.IVW_THRESHOLD, "0:" + curThresh);
            // 设置唤醒模式
            mIvw.setParameter(SpeechConstant.IVW_SST, "wakeup");
            // 设置持续进行唤醒
            mIvw.setParameter(SpeechConstant.KEEP_ALIVE, keep_alive);
            // 设置闭环优化网络模式
            mIvw.setParameter(SpeechConstant.IVW_NET_MODE, ivwNetMode);
            // 设置唤醒资源路径
            mIvw.setParameter(SpeechConstant.IVW_RES_PATH, getResource(mContext));
            // 设置唤醒录音保存路径，保存最近一分钟的音频
            mIvw.setParameter(SpeechConstant.IVW_AUDIO_PATH,
                    mContext.getExternalFilesDir("msc").getAbsolutePath() + "/ivw.wav");
            mIvw.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
            // 如有需要，设置 NOTIFY_RECORD_DATA 以实时通过 onEvent 返回录音音频流字节
            //mIvw.setParameter( SpeechConstant.NOTIFY_RECORD_DATA, "1" );
            // 启动唤醒
            /*mIvw.setParameter(SpeechConstant.AUDIO_SOURCE, "-1");*/
            mIvw.startListening(mWakeuperListener);
        } else {
            Toast.makeText(mContext,"唤醒未初始化",Toast.LENGTH_SHORT).show();
        }

    }

    Handler mHandler;

    private WakeuperListener mWakeuperListener = new WakeuperListener() {

        @Override
        public void onResult(WakeuperResult result) {
            Log.i("lzd","唤醒："+result.getResultString());
            if (!"1".equalsIgnoreCase(keep_alive)) {
                Toast.makeText(mContext,"语音唤醒已断开",Toast.LENGTH_SHORT).show();
            }else{
                initDialog(mContext,mHandler);
            }

        }

        @Override
        public void onError(SpeechError error) {
            Log.i("lzd","错误分析："+error.getErrorDescription());
        }

        @Override
        public void onBeginOfSpeech() {
        }

        @Override
        public void onEvent(int eventType, int isLast, int arg2, Bundle obj) {
            switch (eventType) {
                // EVENT_RECORD_DATA 事件仅在 NOTIFY_RECORD_DATA 参数值为 真 时返回
                case SpeechEvent.EVENT_RECORD_DATA:
                    final byte[] audio = obj.getByteArray(SpeechEvent.KEY_EVENT_RECORD_DATA);
                    break;
            }
        }

        @Override
        public void onVolumeChanged(int volume) {

        }
    };

    private String getResource(Context mContext) {
        final String resPath = ResourceUtil.generateResourcePath(mContext, ResourceUtil.RESOURCE_TYPE.assets, "ivw/9a0faeb7.jet");
        Log.i("lzd","respath:"+resPath);
        return resPath;
    }


    private void xunfeiSpendMessage(String request, Handler mHandler){
        String voiceRequest = "";
        try {
            JSONObject jsonRequest = new JSONObject(request);
            JSONArray ws = jsonRequest.getJSONArray("ws");
            if(ws != null){
                for(int i = 0; i < ws.length() ; i++){
                    JSONArray cw = ws.getJSONObject(i).getJSONArray("cw");
                    for(int j = 0; j < cw.length() ; j++){
                        String w = cw.getJSONObject(j).getString("w");
                        if(!TextUtil.isEmpty(w)){
                            voiceRequest += w;
                        }
                    }

                }
            }
        } catch (JSONException e) {
            e.printStackTrace();
        }
        Log.i("lzd","听写结果："+voiceRequest);
        if (TextUtil.checkContains(voiceRequest,new String[]{"播","放"})){
            String keywords = voiceRequest.substring(voiceRequest.indexOf("放"),voiceRequest.length());
            MainActivity.MUSIC_LABEL = keywords.trim();
            new OkHttpUtils().getSyn("http://49.233.50.221:4000/search?keywords="+keywords.trim(), new OkHttpUtils.OkHttpCallBackListener() {
                @Override
                public void onSuccess(String result) {
                    Log.i("lzd","result:"+result);
                    try {
                        JSONObject jsonObject = new JSONObject(result);
                        JSONObject jsonResult = jsonObject.getJSONObject("result");
                        JSONArray songs = jsonResult.getJSONArray("songs");
                        String id = "";
                        if(songs != null && songs.length() > 0){
                            id = songs.getJSONObject(0).getString("id");
                            MainActivity.MUSIC_LABEL = songs.getJSONObject(0).getString("name");
                            if(!TextUtil.isEmpty(id)){
                                Message msg = new Message();
                                msg.what = 200;
                                Bundle b = new Bundle();
                                b.putString("id",id);
                                msg.setData(b);
                                mHandler.sendMessage(msg);
                            }
                        }
                    } catch (JSONException e) {
                        e.printStackTrace();
                    }
                }

                @Override
                public void onFail(String result) {

                }
            });
        }else if(TextUtil.checkContains(voiceRequest,new String[]{"电","视"})){
            Message msg = new Message();
            msg.what = 101;
            Bundle bundle = new Bundle();
            bundle.putString("result",request);
            msg.setData(bundle);
            mHandler.sendMessage(msg);
        }else{
            EventBus.getDefault().post(new MessageEvent(voiceRequest));
        }
    }


}
