package com.yinchuang.aiuidemo;

import android.content.Context;
import android.content.res.AssetManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Handler;
import android.text.TextUtils;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;

import com.google.gson.Gson;
import com.iflytek.aiui.AIUIAgent;
import com.iflytek.aiui.AIUIConstant;
import com.iflytek.aiui.AIUIEvent;
import com.iflytek.aiui.AIUIListener;
import com.iflytek.aiui.AIUIMessage;

import org.json.JSONObject;

import java.io.IOException;
import java.io.InputStream;

/**
 * Created by lingchen on 2019/5/27. 14:22
 * mail:lingchen52@foxmail.com
 */
public class AIUImanager {

    public static AIUImanager sAIUImanager;
    private Context mContext;
    public static String TAG = "AIUImanager";

    private AIUIAgent mAIUIAgent;
    //AIUI当前录音状态，避免连续两次startRecordAudio时的录音失败
    private boolean mAudioRecording = false;
    private boolean mMSCInitialized = false;
    public static int mAIUIState = AIUIConstant.STATE_IDLE;
    private Toast mToast;

    private TextView mNlpText;

    public static AIUImanager getInstance(Context context) {
        if (sAIUImanager == null) {
            sAIUImanager = new AIUImanager(context);
        }
        return sAIUImanager;
    }

    public AIUImanager(Context context) {
        mContext = context;
        mToast = Toast.makeText(context, "", Toast.LENGTH_SHORT);
    }

    public void setText(TextView text) {
        mNlpText = text;
    }

    public void initAIUIAgent() {
        mAudioRecording = false;
        //创建AIUIAgent
        if (mAIUIAgent == null) {
            mAIUIAgent = AIUIAgent.createAgent(mContext, getAIUIParams(), mAIUIListener);
        }
        startRecord();
    }

    public String getAIUIParams() {
        String params = "";
        AssetManager assetManager = mContext.getResources().getAssets();
        try {
            InputStream ins = assetManager.open("cfg/aiui_phone.cfg");
            byte[] buffer = new byte[ins.available()];

            ins.read(buffer);
            ins.close();

            params = new String(buffer);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return params;
    }

    private AIUIListener mAIUIListener = new AIUIListener() {

        @Override
        public void onEvent(AIUIEvent event) {
            Log.d(TAG, "event.eventType:" + event.eventType);
            switch (event.eventType) {
                //唤醒事件
                case AIUIConstant.EVENT_WAKEUP:
                    break;
                //结果事件（包含听写，语义，离线语法结果）
                case AIUIConstant.EVENT_RESULT:
                    Log.d(TAG, "EVENT_RESULT");
                    Log.d(TAG, event.info);
                    Log.d(TAG, "event.data:" + event.data);
                    Log.d(TAG, "event.arg1:" + event.arg1);
                    Log.d(TAG, "event.arg2:" + event.arg2);
                    //结果解析事件
                    try {
                        JSONObject bizParamJson = new JSONObject(event.info);
                        JSONObject data = bizParamJson.getJSONArray("data").getJSONObject(0);
                        JSONObject params = data.getJSONObject("params");
                        JSONObject content = data.getJSONArray("content").getJSONObject(0);

                        if (content.has("cnt_id")) {
                            String cnt_id = content.getString("cnt_id");
                            String cntStr = new String(event.data.getByteArray(cnt_id), "utf-8");

                            // 获取该路会话的id，将其提供给支持人员，有助于问题排查
                            // 也可以从Json结果中看到
                            String sid = event.data.getString("sid");
                            String tag = event.data.getString("tag");

//                            showTip("tag=" + tag);

                            // 获取从数据发送完到获取结果的耗时，单位：ms
                            // 也可以通过键名"bos_rslt"获取从开始发送数据到获取结果的耗时
                            long eosRsltTime = event.data.getLong("eos_rslt", -1);
//                            mTimeSpentText.setText(eosRsltTime + "ms");

                            if (TextUtils.isEmpty(cntStr)) {
                                return;
                            }

                            JSONObject cntJson = new JSONObject(cntStr);
                            ResultTextEntity resultTextEntity = new Gson().fromJson(cntStr, ResultTextEntity.class);
                            if (mNlpText.getLineCount() > 1000) {
                                mNlpText.setText("");
                            }

                            mNlpText.append("\n");
                            for (int i = 0; i < resultTextEntity.getText().getWs().size(); i++) {
                                for (int j = 0; j < resultTextEntity.getText().getWs().get(i).getCw().size(); j++) {
                                    mNlpText.append(resultTextEntity.getText().getWs().get(i).getCw().get(j).getW());
                                }
                            }

//                            mNlpText.append(cntJson.toString());
//                            mNlpText.setSelection(mNlpText.getText().length());

                            String sub = params.optString("sub");
                            if ("nlp".equals(sub)) {
                                // 解析得到语义结果
                                String resultStr = cntJson.optString("intent");
                                Log.i(TAG, resultStr);
                            }
                        }
                    } catch (Throwable e) {
                        e.printStackTrace();
                        mNlpText.append("\n");
                        mNlpText.append(e.getLocalizedMessage());
                    }

                    mNlpText.append("\n");
                    break;
                //休眠事件
                case AIUIConstant.EVENT_SLEEP:
                    break;
                // 状态事件
                case AIUIConstant.EVENT_STATE:
                    mAIUIState = event.arg1;
                    if (AIUIConstant.STATE_IDLE == mAIUIState) {
                        // 闲置状态，AIUI未开启
                    } else if (AIUIConstant.STATE_READY == mAIUIState) {
                        // AIUI已就绪，等待唤醒
                    } else if (AIUIConstant.STATE_WORKING == mAIUIState) {
                        // AIUI工作中，可进行交互
                    }
                    break;
                //错误事件
                case AIUIConstant.EVENT_ERROR:
                    mAudioRecording = false;
                    Log.i(TAG, "event_error: " + event.eventType);
                    break;
                case AIUIConstant.EVENT_VAD:
                    if (AIUIConstant.VAD_BOS == event.arg1) {
                        //语音前端点
                    } else if (AIUIConstant.VAD_EOS == event.arg1) {
                        //语音后端点
                    }
                    break;
                case AIUIConstant.EVENT_START_RECORD:
                    Log.i(TAG, "on event: " + event.eventType);
                    //开始录音
                    break;
                case AIUIConstant.EVENT_STOP_RECORD:
                    Log.i(TAG, "on event: " + event.eventType);
                    // 停止录音
                    break;
                case AIUIConstant.EVENT_CONNECTED_TO_SERVER:
                    Log.i(TAG, "on event: " + event.eventType);
                    break;
                default:
                    break;
            }
        }
    };

    public void startRecord() {
        if (!mAudioRecording) {
            mAudioRecording = true;
            String params = "sample_rate=16000,data_type=audio,dwa=wpgs,tag=audio-tag";
            sendMessage(new AIUIMessage(AIUIConstant.CMD_START_RECORD, 0, 0, params, null));
        }
    }

    public void stopRecord() {
        if (mAudioRecording) {
            String params = "sample_rate=16000,data_type=audio,tag=audio-tag";
            sendMessage(new AIUIMessage(AIUIConstant.CMD_STOP_RECORD, 0, 0, params, null));
            mAudioRecording = false;
        }
    }

    /**
     * 发送AIUI消息
     *
     * @param message
     */
    public void sendMessage(AIUIMessage message) {
        if (mAIUIAgent != null) {
//            mAudioRecording = true;
            //确保AIUI处于唤醒状态
            Log.d(TAG, "mAIUIState:" + mAIUIState);
            if (mAIUIState != AIUIConstant.STATE_WORKING) {
                mAIUIAgent.sendMessage(new AIUIMessage(AIUIConstant.CMD_WAKEUP, 0, 0, "", null));
            }
            mAIUIAgent.sendMessage(message);
        }
    }

    private static AIUIResultEvent mAIUIResultEvent;

    public interface AIUIResultEvent {
        void onSuccess(String result);
    }

    public static void setAIUIResultEvent(AIUIResultEvent aiuiResultEvent) {
        mAIUIResultEvent = aiuiResultEvent;
    }

    private void showTip(final String str) {
        new Handler().post(new Runnable() {
            @Override
            public void run() {
                mToast.setText(str);
                mToast.show();
            }
        });
    }

    public void weakup() {
        if (mAIUIState != AIUIConstant.STATE_WORKING) {
            mAIUIAgent.sendMessage(new AIUIMessage(AIUIConstant.CMD_WAKEUP, 0, 0, "", null));
        }
    }

    private AudioRecord audioRecord;
    private static int audioSource = MediaRecorder.AudioSource.MIC;
    private static int sampleRateInHz = 16000;//16000Hz
    private static int channelConfig = AudioFormat.CHANNEL_IN_MONO;//AudioFormat.CHANNEL_CONFIGURATION_MONO;CHANNEL_IN_MONO
    private static int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
    public static int bufferSizeInBytesAudioRecord;
    public volatile boolean isRecord = false;

    public AudioRecord getAudioRecord() {
        return audioRecord;
    }

    public void creatAudioRecord() {
        bufferSizeInBytesAudioRecord = AudioRecord.getMinBufferSize(sampleRateInHz,
                channelConfig, audioFormat);
        Log.e(TAG, "AudioRecord getMinBufferSize" + bufferSizeInBytesAudioRecord);
        audioRecord = new AudioRecord(audioSource, sampleRateInHz,
                channelConfig, audioFormat, bufferSizeInBytesAudioRecord);
        if (audioRecord == null) {
            Log.d(TAG, "can't open audioRecord");
        }
    }

    /**
     * 开始录音
     */
    public void startRecordAudio(byte[] audioData) {
        if (!mAudioRecording) {
            String params = "data_type=audio,sample_rate=16000";
            //流式识别
            params += ",dwa=wpgs";
            Log.d(TAG, "sendMessage");
            sendMessage(new AIUIMessage(AIUIConstant.CMD_WRITE, 0, 0, params, audioData));
            mAudioRecording = true;
        }
    }

    /**
     * 停止录音
     */
    public void stopRecordAudio() {
        if (mAudioRecording) {
            mAIUIAgent.sendMessage(new AIUIMessage(AIUIConstant.CMD_STOP_WRITE, 0, 0, "data_type=audio", null));
            mAudioRecording = false;
        }
    }


}
