package com.good.child.ui.activity.ai;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.PopupWindow;
import android.widget.TextView;

import com.blankj.utilcode.util.ToastUtils;
import com.bumptech.glide.Glide;
import com.bytedance.speech.speechengine.SpeechEngine;
import com.bytedance.speech.speechengine.SpeechEngineDefines;
import com.bytedance.speech.speechengine.SpeechEngineGenerator;
import com.good.child.MainActivity;
import com.good.child.R;
import com.good.child.bean.GenerateBean;
import com.good.child.bean.ai.AIHelperHistoryBean;
import com.good.child.http.AIMessageLogical;
import com.good.child.speechUtils.SensitiveDefines;
import com.good.child.speechUtils.SpeechDemoDefines;
import com.good.child.speechUtils.SpeechStreamPlayer;
import com.good.child.speechUtils.SpeechStreamRecorder;
import com.good.child.ui.adapter.ai.AiGeneratePictureAdapter;
import com.good.child.ui.service.ForegroundService;
import com.good.child.ui.utils.AIHelpUtil;
import com.good.child.ui.utils.DimensionUtil;
import com.good.child.widgets.BubblePopupWindow;
import com.good.child.widgets.SoftKeyBoardListener;
import com.good.child.widgets.dialog.PreviewDialog;
import com.yimidida.library.base.BaseActivity;
import com.yimidida.library.base.BasePresenter;
import com.yimidida.library.base.ICallBackResultListener;
import com.yimidida.library.tool.TActivityUtils;
import com.yimidida.library.utils.LogUtils;
import com.yimidida.library.utils.ToastUtil;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import androidx.annotation.NonNull;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.constraintlayout.widget.ConstraintSet;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;

/**
 * AI文生图
 */
public class AiGeneratePictureActivity extends BaseActivity implements SpeechEngine.SpeechListener {
    private static final int CODE_PERMISSION_REQUEST = 999;
    // Permissions
    private static final List<String> ASR_PERMISSIONS = Collections.singletonList(
            Manifest.permission.RECORD_AUDIO
    );

    @BindView(R.id.recyclerview)
    RecyclerView recyclerView;
    @BindView(R.id.im_add)
    ImageView im_add;
    @BindView(R.id.im_switch)
    ImageView im_switch;
    @BindView(R.id.et_send)
    EditText et_send;
    @BindView(R.id.et_show)
    EditText et_show;
    @BindView(R.id.tv_play2)
    TextView tv_play2;
    @BindView(R.id.image_play)
    ImageView playImage;
    @BindView(R.id.constraint_layout)
    ConstraintLayout constraintLayout;
    @BindView(R.id.input)
    ConstraintLayout input;
    @BindView(R.id.swp)
    SwipeRefreshLayout swp;
    private AiGeneratePictureAdapter aiMessageAdapter;
    private List<AIHelperHistoryBean> dataList;

    // Engine
    private SpeechEngine mSpeechEngine = null;

    private SpeechStreamRecorder mStreamRecorder;
    // Paths
    private String mDebugPath = "";
    private long mFinishTalkingTimestamp = -1;

    private boolean recordIsRunning = false;
    // Record
    private Handler recordHandler = null;
    private Runnable recordRunnable = null;
    private boolean mEngineStarted = false;
    private boolean mConnectionCreated = false;
    private boolean mPlayerPaused = false;
    private int pageNum = 1, pageSize = 10;
    private Map<String, AIHelperHistoryBean> map = new HashMap<>();
    private PopupWindow mPopupWindow;
    private BubblePopupWindow bubblePopupWindow;
    private View bubbleView;
    private TextView tv_copy, tv_ref, tv_z, tv_c;
    private boolean openSound = true;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_ai_generate_picture);
        ButterKnife.bind(this);
        SpeechEngineGenerator.PrepareEnvironment(getApplicationContext(), getApplication());
        initView();
    }

    @Override
    protected BasePresenter initPresenter() {
        return null;
    }

    @SuppressLint("ClickableViewAccessibility")
    @Override
    protected void initView() {
        im_add.setVisibility(View.GONE);
        setStatusBar(true, true, new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                finish();
            }
        });
        setAndroidNativeLightStatusBar(this, true);
        setTitle("文字生图");

        if (mDebugPath.isEmpty()) {
            mDebugPath = getDebugPath();
        }
        if (mDebugPath.isEmpty()) {
            mDebugPath = getDebugPath();
        }
        Log.i("SpeechDemo", "当前调试路径：" + mDebugPath);
        dataList = new ArrayList<>();
        initClickPop();
        LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
        recyclerView.setLayoutManager(linearLayoutManager);
        aiMessageAdapter = new AiGeneratePictureAdapter(this);
        aiMessageAdapter.setOnItemClick(new AiGeneratePictureAdapter.onItemClick() {
            @Override
            public void onClickView(AIHelperHistoryBean bean, View view) {
                PreviewDialog.showDialog(getSupportFragmentManager(), bean.getImageUrl(),true);
            }
        });
        recyclerView.setAdapter(aiMessageAdapter);
        aiMessageAdapter.setData(dataList);
        if (mStreamRecorder == null) {
            mStreamRecorder = new SpeechStreamRecorder();
        }
        if (mStreamPlayer == null) {
            mStreamPlayer = new SpeechStreamPlayer();
        }

        tv_play2.setOnTouchListener((v, event) -> {
            if (event.getAction() == MotionEvent.ACTION_DOWN) {
                Glide.with(this)
                        .asGif()
                        .load(R.mipmap.icon_speach)
                        .into(playImage);
                playImage.setVisibility(View.VISIBLE);
                tv_play2.setVisibility(View.GONE);
                recordBtnTouchDown();
                return true;
            } else if (event.getAction() == MotionEvent.ACTION_UP) {
                playImage.setVisibility(View.GONE);
                tv_play2.setVisibility(View.VISIBLE);
                recordBtnTouchUp();
                return true;
            } else if (event.getAction() == MotionEvent.ACTION_CANCEL) {
                playImage.setVisibility(View.GONE);
                tv_play2.setVisibility(View.VISIBLE);
                recordBtnTouchUp();
                return true;
            }
            return false;
        });
        mAudioManager = (AudioManager) getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
        tv_play2.postDelayed(new Runnable() {
            @Override
            public void run() {
                initEngine();
                initEngineTTS();
            }
        }, 1000);

        Intent serviceIntent = new Intent(this, ForegroundService.class);
        serviceIntent.putExtra("inputExtra", "Foreground Service Example in Android");
        ContextCompat.startForegroundService(this, serviceIntent);
        et_show.setOnEditorActionListener(new TextView.OnEditorActionListener() {
            @Override
            public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
                if (i == EditorInfo.IME_ACTION_SEND) {
                    if (TextUtils.isEmpty(et_show.getText().toString())) {
                        ToastUtil.showShort("请输入发送的内容");
                        return false;
                    }
                    input.setVisibility(View.GONE);
                    closekeyboard();
                    setResultText(et_show.getText().toString());
                    et_show.setText("");
                }
                return false;
            }
        });
        getData(true);
        swp.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
            @Override
            public void onRefresh() {
                getData(false);
            }
        });
        // initPop();
        SoftKeyBoardListener.setListener(this, new SoftKeyBoardListener.OnSoftKeyBoardChangeListener() {
            @Override
            public void keyBoardShow(int height) {
                if (input.getVisibility() == View.VISIBLE) {
                    return;
                }
                input.setVisibility(View.VISIBLE);
                ConstraintSet constraintSet = new ConstraintSet();
                constraintSet.clone(constraintLayout);
                constraintSet.connect(input.getId(), ConstraintSet.BOTTOM, ConstraintSet.PARENT_ID, ConstraintSet.BOTTOM, height);
                constraintLayout.requestLayout();
                constraintSet.applyTo(constraintLayout);
                et_show.requestFocus();
                // mPopupWindow.showAtLocation(decorView, Gravity.BOTTOM, 0, height);
            }

            @Override
            public void keyBoardHide(int height) {
                input.setVisibility(View.GONE);
                //  mPopupWindow.dismiss();
            }
        });
        et_show.addTextChangedListener(new TextWatcher() {
            @Override
            public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {

            }

            @Override
            public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {

            }

            @Override
            public void afterTextChanged(Editable editable) {
                et_send.setText(editable.toString());
            }
        });
    }

    public void initClickPop() {
        bubblePopupWindow = new BubblePopupWindow(this);
        bubbleView = getLayoutInflater().inflate(R.layout.ai_pop, null);
        tv_copy = bubbleView.findViewById(R.id.tv_copy);
        tv_ref = bubbleView.findViewById(R.id.tv_ref);
        tv_z = bubbleView.findViewById(R.id.tv_z);
        tv_c = bubbleView.findViewById(R.id.tv_c);
        bubblePopupWindow.setBubbleView(bubbleView);
        bubblePopupWindow.setWidth(DimensionUtil.dpToPx(172));
        bubblePopupWindow.setHeight(DimensionUtil.dpToPx(45));
        tv_copy.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                ToastUtils.showLong("复制");
            }
        });
        tv_ref.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                ToastUtils.showLong("刷新");
            }
        });
        tv_z.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                ToastUtils.showLong("赞ta");
            }
        });
        tv_c.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                ToastUtils.showLong("踩ta");
            }
        });
    }


    private int getStatusBarHeight() {
        int result = 0;
        int resourceId = getResources().getIdentifier("status_bar_height", "dimen", "android");
        if (resourceId > 0) {
            result = getResources().getDimensionPixelSize(resourceId);
        }
        return result;
    }

    public void initPop() {
        mPopupWindow = new PopupWindow(this);
        View view = LayoutInflater.from(this).inflate(R.layout.ai_pop, null);
        mPopupWindow.setContentView(view);
        mPopupWindow.setWidth(ViewGroup.LayoutParams.MATCH_PARENT);
        mPopupWindow.setHeight(ViewGroup.LayoutParams.WRAP_CONTENT);
        mPopupWindow.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
        mPopupWindow.setFocusable(true);
        mPopupWindow.setAnimationStyle(R.style.mypopwindow_anim_style);
    }

    public void getData(boolean isRfresh) {
        if (isRfresh) {
            pageNum = 1;
            dataList.clear();
        } else {
            pageNum++;
        }
        AIMessageLogical.getInstance().requestAIHistory(this, String.valueOf(pageNum), String.valueOf(pageSize), "1", new ICallBackResultListener() {
            @Override
            public void onCallBack(Object result) {
                if (result != null) {
                    AIHelperHistoryBean helperHistoryBean = (AIHelperHistoryBean) result;
                    if (helperHistoryBean.rows != null && helperHistoryBean.rows.size() > 0) {
                        Collections.reverse(helperHistoryBean.rows);
                        dataList.addAll(0, helperHistoryBean.rows);
                        aiMessageAdapter.notifyDataSetChanged();
                        if (pageNum == 1) {
                            recyclerView.scrollToPosition(dataList.size() - 1);
                        }
                    }
                } else {
                    if (!isRfresh) {
                        pageNum--;
                    }
                }
                swp.setRefreshing(false);
            }
        });
    }

    AIHelperHistoryBean bean;
    Boolean isVoice = true;
    private BubblePopupWindow leftTopWindow;

    @OnClick({R.id.im_switch, R.id.im_add, R.id.im})
    public void onClickView(View view) {
        switch (view.getId()) {
            case R.id.im_add:
                // TActivityUtils.jumpToActivity(AiMessageListActivity.this, AiHelperActivity.class);
                ToastUtils.showLong("敬请期待");
                break;

            case R.id.im:
                input.setVisibility(View.GONE);
                closekeyboard();
                break;
            case R.id.im_switch:
                isVoice = !isVoice;
                et_send.setVisibility(isVoice ? View.GONE : View.VISIBLE);
                tv_play2.setVisibility(isVoice ? View.VISIBLE : View.GONE);
                im_switch.setImageResource(isVoice ? R.mipmap.ai_update_keyboard : R.mipmap.ai_update_voice);
                if (!isVoice) {
                    et_send.requestFocus();
                    showKeyBoard(et_send);
                    //    mPopupWindow.showAtLocation(rootView,Gravity.BOTTOM,0,462);

                } else {
                    closekeyboard();
                }
                break;
        }
    }

    Handler handler = new Handler() {
        @Override
        public void handleMessage(@NonNull Message msg) {
            if (msg == null) {
                return;
            }
            if (msg.what == 1003) {
                AIHelperHistoryBean testBean = (AIHelperHistoryBean) msg.obj;
                dataList.add(testBean);
                AIHelperHistoryBean helperHistoryBean = new AIHelperHistoryBean();
                helperHistoryBean.setLoading(true);
                dataList.add(helperHistoryBean);
                aiMessageAdapter.notifyDataSetChanged();
                recyclerView.scrollToPosition(dataList.size() - 1);
                requestImage(testBean.getContent());
            } else if (msg.what == 1005) {
                GenerateBean generateBean = (GenerateBean) msg.obj;
                AIHelperHistoryBean testBean = new AIHelperHistoryBean();
                testBean.setCreateBy("ai");
                testBean.setModeType(2);
                testBean.setImageUrl(generateBean.getFileUrl());
                dataList.add(testBean);
                for (int i = dataList.size() - 1; i >= 0; i--) {
                    if (dataList.get(i).isLoading()) {
                        dataList.remove(i);
                    }
                }
                aiMessageAdapter.notifyDataSetChanged();
                recyclerView.scrollToPosition(dataList.size() - 1);
            } else if (msg.what == 200) {
                requestImage((String) msg.obj);
            }

        }
    };

    public void setResultText(final String text) {
        Message message = Message.obtain();
        AIHelperHistoryBean testBean = new AIHelperHistoryBean();
        testBean.setContent(text);
        testBean.setCreateBy("user");
        message.obj = testBean;
        message.what = 1003;
//        dataList.add(testBean);
//        aiMessageAdapter.setData(dataList);
//        mCurTtsText = "愿中国青年都摆脱冷气，只是向上走，不必听自暴自弃者流的话。能做事的做事，能发声的发声。有一分热，发一分光。就令萤火一般，也可以在黑暗里发一点光，不必等候炬火。此后如竟没有炬火：我便是唯一的光。";
        handler.sendMessage(message);
    }


    @Override
    protected void onDestroy() {
        Log.i(SpeechDemoDefines.TAG, "Asr onDestroy");
        if (mStreamPlayer != null) {
            mStreamPlayer.Stop();
        }
        uninitEngine();
        Intent serviceIntent = new Intent(this, ForegroundService.class);
        stopService(serviceIntent);
        super.onDestroy();

    }

    private void uninitEngine() {
        if (mSpeechEngine != null) {
            Log.i(SpeechDemoDefines.TAG, "引擎析构.");
            mSpeechEngine.destroyEngine();
            mSpeechEngine = null;
            Log.i(SpeechDemoDefines.TAG, "引擎析构完成!");
        }
        if (mSpeechEngineTTS != null) {
            Log.i(SpeechDemoDefines.TAG, "引擎析构.");
            mSpeechEngineTTS.destroyEngine();
            mSpeechEngineTTS = null;
            Log.i(SpeechDemoDefines.TAG, "引擎析构完成!");
        }
    }

    /**
     * 初始化ars
     */
    private void initEngine() {
        if (mSpeechEngine == null) {
            LogUtils.showLog("SpeechDemo", "创建引擎.");
            mSpeechEngine = SpeechEngineGenerator.getInstance();
            mSpeechEngine.createEngine();
            mSpeechEngine.setContext(getApplicationContext());
        }
        LogUtils.showLog("SpeechDemo", "SDK 版本号: " + mSpeechEngine.getVersion());
        LogUtils.showLog("SpeechDemo", "配置初始化参数.");
        configInitParams();

        LogUtils.showLog("SpeechDemo", "引擎初始化.");
        int ret = mSpeechEngine.initEngine();
        LogUtils.showLog("SpeechDemo", "初始化失败，返回值: " + ret);
        if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
            String errMessage = "初始化失败，返回值: " + ret;
            LogUtils.showLog("SpeechDemo", errMessage);
            speechEngineInitFailed(errMessage);
            return;
        }
        mSpeechEngine.setListener(this);
        speechEnginInitucceeded();
    }

    public void speechEngineInitFailed(String tipText) {
        LogUtils.showLog("SpeechDemo", "引擎初始化失败: " + tipText);
        this.runOnUiThread(() -> {
//            setButton(mInitEngineBtn, true);
//            setResultText(tipText);
        });
    }

    public void speechEnginInitucceeded() {
        LogUtils.showLog("SpeechDemo", "引擎初始化成功!");
        mStreamRecorder.SetSpeechEngine(SpeechDemoDefines.ASR_VIEW, mSpeechEngine);
        this.runOnUiThread(() -> {
//            mEngineStatusTv.setText(R.string.hint_ready);
//            setButton(mInitEngineBtn, false);
//            setButton(mUninitEngineBtn, true);
//            setButton(mStartEngineBtn, true);
//            setButton(mRecordBtn, true);
        });
    }

    /**
     * 初始化配置
     */
    private void configInitParams() {
        //【必需配置】Engine Name
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ENGINE_NAME_STRING, SpeechEngineDefines.ASR_ENGINE);
        //【可选配置】Debug & Log
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_DEBUG_PATH_STRING, mDebugPath);
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_LOG_LEVEL_STRING, SpeechEngineDefines.LOG_LEVEL_TRACE);
        //【可选配置】User ID（用以辅助定位线上用户问题）
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_UID_STRING, SensitiveDefines.UID);
        //【必需配置】配置音频来源
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_RECORDER_TYPE_STRING, SpeechEngineDefines.RECORDER_TYPE_RECORDER);

//        if (mSettings.getBoolean(R.string.config_asr_rec_save)) {
//            //【可选配置】录音文件保存路径，如配置，SDK会将录音保存到该路径下，文件格式为 .wav
//            mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_REC_PATH_STRING, mDebugPath);
//        }

        //【可选配置】音频采样率，默认16000
//        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_SAMPLE_RATE_INT, mSettings.getInt(R.string.config_sample_rate));
        //【可选配置】音频通道数，默认1，可选1或2
//        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_CHANNEL_NUM_INT, mSettings.getInt(R.string.config_channel));
        //【可选配置】上传给服务的音频通道数，默认1，可选1或2，一般与PARAMS_KEY_CHANNEL_NUM_INT保持一致即可
//        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_UP_CHANNEL_NUM_INT, mSettings.getInt(R.string.config_channel));

        // 当音频来源为 RECORDER_TYPE_STREAM 时，如输入音频采样率不等于 16K，需添加如下配置
//        if (mSettings.getOptionsValue(R.string.config_recorder_type, this).equals(SpeechEngineDefines.RECORDER_TYPE_STREAM)) {
        if (mStreamRecorder.GetStreamSampleRate() != 16000 || mStreamRecorder.GetStreamChannel() != 1) {
            // 当音频来源为 RECORDER_TYPE_STREAM 时【必需配置】，否则【无需配置】
            // 启用 SDK 内部的重采样
            mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ENABLE_RESAMPLER_BOOL, true);
            // 将重采样所需的输入采样率设置为 APP 层输入的音频的实际采样率
            mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_CUSTOM_SAMPLE_RATE_INT, mStreamRecorder.GetStreamSampleRate());
            mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_CUSTOM_CHANNEL_INT, mStreamRecorder.GetStreamChannel());
        }
//        }

//        String address = mSettings.getString(R.string.config_address);
//        if (address.isEmpty()) {
        String address = SensitiveDefines.DEFAULT_ADDRESS;
//        }
        Log.i("SpeechDemo", "Current address: " + address);
        //【必需配置】识别服务域名
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_ADDRESS_STRING, address);

//        String uri = mSettings.getString(R.string.config_uri);
//        if (uri.isEmpty()) {
        String uri = SensitiveDefines.ASR_DEFAULT_URI;
//        }
        Log.i("SpeechDemo", "Current uri: " + uri);
        //【必需配置】识别服务Uri
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_URI_STRING, uri);

//        String appid = mSettings.getString(R.string.config_app_id);
//        if (appid.isEmpty()) {
        String appid = SensitiveDefines.APPID;
//        }
        Log.i("SpeechDemo", "Current appid: " + appid);
        //【必需配置】鉴权相关：Appid
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_APP_ID_STRING, appid);

//        String token = mSettings.getString(R.string.config_token);
        String token = SensitiveDefines.TOKEN;
        Log.i("SpeechDemo", "Current token: " + token);
        //【必需配置】鉴权相关：Token
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_APP_TOKEN_STRING, token);

//        String cluster = mSettings.getString(R.string.config_cluster);
//        if (cluster.isEmpty()) {
        String cluster = SensitiveDefines.ASR_DEFAULT_CLUSTER;
//        }
        Log.i("SpeechDemo", "Current cluster: " + cluster);
        //【必需配置】识别服务所用集群
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_CLUSTER_STRING, cluster);

        //【可选配置】在线请求的建连与接收超时，一般不需配置使用默认值即可
        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_ASR_CONN_TIMEOUT_INT, 3000);
        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_ASR_RECV_TIMEOUT_INT, 5000);

        //【可选配置】在线请求断连后，重连次数，默认值为0，如果需要开启需要设置大于0的次数
        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_ASR_MAX_RETRY_TIMES_INT, 1);
    }

    /**
     * 解析回吊
     *
     * @param type
     * @param bytes
     * @param i1
     */
    @Override
    public void onSpeechMessage(int type, byte[] bytes, int i1) {
        String stdData = new String(bytes);
        switch (type) {
            case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_START:
                // Callback: 引擎启动成功回调
                Log.i("SpeechDemo", "Callback: 引擎启动成功: data: " + stdData);
                speechStart();
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_STOP:
                // Callback: 引擎关闭回调
                Log.i("SpeechDemo", "Callback: 引擎关闭: data: " + stdData);
                speechStop();
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_ERROR:
                // Callback: 错误信息回调
                Log.e("SpeechDemo", "Callback: 错误信息: " + stdData);
                speechError(stdData);
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_CONNECTION_CONNECTED:
                Log.i("SpeechDemo", "Callback: 建连成功: data: " + stdData);
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_PARTIAL_RESULT:
                // Callback: ASR 当前请求的部分结果回调
                Log.d("SpeechDemo", "Callback: ASR 当前请求的部分结果");
                speechAsrResult(stdData, false);
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_FINAL_RESULT:
                // Callback: ASR 当前请求最终结果回调
                Log.i("SpeechDemo", "Callback: ASR 当前请求最终结果");
                speechAsrResult(stdData, true);
                break;
            case SpeechEngineDefines.MESSAGE_TYPE_VOLUME_LEVEL:
                // Callback: 录音音量回调
                Log.d("SpeechDemo", "Callback: 录音音量");
                break;
            default:
                break;
        }
    }

    public void speechStart() {
//        mEngineStarted = true;
//        this.runOnUiThread(() -> {
//            mEngineStatusTv.setText(R.string.hint_start_cb);
//            setButton(mStartEngineBtn, false);
//            setButton(mStopEngineBtn, true);
//        });
    }

    public void speechStop() {
//        mEngineStarted = false;
//        this.runOnUiThread(() -> {
//            mStreamRecorder.Stop();
//            mEngineStatusTv.setText(R.string.hint_stop_cb);
//            setButton(mStartEngineBtn, true);
//            setButton(mStopEngineBtn, false);
//        });
    }

    public void speechError(final String data) {
        this.runOnUiThread(() -> {
            try {
                // 从回调的 json 数据中解析错误码和错误详细信息
                JSONObject reader = new JSONObject(data);
                if (!reader.has("err_code") || !reader.has("err_msg")) {
                    return;
                }
//                setResultText(data);
            } catch (JSONException e) {
                e.printStackTrace();
            }
        });
    }

    /**
     * 播放结果
     *
     * @param data
     * @param isFinal
     */
    public void speechAsrResult(final String data, boolean isFinal) {
        // 计算由录音结束到 ASR 最终结果之间的延迟
        long delay = 0;
        Log.i(SpeechDemoDefines.TAG, "isFinal" + isFinal);
        Log.i(SpeechDemoDefines.TAG, "mFinishTalkingTimestamp" + mFinishTalkingTimestamp);
        if (isFinal && mFinishTalkingTimestamp > 0) {
            delay = System.currentTimeMillis() - mFinishTalkingTimestamp;
            mFinishTalkingTimestamp = 0;
        }
        final long response_delay = delay;
        this.runOnUiThread(() -> {
            try {
                // 从回调的 json 数据中解析 ASR 结果
                JSONObject reader = new JSONObject(data);
                if (!reader.has("result")) {
                    return;
                }
                String text = reader.getJSONArray("result").getJSONObject(0).getString("text");
                if (text.isEmpty()) {
                    return;
                }
//                text = "result: " + text;
                if (isFinal) {
//                    text += "\nreqid: " + reader.getString("reqid");
//                    text += "\nresponse_delay: " + response_delay;
                    setResultText(text);
                }
                Log.i(SpeechDemoDefines.TAG, "result" + text);
            } catch (JSONException e) {
                e.printStackTrace();
            }
        });
    }

    /**
     * get default debug path
     *
     * @return string: debugPath
     * 音频存储路径
     */
    public String getDebugPath() {
//        if (mDebugPath != null) {
//            return mDebugPath;
//        }
        String state = Environment.getExternalStorageState();
        if (Environment.MEDIA_MOUNTED.equals(state)) {
            Log.d("SpeechDemo", "External storage can be read and write.");
        } else {
            Log.e("SpeechDemo", "External storage can't write.");
            return "";
        }
        File debugDir = getExternalFilesDir(null);
        if (debugDir == null) {
            return "";
        }
        if (!debugDir.exists()) {
            if (debugDir.mkdirs()) {
                Log.d("SpeechDemo", "Create debug path successfully.");
            } else {
                Log.e("SpeechDemo", "Failed to create debug path.");
                return "";
            }
        }
        mDebugPath = debugDir.getAbsolutePath();
        return mDebugPath;
    }

    /**
     * 按钮抬起
     */
    private void recordBtnTouchUp() {
        if (recordIsRunning) {
            recordIsRunning = false;
            Log.i("SpeechDemo", "AsrTouch: Finish");
            mFinishTalkingTimestamp = System.currentTimeMillis();
            // Directive：结束用户音频输入。
            Log.i("SpeechDemo", "Directive: DIRECTIVE_FINISH_TALKING");
            mSpeechEngine.sendDirective(SpeechEngineDefines.DIRECTIVE_FINISH_TALKING, "");
            mStreamRecorder.Stop();
        } else if (recordRunnable != null) {
            Log.i("SpeechDemo", "AsrTouch: Cancel");
            recordHandler.removeCallbacks(recordRunnable);
            recordRunnable = null;
        }
    }

    /**
     * 按下按钮
     */
    private void recordBtnTouchDown() {
        recordIsRunning = false;
        recordHandler = new Handler();
        recordRunnable = () -> {
            recordIsRunning = true;

            Log.i(SpeechDemoDefines.TAG, "配置启动参数.");
            configStartAsrParams();
            //【可选配置】该按钮为长按模式，预期是按下开始录音，抬手结束录音，需要关闭云端自动判停功能。
            mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ASR_AUTO_STOP_BOOL, false);

            // Directive：启动引擎前调用SYNC_STOP指令，保证前一次请求结束。
            Log.i(SpeechDemoDefines.TAG, "关闭引擎（同步）");
            Log.i(SpeechDemoDefines.TAG, "Directive: DIRECTIVE_SYNC_STOP_ENGINE");
            int ret = mSpeechEngine.sendDirective(SpeechEngineDefines.DIRECTIVE_SYNC_STOP_ENGINE, "");
            if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
                Log.e(SpeechDemoDefines.TAG, "send directive syncstop failed, " + ret);
            } else {
                Log.i(SpeechDemoDefines.TAG, "启动引擎");
                Log.i(SpeechDemoDefines.TAG, "Directive: DIRECTIVE_START_ENGINE");
                ret = mSpeechEngine.sendDirective(SpeechEngineDefines.DIRECTIVE_START_ENGINE, "");
                Log.i(SpeechDemoDefines.TAG, "启动引擎 ret = " + ret);
                if (ret == SpeechEngineDefines.ERR_REC_CHECK_ENVIRONMENT_FAILED) {
//                    mEngineStatusTv.setText(R.string.check_rec_permission);
                    requestPermission(ASR_PERMISSIONS);
                } else if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
                    Log.e(SpeechDemoDefines.TAG, "send directive start failed, " + ret);
                }
            }
//            clearResultText();
        };
        recordHandler.postDelayed(recordRunnable, 500);
    }

    /**
     * 配置Asr参数
     */
    private void configStartAsrParams() {
        //【可选配置】是否开启顺滑(DDC)
        mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ASR_ENABLE_DDC_BOOL, true);
        //【可选配置】是否开启文字转数字(ITN)
//        mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ASR_ENABLE_ITN_BOOL, mSettings.getBoolean(R.string.config_asr_enable_itn));
        //【可选配置】是否开启标点
        mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ASR_SHOW_NLU_PUNC_BOOL, true);
        //【可选配置】设置识别语种
//        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_LANGUAGE_STRING, mSettings.getString(R.string.config_asr_language));
        //【可选配置】是否隐藏句尾标点
        mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ASR_DISABLE_END_PUNC_BOOL, true);
        // 【可选配置】直接传递自定义的ASR请求JSON，若使用此参数需自行确保JSON格式正确
//        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_REQ_PARAMS_STRING, mSettings.getString(R.string.config_asr_req_params));

        //【可选配置】控制识别结果返回的形式，全量返回或增量返回，默认为全量
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_RESULT_TYPE_STRING, SpeechEngineDefines.ASR_RESULT_TYPE_SINGLE);

        //【可选配置】设置VAD头部静音时长，用户多久没说话视为空音频，即静音检测时长
//        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_ASR_VAD_START_SILENCE_TIME_INT, mSettings.getInt(R.string.config_asr_vad_start_silence_time));
        //【可选配置】设置VAD尾部静音时长，用户说话后停顿多久视为说话结束，即自动判停时长
//        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_ASR_VAD_END_SILENCE_TIME_INT, mSettings.getInt(R.string.config_asr_vad_end_silence_time));
        //【可选配置】设置VAD模式，用于定制VAD场景，默认为空
//        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_VAD_MODE_STRING, mSettings.getString(R.string.config_asr_vad_mode));
        //【可选配置】用户音频输入最大时长，仅一句话识别场景生效，单位毫秒，默认为 60000ms.
        mSpeechEngine.setOptionInt(SpeechEngineDefines.PARAMS_KEY_VAD_MAX_SPEECH_DURATION_INT, 60000);

        //【可选配置】控制是否返回录音音量，在 APP 需要显示音频波形时可以启用
        mSpeechEngine.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_ENABLE_GET_VOLUME_BOOL, false);

        //【可选配置】设置纠错词表，识别结果会根据设置的纠错词纠正结果，例如："{\"古爱玲\":\"谷爱凌\"}"，当识别结果中出现"古爱玲"时会替换为"谷爱凌"
        mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_ASR_CORRECT_WORDS_STRING, "{\"古爱玲\":\"谷爱凌\"}");


        //【可选配置】更新 ASR 热词
//        if (!mSettings.getString(R.string.config_asr_hotwords).isEmpty()) {
//            Log.d(SpeechDemoDefines.TAG, "Set hotwords.");
//            setHotWords(mSettings.getString(R.string.config_asr_hotwords));
//        }

//        if (mSettings.getOptionsValue(R.string.config_recorder_type, this).equals(SpeechEngineDefines.RECORDER_TYPE_STREAM)) {
//            if (!mStreamRecorder.Start()) {
//                requestPermission(ASR_PERMISSIONS);
//            }
//        } else if (mSettings.getOptionsValue(R.string.config_recorder_type, this).equals(SpeechEngineDefines.RECORDER_TYPE_FILE)) {
//            // 使用音频文件识别时，需要设置文件的绝对路径
//            String test_file_path = mDebugPath + "/asr_rec_file.pcm";
//            Log.d(SpeechDemoDefines.TAG, "输入的音频文件路径: " + test_file_path);
//            // 使用音频文件识别时【必须配置】，否则【无需配置】
//            mSpeechEngine.setOptionString(SpeechEngineDefines.PARAMS_KEY_RECORDER_FILE_STRING, test_file_path);
//        }
    }

    /**
     * 请求麦克风权限
     * check and request multiple permissions
     *
     * @param permissions: permission list
     * @return if all permissions already granted.
     */
    public boolean requestPermission(List<String> permissions) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
            ArrayList<String> unAuthedPermission = new ArrayList<>();
            for (String permission : permissions) {
                if (ContextCompat.checkSelfPermission(this, permission)
                        != PackageManager.PERMISSION_GRANTED) {
                    unAuthedPermission.add(permission);
                }
            }
            if (unAuthedPermission.isEmpty()) {
                return true;
            }
            ActivityCompat.requestPermissions(this, unAuthedPermission.toArray(new String[0]), CODE_PERMISSION_REQUEST);
            return false;
        } else {
            return true;
        }
    }


/////////////////下面是文字转语音/////////////////////////////////////////////////////////////////////////////////////////////////

    // Android AudioTrack Playing
    private SpeechStreamPlayer mStreamPlayer = null;
    private SpeechEngine mSpeechEngineTTS = null;
    private Boolean mDisablePlayerReuse = false;

    private AudioManager mAudioManager = null;
    private AudioManager.OnAudioFocusChangeListener mAFChangeListener = null;

    private boolean mPlaybackNowAuthorized = false;
    // private String  mCurTtsText = "愿中国青年都摆脱冷气，只是向上走，不必听自暴自弃者流的话。能做事的做事，能发声的发声。有一分热，发一分光。就令萤火一般，也可以在黑暗里发一点光，不必等候炬火。此后如竟没有炬火：我便是唯一的光。";
    private String mCurTtsText = "";
    // Engine State
    private boolean mEngineInited = false;
    private boolean mEngineErrorOccurred = false;

    private void initEngineTTS() {
//        mCurTtsWorkMode = mTtsWorkModeArray[mSettings.getOptions(R.string.tts_work_mode_title).chooseIdx];
//        Log.i(SpeechDemoDefines.TAG, "调用初始化接口前的语音合成工作模式为 " + mCurTtsWorkMode);
        // 当使用纯在线模式时，不需要下载离线合成所需资源
        initEngineInternal();
    }

    private void initEngineInternal() {
        int ret = SpeechEngineDefines.ERR_NO_ERROR;
        if (mSpeechEngineTTS == null) {
            Log.i(SpeechDemoDefines.TAG, "创建引擎.");
            mSpeechEngineTTS = SpeechEngineGenerator.getInstance();
            mSpeechEngineTTS.createEngine();
            mSpeechEngineTTS.setContext(getApplicationContext());
        }
        if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
            speechEngineInitFailed("Create engine failed: " + ret);
            return;
        }
        Log.d(SpeechDemoDefines.TAG, "SDK 版本号: " + mSpeechEngineTTS.getVersion());

        Log.i(SpeechDemoDefines.TAG, "配置初始化参数.");
        configInitParamsTTS();

//        if (mSettings.getBoolean(R.string.config_demo_player) && mStreamPlayer != null) {
//            mStreamPlayer.SetPlayerSampleRate(24000);
//        }

        long startInitTimestamp = System.currentTimeMillis();
        Log.i(SpeechDemoDefines.TAG, "引擎初始化.");
        ret = mSpeechEngineTTS.initEngine();
        String errMessage = "初始化失败，返回值: " + ret;
        Log.e(SpeechDemoDefines.TAG, errMessage);
        if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
//            String errMessage = "初始化失败，返回值: " + ret;
//            Log.e(SpeechDemoDefines.TAG, errMessage);
            speechEngineInitFailed(errMessage);
            return;
        }
        Log.i(SpeechDemoDefines.TAG, "设置消息监听");
        mSpeechEngineTTS.setListener(speechListener);

        long cost = System.currentTimeMillis() - startInitTimestamp;
        Log.d(SpeechDemoDefines.TAG, String.format("初始化耗时 %d 毫秒", cost));
        speechEnginInitSucceeded(cost);
    }

    private void configInitParamsTTS() {
        //【必需配置】Engine Name
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_ENGINE_NAME_STRING,
                SpeechEngineDefines.TTS_ENGINE);

        //【必需配置】Work Mode, 可选值如下
        // SpeechEngineDefines.TTS_WORK_MODE_ONLINE, 只进行在线合成，不需要配置离线合成相关参数；
        // SpeechEngineDefines.TTS_WORK_MODE_OFFLINE, 只进行离线合成，不需要配置在线合成相关参数；
        // SpeechEngineDefines.TTS_WORK_MODE_ALTERNATE, 先发起在线合成，失败后（网络超时），启动离线合成引擎开始合成；
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_WORK_MODE_INT, SpeechEngineDefines.TTS_WORK_MODE_ONLINE);

        //【可选配置】Debug & Log
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_DEBUG_PATH_STRING, mDebugPath);
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_LOG_LEVEL_STRING, SpeechEngineDefines.LOG_LEVEL_TRACE);

        //【可选配置】User ID（用以辅助定位线上用户问题）
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_UID_STRING, SensitiveDefines.UID);
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_DEVICE_ID_STRING, SensitiveDefines.DID);

        //【可选配置】是否将合成出的音频保存到设备上，为 true 时需要正确配置 PARAMS_KEY_TTS_AUDIO_PATH_STRING 才会生效
        mSpeechEngineTTS.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_TTS_ENABLE_DUMP_BOOL,
                false);
        // TTS 音频文件保存目录，必须在合成之前创建好且 APP 具有访问权限，保存的音频文件名格式为 tts_{reqid}.wav, {reqid} 是本次合成的请求 id
        // PARAMS_KEY_TTS_ENABLE_DUMP_BOOL 配置为 true 的音频时为【必需配置】，否则为【可选配置】
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_AUDIO_PATH_STRING, mDebugPath);

//        mDisablePlayerReuse = mSettings.getBoolean(R.string.config_disable_player_reuse);
        //【可选配置】是否禁止播放器对象的复用，如果禁用则每次 Start Engine 都会重新创建播放器对象
        mSpeechEngineTTS.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_PLAYER_DISABLE_REUSE_BOOL, false);
        //【可选配置】用于控制 SDK 播放器所用的音源,默认为媒体音源
        // 如果不禁用播放器的复用，必须在 SDK 初始化之前配置音源，其他时机配置无法生效
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_AUDIO_STREAM_TYPE_INT,
                SpeechEngineDefines.AUDIO_STREAM_TYPE_MEDIA);

        //【可选配置】合成出的音频的采样率，默认为 24000
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_SAMPLE_RATE_INT,
                24000);
        //【可选配置】打断播放时使用多长时间淡出停止，单位：毫秒。默认值 0 表示不淡出
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_AUDIO_FADEOUT_DURATION_INT,
                20);

        // ------------------------ 在线合成相关配置 -----------------------

        //【必需配置】在线合成鉴权相关：Appid
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_APP_ID_STRING, SensitiveDefines.APPID);
        //【必需配置】在线合成鉴权相关：Token
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_APP_TOKEN_STRING, SensitiveDefines.TOKEN);
        //【必需配置】语音合成服务域名
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_ADDRESS_STRING, SensitiveDefines.DEFAULT_ADDRESS);
        //【必需配置】语音合成服务Uri
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_URI_STRING, SensitiveDefines.TTS_DEFAULT_URI);
        //【必需配置】语音合成服务所用集群
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_CLUSTER_STRING, SensitiveDefines.TTS_DEFAULT_CLUSTER);
        //【可选配置】在线合成下发的 opus-ogg 音频的压缩倍率
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_COMPRESSION_RATE_INT, 10);

    }

    private void speechEnginInitSucceeded(long initCost) {
        Log.i(SpeechDemoDefines.TAG, "引擎初始化成功!");
        this.runOnUiThread(() -> {
            // setResultText("Initialize cost: " + initCost + "ms.");
            mEngineInited = true;
        });
    }

    private void startEngineBtnClicked() {
        Log.d(SpeechDemoDefines.TAG, "Start engine, current status: " + mEngineStarted);
        if (!mEngineStarted) {
            AcquireAudioFocus();
            if (!mPlaybackNowAuthorized) {
                Log.w(SpeechDemoDefines.TAG, "Acquire audio focus failed, can't play audio");
                return;
            }
//            clearResultText();
            mEngineErrorOccurred = false;
            // Directive：启动引擎前调用SYNC_STOP指令，保证前一次请求结束。
            Log.i(SpeechDemoDefines.TAG, "关闭引擎（同步）");
            Log.i(SpeechDemoDefines.TAG, "Directive: DIRECTIVE_SYNC_STOP_ENGINE");
            int ret = mSpeechEngineTTS.sendDirective(SpeechEngineDefines.DIRECTIVE_SYNC_STOP_ENGINE, "");
            if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
                Log.e(SpeechDemoDefines.TAG, "send directive syncstop failed, " + ret);
            } else {
                configStartTtsParams();
                Log.i(SpeechDemoDefines.TAG, "启动引擎");
                Log.i(SpeechDemoDefines.TAG, "Directive: DIRECTIVE_START_ENGINE");
                ret = mSpeechEngineTTS.sendDirective(SpeechEngineDefines.DIRECTIVE_START_ENGINE, "");
                if (ret != SpeechEngineDefines.ERR_NO_ERROR) {
                    String message = "发送启动引擎指令失败, " + ret;
                    sendStartEngineDirectiveFailed(message);
                }
            }
        }
    }

    private void AcquireAudioFocus() {
        // 向系统请求 Audio Focus 并记录返回结果
        int res = mAudioManager.requestAudioFocus(mAFChangeListener, AudioManager.STREAM_MUSIC,
                AudioManager.AUDIOFOCUS_GAIN);
        if (res == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
            mPlaybackNowAuthorized = false;
        } else if (res == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
            mPlaybackNowAuthorized = true;
        }
    }

    private void sendStartEngineDirectiveFailed(String tipText) {
        Log.e(SpeechDemoDefines.TAG, tipText);
        this.runOnUiThread(() -> {
            setResultText(tipText);
            mEngineStarted = false;
        });
    }

    private void configStartTtsParams() {
        //【必需配置】TTS 使用场景
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_SCENARIO_STRING, SpeechEngineDefines.TTS_SCENARIO_TYPE_NORMAL);

//        String ttsText = mReferText.getText().toString();
//        if (!ttsText.isEmpty()) {
//            mCurTtsText = ttsText;
//        } else {
        if (TextUtils.isEmpty(mCurTtsText)) {
            mCurTtsText = "愿中国青年都摆脱冷气，只是向上走，不必听自暴自弃者流的话。能做事的做事，能发声的发声。有一分热，发一分光。就令萤火一般，也可以在黑暗里发一点光，不必等候炬火。此后如竟没有炬火：我便是唯一的光。";
        }
        //        }
        //【必需配置】需合成的文本，不可超过 80 字
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_TEXT_STRING, mCurTtsText);
        //【可选配置】需合成的文本的类型，支持直接传文本(TTS_TEXT_TYPE_PLAIN)和传 SSML 形式(TTS_TEXT_TYPE_SSML)的文本
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_TEXT_TYPE_STRING,
                "Plain");
        //【可选配置】用于控制 TTS 音频的语速，支持的配置范围参考火山官网 语音技术/语音合成/离在线语音合成SDK/参数说明 文档
        mSpeechEngineTTS.setOptionDouble(SpeechEngineDefines.PARAMS_KEY_TTS_SPEED_RATIO_DOUBLE, 1.0);
        //【可选配置】用于控制 TTS 音频的音量，支持的配置范围参考火山官网 语音技术/语音合成/离在线语音合成SDK/参数说明 文档
        mSpeechEngineTTS.setOptionDouble(SpeechEngineDefines.PARAMS_KEY_TTS_VOLUME_RATIO_DOUBLE, 1.0);
        //【可选配置】用于控制 TTS 音频的音高，支持的配置范围参考火山官网 语音技术/语音合成/离在线语音合成SDK/参数说明 文档
        mSpeechEngineTTS.setOptionDouble(SpeechEngineDefines.PARAMS_KEY_TTS_PITCH_RATIO_DOUBLE, 1.0);
        //【可选配置】是否在文本的每句结尾处添加静音段，单位：毫秒，默认为 0ms
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_SILENCE_DURATION_INT, 0);

        if (mDisablePlayerReuse) {
            //【可选配置】用于控制 SDK 播放器所用的音源,默认为媒体音源
            // 只有禁用了播放器的复用，在 Start Engine 前配置音源才是生效的
            mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_AUDIO_STREAM_TYPE_INT,
                    SpeechEngineDefines.AUDIO_STREAM_TYPE_MEDIA);
        }
        //【可选配置】是否使用 SDK 内置播放器播放合成出的音频，默认为 true
        mSpeechEngineTTS.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_TTS_ENABLE_PLAYER_BOOL,
                false);
        //【可选配置】是否令 SDK 通过回调返回合成的音频数据，默认不返回。
        // 开启后，SDK 会流式返回音频，收到 MESSAGE_TYPE_TTS_AUDIO_DATA_END 回调表示当次合成所有的音频已经全部返回
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_DATA_CALLBACK_MODE_INT, 2);

        // ------------------------ 在线合成相关配置 -----------------------
        Log.d(SpeechDemoDefines.TAG, "Current online voice: BV700_V2_streaming");
        //【必需配置】在线合成使用的发音人代号
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_VOICE_ONLINE_STRING, SensitiveDefines.TTS_DEFAULT_ONLINE_VOICE_TYPE);
        //【必需配置】在线合成使用的音色代号
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_VOICE_TYPE_ONLINE_STRING,
                SensitiveDefines.TTS_DEFAULT_ONLINE_VOICE_TYPE);
        //【可选配置】需要返回详细的播放进度或需要启用断点续播功能时应配置为 1, 否则配置为 0 或不配置
        mSpeechEngineTTS.setOptionInt(SpeechEngineDefines.PARAMS_KEY_TTS_WITH_FRONTEND_INT, 1);
        //【可选配置】使用复刻音色
        mSpeechEngineTTS.setOptionBoolean(SpeechEngineDefines.PARAMS_KEY_TTS_USE_VOICECLONE_BOOL, true);
        //【可选配置】在开启前述使用复刻音色的开关后，制定复刻音色所用的后端集群
        mSpeechEngineTTS.setOptionString(SpeechEngineDefines.PARAMS_KEY_TTS_BACKEND_CLUSTER_STRING, SensitiveDefines.TTS_DEFAULT_CLUSTER);

    }

    private SpeechEngine.SpeechListener speechListener = new SpeechEngine.SpeechListener() {
        @Override
        public void onSpeechMessage(int type, byte[] data, int i1) {
            String stdData = "";
            stdData = new String(data);

            switch (type) {
                case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_START:
                    // Callback: 引擎启动成功回调
                    Log.i(SpeechDemoDefines.TAG, "Callback: 引擎启动成功: data: " + stdData);
                    speechStartTTS(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_STOP:
                    // Callback: 引擎关闭回调
                    Log.i(SpeechDemoDefines.TAG, "Callback: 引擎关闭: data: " + stdData);
                    speechStopTTS(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_ENGINE_ERROR:
                    // Callback: 错误信息回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 错误信息: " + stdData);
                    speechError(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_SYNTHESIS_BEGIN:
                    // Callback: 合成开始回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 合成开始: " + stdData);
                    speechStartSynthesis(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_SYNTHESIS_END:
                    // Callback: 合成结束回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 合成结束: " + stdData);
                    speechFinishSynthesis(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_START_PLAYING:
                    // Callback: 播放开始回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 播放开始: " + stdData);
                    speechStartPlaying(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_PLAYBACK_PROGRESS:
                    // Callback: 播放进度回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 播放进度");
                    speechPlayingProgress(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_FINISH_PLAYING:
                    // Callback: 播放结束回调
                    Log.e(SpeechDemoDefines.TAG, "Callback: 播放结束: " + stdData);
                    speechFinishPlaying(stdData);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_AUDIO_DATA:
                    // Callback: 音频数据回调
                    Log.e(SpeechDemoDefines.TAG, String.format("Callback: 音频数据，长度 %d 字节", stdData.length()));
                    speechTtsAudioData(data, false);
                    break;
                case SpeechEngineDefines.MESSAGE_TYPE_TTS_AUDIO_DATA_END:
                    // Callback: 音频数据回调
                    Log.e(SpeechDemoDefines.TAG, String.format("Callback: 音频数据，长度 %d 字节", stdData.length()));
                    speechTtsAudioData(new byte[0], true);
                    break;
                default:
                    break;
            }
        }
    };

    private void speechStartTTS(final String data) {
        mEngineStarted = true;
        if (/*mSettings.getBoolean(R.string.config_demo_player) &&*/ mStreamPlayer != null) {
            mStreamPlayer.Start();
        }
//        this.runOnUiThread(() -> {
//            mEngineStatus.setText(R.string.hint_start_cb);
//            mReferText.setEnabled(false);
//            setResultText(mCurTtsText);
//            setButton(mStartBtn, false);
//            setButton(mStopBtn, true);
//            setButton(mCreateConnectionBtn, false);
//            setButton(mPauseResumeBtn, mSettings.getBoolean(R.string.config_sdk_player) || mSettings.getBoolean(R.string.config_demo_player));
//        });
    }

    private void speechStopTTS(final String data) {
        mEngineStarted = false;
        if (/*mSettings.getBoolean(R.string.config_demo_player) &&*/ mStreamPlayer != null) {
            mStreamPlayer.Feed(new byte[0], true);
            mStreamPlayer.WaitPlayerStop();
        }
        this.runOnUiThread(() -> {
//            mEngineStatus.setText(R.string.hint_stop_cb);
//            mPauseResumeBtn.setText("Pause");
//            mReferText.setEnabled(true);
//            setButton(mStopBtn, false);
//            setButton(mStartBtn, true);
//            setButton(mCreateConnectionBtn, mCurTtsWorkMode != SpeechEngineDefines.TTS_WORK_MODE_OFFLINE);
//            setButton(mPauseResumeBtn, false);
            mConnectionCreated = false;
            mPlayerPaused = false;

        });

        // Abandon audio focus when playback complete
        mAudioManager.abandonAudioFocus(mAFChangeListener);
        mPlaybackNowAuthorized = false;
    }

    private void speechStartSynthesis(final String data) {
    }

    private void speechFinishSynthesis(final String data) {
    }

    private void speechStartPlaying(final String data) {
    }

    private void speechPlayingProgress(final String data) {
        try {
            JSONObject reader = new JSONObject(data);
            if (!reader.has("reqid") || !reader.has("progress")) {
                Log.w(SpeechDemoDefines.TAG, "Can't find necessary field in progress callback. ");
                return;
            }
            double percentage = reader.getDouble("progress");
            String reqid = reader.getString("reqid");
            Log.d(SpeechDemoDefines.TAG, "当前播放的文本对应的 reqid: " + reqid + ", 播放进度：" + percentage);
        } catch (JSONException e) {
            e.printStackTrace();
        }
    }

    private void speechFinishPlaying(final String data) {
    }

    private void speechTtsAudioData(byte[] data, boolean isFinal) {
        if (/*mSettings.getBoolean(R.string.config_demo_player) &&*/ mStreamPlayer != null) {
            mStreamPlayer.Feed(data, isFinal);
        }
    }

    public void closekeyboard() {
        final View v = this.getWindow().peekDecorView();
        if (v != null && v.getWindowToken() != null) {
            InputMethodManager imm = (InputMethodManager) getSystemService(this.INPUT_METHOD_SERVICE);
            imm.hideSoftInputFromWindow(v.getWindowToken(), 0);
        }
    }

    public void showKeyBoard(EditText editText) {
        InputMethodManager imm = (InputMethodManager) getSystemService(INPUT_METHOD_SERVICE);
        imm.showSoftInput(editText, InputMethodManager.SHOW_IMPLICIT);
    }


    private void requestImage(String prompt) {
        AIHelpUtil.getInstance().requestText2Image(prompt, new AIHelpUtil.Text2TextCallBack() {
            @Override
            public void onDataBack(GenerateBean generateBean) {
                if (generateBean != null) {
                    Message message = Message.obtain();
                    message.what = 1005;
                    message.obj = generateBean;
                    handler.sendMessage(message);
                }
            }
        });
    }
    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
        if (keyCode == KeyEvent.KEYCODE_HOME) {
            TActivityUtils.jumpToActivity(getBaseContext(), MainActivity.class);
            return true;
        }
        return super.onKeyDown(keyCode, event);
    }

}