package com.miao.xfspeech;

import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.widget.Toast;

import androidx.annotation.NonNull;

import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechEvent;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.SpeechUtility;
import com.iflytek.cloud.VoiceWakeuper;
import com.iflytek.cloud.WakeuperListener;
import com.iflytek.cloud.WakeuperResult;
import com.iflytek.cloud.ui.RecognizerDialog;
import com.iflytek.cloud.ui.RecognizerDialogListener;
import com.iflytek.cloud.util.ResourceUtil;
import com.miao.xfspeech.xfutil.JsonParser;

import org.json.JSONException;
import org.json.JSONObject;

import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;

import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;


/** XfspeechPlugin */
public class XfspeechPlugin implements FlutterPlugin, MethodCallHandler,EventChannel.StreamHandler, ActivityAware {
  /// The MethodChannel that will the communication between Flutter and native Android
  ///
  /// This local reference serves to register the plugin with the Flutter Engine and unregister it
  /// when the Flutter Engine is detached from the Activity
  private MethodChannel channel;
  private Context mContext = null;
  private Activity mactivity = null;
  public static EventChannel.EventSink mEventSink = null;
  private EventChannel eventChannel;
  private static final String CHANNEL_METHOD_LOCATION = "flutter_xfspeech_plugin";
  private static final String CHANNEL_STREAM_LOCATION = "flutter_xfspeech_plugin_stream";


  // 语音唤醒对象
  VoiceWakeuper mIvw;
  private String keep_alive = "1";
  private String ivwNetMode = "0";
  // 唤醒结果内容
  private String resultString;


  // 语音听写对象
  private SpeechRecognizer mIat;
  // 语音听写UI
  private RecognizerDialog mIatDialog;
  // 用HashMap存储听写结果
  private HashMap<String, String> mIatResults = new LinkedHashMap<>();
  private SharedPreferences mSharedPreferences;
  private Toast mToast;
  private String mEngineType = "local";


  @Override
  public void onAttachedToEngine(@NonNull FlutterPluginBinding flutterPluginBinding) {

    if (null == mContext) {
      mContext = flutterPluginBinding.getApplicationContext();
    }

    /**
     * 方法调用通道
     */
    channel = new MethodChannel(flutterPluginBinding.getBinaryMessenger(), CHANNEL_METHOD_LOCATION);
    channel.setMethodCallHandler(this);

    /**
     * 回调监听通道
     */
    eventChannel = new EventChannel(flutterPluginBinding.getBinaryMessenger(), CHANNEL_STREAM_LOCATION);
    eventChannel.setStreamHandler(this);

  }

  @Override
  public void onMethodCall(@NonNull MethodCall call, @NonNull Result result) {
    switch (call.method){
      case "getPlatformVersion":
        result.success("Android " + android.os.Build.VERSION.RELEASE);
        break;
      case "init":
        init(call);
        break;
      case "startIvw":
        startIvw(call);
        break;
      case "ivwCancel":
        ivwCancel(result);
        break;
      case "ivwStop":
        ivwStop(result);
        break;
      case "startIat":
        startIat(call);
        break;
      case "iatStop":
        iatStop(result);
        break;
      default:
        result.notImplemented();
    }
  }

  public void init(MethodCall call){
    SpeechUtility.createUtility(mContext, SpeechConstant.APPID +"="+ call.argument("APP_ID"));
  }

  int ret = 0;// 函数调用返回值

  // 开始语音听写
  public void startIat(MethodCall call){
    mIat = SpeechRecognizer.createRecognizer(mContext, mInitListener);

    // 初始化听写Dialog，如果只使用有UI听写功能，无需创建SpeechRecognizer
    // 使用UI听写功能，请根据sdk文件目录下的notice.txt,放置布局文件和图片资源
    mIatDialog = new RecognizerDialog(mContext, mInitListener);
    mSharedPreferences = mContext.getSharedPreferences("com.miao.xfspeech_example", Activity.MODE_PRIVATE);
    // 设置参数
    setParam(call.argument("mEngineType"),call.argument("language"));
    // 不显示听写对话框
    ret = mIat.startListening(mRecognizerListener);
    if (ret != ErrorCode.SUCCESS) {
      Log.d("TAG","听写失败,错误码：" + ret + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
    } else {
      Log.d("TAG","请开始说话");
    }
  }

  // 停止语音听写
  public void iatStop(Result result){
    mIat.stopListening();
    result.success(true);
  }

  /**
   * 初始化监听器。
   */
  private InitListener mInitListener = new InitListener() {

    @Override
    public void onInit(int code) {
      Log.d("TAG", "SpeechRecognizer init() code = " + code);
      if (code != ErrorCode.SUCCESS) {
        Log.d("TAG", "初始化失败，错误码：" + code + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
        //showTip("初始化失败，错误码：" + code + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
      }
    }
  };

  /**
   * 参数设置
   *
   * @return
   */
  public void setParam(String engineType, String language) {
    // 清空参数
    mIat.setParameter(SpeechConstant.PARAMS, null);
    String lag = language;
    // 设置引擎
    mIat.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
    // 设置返回结果格式
    mIat.setParameter(SpeechConstant.RESULT_TYPE, "json");

    mEngineType =  engineType;
    if (mEngineType.equals(SpeechConstant.TYPE_LOCAL)) {
      // 设置本地识别资源
      mIat.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
    }
    // 在线听写支持多种小语种，若想了解请下载在线听写能力，参看其speechDemo
    if (lag!=null && lag.equals("en_us")) {
      // 设置语言
      mIat.setParameter(SpeechConstant.LANGUAGE, "en_us");
      mIat.setParameter(SpeechConstant.ACCENT, null);
    }else {
      // 设置语言
      mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
      // 设置语言区域
      mIat.setParameter(SpeechConstant.ACCENT, lag);
    }

    // 设置语音前端点:静音超时时间，即用户多长时间不说话则当做超时处理
    mIat.setParameter(SpeechConstant.VAD_BOS, mSharedPreferences.getString("iat_vadbos_preference", "4000"));

    // 设置语音后端点:后端点静音检测时间，即用户停止说话多长时间内即认为不再输入， 自动停止录音
    mIat.setParameter(SpeechConstant.VAD_EOS, mSharedPreferences.getString("iat_vadeos_preference", "2000"));

    // 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
    mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", "1"));

    // 设置音频保存路径，保存音频格式支持pcm、wav，设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
    mIat.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
    mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH,
            mContext.getExternalFilesDir("msc").getAbsolutePath() + "/iat.wav");
  }

  /**
   * 听写监听器。
   */
  private RecognizerListener mRecognizerListener = new RecognizerListener() {

    @Override
    public void onBeginOfSpeech() {
      // 此回调表示：sdk内部录音机已经准备好了，用户可以开始语音输入
      Log.d("TAG","开始说话");
    }

    @Override
    public void onError(SpeechError error) {
      // Tips：
      // 错误码：10118(您没有说话)，可能是录音机权限被禁，需要提示用户打开应用的录音权限。
      Log.d("TAG",error.getPlainDescription(true));
      Map map =new HashMap();
      map.put("transResult", error.getPlainDescription(false));
      map.put("resultString", null);
      map.put("success", false);
      map.put("type", "2");
      mEventSink.success(map);
    }

    @Override
    public void onEndOfSpeech() {
      // 此回调表示：检测到了语音的尾端点，已经进入识别过程，不再接受语音输入
      Log.d("TAG","结束说话");
    }

    @Override
    public void onResult(RecognizerResult results, boolean isLast) {
      String text = JsonParser.parseIatResult(results.getResultString());
      Map map =new HashMap();
      map.put("transResult", text);
      map.put("result", results.getResultString());
      map.put("success", true);
      map.put("type", "2");
      mEventSink.success(map);
      if (isLast) {
        //TODO 最后的结果
      }
    }

    @Override
    public void onVolumeChanged(int volume, byte[] data) {
      // Log.d("TAG","当前正在说话，音量大小：" + volume);
      // Log.d("TAG", "返回音频数据：" + data.length);
    }

    @Override
    public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
      // 以下代码用于获取与云端的会话id，当业务出错时将会话id提供给技术支持人员，可用于查询会话日志，定位出错原因
      // 若使用本地能力，会话id为null
      if (SpeechEvent.EVENT_SESSION_ID == eventType) {
        String sid = obj.getString(SpeechEvent.KEY_EVENT_AUDIO_URL);
        Log.d("TAG", "session id =" + sid);
      }
    }
  };

  public void ivwCancel(Result result){
    mIvw.cancel();
    result.success(true);
  }

  public void ivwStop(Result result){
    mIvw.stopListening();
    result.success(true);
  }

  // 语音唤醒监听配置
  public void startIvw(MethodCall call){
    resultString = "";
    // 初始化唤醒对象
    mIvw = VoiceWakeuper.createWakeuper(mContext, null);
    mIvw = VoiceWakeuper.getWakeuper();
    if (mIvw != null) {
      if(null != call.argument("keep_alive")){
        keep_alive = call.argument("keep_alive");
      }
      if(null != call.argument("ivwNetMode")){
        keep_alive = call.argument("ivwNetMode");
      }
      // 清空参数
      mIvw.setParameter(SpeechConstant.PARAMS, null);
      // 唤醒门限值，根据资源携带的唤醒词个数按照“id:门限;id:门限”的格式传入
      //mIvw.setParameter(SpeechConstant.IVW_THRESHOLD, "0:" + curThresh);
      // 设置唤醒模式
      mIvw.setParameter(SpeechConstant.IVW_SST, "wakeup");
      // 设置持续进行唤醒
      mIvw.setParameter(SpeechConstant.KEEP_ALIVE, keep_alive);
      // 设置闭环优化网络模式
      mIvw.setParameter(SpeechConstant.IVW_NET_MODE, ivwNetMode);
      // 设置唤醒资源路径
      mIvw.setParameter(SpeechConstant.IVW_RES_PATH, getResource(call.argument("APP_ID")));
      // 设置唤醒录音保存路径，保存最近一分钟的音频
      mIvw.setParameter(SpeechConstant.IVW_AUDIO_PATH,
              mContext.getExternalFilesDir("msc").getAbsolutePath() + "/ivw.wav");
      mIvw.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
      // 如有需要，设置 NOTIFY_RECORD_DATA 以实时通过 onEvent 返回录音音频流字节
      //mIvw.setParameter( SpeechConstant.NOTIFY_RECORD_DATA, "1" );
      // 启动唤醒
      /*	mIvw.setParameter(SpeechConstant.AUDIO_SOURCE, "-1");*/
      mIvw.startListening(mWakeuperListener);
    } else {
      Toast.makeText(mContext, "唤醒未初始化", Toast.LENGTH_SHORT).show();
    }

  }


  private String getResourcePath() {
    StringBuffer tempBuffer = new StringBuffer();
    //识别通用资源
    tempBuffer.append(ResourceUtil.generateResourcePath(mContext, ResourceUtil.RESOURCE_TYPE.assets, "iat/common.jet"));
    tempBuffer.append(";");
    tempBuffer.append(ResourceUtil.generateResourcePath(mContext, ResourceUtil.RESOURCE_TYPE.assets, "iat/sms_16k.jet"));
    //识别8k资源-使用8k的时候请解开注释
    return tempBuffer.toString();
  }

  private String getResource(String appId) {
    final String resPath = ResourceUtil.generateResourcePath(mContext, ResourceUtil.RESOURCE_TYPE.assets, "ivw/" + appId + ".jet");
    Log.d("TAG", "resPath: " + resPath);
    return resPath;
  }


  private WakeuperListener mWakeuperListener = new WakeuperListener() {

    @Override
    public void onResult(WakeuperResult result) {
      Log.d("TAG", "onResult");
      try {
        String text = result.getResultString();
        JSONObject object;
        object = new JSONObject(text);
        StringBuffer buffer = new StringBuffer();
        buffer.append("【RAW】 " + text);
        buffer.append("\n");
        buffer.append("【操作类型】" + object.optString("sst"));
        buffer.append("\n");
        buffer.append("【唤醒词id】" + object.optString("id"));
        buffer.append("\n");
        buffer.append("【得分】" + object.optString("score"));
        buffer.append("\n");
        buffer.append("【前端点】" + object.optString("bos"));
        buffer.append("\n");
        buffer.append("【尾端点】" + object.optString("eos"));
        resultString = buffer.toString();
        Map map =new HashMap();
        map.put("transResult", resultString);
        map.put("result",  result.getResultString());
        map.put("success", true);
        map.put("type", "1");
        mEventSink.success(map);
      } catch (JSONException e) {
        resultString = "结果解析出错";
        Map map =new HashMap();
        map.put("transResult", "");
        map.put("result",  e.getMessage());
        map.put("success", false);
        map.put("type", "1");
        mEventSink.success(map);
        //mEventSink.error(e.getMessage(),"结果解析出错",null);
        e.printStackTrace();
      }
    }

    @Override
    public void onError(SpeechError error) {
      Toast.makeText(mContext, error.getPlainDescription(true), Toast.LENGTH_SHORT).show();
    }

    @Override
    public void onBeginOfSpeech() {
    }

    @Override
    public void onEvent(int eventType, int isLast, int arg2, Bundle obj) {
      switch (eventType) {
        // EVENT_RECORD_DATA 事件仅在 NOTIFY_RECORD_DATA 参数值为 真 时返回
        case SpeechEvent.EVENT_RECORD_DATA:
          final byte[] audio = obj.getByteArray(SpeechEvent.KEY_EVENT_RECORD_DATA);
          Log.i("TAG", "ivw audio length: " + audio.length);
          break;
      }
    }

    @Override
    public void onVolumeChanged(int volume) {

    }
  };


  @Override
  public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
    channel.setMethodCallHandler(null);
    if (null != mIvw) {
      // 退出时释放连接
      mIvw.cancel();
      mIvw.destroy();
    }
    if (null != mIat) {
      // 退出时释放连接
      mIat.cancel();
      mIat.destroy();
    }
  }

  @Override
  public void onListen(Object arguments, EventChannel.EventSink events) {
    mEventSink = events;
  }

  @Override
  public void onCancel(Object arguments) {

  }

  @Override
  public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) {
    mactivity = binding.getActivity();
  }

  @Override
  public void onDetachedFromActivityForConfigChanges() {

  }

  @Override
  public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) {

  }

  @Override
  public void onDetachedFromActivity() {

  }
}
