package org.tensorflow.lite.examples.detection.fragement;

import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.text.SpannableStringBuilder;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

import com.android.volley.NetworkResponse;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.RetryPolicy;
import com.android.volley.VolleyError;
import com.android.volley.VolleyLog;
import com.android.volley.toolbox.HttpHeaderParser;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.SpeechSynthesizer;
import com.iflytek.cloud.ui.RecognizerDialog;
import com.iflytek.cloud.ui.RecognizerDialogListener;

import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.tensorflow.lite.examples.detection.R;
import org.tensorflow.lite.examples.detection.gpt.domain.ChatGptResponse;
import org.tensorflow.lite.examples.detection.speech.manager.TTSManager;
import org.tensorflow.lite.examples.detection.speech.setting.IatSettings;
import org.tensorflow.lite.examples.detection.speech.utils.JsonParser;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;

/**
 * A simple {@link Fragment} subclass.
 * Use the {@link ChattingFragment# newInstance} factory method to
 * create an instance of this fragment.
 * 主要参考speechDemo中的IatDemo编写
 */
public class ChattingFragment extends Fragment implements TTSManager.OnSpeakProgressListener {

    // =============================
    // 公共参数声明区域
    // =============================
    private Toast mToast;
    private SharedPreferences mSharedPreferences;
    // 引擎类型
    private String mEngineType = SpeechConstant.TYPE_CLOUD;

    private Activity activity;
    private Context context;
    private TTSManager ttsManager;


    // =============================
    // 语音转文字 IAT参数声明区域
    // =============================
    private static String TAG = ChattingFragment.class.getSimpleName();
    // 语音听写对象
    private SpeechRecognizer mIat;
    // 语音听写UI
    private RecognizerDialog mIatDialog;
    // 用HashMap存储听写结果
    private HashMap<String, String> mIatResults = new LinkedHashMap<>();
    private EditText mResultText;
    private EditText mLLMText;
    private EditText showContacts;
    private TextView languageText;
    private String[] languageEntries;
    private String[] languageValues;
    private String language = "zh_cn";
    private int selectedNum = 0;
    private String resultType = "json";
    private StringBuffer buffer = new StringBuffer();


    // =============================
    // 公共组件声明区域
    // =============================
    private Button talkButton;
    private Button talkFlowButton;

    public ChattingFragment() {
    }

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
    }

    @Override
    public View onCreateView(LayoutInflater inflater, ViewGroup container,
                             Bundle savedInstanceState) {
        return inflater.inflate(R.layout.iatdemo, container, false);
    }

    int ret = 0; // 函数调用返回值

    @Override
    public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
        super.onViewCreated(view, savedInstanceState);

        // 初始化基本视图
        activity = getActivity();
        context = getContext();

        if (activity != null && context != null) {
            mSharedPreferences = activity.getSharedPreferences(IatSettings.PREFER_NAME, Activity.MODE_PRIVATE);
        } else {
            // Handle the case where getActivity() returns null
            Log.d(TAG,"无法获取Activity对象或者Context对象");
        }

        // 初始化视图中的组件
        mResultText = view.findViewById(R.id.iat_text);
        showContacts = view.findViewById(R.id.iat_contacts);
        mLLMText = view.findViewById(R.id.LLM_text);
        talkButton = view.findViewById(R.id.iat_recognize);
        talkFlowButton = view.findViewById(R.id.iat_recognize_stream);

        // 设置点击事件
        // 初始化识别无UI识别对象
        // 使用SpeechRecognizer对象，可根据回调消息自定义界面；
        mIat = SpeechRecognizer.createRecognizer(context, mInitListener);
        // 初始化听写Dialog，如果只使用有UI听写功能，无需创建SpeechRecognizer
        // 使用UI听写功能，请根据sdk文件目录下的notice.txt,放置布局文件和图片资源
        mIatDialog = new RecognizerDialog(context, mInitListener);
        // 初始化合成对象
        ttsManager = new TTSManager(context, activity, mSharedPreferences);
        ttsManager.setOnSpeakProgressListener(this);  // 设置回调

        // 各种点击事件声明区域
        talkBtnClick(talkButton);
    }

    /**
     * 开始听写按钮 点击事件
     * @param talkButton
     */
    private void talkBtnClick(Button talkButton) {
        talkButton.setOnClickListener(v -> {
            talkButton.setEnabled(false);
            talkButton.setText("我在听，您请说……");
            // 清空当前buffer
            buffer.setLength(0);
            mResultText.setText(null);// 清空显示内容
            mIatResults.clear();
            // 设置参数
            setIatParam();

            boolean isShowDialog = mSharedPreferences.getBoolean(
                    getString(R.string.pref_key_iat_show), false);
            if (isShowDialog) {
                // 显示听写对话框
                mIatDialog.setListener(mRecognizerDialogListener);
                mIatDialog.show();
                showTip(getString(R.string.text_begin));
            } else {
                // 不显示听写对话框
                ret = mIat.startListening(mRecognizerListener);
                if (ret != ErrorCode.SUCCESS) {
                    showTip("听写失败,错误码：" + ret + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
                } else {
                    showTip(getString(R.string.text_begin));
                }
            }
        });
    }

    /**
     * 初始化IAT监听器。
     */
    private InitListener mInitListener = code -> {
        Log.d(TAG, "SpeechRecognizer init() code = " + code);
        if (code != ErrorCode.SUCCESS) {
            System.out.println("初始化失败，错误码：" + code + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
        }
    };

    /**
     * IAT（语音转文字）参数设置
     *
     * @return
     */
    public void setIatParam() {
        // 清空参数
        mIat.setParameter(SpeechConstant.PARAMS, null);
        // 设置听写引擎
        mIat.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
        // 设置返回结果格式
        mIat.setParameter(SpeechConstant.RESULT_TYPE, resultType);

        if (language.equals("zh_cn")) {
            String lag = mSharedPreferences.getString("iat_language_preference",
                    "mandarin");
            // 设置语言
            Log.e(TAG, "language = " + language);
            mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
            // 设置语言区域
            mIat.setParameter(SpeechConstant.ACCENT, lag);
        } else {
            mIat.setParameter(SpeechConstant.LANGUAGE, language);
        }
        Log.e(TAG, "last language:" + mIat.getParameter(SpeechConstant.LANGUAGE));

        //此处用于设置dialog中不显示错误码信息
        //mIat.setParameter("view_tips_plain","false");

        // 设置语音前端点:静音超时时间，即用户多长时间不说话则当做超时处理
        mIat.setParameter(SpeechConstant.VAD_BOS, mSharedPreferences.getString("iat_vadbos_preference", "4000"));

        // 设置语音后端点:后端点静音检测时间，即用户停止说话多长时间内即认为不再输入， 自动停止录音
        mIat.setParameter(SpeechConstant.VAD_EOS, mSharedPreferences.getString("iat_vadeos_preference", "1000"));

        // 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
        mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", "1"));

        // 设置音频保存路径，保存音频格式支持pcm、wav.
        mIat.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");

        File dir = context.getExternalFilesDir("msc");
        assert dir != null;
        mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH,
                dir.getAbsolutePath() + "/iat.wav");
    }

    /**
     * 无UI监听器
     */
    private RecognizerListener mRecognizerListener = new RecognizerListener() {

        @Override
        public void onBeginOfSpeech() {
            // 此回调表示：sdk内部录音机已经准备好了，用户可以开始语音输入

            showTip("开始说话");
        }

        @Override
        public void onEndOfSpeech() {
            // 此回调表示：检测到了语音的尾端点，已经进入识别过程，不再接受语音输入
            showTip("结束说话");
            ttsManager.textToSpeech("了解您的问题！请稍等……");
        }

        String IatResult = null;

        /**
         * 详情参见 https://www.xfyun.cn/doc/mscapi/Android/androidrecognizer.html#onresult
         * @param results 结果数据
         * @param isLast 是否最后一次结果标记
         */
        @Override
        public void onResult(RecognizerResult results, boolean isLast) {
            Log.d(TAG, results.getResultString());
            if (isLast) {
                Log.d(TAG, "已经是最后一次结果");

                // =============================
                // OpenAI 接口调用部分
                // =============================

                try {
                    RequestQueue requestQueue = Volley.newRequestQueue(context);
                    String URL = "https://api.openai.com/v1/chat/completions";
                    JSONObject jsonBody = new JSONObject();
                    jsonBody.put("model", "gpt-3.5-turbo");
                    jsonBody.put("messages", LLMprompt(IatResult));

                    final String requestBody = jsonBody.toString();

                    StringRequest stringRequest = new StringRequest(Request.Method.POST, URL, response -> Log.i("VOLLEY", "请求成功！"), error -> Log.i("VOLLEY", String.valueOf(error)))

                    {
                        @Override
                        public String getBodyContentType() {
                            return "application/json; charset=utf-8";
                        }

                        @Override
                        public Map<String, String> getHeaders() {
                            Map<String, String> params = new HashMap<String, String>();
                            params.put("Content-Type", "application/json");
                            params.put("Authorization", "Bearer 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");
                            return params;
                        }

                        @Override
                        public byte[] getBody() {
                            try {
                                return requestBody == null ? null : requestBody.getBytes("utf-8");
                            } catch (UnsupportedEncodingException uee) {
                                VolleyLog.wtf("Unsupported Encoding while trying to get the bytes of %s using %s", requestBody, "utf-8");
                                return null;
                            }
                        }

                        @Override
                        protected Response<String> parseNetworkResponse(NetworkResponse response) {
                            String responseString = new String(response.data, StandardCharsets.UTF_8);
                            // 创建ObjectMapper对象。
                            ObjectMapper mapper = new ObjectMapper();
                            // Json格式字符串转Java对象。
                            try {
                                ChatGptResponse resp = mapper.readValue(responseString, ChatGptResponse.class);
                                String AIResponse = resp.getChoices().get(0).getMessage().getContent();
                                activity.runOnUiThread(() -> mLLMText.setText(AIResponse));
                                // 文字转语音
                                ttsManager.textToSpeech(AIResponse);
                            } catch (IOException e) {
                                throw new RuntimeException(e);
                            }

                            return Response.success(responseString, HttpHeaderParser.parseCacheHeaders(response));
                        }
                    };

                    stringRequest.setRetryPolicy(new RetryPolicy() {
                        @Override
                        public int getCurrentTimeout() {
                            return 50000;
                        }

                        @Override
                        public int getCurrentRetryCount() {
                            return 50000;
                        }

                        @Override
                        public void retry(VolleyError error) {
                            Log.i("VOLLEY", String.valueOf(error));
                        }
                    });
                    requestQueue.add(stringRequest);

                } catch (JSONException e) {
                    e.printStackTrace();
                }
                talkButton.setEnabled(true);
                talkButton.setText("点击说话");
            }
            if (resultType.equals("json")) {
                IatResult = printResult(results);
                return;
            }
            if (resultType.equals("plain")) {
                buffer.append(results.getResultString());
                mResultText.setText(buffer.toString());
                mResultText.setSelection(mResultText.length());
            }

        }

        /**
         * 当音量改变时
         * @param volume
         * @param data
         */
        @Override
        public void onVolumeChanged(int volume, byte[] data) {
           // 提示性tip
            // showTip("当前正在说话，音量大小 = " + volume + " 返回音频数据 = " + data.length);
        }

        @Override
        public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
            // 以下代码用于获取与云端的会话id，当业务出错时将会话id提供给技术支持人员，可用于查询会话日志，定位出错原因
            // 若使用本地能力，会话id为null
            //	if (SpeechEvent.EVENT_SESSION_ID == eventType) {
            //		String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
            //		Log.d(TAG, "session id =" + sid);
            //	}
        }

        @Override
        public void onError(SpeechError error) {
            // Tips：
            // 错误码：10118(您没有说话)，可能是录音机权限被禁，需要提示用户打开应用的录音权限。
            Log.d(TAG, "onError " + error.getPlainDescription(true));
            showTip(error.getPlainDescription(true));
        }
    };

    /**
     * 听写UI监听器
     */
    private RecognizerDialogListener mRecognizerDialogListener = new RecognizerDialogListener() {
        // 返回结果
        public void onResult(RecognizerResult results, boolean isLast) {
//            printResult(results);
        }

        // 识别回调错误
        public void onError(SpeechError error) {
            showTip(error.getPlainDescription(true));
        }

    };


    /**
     * iat结果得出后的处理逻辑
     */
    private String printResult(RecognizerResult results) {
        String text = JsonParser.parseIatResult(results.getResultString());
        String sn = null;
        // 读取json结果中的sn字段
        try {
            JSONObject resultJson = new JSONObject(results.getResultString());
            sn = resultJson.optString("sn");
        } catch (JSONException e) {
            e.printStackTrace();
        }

        mIatResults.put(sn, text);

        StringBuffer resultBuffer = new StringBuffer();
        for (String key : mIatResults.keySet()) {
            resultBuffer.append(mIatResults.get(key));
        }
//        mResultText.setText(resultBuffer.toString());
        mResultText.setSelection(mResultText.length());

        // 原本上面的流程将会显示说话者的结果到框中，我们只需要将值作为参数传入 GPT API 即可实现人机交互。
        String IatResult = resultBuffer.toString();
        mResultText.setText(IatResult);
        return IatResult;
    }

    private JSONArray LLMprompt(String question) {
        try {
            JSONArray jsonArray = new JSONArray();

            JSONObject systemObject = new JSONObject();
            systemObject.put("role", "system");
            systemObject.put("content", "你是一位名叫AI看见助手的一款聊天机器人、主要帮助盲人去识人、辨物、避障、导航等。你只会说中文。");
            jsonArray.put(systemObject);

            JSONObject userObject_1 = new JSONObject();
            userObject_1.put("role", "user");
            userObject_1.put("content", "你叫什么名字？");
            jsonArray.put(userObject_1);

            JSONObject assistantObject = new JSONObject();
            assistantObject.put("role", "assistant");
            assistantObject.put("content", "我叫AI看见助手,是一名专注于帮助盲人的聊天机器人");
            jsonArray.put(assistantObject);

            JSONObject userQuestion = new JSONObject();
            userQuestion.put("role", "user");
            userQuestion.put("content", question);
            jsonArray.put(userQuestion);

            return jsonArray;
        } catch (JSONException e) {
            Log.i("ERROR", "JSONObject操作失败!");
            throw new RuntimeException(e);
        }
    }

    @Override
    public void onSpeakProgress(int percent, int beginPos, int endPos, SpannableStringBuilder style) {
        mLLMText.setText(style);
    }

    /**
     * 给file追加数据
     */
    private void appendFile(File file, byte[] buffer) {
        try {
            if (!file.exists()) {
                boolean b = file.createNewFile();
            }
            RandomAccessFile randomFile = new RandomAccessFile(file, "rw");
            randomFile.seek(file.length());
            randomFile.write(buffer);
            randomFile.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 提示小弹窗显示方法
     * @param str
     */
    private void showTip(final String str) {
        Context appContext = activity.getApplicationContext();
        if (mToast != null) {
            mToast.cancel();
        }
        mToast = Toast.makeText(appContext, str, Toast.LENGTH_SHORT);
        mToast.show();
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        // 退出时释放连接
        if ( null != mIat ){
            mIat.cancel();
            mIat.destroy();
        }
    }

}