//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
// <code>
package com.vieewer.btdemo.view.microsoft.audio;

import android.graphics.Paint;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.RequiresApi;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.animation.LinearOutSlowInInterpolator;
import android.view.KeyEvent;
import android.widget.*;
import com.lindroid.androidutilskt.extension.SpUtil;
import com.lindroid.androidutilskt.extension.ToastUtil;
import com.vieewer.btdemo.R;
import android.util.Log;
import android.view.View;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.microsoft.cognitiveservices.speech.*;
import com.microsoft.cognitiveservices.speech.translation.SpeechTranslationConfig;
import com.microsoft.cognitiveservices.speech.translation.TranslationRecognizer;
import com.vieewer.btdemo.base.BaseActivity;
import com.vieewer.btdemo.bean.TranslateBean;
import com.vieewer.btdemo.utils.XClickUtil;
import com.vieewer.btdemo.widget.WaveView;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static android.Manifest.permission.INTERNET;
import static android.Manifest.permission.RECORD_AUDIO;
import static com.vieewer.btdemo.view.microsoft.audio.TranslateHelper.prettify;

public class TranslateAudioActivity extends BaseActivity {

    // Replace below with your own subscription key 7597cec66a5a452888c7514fb703a02a 9f34a515ed0945c8b173987228458d3a, f68b98e01c16469ea4e89aa72d532eaf
    private static String speechSubscriptionKey = "32a88974600a4fc8bfcf078b4318852f"; //东亚 9f34a515ed0945c8b173987228458d3a 7597cec66a5a452888c7514fb703a02a
    //private static String speechSubscriptionKey = "77456dfcfbc54681b14346bb04d42d37"; //（亚太）韩国中部
    // Replace below with your own service region (e.g., "westus").
    private static String serviceRegion = "eastasia"; //eastasia 东亚
    //private static String serviceRegion = "koreacentral"; //（亚太）韩国中部

    private SpeechRecognitionResult result;
    private TranslationRecognizer recognizer;
    public TextView txt;
    private EditText edt;
    private WaveView mWaveView;
    private ImageView mStartTranslateImg;
    private boolean mCurViewFocused = false;
    private boolean mIsTranslating = false;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_translate);
        edt = (EditText) findViewById(R.id.edt);
        txt = (TextView) findViewById(R.id.txt_result); // 'hello' is the ID of your text view
        mStartTranslateImg = findViewById(R.id.btn_continuous_speech_translation);
        // Note: we need to request the permissions
        int requestCode = 5; // unique code for the permission request
        ActivityCompat.requestPermissions(TranslateAudioActivity.this, new String[]{RECORD_AUDIO, INTERNET}, requestCode);

        initWaveView();

    }

    private void initWaveView() {
        mWaveView = (WaveView) findViewById(R.id.wave_view);
        mWaveView.setDuration(5000);
        mWaveView.setStyle(Paint.Style.FILL);
        mWaveView.setColor(ContextCompat.getColor(this, R.color.main_color));
        mWaveView.setInterpolator(new LinearOutSlowInInterpolator());


/*        Button btn_test = findViewById(R.id.btn_test);
        btn_test.setOnClickListener(view -> {
            mIsTranslating = !mIsTranslating;
            Log.i("yjj", "onKeyDown TranslateActivity" + mIsTranslating);
            if (!mIsTranslating) {
                onStopTranslate();
            } else {
                onStartTranslate();
            }
        });*/
    }

    @RequiresApi(api = Build.VERSION_CODES.KITKAT)
    public void onSpeechButtonClicked(View v) {

        try {
            SpeechConfig config = SpeechConfig.fromSubscription(speechSubscriptionKey, serviceRegion);
            //todo
            config.setSpeechRecognitionLanguage("zh-CN");

            assert(config != null);

            SpeechRecognizer reco = new SpeechRecognizer(config);
            assert(reco != null);


            Future<SpeechRecognitionResult> task = reco.recognizeOnceAsync();
            assert(task != null);

            // Note: this will block the UI thread, so eventually, you want to
            //        register for the event (see full samples)
            result = task.get();
            assert(result != null);

            if (result.getReason() == ResultReason.RecognizedSpeech) {
                Log.i("yjj","1111" + result.getText());
                //txt.setText(result.getText());
                translate();
            }
            else {
                txt.setText("Error recognizing. Did you update the subscription info?" + System.lineSeparator() + result.toString());
            }

            reco.close();
        } catch (Exception ex) {
            Log.e("SpeechSDKDemo", "unexpected " + ex.getMessage());
            assert(false);
        }
    }

    public void onTransButtonClicked(View v) {

        translate();

    }



    private void translate() {
        // Android 4.0 之后不能在主线程中请求HTTP请求
        new Thread(() -> {
            try {

                TranslateHelper translateRequest = new TranslateHelper();
                String content = "[{\n\t\"Text\": \"" + result.getText() + "\"\n}]";
                String response = translateRequest.Post(content);
                String result = prettify(response);
                Log.i("yjj", result);
                List<TranslateBean> list = new Gson().fromJson(result, new TypeToken<List<TranslateBean>>() {}.getType());

                if (list != null && list.size() > 0) {
                    String translatedStr = list.get(0).getTranslations().get(0).getText();
                    Message msg = mHandler.obtainMessage();
                    msg.what = 1;
                    msg.obj = translatedStr;
                    mHandler.sendMessage(msg);
                    Log.i("yjj", translatedStr);
                }

            } catch (Exception e) {
                System.out.print(e);
            }

        }).start();
    }


    public void onContinueRecognizeClicked(View v) {
        onStartTranslate();
    }

    private void onStartTranslate() {

        if (!XClickUtil.isFastDoubleClick(mStartTranslateImg, 3*1000)) {

            mWaveView.start();

            //为了防止ANR 需要开一个子线程
            new Thread(new Runnable() {

                @Override
                public void run() {

                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            try {
                                translationWithMicrophoneAsync();
                            } catch (Exception ex) {
                                System.out.print("Unexpected exception: " + ex.getMessage());
                                assert(false);
                            }
                        }
                    });

                }
            }).start();

        } else {
            ToastUtil.shortToast("正在翻译");
        }

    }


    public void translationWithMicrophoneAsync() throws InterruptedException, ExecutionException, IOException {
        // Creates an instance of a speech translation config with specified
        // subscription key and service region. Replace with your own subscription key
        // and service region (e.g., "westus").
        SpeechTranslationConfig config = SpeechTranslationConfig.fromSubscription(speechSubscriptionKey, serviceRegion);

        // todo yjj Sets source and target language(s).
        String fromLanguage =  SpUtil.getSpString("SP_KEY_ORIGIN_LANGUAGE", "en-US");
        String targetLanguage = SpUtil.getSpString("SP_KEY_TARGET_LANGUAGE", "zh-Hans");

        Log.i("yjj", "语音翻译 fromLanguage" + fromLanguage + ", targetLanguage =" + targetLanguage);

        //String fromLanguage = "en-US";
        config.setSpeechRecognitionLanguage(fromLanguage);
        config.addTargetLanguage(targetLanguage);

        // Sets voice name of synthesis output.
        String GermanVoice = "Microsoft Server Speech Text to Speech Voice (de-DE, Hedda)";
        config.setVoiceName(GermanVoice);

        // Creates a translation recognizer using microphone as audio input.
        recognizer = new TranslationRecognizer(config);
        {
            // Subscribes to events.
            recognizer.recognizing.addEventListener((s, e) -> {
                Log.i("yjj" , "RECOGNIZING in '" + fromLanguage + "': Text=" + e.getResult().getText());

                Map<String, String> map = e.getResult().getTranslations();
                for(String element : map.keySet()) {
                    Log.i("yjj" , "    TRANSLATING into '" + element + "': " + map.get(element));
                }
            });

            recognizer.recognized.addEventListener((s, e) -> {
                if (e.getResult().getReason() == ResultReason.TranslatedSpeech) {
                    Log.i("yjj" , "RECOGNIZED in '" + fromLanguage + "': Text=" + e.getResult().getText());

                    Map<String, String> map = e.getResult().getTranslations();
                    String content = "";
                    if (map.size() > 1) {
                        StringBuilder sb = new StringBuilder();
                        for(String element : map.keySet()) {
                            if (!map.get(element).equals("")) {
                                sb.append(map.get(element));
                            }
                            Log.i("yjj" , "    TRANSLATED into '" + element + "': " + map.get(element) + ", sb = " + sb.toString());
                        }
                        content = sb.toString();
                    } else if (map.size() == 1) {
                        content = map.values().toString();
                    }


                    if (!content.equals("") && !content.equals("[]")) {
                        //todo
                        Message msg = mHandler.obtainMessage();
                        msg.what = 2;
                        msg.obj = content;
                        mHandler.sendMessage(msg);
                        Log.i("yjj", "continue recognize success" + content);
                    }
                    Log.i("yjj" , " size >>>>>> " + map.size());
                }
                if (e.getResult().getReason() == ResultReason.RecognizedSpeech) {
                    Log.i("yjj" , "RECOGNIZED: Text=" + e.getResult().getText());
                    Log.i("yjj" , "    Speech not translated.");
                }
                else if (e.getResult().getReason() == ResultReason.NoMatch) {
                    Log.i("yjj" , "NOMATCH: Speech could not be recognized.");
                }
            });

            recognizer.synthesizing.addEventListener((s, e) -> {
                Log.i("yjj" , "Synthesis result received. Size of audio data: " + e.getResult().getAudio().length);
            });

            recognizer.canceled.addEventListener((s, e) -> {
                Log.i("yjj" , "CANCELED: Reason=" + e.getReason());

                if (e.getReason() == CancellationReason.Error) {
                    Log.i("yjj" , "CANCELED: ErrorCode=" + e.getErrorCode());
                    Log.i("yjj" , "CANCELED: ErrorDetails=" + e.getErrorDetails());
                    Log.i("yjj" , "CANCELED: Did you update the subscription info?");
                }
            });

            recognizer.sessionStarted.addEventListener((s, e) -> {
                //开始翻译回调
                //mIsTranslating = true;
                Log.i("yjj" , "\nSession started event.");
            });

            recognizer.sessionStopped.addEventListener((s, e) -> {
                //结束翻译回调
                //mIsTranslating = false;
                Log.i("yjj" , "\nSession stopped event.");

                Message msg = mHandler.obtainMessage();
                msg.what = 4;
                mHandler.sendMessage(msg);

            });

            // Starts continuous recognition. Uses StopContinuousRecognitionAsync() to stop recognition.
            Log.i("yjj" , "Say something...");


            //putSp<String>("SP_KEY_ORIGIN_LANGUAGE_CNA", bean.yuanCna)
            //putSp<String>("SP_KEY_TARGET_LANGUAGE_CNA", bean.mubiaoCna)


            Message msg = mHandler.obtainMessage();
            msg.what = 3;
            mHandler.sendMessage(msg);

            recognizer.startContinuousRecognitionAsync().get();

        }
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        Log.i("yjj" , "Press any key to stop");
        if (mWaveView != null) {
            mWaveView = null;
        }


        if (recognizer != null) {
            recognizer.stopContinuousRecognitionAsync();
            recognizer.close();
            recognizer = null;
        }

    }

    private void onStopTranslate() {


        if (!XClickUtil.isFastDoubleClick(txt, 3*1000)) {

            if (mWaveView != null) {
                mWaveView.stop();
            }

            if (recognizer != null) {

                //为了防止ANR 需要开一个子线程
                new Thread(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Log.i("yjj","stop translate ~~~~");
                            recognizer.stopContinuousRecognitionAsync().get();
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        } catch (ExecutionException e) {
                            e.printStackTrace();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }).start();


            }

        } else {
            ToastUtil.shortToast("正在结束翻译");
        }

    }

    private final MyHandler mHandler = new MyHandler(this);

    private static class MyHandler extends Handler {
        private final WeakReference<TranslateAudioActivity> mActivity;

        public MyHandler(TranslateAudioActivity activity) {
            mActivity = new WeakReference<TranslateAudioActivity>(activity);
        }

        @Override
        public void handleMessage(Message msg) {
            if (msg.what == 1) {
                refreshTxtContent(msg);
            } else if (msg.what == 2) {
                refreshTxtContent(msg);
            } else if (msg.what == 3) {
                String speakLanguage =  SpUtil.getSpString("SP_KEY_ORIGIN_LANGUAGE_CNA", "英文");
                Toast.makeText(mActivity.get(), "请说" + speakLanguage, Toast.LENGTH_SHORT).show();
            } else if (msg.what == 4) {
                mActivity.get().txt.setText("翻译结果：");
            }

        }

        private void refreshTxtContent(Message msg) {
            TranslateAudioActivity activity = mActivity.get();
            if (activity != null) {
                activity.txt.setText(msg.obj.toString());
            }
        }
    }


    @Override
    public void onWindowFocusChanged(boolean hasFocus) {
        super.onWindowFocusChanged(hasFocus);
        mCurViewFocused = hasFocus;
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
        if (keyCode == 66 && mCurViewFocused) {
            operateTranslate();
        }
        return super.onKeyDown(keyCode, event);
    }

    private void operateTranslate() {
        Log.i("yjj", "onKeyDown TranslateActivity" + mIsTranslating);
        if (mIsTranslating) {
            onStopTranslate();
        } else {
            onStartTranslate();
        }
        mIsTranslating = !mIsTranslating;
    }





/*
    @Override
    protected void onPause() {
        super.onPause();
        operateTranslate();
    }
*/



}

