package com.zhanlv.socketnet;

import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;

import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;

import com.aibote.human.ZhanLvBote;

public class DHActivity extends AppCompatActivity {

    private static final String TAG = "HomeActivity";

    ImageView dhImageView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_home);

        dhImageView = findViewById(R.id.dh_imageview);

        initDigitalHuman();

        findViewById(R.id.btn_test).setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                speak("你好啊 世界");
            }
        });
    }

    void speak(String text){
        new Thread(new Runnable() {
            @Override
            public void run() {
                Log.i(TAG, "========= 开始说话 " + System.currentTimeMillis() + " ==========" );
                Log.i(TAG, "excute text start: " + text);
                ZhanLvBote.humanSpeak("test_audio_" + System.currentTimeMillis() + ".mp3",
                        text,
                        "zh-cn",
                        "zh-cn-XiaoxiaoMultilingualNeural",
                        0,
                        "Newscast",
                        1,
                        true);
                Log.i(TAG, "excute text end: " + text);
            }
        }).start();
    }

    Handler humanHandler = new Handler(){
        @Override
        public void handleMessage(@NonNull Message msg) {
            super.handleMessage(msg);

            if (msg.what != 258 && msg.what != 260){
                Log.i("wooran", "human handler msg: " + msg.what + " - " + msg.obj);
            }

            switch (msg.what){

                case 258:
                    //GotBitMap 0x102
                    dhImageView.setImageBitmap((Bitmap) msg.obj);

                    break;
            }
        }
    };

    void initDigitalHuman(){
        //139.196.17.31
        String serverIp = "127.0.0.1";//语音合成 和 gpt服务器IP
        int speakPort = 6667;//语音合成端口
        int gptPort = 6666;//gpt端口
        String modelUrl = "https://qn.ieasysell.com/virtual_human/1.3/dayaobaiqun/metahumanMode.bin";//模型路径
        //https://qn.ieasysell.com/virtual_human/1.3/dayaobaiqun/metahumanMode.bin
        //https://qn.ieasysell.com/virtual_human/1.3/meilin/metahumanMode.bin
        String modelName = "dayaobaiqun";//模型名称
        String speechKey = "3da4b16c14cd4a1ea8040ab8b1ef2300";//微软语音密钥
        String region = "southeastasia";//密钥区域

        new Thread(new Runnable() {
            @Override
            public void run() {
                Boolean res = ZhanLvBote.init(DHActivity.this, humanHandler, serverIp, speakPort, gptPort,
                        modelUrl, modelName, speechKey, region, 0.7, 0.7);

                Log.i("wooran", "数字人初始化 结果: " + res + " " + ZhanLvBote.getUserId());

            }
        }).start();
    }
}