package com.lhd.hms.face;

import androidx.appcompat.app.AppCompatActivity;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import com.huawei.hmf.tasks.OnFailureListener;
import com.huawei.hmf.tasks.OnSuccessListener;
import com.huawei.hmf.tasks.Task;
import com.huawei.hms.mlsdk.MLAnalyzerFactory;
import com.huawei.hms.mlsdk.common.MLFrame;
import com.huawei.hms.mlsdk.face.MLFace;
import com.huawei.hms.mlsdk.face.MLFaceAnalyzer;
import com.huawei.hms.mlsdk.face.MLFaceAnalyzerSetting;
import com.huawei.hms.mlsdk.face.MLFaceEmotion;
import com.lhd.hms.R;

import java.util.List;

public class FaceAnalyzerActivity extends AppCompatActivity {
    TextView mTvResult;
    ImageView mIvImage;
    Button mBtnAnalyzer;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_face_analyzer);
        initView();
    }

    private void initView() {
        mTvResult = findViewById(R.id.tv_result);
        mBtnAnalyzer  = findViewById(R.id.btn_analyzer);


        mBtnAnalyzer.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                MLFaceAnalyzer analyzer = MLAnalyzerFactory.getInstance().getFaceAnalyzer();
                Bitmap bitmap = BitmapFactory.decodeResource(getResources(),R.drawable.head);
                MLFrame frame = MLFrame.fromBitmap(bitmap);
                Task<List<MLFace>> task = analyzer.asyncAnalyseFrame(frame);
                task.addOnSuccessListener(new OnSuccessListener<List<MLFace>>() {
                    @Override
                    public void onSuccess(List<MLFace> mlFaces) {
                        if(mlFaces.size()>0){
                            MLFace mlFace = mlFaces.get(0);
                            MLFaceEmotion emotions = mlFace.getEmotions();
                            mTvResult.setText("年龄:"+mlFace.getFeatures().getAge());
                            String emotion = getEmotion(emotions);
                            mTvResult.setText(mTvResult.getText()+"\n"+"表情:"+emotion);


                        }
                    }
                });
                task.addOnFailureListener(new OnFailureListener() {
                    @Override
                    public void onFailure(Exception e) {
                        Toast.makeText(FaceAnalyzerActivity.this, "FAIL", Toast.LENGTH_SHORT).show();
                        e.printStackTrace();
                    }
                });
            }
        });
    }

    private String getEmotion(MLFaceEmotion emotions) {
        String emotion="";
        if(emotions.getAngryProbability()>0.5){
            emotion+="生气";
        }
        if(emotions.getDisgustProbability()>0.5){
            emotion+="讨厌";
        }
        if(emotions.getFearProbability()>0.5){
            emotion+="害怕";
        }
        if(emotions.getNeutralProbability()>0.5){
            emotion+="无表情";
        }
        if(emotions.getSadProbability()>0.5){
            emotion+="伤心";
        }
        if(emotions.getSmilingProbability()>0.5){
            emotion+="微笑";
        }
        if(emotions.getSurpriseProbability()>0.5){
            emotion+="惊讶";
        }
        return emotion;
    }
}