package com.taobao.android.mnndemo;

import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.drawable.Drawable;
import android.hardware.SensorManager;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.ViewStub;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;

import com.taobao.android.mnn.MNNForwardType;
import com.taobao.android.mnn.MNNImageProcess;
import com.taobao.android.mnn.MNNNetInstance;
import com.taobao.android.utils.BitmapToByteArray;
import com.taobao.android.utils.Common;
import com.taobao.android.utils.TxtFileReader;

import java.io.File;
import java.text.DecimalFormat;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;

public class VideoActivity extends AppCompatActivity implements AdapterView.OnItemSelectedListener {

    private final String TAG = "VideoActivity";
    private final int MAX_CLZ_SIZE = 1000;

    private final String MobileModelFileName = "MobileNet/v2/mobilenet_v2.caffe.mnn";
    private final String MobileWordsFileName = "MobileNet/synset_words.txt";

    private final String SqueezeModelFileName = "SqueezeNet/v1.1/squeezenet_v1.1.caffe.mnn";
    private final String SqueezeWordsFileName = "SqueezeNet/squeezenet.txt";

    private String mMobileModelPath;
    private List<String> mMobileTaiWords;
    private String mSqueezeModelPath;
    private List<String> mSqueezeTaiWords;

    //    private int mSelectedModelIndex;// current using model
    private final MNNNetInstance.Config mConfig = new MNNNetInstance.Config();// session config

    private CameraView mCameraView;
    private Spinner mForwardTypeSpinner;
    private Spinner mThreadNumSpinner;
    private Spinner mModelSpinner;
    private Spinner mMoreDemoSpinner;

    private TextView mFirstResult;
    private TextView mSecondResult;
    private TextView mThirdResult;
    private TextView mTimeTextView;


      int SqueezeInputWidth = 227;
      int SqueezeInputHeight = 227;

    HandlerThread mThread;
    Handler mHandle;

    private AtomicBoolean mLockUIRender = new AtomicBoolean(false);
    private AtomicBoolean mDrop = new AtomicBoolean(false);

    private MNNNetInstance mNetInstance;
    private MNNNetInstance.Session mSession;
    private MNNNetInstance.Session.Tensor mInputTensor;

    private int mRotateDegree;// 0/90/180/360

    /**
     * 监听屏幕旋转
     */
    void detectScreenRotate() {
        OrientationEventListener orientationListener = new OrientationEventListener(this,
                SensorManager.SENSOR_DELAY_NORMAL) {
            @Override
            public void onOrientationChanged(int orientation) {

                if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
                    return;  //手机平放时，检测不到有效的角度
                }

                //可以根据不同角度检测处理，这里只检测四个角度的改变
                orientation = (orientation + 45) / 90 * 90;
                mRotateDegree = orientation % 360;
            }
        };


        if (orientationListener.canDetectOrientation()) {
            orientationListener.enable();
        } else {
            orientationListener.disable();
        }
    }

    private void prepareModels() {

//        mMobileModelPath = getCacheDir() + "mobilenet_v1.caffe.mnn";
//        try {
//            if (!new File(mMobileModelPath).exists()) {
//                Common.copyAssetResource2File(getBaseContext(), MobileModelFileName, mMobileModelPath);
//            }
//            mMobileTaiWords = TxtFileReader.getUniqueUrls(getBaseContext(), MobileWordsFileName, Integer.MAX_VALUE);
//        } catch (Throwable e) {
//            throw new RuntimeException(e);
//        }
//
//        mSqueezeModelPath = getCacheDir() + "squeezenet_v1.1.caffe.mnn";
//        try {
//            if (!new File(mSqueezeModelPath).exists()) {
//                Common.copyAssetResource2File(getBaseContext(), SqueezeModelFileName, mSqueezeModelPath);
//            }
//            mSqueezeTaiWords = TxtFileReader.getUniqueUrls(getBaseContext(), SqueezeWordsFileName, Integer.MAX_VALUE);
//        } catch (Throwable e) {
//            throw new RuntimeException(e);
//        }
    }


    private void prepareNet() {
        Log.d(TAG, "prepareNet() called");
        if (null != mSession) {
            mSession.release();
            mSession = null;
        }
        if (mNetInstance != null) {
            mNetInstance.release();
            mNetInstance = null;
        }

        String modelPath = getCacheDir() + "/movenet-tensorflow2-singlepose-thunder-v4.mnn";
//        if (mSelectedModelIndex == 0) {
//            modelPath = mMobileModelPath;
//        } else if (mSelectedModelIndex == 1) {
//            modelPath = mSqueezeModelPath;
//        }
        Log.d(TAG, "prepareNet() called modelPath:" + modelPath);
        // create net instance
        mNetInstance = MNNNetInstance.createFromFile(modelPath);

        // mConfig.saveTensors;
        mSession = mNetInstance.createSession(mConfig);

        // get input tensor
        mInputTensor = mSession.getInput(null);

        int[] dimensions = mInputTensor.getDimensions();
        dimensions[0] = 1; // force batch = 1  NCHW  [batch, channels, height, width]
        mInputTensor.reshape(dimensions);
        mSession.reshape();
        Log.d(TAG, "prepareNet() called");
        mLockUIRender.set(false);
    }

    SkeletonImageView imageView;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
                WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
        setContentView(R.layout.activity_main);


        detectScreenRotate();

//        mSelectedModelIndex = 0;
        mConfig.numThread = 4;
        mConfig.forwardType = MNNForwardType.FORWARD_VULKAN.type;

        // prepare mnn net models
        prepareModels();

        mForwardTypeSpinner = (Spinner) findViewById(R.id.forwardTypeSpinner);
        mThreadNumSpinner = (Spinner) findViewById(R.id.threadsSpinner);
        mThreadNumSpinner.setSelection(2);
        mModelSpinner = (Spinner) findViewById(R.id.modelTypeSpinner);
        mMoreDemoSpinner = (Spinner) findViewById(R.id.MoreDemo);

        mFirstResult = findViewById(R.id.firstResult);
        mSecondResult = findViewById(R.id.secondResult);
        mThirdResult = findViewById(R.id.thirdResult);
        mTimeTextView = findViewById(R.id.timeTextView);

        mForwardTypeSpinner.setSelection(2);
        mForwardTypeSpinner.setOnItemSelectedListener(VideoActivity.this);
//        onItemSelected(mForwardTypeSpinner, null, 2, 2);
        mThreadNumSpinner.setOnItemSelectedListener(VideoActivity.this);
        mModelSpinner.setOnItemSelectedListener(VideoActivity.this);
        mMoreDemoSpinner.setOnItemSelectedListener(VideoActivity.this);

        // init sub thread handle
        mLockUIRender.set(true);
        clearUIForPrepareNet();

        ViewStub stub = (ViewStub) findViewById(R.id.stub);
        stub.inflate();
        imageView = findViewById(R.id.imageView);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
            if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                requestPermissions(new String[]{Manifest.permission.CAMERA}, 10);
            } else {
                handlePreViewCallBack();
            }
        } else {
            handlePreViewCallBack();
        }

        mThread = new HandlerThread("MNNNet");
        mThread.start();
        mHandle = new Handler(mThread.getLooper());

        mHandle.post(new Runnable() {
            @Override
            public void run() {
                prepareNet();
            }
        });

    }


    @Override
    public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);

        if (10 == requestCode) {
            if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                handlePreViewCallBack();
            } else {
                Toast.makeText(this, "没有获得必要的权限", Toast.LENGTH_SHORT).show();
            }
        }

    }
    boolean useImage = true;

    private void handlePreViewCallBack() {
        mCameraView = (CameraView) findViewById(R.id.camera_view);

        CameraView.PreviewCallback previewCallback = new CameraView.PreviewCallback() {
            @Override
            public void onGetPreviewOptimalSize(int optimalWidth, int optimalHeight) {

                // adjust video preview size according to screen size
                DisplayMetrics metric = new DisplayMetrics();
                getWindowManager().getDefaultDisplay().getMetrics(metric);
                int fixedVideoHeight = metric.widthPixels * optimalWidth / optimalHeight;

                FrameLayout layoutVideo = findViewById(R.id.videoLayout);
                RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) layoutVideo.getLayoutParams();
                params.height = fixedVideoHeight;
                layoutVideo.setLayoutParams(params);
            }

            @Override
            public void onPreviewFrame(final byte[] data, final int imageWidth, final int imageHeight, final int angle) {

                if (mLockUIRender.get()) {
                    Log.d(TAG, "run: mLockUIRender locked");
                    return;
                }


                if (mDrop.get()) {
                    Log.w(TAG, "drop frame , net running too slow !!");
                } else {
                    mDrop.set(true);
                    mHandle.post(new Runnable() {
                        @Override
                        public void run() {
                            mDrop.set(false);
                            if (mLockUIRender.get()) {
                                Log.d(TAG, "run: mLockUIRender locked");
                                return;
                            }
// 计算基于相机方向和设备旋转角度的修正角度（后置摄像头）
                            int needRotateAngle = (angle + mRotateDegree) % 360;

                            /*
                             * 将数据转换为输入张量
                             */
                            final MNNImageProcess.Config config = new MNNImageProcess.Config();
// 归一化参数
//                            config.mean = new float[]{0.0f, 0.0f, 0.0f}; // 不需要均值归一化，因为模型期望 [0, 255] 的 uint8 输入
//                            config.normal = new float[]{1.0f, 1.0f, 1.0f}; // 不需要标准化，保持原始范围
                            if (useImage) {
                                config.source = MNNImageProcess.Format.RGB; // 输入数据格式为 YUV_NV21
                            } else {
                                config.source = MNNImageProcess.Format.YUV_NV21; // 输入数据格式为 YUV_NV21
                            }
                            config.dest = MNNImageProcess.Format.BGR; // 转换为 RGB 格式，因为模型期望 RGB 格式

                            int MobileInputWidth = 192; // 模型输入期望的宽度
                            int MobileInputHeight = 192; // 模型输入期望的高度
// 矩阵变换：dst 到 src
                            Matrix matrix = new Matrix();
                            matrix.postScale(MobileInputWidth / (float) imageWidth, MobileInputHeight / (float) imageHeight);
                            matrix.postRotate(needRotateAngle, MobileInputWidth / 2, MobileInputHeight / 2);

// 将数据转换为输入张量
                            MNNImageProcess.convertBuffer(data, imageWidth, imageHeight, mInputTensor, config, matrix);

                            final long startTimestamp = System.nanoTime();

/**
 * 推理
 */
                            mSession.run();

/**
 * 获取输出张量
 */
                            MNNNetInstance.Session.Tensor output = mSession.getOutput(null);

                            float[] result = output.getFloatData(); // 获取 float 类型的结果
                            final long endTimestamp = System.nanoTime();
                            final float inferenceTimeCost = (endTimestamp - startTimestamp) / 1000000.0f;
                            Log.d(TAG, "run() called result：" + Arrays.toString(result));

// 解析输出
                            float[][] keypoints = new float[17][3]; // 17 个关键点，每个关键点有 [x, y, confidence]
                            for (int i = 0; i < 17; i++) {
                                keypoints[i][1] = result[i * 3];     // y 坐标（归一化的 y 坐标）
                                keypoints[i][0] = result[i * 3 + 1]; // x 坐标（归一化的 x 坐标）
                                keypoints[i][2] = result[i * 3 + 2]; // 置信度
                            }

// 处理输出的关键点数据
                            for (int i = 0; i < 17; i++) {
                                float normalizedX = keypoints[i][0];  // 归一化的 x 坐标
                                float normalizedY = keypoints[i][1];  // 归一化的 y 坐标
                                float confidence = keypoints[i][2];   // 置信度

                                // 将归一化的坐标转换为实际像素坐标
                                int pixelX = (int) (normalizedX * MobileInputWidth);
                                int pixelY = (int) (normalizedY * MobileInputHeight);

                                Log.d(TAG, "Keypoint " + i + ": [" + pixelX + "," + pixelY + "] with confidence: " + confidence);

                                // 根据置信度绘制或处理关键点
                                if (confidence > 0.5) {
                                    // 在原始图像上绘制关键点
                                    // drawKeyPoint(pixelX, pixelY);
                                }
                            }
                            // 将关键点数据传递给 ImageView
                            mHandle.post(new Runnable() {
                                @Override
                                public void run() {
                                    imageView.setKeypoints(keypoints);
                                }
                            });
                            Log.d(TAG, "run: result: " + Arrays.toString(result));
                            if (result.length > MAX_CLZ_SIZE) {
                                Log.w(TAG, "session result too big (" + result.length + "), model incorrect ?");
                            }

                            // show results on ui
                            runOnUiThread(new Runnable() {
                                @Override
                                public void run() {
//                                    if (maybes.size() == 0) {
//                                        mFirstResult.setText("no data");
//                                        mSecondResult.setText("");
//                                        mThirdResult.setText("");
//                                    }
                                    mTimeTextView.setText("cost time：" + inferenceTimeCost + "ms");
                                }
                            });

                        }
                    });
                }
            }
        };

        if (useImage) {
            Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.stand);
            imageView.setImageBitmap(bitmap);
            byte[] bytesFromBitmap = BitmapToByteArray.getBytesFromBitmap(bitmap);
            Handler handler = new Handler();
            Runnable publishTask = new Runnable() {
                @Override
                public void run() {
                    if (isDestroyed() || isFinishing()) {
                        return;
                    }
                    previewCallback.onPreviewFrame(bytesFromBitmap, bitmap.getWidth(), bitmap.getHeight(), 0);
                    handler.postDelayed(this, 100);
                }
            };
            handler.postDelayed(publishTask, 100);
        }else {
            if (mCameraView != null) {
                mCameraView.setPreviewCallback(previewCallback);
            }
        }

    }


    @Override
    public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {

        // forward type
        if (mForwardTypeSpinner.getId() == adapterView.getId()) {
            Log.d(TAG, "onItemSelected() called with:  i = [" + i + "], l = [" + l + "]");
            if (i == 0) {
                mConfig.forwardType = MNNForwardType.FORWARD_CPU.type;
            } else if (i == 1) {
                mConfig.forwardType = MNNForwardType.FORWARD_OPENCL.type;
            } else if (i == 2) {
                mConfig.forwardType = MNNForwardType.FORWARD_OPENGL.type;
            } else if (i == 3) {
                mConfig.forwardType = MNNForwardType.FORWARD_VULKAN.type;
            }
        }
        // threads num
        else if (mThreadNumSpinner.getId() == adapterView.getId()) {

            String[] threadList = getResources().getStringArray(R.array.thread_list);
            mConfig.numThread = Integer.parseInt(threadList[i].split(" ")[1]);
        }
        // model index
        else if (mModelSpinner.getId() == adapterView.getId()) {

//            mSelectedModelIndex = i;
        } else if (mMoreDemoSpinner.getId() == adapterView.getId()) {

            if (i == 1) {
                Intent intent = new Intent(VideoActivity.this, ImageActivity.class);
                startActivity(intent);
            } else if (i == 2) {
                Intent intent = new Intent(VideoActivity.this, PortraitActivity.class);
                startActivity(intent);
            } else if (i == 3) {
                Intent intent = new Intent(VideoActivity.this, OpenGLTestActivity.class);
                startActivity(intent);
            }
        }


        mLockUIRender.set(true);
        clearUIForPrepareNet();

        mHandle.post(new Runnable() {
            @Override
            public void run() {
                prepareNet();
            }
        });

    }

    private void clearUIForPrepareNet() {
        mFirstResult.setText("prepare net ...");
        mSecondResult.setText("");
        mThirdResult.setText("");
        mTimeTextView.setText("");
    }


    @Override
    public void onNothingSelected(AdapterView<?> adapterView) {

    }

    @Override
    protected void onPause() {
        if (mCameraView != null) {
            mCameraView.onPause();
        }
        super.onPause();
    }

    @Override
    protected void onResume() {
        super.onResume();
        if (mCameraView != null) {
            mCameraView.onResume();
        }
    }


    @Override
    protected void onDestroy() {
        mThread.interrupt();

        /**
         * instance release
         */
        mHandle.post(new Runnable() {
            @Override
            public void run() {
                if (mNetInstance != null) {
                    mNetInstance.release();
                }
            }
        });

        super.onDestroy();
    }
}