package com.orbbec.orbbecsdkexamples.activity;

import android.content.Context;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.text.TextUtils;
import android.util.Log;
import android.view.PixelCopy;
import android.widget.Toast;

import com.orbbec.obsensor.ColorFrame;
import com.orbbec.obsensor.Config;
import com.orbbec.obsensor.DepthFrame;
import com.orbbec.obsensor.Device;
import com.orbbec.obsensor.DeviceChangedCallback;
import com.orbbec.obsensor.DeviceInfo;
import com.orbbec.obsensor.DeviceList;
import com.orbbec.obsensor.FrameSet;
import com.orbbec.obsensor.LogSeverity;
import com.orbbec.obsensor.OBContext;
import com.orbbec.obsensor.OBException;
import com.orbbec.obsensor.Pipeline;
import com.orbbec.obsensor.Sensor;
import com.orbbec.obsensor.SensorType;
import com.orbbec.obsensor.StreamType;
import com.orbbec.obsensor.VideoStreamProfile;
import com.orbbec.orbbecsdkexamples.BuildConfig;
import com.orbbec.orbbecsdkexamples.R;
import com.orbbec.orbbecsdkexamples.view.OBGLView;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;

import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Timer;
import java.util.TimerTask;

/**
 * Color Viewer
 */
public class ColorViewerActivity extends BaseActivity {
    private static final String TAG = "ColorViewerActivity";

    private Pipeline mPipeline;
    private Pipeline mPipeline2;
    private Thread mStreamThread;
    private Thread mStreamThread2;
    private volatile boolean mIsStreamRunning;
    private OBGLView mColorView;
    private OBGLView dpView;
    private Device mDevice;

    private DeviceChangedCallback mDeviceChangedCallback = new DeviceChangedCallback() {
        @Override
        public void onDeviceAttach(DeviceList deviceList) {
            try {
                if (null == mPipeline) {
                    // 2.Create Device and initialize Pipeline through Device
                    mDevice = deviceList.getDevice(0);
                    Sensor colorSensor = mDevice.getSensor(SensorType.COLOR);
                    if (null == colorSensor) {
                        showToast(getString(R.string.device_not_support_color));
                        mDevice.close();
                        mDevice = null;
                        return;
                    }
                    // 3. Create Device and initialize Pipeline through Device
                    mPipeline = new Pipeline(mDevice);
                    mPipeline2 = new Pipeline(mDevice);

                    // 4.Create Pipeline configuration
                    Config config = new Config();
                    Config config2 = new Config();

                    // 5.Get the color sensor configuration and configure it to Config
                    // Here, matching is performed based on the width, frame rate and RGB888 format.
                    // If the configuration is not changed, the matching meets the configuration
                    // of a width of 640 and a frame rate of 30fps.
                    VideoStreamProfile streamProfile = getStreamProfile(mPipeline, SensorType.COLOR);

                    VideoStreamProfile streamProfile2 = getStreamProfile(mPipeline, SensorType.DEPTH);

                    // 6.Enable color StreamProfile
                    if (null != streamProfile) {
                        printStreamProfile(streamProfile.as(StreamType.VIDEO));
                        config.enableStream(streamProfile);
                        streamProfile.close();

                        printStreamProfile(streamProfile2.as(StreamType.VIDEO));
                        config2.enableStream(streamProfile2);
                        streamProfile2.close();
                    } else {
                        mPipeline.close();
                        mPipeline = null;
                        mPipeline2.close();
                        mPipeline2 = null;
                        mDevice.close();
                        mDevice = null;
                        config.close();
                        config2.close();
                        Log.w(TAG, "No target stream profile!");
                        showToast(getString(R.string.init_stream_profile_failed));
                        return;
                    }

                    // 7.Start sensor stream
                    mPipeline.start(config);
                    mPipeline2.start(config2);

                    // 8.Release config
                    config.close();
                    config2.close();

                    // 9.Create a thread to obtain Pipeline data
                    start();
                }
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                // 10.Release device list resources
                deviceList.close();
            }
        }

        @Override
        public void onDeviceDetach(DeviceList deviceList) {
            try {
                if (mDevice != null) {
                    for (int i = 0, N = deviceList.getDeviceCount(); i < N; i++) {
                        String uid = deviceList.getUid(i);
                        DeviceInfo deviceInfo = mDevice.getInfo();
                        if (null != deviceInfo && TextUtils.equals(uid, deviceInfo.getUid())) {
                            stop();
                            mPipeline.stop();
                            mPipeline.close();
                            mPipeline = null;
                            mDevice.close();
                            mDevice = null;
                        }
                        deviceInfo.close();
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                try {
                    deviceList.close();
                } catch (Exception ignore) {
                }
            }
        }
    };

    // 创建一个 Handler 和 Runnable 对象
    private Handler captureHandler = new Handler(Looper.getMainLooper());
    private final long CAPTURE_INTERVAL = 10000; // 每秒捕获一次

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        init(this);
        super.onCreate(savedInstanceState);
        setTitle("ColorView");
        setContentView(R.layout.activity_color_viewer);
        mColorView = findViewById(R.id.colorview_id);
        dpView = findViewById(R.id.dpview);


        new Timer().schedule(new TimerTask() {
            @Override
            public void run() {
                // 在合适的地方启动定时器
                captureHandler.postDelayed(captureRunnable, CAPTURE_INTERVAL);
            }
        },0,10000);
        initClassifier();
        // 停止定时器
//        captureHandler.removeCallbacks(captureRunnable);
    }

    @Override
    protected void onStart() {
        super.onStart();
        initSDK();
    }

    @Override
    protected void onStop() {
        try {
            // Stop getting Pipeline data
            stop();

            // Stop the Pipeline and release
            if (null != mPipeline) {
                mPipeline.stop();
                mPipeline.close();

                mPipeline2.stop();
                mPipeline2.close();
            }

            // Release Device
            if (mDevice != null) {
                mDevice.close();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        releaseSDK();
        super.onStop();
    }

    @Override
    protected DeviceChangedCallback getDeviceChangedCallback() {
        return mDeviceChangedCallback;
    }

    private void showToast(String msg) {
        runOnUiThread(() -> Toast.makeText(ColorViewerActivity.this, msg, Toast.LENGTH_SHORT).show());
    }

    private void start() {
        mIsStreamRunning = true;
        if (null == mStreamThread) {
            mStreamThread = new Thread(mStreamRunnable);
            mStreamThread2 = new Thread(mStreamRunnable2);
            mStreamThread.start();
//            mStreamThread2.start();
        }
    }

    private void stop() {
        mIsStreamRunning = false;
        if (null != mStreamThread) {
            try {
                mStreamThread.join(300);
                mStreamThread2.join(300);
            } catch (InterruptedException e) {
            }
            mStreamThread = null;
            mStreamThread2 = null;
        }
    }

    private Runnable mStreamRunnable = () -> {
        ByteBuffer buffer = null;
        while (mIsStreamRunning) {
            try {
                // Obtain the data set in blocking mode. If it cannot be obtained after waiting for 100ms, it will time out.
                FrameSet frameSet = mPipeline.waitForFrameSet(100);

                Log.d(TAG, "frameSet=" + frameSet);
                if (null == frameSet) {
                    continue;
                }

                // Get color flow data
                ColorFrame colorFrame = frameSet.getColorFrame();
                if (null != buffer) {
                    buffer.clear();
                }

                Log.d(TAG, "frameSet=" + frameSet + ", colorFrame=" + colorFrame);
                if (null != colorFrame) {
                    Log.d(TAG, "color frame: " + colorFrame.getSystemTimeStamp());
                    // Initialize buffer
                    int dataSize = colorFrame.getDataSize();
                    if (null == buffer || buffer.capacity() != dataSize) {
                        buffer = ByteBuffer.allocateDirect(dataSize);
                    }
                    // Get data and render
                    colorFrame.getData(buffer);
                    mColorView.update(colorFrame.getWidth(), colorFrame.getHeight(), StreamType.COLOR, colorFrame.getFormat(), buffer, 1.0f);

                    // Release color data frame
                    colorFrame.close();
                }
                // Release FrameSet
                frameSet.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };
    private CascadeClassifier mJavaDetector; // OpenCV的人脸检测器

    private Runnable captureRunnable = new Runnable() {
        @Override
        public void run() {
            // 1. 获取 GLSurfaceView 的宽度和高度
            int width = mColorView.getWidth();
            int height = mColorView.getHeight();

            // 2. 创建一个空的 Bitmap 对象
            Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);

            // 3. 使用 PixelCopy 类将 GLSurfaceView 的内容复制到 Bitmap
            PixelCopy.request(mColorView, bitmap, new PixelCopy.OnPixelCopyFinishedListener() {
                @Override
                public void onPixelCopyFinished(int copyResult) {
                    if (copyResult == PixelCopy.SUCCESS) {
                        // 4. 复制成功，处理 Bitmap 对象
                        // ... 这里可以保存、显示或处理 Bitmap
                        detectFace(bitmap);
                        Log.d("TAG", "Bitmap captured successfully!");
                    } else {
                        // 5. 复制失败，处理错误
                        Log.e("TAG", "Failed to capture bitmap: " + copyResult);
                    }
                }
            }, new Handler(Looper.getMainLooper()));
        }
    };

    private Runnable mStreamRunnable2 = () -> {
        while (mIsStreamRunning) {
            try {
                // Obtain the data set in blocking mode. If it cannot be obtained after waiting for 100ms, it will time out.
                FrameSet frameSet = mPipeline2.waitForFrameSet(100);

                if (null == frameSet) {
                    continue;
                }

                // Get depth flow data
                DepthFrame frame = frameSet.getDepthFrame();
                if (frame != null) {
                    // Get data and render
                    byte[] frameData = new byte[frame.getDataSize()];
                    frame.getData(frameData);
//                    dpView.update(frame.getWidth(), frame.getHeight(), StreamType.DEPTH, frame.getFormat(), frameData, frame.getValueScale());

                    // Release depth data frame
                    frame.close();
                }

                // 释放数据集
                frameSet.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    };


    public static void init(Context mContext) {
        BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(mContext) {
            @Override
            public void onManagerConnected(int status) {
                super.onManagerConnected(status);
                switch (status) {
                    case LoaderCallbackInterface.SUCCESS:
                        break;
                    default:
                        super.onManagerConnected(status);
                        break;
                }
            }
        };
        if (!OpenCVLoader.initDebug()) {
            Log.d("OpenCV", "Internal OpenCV library not found. Using OpenCV Manager for initialization");
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, mContext, mLoaderCallback);
        } else {
            Log.d("OpenCV", "OpenCV library found inside package. Using it!");
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    // 初始化人脸级联分类器，必须先初始化
    private void initClassifier() {
        try {
            InputStream is = getResources()
                    .openRawResource(R.raw.lbpcascade_frontalface);
            File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
//            File cascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
            File cascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
            FileOutputStream os = new FileOutputStream(cascadeFile);
            byte[] buffer = new byte[4096];
            int bytesRead;
            while ((bytesRead = is.read(buffer)) != -1) {
                os.write(buffer, 0, bytesRead);
            }
            is.close();
            os.close();
            mJavaDetector = new CascadeClassifier(cascadeFile.getAbsolutePath());
        } catch (Exception e) {
            Log.e("LOG_TAG", "加载文件失败");
            e.printStackTrace();
        }
    }

    // 检测位图中的人脸
    private void detectFace(Bitmap orig) {
        Mat rgba = new Mat();
        Utils.bitmapToMat(orig, rgba); // 把位图对象转为Mat结构
        //Mat rgba = Imgcodecs.imread(mFilePath); // 从文件路径读取Mat结构
        //Imgcodecs.imwrite(tempFile.getAbsolutePath(), rgba); // 把Mate结构保存为文件
        Mat gray = new Mat();
        Imgproc.cvtColor(rgba, gray, Imgproc.COLOR_RGB2GRAY); // 全彩矩阵转灰度矩阵
        // 下面检测并显示人脸
        MatOfRect faces = new MatOfRect();
        int absoluteFaceSize = 0;
        int height = gray.rows();
        if (Math.round(height * 0.2f) > 0) {
            absoluteFaceSize = Math.round(height * 0.2f);
        }
        if (mJavaDetector != null) { // 检测器开始识别人脸
            mJavaDetector.detectMultiScale(gray, faces, 1.1, 2, 2,
                    new Size(absoluteFaceSize, absoluteFaceSize), new Size());
        }
        Rect[] faceArray = faces.toArray();
//        for (Rect rect : faceArray) { // 给找到的人脸标上相框
//            Imgproc.rectangle(rgba, rect.tl(), rect.br(), new Scalar(0, 255, 0, 255), 3);
//            Log.d("TAG", rect.toString());
//        }
        Log.d("TAG", "人脸数："+faceArray.length);
        Bitmap mark = Bitmap.createBitmap(orig.getWidth(), orig.getHeight(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(rgba, mark); // 把Mat结构转为位图对象
//        imageView.setImageBitmap(mark);
    }
}