package com.tfc.glassesfree3dplayer;

import static android.os.SystemClock.sleep;

import static com.tfc.glassesfree3dplayer.MainActivity.TAG;
import static com.tfc.glassesfree3dplayer.MainActivity.videoPath;

import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaPlayer;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.widget.Toast;

import org.opencv.android.Utils;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;

import java.io.IOException;
import java.nio.ByteBuffer;

public class OnlineVideoProcessor {
    private MediaExtractor extractor;
    private MediaCodec decoder;
    private Surface outputSurface;
    private Thread processingThread;
    private volatile boolean isRunning = false;
    long startTime;
    long endTime;
    long runTime;
    long sleep_time = 30;
    long videoFTP = 30;
    int nowFrameNum = 0;    //当前已播放帧数
    public void init(String videoPath, Surface surface) throws IOException {
        outputSurface = surface;
        extractor = new MediaExtractor();
        extractor.setDataSource(videoPath);

        // 查找视频轨道
        int videoTrackIndex = -1;
        for (int i = 0; i < extractor.getTrackCount(); i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                videoTrackIndex = i;
                break;
            }
        }

        if (videoTrackIndex == -1) {
            throw new IOException("No video track found");
        }

        extractor.selectTrack(videoTrackIndex);
        MediaFormat format = extractor.getTrackFormat(videoTrackIndex);
        String mime = format.getString(MediaFormat.KEY_MIME);

        // 创建解码器
        decoder = MediaCodec.createDecoderByType(mime);
        decoder.configure(format, null, null, 0);
        decoder.start();
    }

    public void startProcessing() {
        isRunning = true;
        processingThread = new Thread(this::processFrames);
        processingThread.start();
    }

    public void stopProcessing() {
        isRunning = false;
        if (processingThread != null) {
            try {
                processingThread.join();
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        }
        release();
    }

    private void processFrames() {
        MediaPlayer mMediaPlayer;
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        boolean outputDone = false;
        boolean inputDone = false;

        // 初始化MediaPlayer用于音频播放
        mMediaPlayer = new MediaPlayer();
        try {
            Log.i(TAG,"播放地址："+videoPath);
            mMediaPlayer.setDataSource(videoPath);
            mMediaPlayer.prepare();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

        while (isRunning && !outputDone) {
            //每1帧的起始时间
            startTime = System.currentTimeMillis();

            //帧计数
            nowFrameNum = nowFrameNum + 1;
            Log.i(TAG,"帧计数："+nowFrameNum);
            //获取当前音频和视频播放时间
            long nowAudioPlayTime = mMediaPlayer.getCurrentPosition();
            long nowVideoPlayTime = 1000L * nowFrameNum/videoFTP;
            Log.i(TAG,"音频播放进度：" + nowAudioPlayTime);
            Log.i(TAG,"视频播放进度："+nowVideoPlayTime);
            // 音视频同步策略
            double syncDiff = nowVideoPlayTime - nowAudioPlayTime; // 毫秒
            if (Math.abs(syncDiff) > 40) {  //同步阈值40ms
                if (syncDiff > 0) {
                    // 视频超前于音频，需要等待
                    sleep(sleep_time);
                    //重新记录起始时间
                    startTime = System.currentTimeMillis();
                } else {
                    // 视频落后于音频，需要追赶，直接跳过这一帧
                    continue;
                }
            }

            if (!inputDone) {
                int inputBufferId = decoder.dequeueInputBuffer(10000);
                if (inputBufferId >= 0) {
                    ByteBuffer inputBuffer;
                    inputBuffer = decoder.getInputBuffer(inputBufferId);

                    int sampleSize = extractor.readSampleData(inputBuffer, 0);
                    if (sampleSize < 0) {
                        decoder.queueInputBuffer(inputBufferId, 0, 0, 0,
                                MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        inputDone = true;
                    } else {
                        long presentationTimeUs = extractor.getSampleTime();
                        decoder.queueInputBuffer(inputBufferId, 0, sampleSize,
                                presentationTimeUs, 0);
                        extractor.advance();
                    }
                }
            }

            int outputBufferId = decoder.dequeueOutputBuffer(bufferInfo, 10000);
            if (outputBufferId >= 0) {
                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    outputDone = true;
                }

                if (outputSurface != null) {
                    // 获取输出图像并处理
                    processFrame(outputBufferId, bufferInfo);
                }

                decoder.releaseOutputBuffer(outputBufferId, true);
            } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // 可以在这里处理格式变化
            }

            Log.i(TAG,"实际休眠时间："+sleep_time);
            sleep(sleep_time);

            //每1帧的结束时间
            endTime = System.currentTimeMillis();
            //每1帧的时间差
            runTime = endTime - startTime;
            if(runTime!=0){
                Log.i(TAG, "帧时间：" + runTime );
                Log.i(TAG, "当前帧率：" + 1000 / runTime );
            }else{
                Log.i(TAG, "帧时间：0" );
                Log.i(TAG, "当前帧率：无穷大" );
            }
            Log.i(TAG, "休眠时间：" + sleep_time);
            //根据帧率调整休眠时间
            if(runTime!=0){
                if((double) 1000 / runTime > videoFTP + 10){
                    sleep_time = sleep_time + 5;
                }else if((double) 1000 / runTime > videoFTP + 5){
                    sleep_time = sleep_time + 2;
                }else if((double) 1000 / runTime > videoFTP + 1){
                    sleep_time = sleep_time + 1;
                }else if((double) 1000 / runTime < videoFTP){
                    if(sleep_time > 1){
                        sleep_time = sleep_time - 1;
                    }
                }
            }
        }
    }

    private Mat imageToMat(Image image) {
        // 步骤1: 将 Image 转换为 NV21 字节数组
        byte[] nv21 = yuv420888ToNv21(image);

        // 步骤2: 创建 YUV Mat 对象
        Mat yuvMat = new Mat(
                image.getHeight() + image.getHeight() / 2, // 高度 = 原始高度 * 1.5
                image.getWidth(),
                CvType.CV_8UC1
        );
        yuvMat.put(0, 0, nv21); // 加载数据

        // 步骤3: 转换为 BGR Mat
        Mat bgrMat = new Mat();
        Imgproc.cvtColor(yuvMat, bgrMat, Imgproc.COLOR_YUV2BGR_NV21);

        // 释放资源
        yuvMat.release();
        return bgrMat;
    }

    // 将 YUV_420_888 转换为 NV21 格式
    private static byte[] yuv420888ToNv21(Image image) {
        Image.Plane yPlane = image.getPlanes()[0];
        Image.Plane uPlane = image.getPlanes()[1];
        Image.Plane vPlane = image.getPlanes()[2];

        ByteBuffer yBuffer = yPlane.getBuffer();
        ByteBuffer uBuffer = uPlane.getBuffer();
        ByteBuffer vBuffer = vPlane.getBuffer();

        int ySize = yBuffer.remaining();
        int uSize = uBuffer.remaining();
        int vSize = vBuffer.remaining();

        byte[] nv21 = new byte[ySize + uSize + vSize];

        // 复制 Y 数据
        yBuffer.get(nv21, 0, ySize);

        // 注意：YUV_420_888 的 U/V 平面可能有步幅（stride）和像素步幅（pixel stride）
        int pixelStride = vPlane.getPixelStride(); // 通常 U/V 的 pixelStride=2 (交错存储)
        int rowStride = vPlane.getRowStride();

        // 处理交错存储的 V/U 数据 (NV21 要求 V 在前)
        if (pixelStride == 2) {
            // 直接复制 V 平面数据（包含 V 和交错的 U 值）
            vBuffer.get(nv21, ySize, vSize);
        } else {
            // 手动解交错（较少见的情况）
            byte[] vData = new byte[vSize];
            vBuffer.get(vData);
            for (int i = 0; i < vSize; i++) {
                nv21[ySize + i] = vData[i];
            }
        }
        return nv21;
    }


    private void processFrame(int outputBufferId, MediaCodec.BufferInfo bufferInfo) {
        Image image = decoder.getOutputImage(outputBufferId);
        if (image != null) {
            try {
                Mat inputMat = new Mat();
                inputMat = imageToMat(image);
                // 处理图像
                Mat processedMat = applyOpenCVEffects(inputMat);

                // 将处理后的Mat渲染到Surface
                if (outputSurface != null) {
                    // 需要将Mat转换回合适的格式
                    Mat displayMat = new Mat();
                    Imgproc.cvtColor(processedMat, displayMat, Imgproc.COLOR_RGB2RGBA);

                    Bitmap bitmap = Bitmap.createBitmap(displayMat.cols(), displayMat.rows(), Bitmap.Config.ARGB_8888);
                    Utils.matToBitmap(displayMat, bitmap);

                    Canvas canvas = outputSurface.lockCanvas(null);
                    canvas.drawBitmap(bitmap, 0, 0, null);
                    outputSurface.unlockCanvasAndPost(canvas);

                    displayMat.release();
                    bitmap.recycle();
                }

                inputMat.release();
                processedMat.release();
            } finally {
                image.close();
            }
        }
    }

    private Mat applyOpenCVEffects(Mat input) {
        // 示例处理：转换为灰度并边缘检测
        Mat gray = new Mat();
        Mat edges = new Mat();

        // 转换为灰度
        Imgproc.cvtColor(input, gray, Imgproc.COLOR_RGBA2GRAY);

        // 边缘检测
        Imgproc.Canny(gray, edges, 50, 150);

        // 将边缘检测结果转换回RGBA
        Mat result = new Mat();
        Imgproc.cvtColor(edges, result, Imgproc.COLOR_GRAY2RGBA);

        gray.release();
        edges.release();

        return result;
    }

    public void release() {
        if (decoder != null) {
            decoder.stop();
            decoder.release();
            decoder = null;
        }
        if (extractor != null) {
            extractor.release();
            extractor = null;
        }
    }
}
