#include <jni.h>
#include <string>
#include <iostream>
#include <queue>

extern "C" {
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"//SwsContext
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
#include "libavutil/frame.h"
#include "libavutil/time.h"
}

#include <android/native_window.h>//ANativeWindow
#include <android/native_window_jni.h>
#include <android/log.h>
#include <time.h>
#include <unistd.h>

#define LOG_TAG "ANSION"  // 定义日志的Tag
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)

std::queue<std::string> testQueue;
std::mutex testMutex;
std::condition_variable testCondition;
bool testFinished = false;
void *produce(void* arg){
    for (int i = 1; i <= 5; ++i) {
        {//这对大括号很重要，限制锁的作用域
            std::unique_lock<std::mutex> lock(testMutex);
            LOGD("produce :%d",i);
            testQueue.push(std::to_string(i));
            testCondition.notify_one();
        }
        usleep(300*1000);
    }
    testFinished = true;
    testCondition.notify_all();
    return nullptr;
}
void *consume(void* arg) {
    std::unique_lock<std::mutex> lock(testMutex);
    while(true) {
        testCondition.wait(lock, [] { return !testQueue.empty() || testFinished; });//返回true才解锁。
        if (testFinished) break;
        std::string str = testQueue.front();
        testQueue.pop();
        LOGD("consume :%s",str.c_str());
    }
    return nullptr;
}

extern "C" {
AVFormatContext *formatContext = nullptr;
AVCodecContext *codecContext = nullptr;
ANativeWindow *nativeWindow = nullptr;
SwsContext *swsContext = nullptr;
int videoStreamIndex = -1;
// 定义结构体来存放多个参数
typedef struct {
    char path[256];
} ThreadArgs;
struct Frame {
    AVFrame *frame;
    int64_t pts;
};
std::queue<Frame> frameQueue;
std::mutex queueMutex;
std::condition_variable queueCondition;
bool finished = false;
int fps = 30;
AVFrame *rgbFrame;
int i = 0;
JNIEXPORT jint JNICALL
Java_com_algento_jnitest_MainActivity_init(JNIEnv *env, jobject thiz, jobject surface) {
    nativeWindow = ANativeWindow_fromSurface(env, surface);
    if (!nativeWindow) {
        return -1;
    }
    LOGE("PLAY INIT !");
    return 0;
}

void *decodeThreadFunction(void *arg) {
    ThreadArgs *args = (ThreadArgs *) arg;
    // 获取参数
    const char *videoPath = args->path;
    LOGD("decodeThreadFunction :%s",videoPath);
    delete args;
    formatContext = avformat_alloc_context();
    // 打开视频文件
    if (avformat_open_input(&formatContext, videoPath, nullptr, nullptr) != 0) {
        LOGE("PLAY FAIL avformat_open_input FAIL!");
        return nullptr;
    }
//    formatContext->probesize = 4096;
//    formatContext->max_analyze_duration = 1500;
    // 查找流信息
    if (avformat_find_stream_info(formatContext, nullptr) < 0) {
        LOGE("PLAY FAIL avformat_find_stream_info FAIL!");
        return nullptr;
    }
    // Custom output function to redirect to logcat
    auto custom_output = [](void *ptr, int level, const char *fmt, va_list vargs) {
        __android_log_vprint(ANDROID_LOG_INFO, "FFMpeg", fmt, vargs);
    };

    // Set log callback
    av_log_set_callback(custom_output);
    // 获取视频流索引
    av_dump_format(formatContext, 0, videoPath, 0);
    for (int i = 0; i < formatContext->nb_streams; ++i) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }
    if (videoStreamIndex == -1) {
        LOGE("PLAY FAIL videoStreamIndex == -1 !");
        return nullptr;
    }
    AVStream *stream = formatContext->streams[videoStreamIndex];
    // 获取解码器
    AVCodecParameters *codecPar = stream->codecpar;
    const AVCodec *codec = avcodec_find_decoder(codecPar->codec_id);
    if (!codec) {
        LOGE("PLAY FAIL avcodec_find_decoder FAIL !");
        return nullptr;
    }
    // 创建解码器上下文
    codecContext = avcodec_alloc_context3(codec);
    if (!codecContext) {
        LOGE("PLAY FAIL avcodec_alloc_context3 FAIL !");
        return nullptr;
    }
    if (avcodec_parameters_to_context(codecContext, codecPar) < 0) {
        LOGE("PLAY FAIL avcodec_parameters_to_context FAIL !");
        return nullptr;
    }
    // 打开解码器
    if (avcodec_open2(codecContext, codec, nullptr) < 0) {
        LOGE("PLAY FAIL avcodec_open2 FAIL !");
        return nullptr;
    }
    // 配置 ANativeWindow 的宽高和像素格式
    // Prepare to decode
    rgbFrame = av_frame_alloc();

    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, codecContext->width,
                                            codecContext->height, 1);
    size_t size = numBytes * sizeof(uint8_t);
    uint8_t *buffer = static_cast<uint8_t *>(av_malloc(size));
    av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGBA,
                         codecContext->width, codecContext->height, 1);
    swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt,
                                codecContext->width, codecContext->height, AV_PIX_FMT_RGBA,
                                SWS_FAST_BILINEAR,
                                nullptr, nullptr, nullptr);
    if (!swsContext) {
        LOGD("Failed to allocate swsContext");
        return nullptr;
    }
    ANativeWindow_setBuffersGeometry(nativeWindow, codecContext->width, codecContext->height,
                                     WINDOW_FORMAT_RGBA_8888);
    fps = (int) av_q2d(stream->r_frame_rate);
    AVPacket *packet = av_packet_alloc();
    if (!packet) {
        LOGD("Failed to allocate AVPacket");
        return nullptr;
    }
    int64_t startTime = av_gettime();
    while (av_read_frame(formatContext, packet) >= 0) {
        if (packet->stream_index == videoStreamIndex) {
            if (avcodec_send_packet(codecContext, packet) == 0) {
                AVFrame *frame = av_frame_alloc();
                if (!frame) {
                    LOGD("Failed to allocate AVFrame");
                    break;
                }
                while (avcodec_receive_frame(codecContext, frame) == 0) {
                    {
                        std::unique_lock<std::mutex> lock(queueMutex);
                        // 将解码后的帧插入队列
                        frameQueue.push({frame, frame->pts});
                        LOGD("push AVFrame to queue");
                    }
                    // 通知播放线程有新的帧可用
                    queueCondition.notify_one();

                    ++i;
                    int64_t target_time = startTime + i * (1000000 / fps);
                    int64_t current_time = av_gettime();
                    int64_t delay = target_time - current_time;

                    LOGD("Calculated delay: %ld", delay);
                    if (delay > 0) {
                        usleep(delay);
                    } else {
                        usleep(1000000/30);
                    }
//                    // 使用PTS计算延迟
//                    int64_t pts_time = av_rescale_q(frame->pts, codecContext->time_base, AV_TIME_BASE_Q);
//                    int64_t delayPts = pts_time - av_gettime();
//                    LOGD("delay: %lld, pts: %lld, timebase: %f, delayPts:%lld", delay, frame->pts, av_q2d(frame->time_base),pts_time);
                }
//                av_frame_free(&frame);
            }
        }
        av_packet_unref(packet);
    }

    // 解码完成后，通知播放线程
//        std::unique_lock<std::mutex> lock(queueMutex);
        finished = true;
        queueCondition.notify_all();

    av_packet_free(&packet);
    return nullptr;
}

void *playThreadFunction(void *arg) {
    ANativeWindow_Buffer windowBuffer;

    while (true) {
        Frame frame;
        {
            std::unique_lock<std::mutex> lock(queueMutex);
            queueCondition.wait(lock, [] { return !frameQueue.empty() || finished; });
            if (finished && frameQueue.empty()) {
                LOGD("finish !");
                break;
            }
            LOGD("get AVFrame from queue!");
            if (!frameQueue.empty()) {
                frame = frameQueue.front();
                frameQueue.pop();
            }
        }
        LOGD("before sws_scale!");
        // 使用 sws_scale 将 YUV 转换为 RGB (假设需要)
        sws_scale(swsContext, frame.frame->data, frame.frame->linesize, 0, codecContext->height,
                  rgbFrame->data, rgbFrame->linesize);
        LOGD("after sws_scale!");
        // 渲染到窗口
        if (ANativeWindow_lock(nativeWindow, &windowBuffer, nullptr) == 0) {
            uint8_t *dst = (uint8_t *) windowBuffer.bits;
            uint8_t *src = rgbFrame->data[0];
            int dstStride = windowBuffer.stride * 4;
            int srcStride = rgbFrame->linesize[0];
            for (int h = 0; h < codecContext->height; h++) {
                memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
            }
            LOGD("start to render on window!");
            ANativeWindow_unlockAndPost(nativeWindow);
        }
        // 释放解码后的帧
        av_frame_free(&frame.frame);
    }
    return nullptr;
}
pthread_t decodeThread, playThread;
void *startDecodeAndPlay(void *arg) {
    ThreadArgs *args;
    args = static_cast<ThreadArgs *>(arg);
    // 获取参数
    const char *videoPath = args->path;
    LOGD("startDecodeAndPlay :%s",videoPath);
    // 启动解码线程
    if (pthread_create(&decodeThread, nullptr, decodeThreadFunction, args) != 0) {
        LOGD("Failed to create decode thread");
        return nullptr;
    }
    // 启动播放线程
    if (pthread_create(&playThread, nullptr, playThreadFunction, nullptr) != 0) {
        LOGD("Failed to create play thread");
        return nullptr;
    }
    // 等待线程完成
//    pthread_join(decodeThread, nullptr);
//    pthread_join(playThread, nullptr);
    LOGE("PLAY SUCCESS !");
    return NULL;
}

JNIEXPORT void JNICALL
Java_com_algento_jnitest_MainActivity_play(JNIEnv *env, jobject thiz, jstring path) {
    const char *videoPath = env->GetStringUTFChars(path, nullptr);
    LOGD("Native string: %s", videoPath);
    ThreadArgs *args = new ThreadArgs();
    strcpy(args->path, videoPath);
    startDecodeAndPlay(args);
    env->ReleaseStringUTFChars(path, videoPath);
}
JNIEXPORT void JNICALL
Java_com_algento_jnitest_MainActivity_stop(JNIEnv *env, jobject thiz) {
    if (codecContext) {
        avcodec_free_context(&codecContext);
        codecContext = nullptr;
    }

    if (formatContext) {
        avformat_close_input(&formatContext);
        formatContext = nullptr;
    }

    if (swsContext) {
        sws_freeContext(swsContext);
        swsContext = nullptr;
    }

    if (nativeWindow) {
        ANativeWindow_release(nativeWindow);
        nativeWindow = nullptr;
    }
    LOGE("PLAY STOP !");
}
}