#include <fstream>
#include <dirent.h>
#include <sys/stat.h>
#include <unistd.h>
#include <vector>
#include <string>
#include <cstring>
#include <cstdlib>
#include <iostream>

extern "C" {
    #include "libavutil/avutil.h"
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavutil/imgutils.h"
    #include "libswscale/swscale.h"
    #include "libswresample/swresample.h"
    #include "libavfilter/avfilter.h"
    #include "libavdevice/avdevice.h"
}

using namespace std;

int videoToImagesMp4(const std::string &videoPath, const std::string &outputDir, int intervalMs)
{
    // av_register_all();

    AVFormatContext *fmtCtx = nullptr;
    if (avformat_open_input(&fmtCtx, videoPath.c_str(), nullptr, nullptr) != 0) {
        std::cerr << "Failed to open video: " << videoPath << std::endl;
        return -1;
    }

    if (avformat_find_stream_info(fmtCtx, nullptr) < 0) {
        std::cerr << "Failed to find stream info\n";
        avformat_close_input(&fmtCtx);
        return -2;
    }

    int videoStreamIndex = -1;
    for (unsigned int i = 0; i < fmtCtx->nb_streams; ++i) {
        if (fmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }

    if (videoStreamIndex == -1) {
        std::cerr << "No video stream found\n";
        avformat_close_input(&fmtCtx);
        return -3;
    }

    AVCodecParameters *codecPar = fmtCtx->streams[videoStreamIndex]->codecpar;
    const AVCodec *codec = avcodec_find_decoder(codecPar->codec_id);
    if (!codec) {
        std::cerr << "Unsupported codec\n";
        avformat_close_input(&fmtCtx);
        return -4;
    }

    AVCodecContext *codecCtx = avcodec_alloc_context3(codec);
    avcodec_parameters_to_context(codecCtx, codecPar);

    if (avcodec_open2(codecCtx, codec, nullptr) < 0) {
        std::cerr << "Could not open codec\n";
        avcodec_free_context(&codecCtx);
        avformat_close_input(&fmtCtx);
        return -5;
    }

    AVPacket *packet = av_packet_alloc();
    AVFrame *frame = av_frame_alloc();
    AVFrame *rgbFrame = av_frame_alloc();

    int width = codecCtx->width;
    int height = codecCtx->height;
    SwsContext *swsCtx = sws_getContext(width, height, codecCtx->pix_fmt,
                                        width, height, AV_PIX_FMT_RGB24,
                                        SWS_BICUBIC, nullptr, nullptr, nullptr);

    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, width, height, 1);
    uint8_t *buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer,
                         AV_PIX_FMT_RGB24, width, height, 1);

    AVRational timeBase = fmtCtx->streams[videoStreamIndex]->time_base;
    int64_t intervalPts = av_rescale_q(intervalMs, AVRational{1, 1000}, timeBase);
    int64_t nextPts = 0;
    int frameCount = 0;

    char filename[512];

    while (av_read_frame(fmtCtx, packet) >= 0) {
        if (packet->stream_index == videoStreamIndex) {
            if (avcodec_send_packet(codecCtx, packet) == 0) {
                while (avcodec_receive_frame(codecCtx, frame) == 0) {
                    if (frame->pts >= nextPts) {
                        sws_scale(swsCtx, frame->data, frame->linesize, 0, height,
                                  rgbFrame->data, rgbFrame->linesize);

                        snprintf(filename, sizeof(filename), "%s/frame_%05d.jpg", outputDir.c_str(), frameCount++);
                        FILE *jpgFile = fopen(filename, "wb");
                        if (jpgFile) {
                            const AVCodec *jpegCodec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
                            AVCodecContext *jpegCtx = avcodec_alloc_context3(jpegCodec);
                            jpegCtx->bit_rate = 400000;
                            jpegCtx->width = width;
                            jpegCtx->height = height;
                            jpegCtx->pix_fmt = AV_PIX_FMT_RGB24;
                            jpegCtx->time_base = (AVRational){1, 25};

                            if (avcodec_open2(jpegCtx, jpegCodec, nullptr) == 0) {
                                AVPacket *jpegPacket = av_packet_alloc();
                                if (!jpegPacket) return -6;

                                if (avcodec_send_frame(jpegCtx, rgbFrame) == 0) {
                                    if (avcodec_receive_packet(jpegCtx, jpegPacket) == 0) {
                                        fwrite(jpegPacket->data, 1, jpegPacket->size, jpgFile);
                                        av_packet_unref(jpegPacket);
                                    }
                                }
                                av_packet_free(&jpegPacket);
                                avcodec_free_context(&jpegCtx);
                            }
                            fclose(jpgFile);
                        }

                        nextPts = frame->pts + intervalPts;
                    }
                }
            }
        }
        av_packet_unref(packet);
    }

    av_free(buffer);
    sws_freeContext(swsCtx);
    av_frame_free(&frame);
    av_frame_free(&rgbFrame);
    av_packet_free(&packet);
    avcodec_free_context(&codecCtx);
    avformat_close_input(&fmtCtx);

    return frameCount;
}

int videoToImages(const std::string& videoPath, const std::string& outputDir, int intervalMs) {
    // avformat_network_init();
    AVFormatContext* fmtCtx = nullptr;
    if (avformat_open_input(&fmtCtx, videoPath.c_str(), nullptr, nullptr) < 0) {
        std::cerr << "Failed to open video: " << videoPath << std::endl;
        return -1;
    }

    if (avformat_find_stream_info(fmtCtx, nullptr) < 0) {
        std::cerr << "Failed to find stream info." << std::endl;
        avformat_close_input(&fmtCtx);
        return -2;
    }

    int videoStream = -1;
    for (unsigned i = 0; i < fmtCtx->nb_streams; i++) {
        if (fmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    }

    if (videoStream == -1) {
        std::cerr << "No video stream found." << std::endl;
        avformat_close_input(&fmtCtx);
        return -3;
    }

    AVCodecParameters* codecpar = fmtCtx->streams[videoStream]->codecpar;
    const AVCodec* decoder = avcodec_find_decoder(codecpar->codec_id);
    AVCodecContext* decoderCtx = avcodec_alloc_context3(decoder);
    avcodec_parameters_to_context(decoderCtx, codecpar);
    avcodec_open2(decoderCtx, decoder, nullptr);

    // JPEG encoder
    const AVCodec* jpegCodec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
    if (!jpegCodec) {
        std::cerr << "JPEG encoder not found!" << std::endl;
        return -4;
    }

    AVCodecContext* jpegCtx = avcodec_alloc_context3(jpegCodec);
    jpegCtx->bit_rate = 400000;
    jpegCtx->width = decoderCtx->width;
    jpegCtx->height = decoderCtx->height;
    jpegCtx->pix_fmt = AV_PIX_FMT_YUVJ420P;  // JPEG支持的格式
    jpegCtx->time_base = {1, 25};

    if (avcodec_open2(jpegCtx, jpegCodec, nullptr) < 0) {
        std::cerr << "Failed to open JPEG encoder." << std::endl;
        return -5;
    }

    AVPacket* packet = av_packet_alloc();
    AVFrame* frame = av_frame_alloc();
    AVFrame* yuvFrame = av_frame_alloc();

    // 输出图像帧所需的缓冲
    yuvFrame->format = jpegCtx->pix_fmt;
    yuvFrame->width = jpegCtx->width;
    yuvFrame->height = jpegCtx->height;
    av_frame_get_buffer(yuvFrame, 0);

    SwsContext* swsCtx = sws_getContext(
        decoderCtx->width, decoderCtx->height, decoderCtx->pix_fmt,
        jpegCtx->width, jpegCtx->height, jpegCtx->pix_fmt,
        SWS_BICUBIC, nullptr, nullptr, nullptr);

    int64_t lastPtsMs = 0;
    int frameCount = 0;

    AVRational timeBase = fmtCtx->streams[videoStream]->time_base;

    while (av_read_frame(fmtCtx, packet) >= 0) {
        if (packet->stream_index == videoStream) {
            if (avcodec_send_packet(decoderCtx, packet) >= 0) {
                while (avcodec_receive_frame(decoderCtx, frame) >= 0) {
                    int64_t ptsMs = av_rescale_q(frame->pts, timeBase, {1, 1000});
                    if (ptsMs - lastPtsMs >= intervalMs) {
                        sws_scale(swsCtx, frame->data, frame->linesize, 0,
                                  decoderCtx->height, yuvFrame->data, yuvFrame->linesize);

                        // 编码 JPEG
                        AVPacket jpgPkt;
                        av_init_packet(&jpgPkt);
                        jpgPkt.data = nullptr;
                        jpgPkt.size = 0;

                        if (avcodec_send_frame(jpegCtx, yuvFrame) >= 0) {
                            if (avcodec_receive_packet(jpegCtx, &jpgPkt) >= 0) {
                                char filename[512];
                                snprintf(filename, sizeof(filename), "%s/%10d.jpg", outputDir.c_str(), frameCount++);
                                FILE* outFile = fopen(filename, "wb");
                                if (outFile) {
                                    fwrite(jpgPkt.data, 1, jpgPkt.size, outFile);
                                    fclose(outFile);
                                }
                                av_packet_unref(&jpgPkt);
                            }
                        }

                        lastPtsMs = ptsMs;
                    }
                }
            }
        }
        av_packet_unref(packet);
    }

    sws_freeContext(swsCtx);
    av_frame_free(&frame);
    av_frame_free(&yuvFrame);
    av_packet_free(&packet);
    avcodec_free_context(&decoderCtx);
    avcodec_free_context(&jpegCtx);
    avformat_close_input(&fmtCtx);

    return 0;
}


int main()
{
    videoToImages("test.mp4", "output", 500);
    return 0;
}
