#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>

int main(int argc, char **argv)
{
    const char *filename;
    filename = argv[1];
    avformat_network_init();

    AVFormatContext *formatContext = avformat_alloc_context();
    //if (avformat_open_input(&formatContext, "rtsp://your_rtsp_url", NULL, NULL) != 0) {
    if (avformat_open_input(&formatContext, filename, NULL, NULL) != 0) {
        printf("Failed to open RTSP stream\n");
        return -1;
    }
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        printf("Failed to find stream information\n");
        return -1;
    }

    int videoStreamIndex = -1;
    for (int i = 0; i < formatContext->nb_streams; i++) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }
    if (videoStreamIndex == -1) {
        printf("Failed to find video stream\n");
        return -1;
    }

    AVCodecParameters *codecParameters = formatContext->streams[videoStreamIndex]->codecpar;
    AVCodec *codec = avcodec_find_decoder(codecParameters->codec_id);
    AVCodecContext *codecContext = avcodec_alloc_context3(codec);
    avcodec_parameters_to_context(codecContext, codecParameters);
    if (avcodec_open2(codecContext, codec, NULL) < 0) {
        printf("Failed to open codec\n");
        return -1;
    }

    AVPacket *packet = av_packet_alloc();
    AVFrame *frame = av_frame_alloc();

    int frameCount = 0;
    printf ("code pix_fmt:%d", codecContext->pix_fmt);
    struct SwsContext *swsContext = sws_getContext(
        codecContext->width, codecContext->height, codecContext->pix_fmt,
        codecContext->width, codecContext->height, AV_PIX_FMT_RGB24,
        SWS_BILINEAR, NULL, NULL, NULL);
    while (av_read_frame(formatContext, packet) >= 0 && frameCount < 5) {
        if (packet->stream_index == videoStreamIndex) {
            avcodec_send_packet(codecContext, packet);
            while (avcodec_receive_frame(codecContext, frame) == 0) {
              AVFrame *frameRGB = av_frame_alloc();
              frameRGB->format = AV_PIX_FMT_RGB24;
              frameRGB->width = frame->width;
              frameRGB->height = frame->height;
                // 分配内存用于保存RGB图像数据
                uint8_t *rgbBuffer = av_malloc(
                    av_image_get_buffer_size(AV_PIX_FMT_RGB24, frame->width,
                      frame->height, 1));
                // 关联RGB图像数据和帧对象
                av_image_fill_arrays(frame->data, frame->linesize, rgbBuffer,
                    AV_PIX_FMT_RGB24, frameRGB->width, frameRGB->height, 1);
                // 将YUV图像转换为RGB图像
                sws_scale(swsContext, frame->data, frame->linesize, 0,
                    frameRGB->height, frameRGB->data, frameRGB->linesize);
                // 将RGB图像保存为PPM文件
                char filename[20];
                sprintf(filename, "frame%d.ppm", frameCount);
                FILE *file = fopen(filename, "wb");
                fprintf(file, "P6\n%d %d\n255\n",
                    codecContext->width, codecContext->height);
                fwrite(rgbBuffer, 1,
                    av_image_get_buffer_size(AV_PIX_FMT_RGB24, codecContext->width,
                      codecContext->height, 1), file);
                fclose(file);
                av_freep(&rgbBuffer);
                frameCount++;
            }
        }
        av_packet_unref(packet);
    }

    sws_freeContext(swsContext);
    av_frame_free(&frame);
    av_packet_free(&packet);
    avcodec_close(codecContext);
    avformat_close_input(&formatContext);

    return 0;
}

