//
// Created by shen on 2024/10/30.
//

#include <in6addr.h>
#include "VideoDecoder.h"
#include <png.h>
void VideoDecoder::saveFrame(AVFrame *pFrame, int width, int height, int iFrame)
{
    FILE *pFile;
    char szFilename[32];
    int y;

    // 打开文件
    sprintf(szFilename, "D:\\cutpic\\frame%d.ppm", iFrame);
    pFile = fopen(szFilename, "wb");
    if (pFile == NULL)
        return;

    // 写入文件头
    fprintf(pFile, "P6\n%d %d\n255\n", width, height);

    // 写入像素数据
    for (y = 0; y < height; y++)
        fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile);

    // 关闭文件
    fclose(pFile);
}

int VideoDecoder::yuv_png(AVFrame *pFrame,AVCodecContext *pCodecCtx,int i){
    AVFrame *pFrameRGB=av_frame_alloc();

    char filename[1024];
    //把yuv数据保存为png图片
    sprintf(filename, "D:\\cutpic\\frame%d.png", i);
    FILE *fp = fopen(filename, "wb");
    png_structp png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
    if (png_ptr == NULL)
    {
        printf("png_create_write_struct error\r\n");
        return -1;
    }
    png_infop info_ptr = png_create_info_struct(png_ptr);
    if (info_ptr == NULL)
    {
        printf("png_create_info_struct error\r\n");
        return -1;
    }
    if (setjmp(png_jmpbuf(png_ptr)))
    {
        printf("setjmp error\r\n");
        return -1;
    }
    png_init_io(png_ptr, fp);
    png_set_IHDR(png_ptr, info_ptr, pCodecCtx->width, pCodecCtx->height, 8, PNG_COLOR_TYPE_RGB, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_BASE, PNG_FILTER_TYPE_BASE);
    png_write_info(png_ptr, info_ptr);
    uint8_t *out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1));

    if (out_buffer== nullptr){
        return 1;
    }
    av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer, AV_PIX_FMT_RGB24,  pCodecCtx->width, pCodecCtx->height, 1);
    struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
    if(img_convert_ctx == NULL)
    {
        printf("sws_getContext error\r\n");
        return -1;
    }

    sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
    sws_freeContext(img_convert_ctx);
    m_VideoRender->RenderVideoFrame(pFrameRGB);
    png_bytep row_pointers[pCodecCtx->height]; // 每一行的指针 数组 用于写入 png 图片
    for (int i = 0; i < pCodecCtx->height; i++)
    {
        row_pointers[i] = pFrameRGB->data[0] + i * pFrameRGB->linesize[0]; //
    }
    png_write_image(png_ptr, row_pointers);
    png_write_end(png_ptr, NULL);
    png_destroy_write_struct(&png_ptr, &info_ptr);
    printf("save frame%d.png\r\n", i);
    fclose(fp);
    int a=0;
    a++;
    av_frame_free(&pFrameRGB);
    return 1;
//

//     保存yuv数据到图片文件
//     sprintf(filename, "D:\\cutpic\\frame%d.yuv", i);
//     FILE *pFile = fopen(filename, "wb");
//     if (pFile == NULL)
//     {
//         printf("Couldn't open file.\r \n");
//         return -1;
//     }
//     // 打印图片宽高
//     printf("width: %d, height: %d\r\n", pFrame->width, pFrame->height);
//     fwrite(pFrame->data[0], 1, pFrame->linesize[0] * pCodecCtx->height, pFile);
//     fwrite(pFrame->data[1], 1, pFrame->linesize[1] * pCodecCtx->height / 2, pFile);
//     fwrite(pFrame->data[2], 1, pFrame->linesize[2] * pCodecCtx->height / 2, pFile);
//     fclose(pFile);




//



}


int i=0;
void YUV420P_TO_RGB24(unsigned char *yuv420p, unsigned char *rgb24, int width, int height) {
    int index = 0;
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {

            int indexY = y * width + x;
            int indexU = width * height + y / 2 * width / 2 + x / 2;
            int indexV = width * height + width * height / 4 + y / 2 * width / 2 + x / 2;

            u_char Y = yuv420p[indexY];
            u_char U = yuv420p[indexU];
            u_char V = yuv420p[indexV];

            rgb24[index++] = Y + 1.402 * (V - 128); //R
            rgb24[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
            rgb24[index++] = Y + 1.772 * (U - 128); //B
        }
    }
}


//bool RGB24_TO_PNG(const char *filename, u_char *rgbData, int width, int height,
//                           int quality) {
//    FILE *fp;
//    png_structp png_ptr = NULL;
//    png_infop info_ptr = NULL;
//    fp = fopen(filename, "wb");
//    if (fp) {
//        png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
//        info_ptr = png_create_info_struct(png_ptr);
//        if (png_ptr == NULL || info_ptr == NULL) {
//            fclose(fp);
//            return false;
//        }
//        png_init_io(png_ptr, fp);
//        png_set_IHDR(png_ptr, info_ptr, width, height, 8, PNG_COLOR_TYPE_RGB, PNG_INTERLACE_NONE,
//                     PNG_COMPRESSION_TYPE_BASE, PNG_FILTER_TYPE_BASE);
//        png_write_info(png_ptr, info_ptr);
//        png_set_packing(png_ptr);
//        uint8_t *data = new uint8_t[width * 3];
//        memset(data, 255, width * 3);
//        int nv_start = width * height;
//        uint32_t rgb_index = 0, pos;
//        int r, g, b, nv_index, y, ru, rv;
//        for (uint32_t i = 0; i < height; ++i) {
//            for (uint32_t j = 0; j < width; ++j) {
//                data[j * 3 + 0] = rgbData[rgb_index++];
//                data[j * 3 + 1] = rgbData[rgb_index++];
//                data[j * 3 + 2] = rgbData[rgb_index++];
//            }
//            png_write_row(png_ptr, data);
//        }
//
//        delete data;
//        png_write_end(png_ptr, info_ptr);
//        png_destroy_write_struct(&png_ptr, &info_ptr);
//        fclose(fp);
//        return true;
//    }
//    return false;
//}
//
//
//void YUV420P_TO_PNG(const char *yuvPath, const char *pngPath, int width, int height) {
//    FILE *file = fopen(yuvPath, "rb");
//
//    unsigned char *yuv420p = new unsigned char[width * height * 3 / 2];
//
//    fread(yuv420p, 1, width * height * 3 / 2, file);
//
//    unsigned char *rgb24 = new unsigned char[width * height * 3];
//
//    //YUV420P转RGB24
//    YUV420P_TO_RGB24(yuv420p, rgb24, width, height);
//
//    //RGB24转PNG文件
//    RGB24_TO_PNG(pngPath, rgb24, width, height, 100);
//
//    //释放内存
//    delete[] yuv420p;
//    delete[] rgb24;
//
//    //关闭文件句柄
//    fclose(file);
//}

void VideoDecoder::OnFrameAvailable(AVFrame *frame) {
//    m_VideoRender->RenderVideoFrame(frame);
  printf(
                            "Frame %c (%d) pts %lld dts %lld key_frame %d [coded_picture_number %d, display_picture_number %d, %dx%d]\n",
                            av_get_picture_type_char(frame->pict_type),
                            GetCodecContext()->frame_number,
                            frame->pts,
                            frame->pkt_dts,
                            frame->key_frame,
                            frame->coded_picture_number,
                            frame->display_picture_number,
                            GetCodecContext()->width,
                            GetCodecContext()->height);
//    std::unique_lock<std::mutex> lock(m_Mutex1);
    printf("帧率 %d %d \n", GetAVFormatContext()->streams[m_StreamIndex]->avg_frame_rate.num, GetAVFormatContext()->streams[m_StreamIndex]->avg_frame_rate.den);

    printf("时间 %d %d \n",GetAVFormatContext()->streams[m_StreamIndex]->time_base.num, GetAVFormatContext()->streams[m_StreamIndex]->time_base.den);
    yuv_png(frame,GetCodecContext(),i++);

//    int w=GetCodecContext()->width;
//    int h=GetCodecContext()->height;
//
//
//    fwrite(frame->data[0], 1, w * h, fp);//y
//    fwrite(frame->data[1], 1, w * h / 4, fp);//u
//    fwrite(frame->data[2], 1, w * h / 4, fp);//v
//    saveFrame(frame,w,h,i++);

//    if(m_VideoRender != nullptr && frame != nullptr) {
//        NativeImage image;
//        if(m_VideoRender->GetRenderType() == VIDEO_RENDER_SDL) {
//            //AV_PIX_FMT_YUV420P
//            AVCodecContext  *m_AVCodecContext1=GetCodecContext();
//
//            if(GetCodecContext()->pix_fmt == AV_PIX_FMT_YUV420P || GetCodecContext()->pix_fmt == AV_PIX_FMT_YUVJ420P) {
//                image.format = IMAGE_FORMAT_I420;
//                image.width = frame->width;
//                image.height = frame->height;
//                image.pLineSize[0] = frame->linesize[0];
//                image.pLineSize[1] = frame->linesize[1];
//                image.pLineSize[2] = frame->linesize[2];
//                image.ppPlane[0] = frame->data[0];
//                image.ppPlane[1] = frame->data[1];
//                image.ppPlane[2] = frame->data[2];
//                if (frame->data[0] && frame->data[1] && !frame->data[2] && frame->linesize[0] == frame->linesize[1] &&
//                    frame->linesize[2] == 0) {
//                    // on some android device, output of h264 mediacodec decoder is NV12 兼容某些设备可能出现的格式不匹配问题
//                    image.format = IMAGE_FORMAT_NV12;
//                }
//            }else if (GetCodecContext()->pix_fmt == AV_PIX_FMT_NV12) {
//                    image.format = IMAGE_FORMAT_NV12;
//                    image.width = frame->width;
//                    image.height = frame->height;
//                    image.pLineSize[0] = frame->linesize[0];
//                    image.pLineSize[1] = frame->linesize[1];
//                    image.ppPlane[0] = frame->data[0];
//                    image.ppPlane[1] = frame->data[1];
//            } else if (GetCodecContext()->pix_fmt == AV_PIX_FMT_NV21) {
//                    image.format = IMAGE_FORMAT_NV21;
//                    image.width = frame->width;
//                    image.height = frame->height;
//                    image.pLineSize[0] = frame->linesize[0];
//                    image.pLineSize[1] = frame->linesize[1];
//                    image.ppPlane[0] = frame->data[0];
//                    image.ppPlane[1] = frame->data[1];
//            } else if (GetCodecContext()->pix_fmt == AV_PIX_FMT_RGBA) {
//                    image.format = IMAGE_FORMAT_RGBA;
//                    image.width = frame->width;
//                    image.height = frame->height;
//                    image.pLineSize[0] = frame->linesize[0];
//                    image.ppPlane[0] = frame->data[0];
//            } else {
//                    sws_scale(m_SwsContext, frame->data, frame->linesize, 0,
//                              m_VideoHeight, m_RGBAFrame->data, m_RGBAFrame->linesize);
//                    image.format = IMAGE_FORMAT_RGBA;
//                    image.width = m_RenderWidth;
//                    image.height = m_RenderHeight;
//                    image.ppPlane[0] = m_RGBAFrame->data[0];
//                    image.pLineSize[0] = image.width * 4;
//            }
//                m_VideoRender->RenderVideoFrame(&image);
//        }
//
////        m_VideoRender->RenderVideoFrame(&image);
//
//        }
}
void VideoDecoder::OnDecoderReady() {
//    fp=fopen("D:\\tools\\result22.yuv","w+b");
//    if(fp==NULL){
//        printf("Cannot open file.\n");
//    }
    m_VideoWidth = GetCodecContext()->width;
    m_VideoHeight = GetCodecContext()->height;
    if(m_VideoRender != nullptr) {
        int dstSize[2] = {0};
        m_VideoRender->Init(m_VideoWidth, m_VideoHeight, dstSize);
        m_RenderWidth = dstSize[0];
        m_RenderHeight = dstSize[1];


        m_RGBAFrame=av_frame_alloc();
        int bufferSize = av_image_get_buffer_size(DST_PIXEL_FORMAT, m_RenderWidth, m_RenderHeight, 1);
        m_FrameBuffer = (uint8_t *) av_malloc(bufferSize * sizeof(uint8_t));
        av_image_fill_arrays(m_RGBAFrame->data, m_RGBAFrame->linesize,
                                   m_FrameBuffer, DST_PIXEL_FORMAT, m_RenderWidth, m_RenderHeight, 1);

        m_SwsContext = sws_getContext(m_VideoWidth, m_VideoHeight, GetCodecContext()->pix_fmt,
                                   m_RenderWidth, m_RenderHeight, DST_PIXEL_FORMAT,
                                   SWS_FAST_BILINEAR, NULL, NULL, NULL);
    }
}
void VideoDecoder::OnDecoderDone() {

    if(m_VideoRender)
        m_VideoRender->UnInit();

    if(m_RGBAFrame != nullptr) {
        av_frame_free(&m_RGBAFrame);
        m_RGBAFrame = nullptr;
    }

    if(m_FrameBuffer != nullptr) {
        free(m_FrameBuffer);
        m_FrameBuffer = nullptr;
    }

    if(m_SwsContext != nullptr) {
        sws_freeContext(m_SwsContext);
        m_SwsContext = nullptr;
    }



}
