#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavcodec/avcodec.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libswscale/swscale.h"

void avfoundationListDevices() {

    AVInputFormat *inFmt = av_find_input_format("avfoundation");
    if (inFmt == NULL) {
        av_log(NULL, AV_LOG_ERROR, "av_find_input_format failed \n");
        return;
    }

    //TODO 初始为NULL 否值报错
    AVDictionary *options = NULL;
    av_dict_set(&options, "list_devices", "true", 0);

    AVFormatContext *inFmtCtx = avformat_alloc_context();
    int ret = avformat_open_input(&inFmtCtx, "", inFmt, &options);
    if (ret != 0) {
        av_log(NULL, AV_LOG_ERROR, "avformat_open_input failed:%s \n", av_err2str(ret));
        return;
    }

    if (inFmtCtx) {
        avformat_close_input(&inFmtCtx);
        avformat_free_context(inFmtCtx);
    }

}

void decodeVideo(struct SwsContext *swsCtx, AVCodecContext *decoderCtx, AVFrame *destFrame,AVPacket *packet, FILE *dest_fp) {
    if (avcodec_send_packet(decoderCtx, packet) == 0) {
        AVFrame  *frame = av_frame_alloc();
        while (avcodec_receive_frame(decoderCtx, frame) >= 0) {

            sws_scale(swsCtx, (const uint8_t *const *)frame->data, frame->linesize, 0, decoderCtx->height, destFrame->data, destFrame->linesize);

            //TODO yuv422 转yuv420 需要测试重写
            //uyvu422 yuv422 packed
            //fwrite(frame->data[0], 1, decoderCtx->width * decoderCtx->height * 2, dest_fp);
            fwrite(destFrame->data[0], 1, decoderCtx->width * decoderCtx->height, dest_fp);
            fwrite(destFrame->data[1], 1, decoderCtx->width * decoderCtx->height / 4, dest_fp);
            fwrite(destFrame->data[2], 1, decoderCtx->width * decoderCtx->height / 4, dest_fp);
        }
        av_frame_unref(frame);
    }
}

// ./metedata code.yuv
//p 23 2:00:00
//ffmpeg -f avfoundation -list_devices true -i ""  查看支持采集的设备列表
//ffmpeg -f avfoundation -framerate 30 -i 0 out.yuv 连接第一个设备保存视频为out.yuv //640x480 0用的是mac 的faceTime
//ffplay -video_size  640x480 -pixel_format uyvy422 out.yuv  播放录制的视频
//TODO 此代码将采集后的视频从yuv422转为yuv420 播放的画质有问题需要调整


int main(int argc, char **argv) {
    av_log_set_level(AV_LOG_INFO);
    if (argc < 2) {
        av_log(NULL, AV_LOG_ERROR, "Usage: %s <outFileName>. \n", argv[0]);
        return -1;
    }
    const char *outFileName = argv[1];

    avdevice_register_all();
    avfoundationListDevices();

    AVFormatContext *inFmtCtx = avformat_alloc_context();
    AVInputFormat  *inFmt = av_find_input_format("avfoundation");
    if (inFmt == NULL) {
        av_log(NULL, AV_LOG_ERROR, "av_find_input_format failed\n");
        goto end;
    }

    AVDictionary *options = NULL;
    av_dict_set(&options, "framerate", "30", 0);
    int ret = avformat_open_input(&inFmtCtx, "0", inFmt, &options);
    if (ret != 0) {
        av_log(NULL, AV_LOG_ERROR, "avformat_open_input failed:%s \n", av_err2str(ret));
        goto end;
    }

    ret = avformat_find_stream_info(inFmtCtx, NULL);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "avformat_find_stream_info failed: %x\n", av_err2str(ret));
        goto end;
    }

    ret = av_find_best_stream(inFmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "av_find_best_stream failed: %s\n", av_err2str(ret));
        goto end;
    }

    int videoIndex = ret;
    AVCodecContext *decoderCtx = avcodec_alloc_context3(NULL);
    ret = avcodec_parameters_to_context(decoderCtx, inFmtCtx->streams[videoIndex]->codecpar);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "avcodec_parameters_to_context failed:%s \n", av_err2str(ret));
        goto end;
    }

    AVCodec *decoder = avcodec_find_decoder(decoderCtx->codec_id);
    if (decoder == NULL) {
        av_log(NULL, AV_LOG_ERROR, "avcodec_find_decoder failed:%s\n", av_err2str(ret));
        goto end;
    }

    ret = avcodec_open2(decoderCtx, decoder, NULL);
    if (ret != 0) {
        av_log(NULL, AV_LOG_ERROR, "avcodec_open2 failed:%s\n", av_err2str(ret));
        goto end;
    }

    //将采集到视频从yuv422 转成yuv420
    //申请空间
    AVFrame *destFrame = av_frame_alloc();
    enum AVPixelFormat destPixFmt = AV_PIX_FMT_YUV420P;

    uint8_t *outBuffer = av_malloc(av_image_get_buffer_size(destPixFmt, decoderCtx->width, decoderCtx->height, 1));
    av_image_fill_arrays(destFrame->data, destFrame->linesize, outBuffer, destPixFmt, decoderCtx->width, decoderCtx->height, 1);

    struct SwsContext *swsCtx = sws_getContext(decoderCtx->width, decoderCtx->height, decoderCtx->pix_fmt, decoderCtx->width, decoderCtx->height,destPixFmt, 0, NULL, NULL, NULL);
    if (swsCtx == NULL) {
        av_log(NULL, AV_LOG_ERROR, "sws_getContext failed\n");
        ret = -1;
        goto end;
    }

    FILE *dest_fp = fopen(outFileName, "web+");
    if (dest_fp == NULL) {
        av_log(NULL, AV_LOG_ERROR, "fopen failed:%s\n",outFileName);
        goto end;
    }

    AVPacket *packet = av_packet_alloc();
    if (!packet) {
        av_log(NULL, AV_LOG_ERROR, "av_packet_alloc failed \n");
        goto end;
    }

    while (1) {
        if (av_read_frame(inFmtCtx, packet) == 0) {
            if (packet->stream_index == videoIndex) {
                decodeVideo(swsCtx, decoderCtx, destFrame,packet, dest_fp);
            }
        }
        av_packet_unref(packet);
    }
    decodeVideo(swsCtx,decoderCtx,destFrame, NULL, dest_fp);


    end:
    if (inFmtCtx) {
        avformat_free_context(inFmtCtx);
    }
    if (decoderCtx) {
        avcodec_free_context(&decoderCtx);
    }
    if (dest_fp) {
        fclose(dest_fp);
    }
    if (outBuffer) {
        av_freep(&outBuffer);
    }

    return ret;
}

